mirror of
https://github.com/logos-messaging/js-waku.git
synced 2026-01-04 06:43:12 +00:00
Merge branch 'master' of github.com:waku-org/js-waku into weboko/webrtc
This commit is contained in:
commit
0f5e437680
@ -55,6 +55,7 @@
|
|||||||
"fontsource",
|
"fontsource",
|
||||||
"globby",
|
"globby",
|
||||||
"gossipsub",
|
"gossipsub",
|
||||||
|
"hackathons",
|
||||||
"huilong",
|
"huilong",
|
||||||
"iasked",
|
"iasked",
|
||||||
"ihave",
|
"ihave",
|
||||||
@ -62,6 +63,7 @@
|
|||||||
"ineed",
|
"ineed",
|
||||||
"IPAM",
|
"IPAM",
|
||||||
"ipfs",
|
"ipfs",
|
||||||
|
"isready",
|
||||||
"iwant",
|
"iwant",
|
||||||
"jdev",
|
"jdev",
|
||||||
"jswaku",
|
"jswaku",
|
||||||
@ -165,6 +167,7 @@
|
|||||||
"gen",
|
"gen",
|
||||||
"proto",
|
"proto",
|
||||||
"*.spec.ts",
|
"*.spec.ts",
|
||||||
|
"*.log",
|
||||||
"CHANGELOG.md"
|
"CHANGELOG.md"
|
||||||
],
|
],
|
||||||
"patterns": [
|
"patterns": [
|
||||||
|
|||||||
55
.github/workflows/ci.yml
vendored
55
.github/workflows/ci.yml
vendored
@ -15,7 +15,7 @@ on:
|
|||||||
type: string
|
type: string
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "22"
|
NODE_JS: "24"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
check:
|
check:
|
||||||
@ -57,7 +57,7 @@ jobs:
|
|||||||
browser:
|
browser:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/playwright:v1.53.1-jammy
|
image: mcr.microsoft.com/playwright:v1.56.1-jammy
|
||||||
env:
|
env:
|
||||||
HOME: "/root"
|
HOME: "/root"
|
||||||
steps:
|
steps:
|
||||||
@ -71,65 +71,18 @@ jobs:
|
|||||||
- run: npm run build:esm
|
- run: npm run build:esm
|
||||||
- run: npm run test:browser
|
- run: npm run test:browser
|
||||||
|
|
||||||
build_rln_tree:
|
|
||||||
if: false # This condition disables the job
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
repository: waku-org/js-waku
|
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: ${{ env.NODE_JS }}
|
|
||||||
- name: Check for existing RLN tree artifact
|
|
||||||
id: check-artifact
|
|
||||||
uses: actions/github-script@v6
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
const artifact = await github.rest.actions.listWorkflowRunArtifacts({
|
|
||||||
owner: context.repo.owner,
|
|
||||||
repo: context.repo.repo,
|
|
||||||
run_id: context.runId
|
|
||||||
});
|
|
||||||
console.log(artifact);
|
|
||||||
const foundArtifact = artifact.data.artifacts.find(art => art.name === 'rln_tree.tar.gz');
|
|
||||||
if (foundArtifact) {
|
|
||||||
core.setOutput('artifact_id', foundArtifact.id);
|
|
||||||
core.setOutput('artifact_found', 'true');
|
|
||||||
} else {
|
|
||||||
core.setOutput('artifact_found', 'false');
|
|
||||||
}
|
|
||||||
- name: Download RLN tree artifact
|
|
||||||
if: steps.check-artifact.outputs.artifact_found == 'true'
|
|
||||||
uses: actions/download-artifact@v4
|
|
||||||
with:
|
|
||||||
name: rln_tree.tar.gz
|
|
||||||
path: /tmp
|
|
||||||
- uses: ./.github/actions/npm
|
|
||||||
- name: Sync rln tree and save artifact
|
|
||||||
run: |
|
|
||||||
mkdir -p /tmp/rln_tree.db
|
|
||||||
npm run build:esm
|
|
||||||
npm run sync-rln-tree
|
|
||||||
tar -czf rln_tree.tar.gz -C /tmp/rln_tree.db .
|
|
||||||
- name: Upload artifact
|
|
||||||
uses: actions/upload-artifact@v4
|
|
||||||
with:
|
|
||||||
name: rln_tree.tar.gz
|
|
||||||
path: rln_tree.tar.gz
|
|
||||||
|
|
||||||
node:
|
node:
|
||||||
uses: ./.github/workflows/test-node.yml
|
uses: ./.github/workflows/test-node.yml
|
||||||
secrets: inherit
|
secrets: inherit
|
||||||
with:
|
with:
|
||||||
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.35.1' }}
|
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.36.0' }}
|
||||||
test_type: node
|
test_type: node
|
||||||
allure_reports: true
|
allure_reports: true
|
||||||
|
|
||||||
node_optional:
|
node_optional:
|
||||||
uses: ./.github/workflows/test-node.yml
|
uses: ./.github/workflows/test-node.yml
|
||||||
with:
|
with:
|
||||||
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.35.1' }}
|
nim_wakunode_image: ${{ inputs.nim_wakunode_image || 'wakuorg/nwaku:v0.36.0' }}
|
||||||
test_type: node-optional
|
test_type: node-optional
|
||||||
|
|
||||||
node_with_nwaku_master:
|
node_with_nwaku_master:
|
||||||
|
|||||||
2
.github/workflows/playwright.yml
vendored
2
.github/workflows/playwright.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/playwright:v1.53.1-jammy
|
image: mcr.microsoft.com/playwright:v1.56.1-jammy
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v3
|
||||||
|
|||||||
20
.github/workflows/pre-release.yml
vendored
20
.github/workflows/pre-release.yml
vendored
@ -2,7 +2,11 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "22"
|
NODE_JS: "24"
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
id-token: write
|
||||||
|
contents: read
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
pre-release:
|
pre-release:
|
||||||
@ -10,19 +14,19 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
if: github.event_name == 'workflow_dispatch'
|
if: github.event_name == 'workflow_dispatch'
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- uses: actions/setup-node@v3
|
- uses: actions/setup-node@v4
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODE_JS }}
|
node-version: ${{ env.NODE_JS }}
|
||||||
registry-url: "https://registry.npmjs.org"
|
registry-url: "https://registry.npmjs.org"
|
||||||
|
|
||||||
- run: npm install
|
- run: npm install
|
||||||
|
|
||||||
- run: npm run build
|
- run: npm run build
|
||||||
|
|
||||||
- run: npm run publish -- --tag next
|
- run: npm run publish -- --tag next
|
||||||
env:
|
env:
|
||||||
NODE_AUTH_TOKEN: ${{ secrets.NPM_JS_WAKU_PUBLISH }}
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_JS_WAKU_PUBLISH }}
|
||||||
|
|||||||
8
.github/workflows/test-node.yml
vendored
8
.github/workflows/test-node.yml
vendored
@ -24,7 +24,7 @@ on:
|
|||||||
default: false
|
default: false
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "22"
|
NODE_JS: "24"
|
||||||
# Ensure test type conditions remain consistent.
|
# Ensure test type conditions remain consistent.
|
||||||
WAKU_SERVICE_NODE_PARAMS: ${{ (inputs.test_type == 'go-waku-master') && '--min-relay-peers-to-publish=0' || '' }}
|
WAKU_SERVICE_NODE_PARAMS: ${{ (inputs.test_type == 'go-waku-master') && '--min-relay-peers-to-publish=0' || '' }}
|
||||||
DEBUG: ${{ inputs.debug }}
|
DEBUG: ${{ inputs.debug }}
|
||||||
@ -42,7 +42,7 @@ jobs:
|
|||||||
checks: write
|
checks: write
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- name: Remove unwanted software
|
- name: Remove unwanted software
|
||||||
@ -62,7 +62,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Merge allure reports
|
- name: Merge allure reports
|
||||||
if: always() && env.ALLURE_REPORTS == 'true'
|
if: always() && env.ALLURE_REPORTS == 'true'
|
||||||
run: node ci/mergeAllureResults.cjs
|
run: node ci/mergeAllureResults.cjs
|
||||||
|
|
||||||
- name: Get allure history
|
- name: Get allure history
|
||||||
if: always() && env.ALLURE_REPORTS == 'true'
|
if: always() && env.ALLURE_REPORTS == 'true'
|
||||||
@ -125,4 +125,4 @@ jobs:
|
|||||||
echo "## Run Information" >> $GITHUB_STEP_SUMMARY
|
echo "## Run Information" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "- **NWAKU**: ${{ env.WAKUNODE_IMAGE }}" >> $GITHUB_STEP_SUMMARY
|
echo "- **NWAKU**: ${{ env.WAKUNODE_IMAGE }}" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
|
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
|
||||||
echo "Allure report will be available at: https://waku-org.github.io/allure-jswaku/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY
|
echo "Allure report will be available at: https://waku-org.github.io/allure-jswaku/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY
|
||||||
|
|||||||
6
.github/workflows/test-reliability.yml
vendored
6
.github/workflows/test-reliability.yml
vendored
@ -18,7 +18,7 @@ on:
|
|||||||
- all
|
- all
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_JS: "22"
|
NODE_JS: "24"
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
@ -34,7 +34,7 @@ jobs:
|
|||||||
if: ${{ github.event.inputs.test_type == 'all' }}
|
if: ${{ github.event.inputs.test_type == 'all' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- name: Remove unwanted software
|
- name: Remove unwanted software
|
||||||
@ -74,7 +74,7 @@ jobs:
|
|||||||
if: ${{ github.event.inputs.test_type != 'all' }}
|
if: ${{ github.event.inputs.test_type != 'all' }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
repository: waku-org/js-waku
|
repository: waku-org/js-waku
|
||||||
|
|
||||||
- name: Remove unwanted software
|
- name: Remove unwanted software
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@ -17,4 +17,6 @@ packages/discovery/mock_local_storage
|
|||||||
.giga
|
.giga
|
||||||
.cursor
|
.cursor
|
||||||
.DS_Store
|
.DS_Store
|
||||||
CLAUDE.md
|
CLAUDE.md
|
||||||
|
.env
|
||||||
|
postgres-data/
|
||||||
|
|||||||
@ -10,5 +10,6 @@
|
|||||||
"packages/discovery": "0.0.12",
|
"packages/discovery": "0.0.12",
|
||||||
"packages/sds": "0.0.7",
|
"packages/sds": "0.0.7",
|
||||||
"packages/rln": "0.1.9",
|
"packages/rln": "0.1.9",
|
||||||
"packages/react": "0.0.7"
|
"packages/react": "0.0.7",
|
||||||
|
"packages/run": "0.0.1"
|
||||||
}
|
}
|
||||||
|
|||||||
@ -23,6 +23,15 @@ npm install
|
|||||||
npm run doc
|
npm run doc
|
||||||
```
|
```
|
||||||
|
|
||||||
|
# Using Nix shell
|
||||||
|
```shell
|
||||||
|
git clone https://github.com/waku-org/js-waku.git
|
||||||
|
cd js-waku
|
||||||
|
nix develop
|
||||||
|
npm install
|
||||||
|
npm run doc
|
||||||
|
```
|
||||||
|
|
||||||
## Bugs, Questions & Features
|
## Bugs, Questions & Features
|
||||||
|
|
||||||
If you encounter any bug or would like to propose new features, feel free to [open an issue](https://github.com/waku-org/js-waku/issues/new/).
|
If you encounter any bug or would like to propose new features, feel free to [open an issue](https://github.com/waku-org/js-waku/issues/new/).
|
||||||
|
|||||||
26
ci/Jenkinsfile
vendored
26
ci/Jenkinsfile
vendored
@ -1,5 +1,13 @@
|
|||||||
pipeline {
|
pipeline {
|
||||||
agent { label 'linux' }
|
agent {
|
||||||
|
docker {
|
||||||
|
label 'linuxcontainer'
|
||||||
|
image 'harbor.status.im/infra/ci-build-containers:linux-base-1.0.0'
|
||||||
|
args '--volume=/nix:/nix ' +
|
||||||
|
'--volume=/etc/nix:/etc/nix ' +
|
||||||
|
'--user jenkins'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
options {
|
options {
|
||||||
disableConcurrentBuilds()
|
disableConcurrentBuilds()
|
||||||
@ -21,19 +29,25 @@ pipeline {
|
|||||||
stages {
|
stages {
|
||||||
stage('Deps') {
|
stage('Deps') {
|
||||||
steps {
|
steps {
|
||||||
sh 'npm install'
|
script {
|
||||||
|
nix.develop('npm install', pure: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Packages') {
|
stage('Packages') {
|
||||||
steps {
|
steps {
|
||||||
sh 'npm run build'
|
script {
|
||||||
|
nix.develop('npm run build', pure: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
stage('Build') {
|
stage('Build') {
|
||||||
steps {
|
steps {
|
||||||
sh 'npm run doc'
|
script {
|
||||||
|
nix.develop('npm run doc', pure: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,7 +55,9 @@ pipeline {
|
|||||||
when { expression { GIT_BRANCH.endsWith('master') } }
|
when { expression { GIT_BRANCH.endsWith('master') } }
|
||||||
steps {
|
steps {
|
||||||
sshagent(credentials: ['status-im-auto-ssh']) {
|
sshagent(credentials: ['status-im-auto-ssh']) {
|
||||||
sh 'npm run deploy'
|
script {
|
||||||
|
nix.develop('npm run deploy', pure: true)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
26
flake.lock
generated
Normal file
26
flake.lock
generated
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"nodes": {
|
||||||
|
"nixpkgs": {
|
||||||
|
"locked": {
|
||||||
|
"lastModified": 1761016216,
|
||||||
|
"narHash": "sha256-G/iC4t/9j/52i/nm+0/4ybBmAF4hzR8CNHC75qEhjHo=",
|
||||||
|
"owner": "NixOS",
|
||||||
|
"repo": "nixpkgs",
|
||||||
|
"rev": "481cf557888e05d3128a76f14c76397b7d7cc869",
|
||||||
|
"type": "github"
|
||||||
|
},
|
||||||
|
"original": {
|
||||||
|
"id": "nixpkgs",
|
||||||
|
"ref": "nixos-25.05",
|
||||||
|
"type": "indirect"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"inputs": {
|
||||||
|
"nixpkgs": "nixpkgs"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"root": "root",
|
||||||
|
"version": 7
|
||||||
|
}
|
||||||
33
flake.nix
Normal file
33
flake.nix
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
{
|
||||||
|
description = "Nix flake development shell.";
|
||||||
|
|
||||||
|
inputs = {
|
||||||
|
nixpkgs.url = "nixpkgs/nixos-25.05";
|
||||||
|
};
|
||||||
|
|
||||||
|
outputs =
|
||||||
|
{ self, nixpkgs }:
|
||||||
|
let
|
||||||
|
supportedSystems = [
|
||||||
|
"x86_64-linux"
|
||||||
|
"aarch64-linux"
|
||||||
|
"x86_64-darwin"
|
||||||
|
"aarch64-darwin"
|
||||||
|
];
|
||||||
|
forEachSystem = nixpkgs.lib.genAttrs supportedSystems;
|
||||||
|
pkgsFor = forEachSystem (system: import nixpkgs { inherit system; });
|
||||||
|
in
|
||||||
|
rec {
|
||||||
|
formatter = forEachSystem (system: pkgsFor.${system}.nixpkgs-fmt);
|
||||||
|
|
||||||
|
devShells = forEachSystem (system: {
|
||||||
|
default = pkgsFor.${system}.mkShellNoCC {
|
||||||
|
packages = with pkgsFor.${system}.buildPackages; [
|
||||||
|
git # 2.44.1
|
||||||
|
openssh # 9.7p1
|
||||||
|
nodejs_20 # v20.15.1
|
||||||
|
];
|
||||||
|
};
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
1529
package-lock.json
generated
1529
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@ -14,6 +14,7 @@
|
|||||||
"packages/rln",
|
"packages/rln",
|
||||||
"packages/sdk",
|
"packages/sdk",
|
||||||
"packages/relay",
|
"packages/relay",
|
||||||
|
"packages/run",
|
||||||
"packages/tests",
|
"packages/tests",
|
||||||
"packages/reliability-tests",
|
"packages/reliability-tests",
|
||||||
"packages/browser-tests",
|
"packages/browser-tests",
|
||||||
@ -44,8 +45,7 @@
|
|||||||
"doc": "run-s doc:*",
|
"doc": "run-s doc:*",
|
||||||
"doc:html": "typedoc --options typedoc.cjs",
|
"doc:html": "typedoc --options typedoc.cjs",
|
||||||
"doc:cname": "echo 'js.waku.org' > docs/CNAME",
|
"doc:cname": "echo 'js.waku.org' > docs/CNAME",
|
||||||
"publish": "node ./ci/publish.js",
|
"publish": "node ./ci/publish.js"
|
||||||
"sync-rln-tree": "node ./packages/tests/src/sync-rln-tree.js"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@size-limit/preset-big-lib": "^11.0.2",
|
"@size-limit/preset-big-lib": "^11.0.2",
|
||||||
@ -78,5 +78,6 @@
|
|||||||
"*.{ts,js}": [
|
"*.{ts,js}": [
|
||||||
"eslint --fix"
|
"eslint --fix"
|
||||||
]
|
]
|
||||||
}
|
},
|
||||||
|
"version": ""
|
||||||
}
|
}
|
||||||
|
|||||||
@ -28,7 +28,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/core#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/core#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -15,7 +15,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/discovery#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/discovery#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -15,7 +15,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/enr#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/enr#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -15,7 +15,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/interfaces#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/interfaces#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -36,7 +36,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/message-encryption#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/message-encryption#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -15,7 +15,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/proto#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/proto#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -13,6 +13,7 @@ import type { Uint8ArrayList } from 'uint8arraylist'
|
|||||||
export interface HistoryEntry {
|
export interface HistoryEntry {
|
||||||
messageId: string
|
messageId: string
|
||||||
retrievalHint?: Uint8Array
|
retrievalHint?: Uint8Array
|
||||||
|
senderId?: string
|
||||||
}
|
}
|
||||||
|
|
||||||
export namespace HistoryEntry {
|
export namespace HistoryEntry {
|
||||||
@ -35,6 +36,11 @@ export namespace HistoryEntry {
|
|||||||
w.bytes(obj.retrievalHint)
|
w.bytes(obj.retrievalHint)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (obj.senderId != null) {
|
||||||
|
w.uint32(26)
|
||||||
|
w.string(obj.senderId)
|
||||||
|
}
|
||||||
|
|
||||||
if (opts.lengthDelimited !== false) {
|
if (opts.lengthDelimited !== false) {
|
||||||
w.ldelim()
|
w.ldelim()
|
||||||
}
|
}
|
||||||
@ -57,6 +63,10 @@ export namespace HistoryEntry {
|
|||||||
obj.retrievalHint = reader.bytes()
|
obj.retrievalHint = reader.bytes()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
case 3: {
|
||||||
|
obj.senderId = reader.string()
|
||||||
|
break
|
||||||
|
}
|
||||||
default: {
|
default: {
|
||||||
reader.skipType(tag & 7)
|
reader.skipType(tag & 7)
|
||||||
break
|
break
|
||||||
@ -87,6 +97,7 @@ export interface SdsMessage {
|
|||||||
lamportTimestamp?: bigint
|
lamportTimestamp?: bigint
|
||||||
causalHistory: HistoryEntry[]
|
causalHistory: HistoryEntry[]
|
||||||
bloomFilter?: Uint8Array
|
bloomFilter?: Uint8Array
|
||||||
|
repairRequest: HistoryEntry[]
|
||||||
content?: Uint8Array
|
content?: Uint8Array
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -132,6 +143,13 @@ export namespace SdsMessage {
|
|||||||
w.bytes(obj.bloomFilter)
|
w.bytes(obj.bloomFilter)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (obj.repairRequest != null) {
|
||||||
|
for (const value of obj.repairRequest) {
|
||||||
|
w.uint32(106)
|
||||||
|
HistoryEntry.codec().encode(value, w)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (obj.content != null) {
|
if (obj.content != null) {
|
||||||
w.uint32(162)
|
w.uint32(162)
|
||||||
w.bytes(obj.content)
|
w.bytes(obj.content)
|
||||||
@ -145,7 +163,8 @@ export namespace SdsMessage {
|
|||||||
senderId: '',
|
senderId: '',
|
||||||
messageId: '',
|
messageId: '',
|
||||||
channelId: '',
|
channelId: '',
|
||||||
causalHistory: []
|
causalHistory: [],
|
||||||
|
repairRequest: []
|
||||||
}
|
}
|
||||||
|
|
||||||
const end = length == null ? reader.len : reader.pos + length
|
const end = length == null ? reader.len : reader.pos + length
|
||||||
@ -184,6 +203,16 @@ export namespace SdsMessage {
|
|||||||
obj.bloomFilter = reader.bytes()
|
obj.bloomFilter = reader.bytes()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
|
case 13: {
|
||||||
|
if (opts.limits?.repairRequest != null && obj.repairRequest.length === opts.limits.repairRequest) {
|
||||||
|
throw new MaxLengthError('Decode error - map field "repairRequest" had too many elements')
|
||||||
|
}
|
||||||
|
|
||||||
|
obj.repairRequest.push(HistoryEntry.codec().decode(reader, reader.uint32(), {
|
||||||
|
limits: opts.limits?.repairRequest$
|
||||||
|
}))
|
||||||
|
break
|
||||||
|
}
|
||||||
case 20: {
|
case 20: {
|
||||||
obj.content = reader.bytes()
|
obj.content = reader.bytes()
|
||||||
break
|
break
|
||||||
|
|||||||
@ -3,6 +3,8 @@ syntax = "proto3";
|
|||||||
message HistoryEntry {
|
message HistoryEntry {
|
||||||
string message_id = 1; // Unique identifier of the SDS message, as defined in `Message`
|
string message_id = 1; // Unique identifier of the SDS message, as defined in `Message`
|
||||||
optional bytes retrieval_hint = 2; // Optional information to help remote parties retrieve this SDS message; For example, A Waku deterministic message hash or routing payload hash
|
optional bytes retrieval_hint = 2; // Optional information to help remote parties retrieve this SDS message; For example, A Waku deterministic message hash or routing payload hash
|
||||||
|
|
||||||
|
optional string sender_id = 3; // Participant ID of original message sender. Only populated if using optional SDS Repair extension
|
||||||
}
|
}
|
||||||
|
|
||||||
message SdsMessage {
|
message SdsMessage {
|
||||||
@ -12,5 +14,8 @@ message SdsMessage {
|
|||||||
optional uint64 lamport_timestamp = 10; // Logical timestamp for causal ordering in channel
|
optional uint64 lamport_timestamp = 10; // Logical timestamp for causal ordering in channel
|
||||||
repeated HistoryEntry causal_history = 11; // List of preceding message IDs that this message causally depends on. Generally 2 or 3 message IDs are included.
|
repeated HistoryEntry causal_history = 11; // List of preceding message IDs that this message causally depends on. Generally 2 or 3 message IDs are included.
|
||||||
optional bytes bloom_filter = 12; // Bloom filter representing received message IDs in channel
|
optional bytes bloom_filter = 12; // Bloom filter representing received message IDs in channel
|
||||||
|
|
||||||
|
repeated HistoryEntry repair_request = 13; // Capped list of history entries missing from sender's causal history. Only populated if using the optional SDS Repair extension.
|
||||||
|
|
||||||
optional bytes content = 20; // Actual content of the message
|
optional bytes content = 20; // Actual content of the message
|
||||||
}
|
}
|
||||||
|
|||||||
@ -21,7 +21,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/react#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/react#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -1,2 +1,4 @@
|
|||||||
|
export * from "@waku/sdk";
|
||||||
|
|
||||||
export type { CreateNodeOptions } from "./types.js";
|
export type { CreateNodeOptions } from "./types.js";
|
||||||
export { WakuProvider, useWaku } from "./WakuProvider.js";
|
export { WakuProvider, useWaku } from "./WakuProvider.js";
|
||||||
|
|||||||
@ -14,7 +14,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/relay#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/relay#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -16,7 +16,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/reliability-tests#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/reliability-tests#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { promisify } from "util";
|
|||||||
|
|
||||||
const execAsync = promisify(exec);
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
const WAKUNODE_IMAGE = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.35.1";
|
const WAKUNODE_IMAGE = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.36.0";
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -14,7 +14,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/rln#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/rln#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
20
packages/run/.eslintrc.cjs
Normal file
20
packages/run/.eslintrc.cjs
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
module.exports = {
|
||||||
|
parserOptions: {
|
||||||
|
tsconfigRootDir: __dirname,
|
||||||
|
project: "./tsconfig.dev.json"
|
||||||
|
},
|
||||||
|
rules: {
|
||||||
|
"@typescript-eslint/no-non-null-assertion": "off"
|
||||||
|
},
|
||||||
|
globals: {
|
||||||
|
process: true
|
||||||
|
},
|
||||||
|
overrides: [
|
||||||
|
{
|
||||||
|
files: ["*.js"],
|
||||||
|
rules: {
|
||||||
|
"no-console": "error"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
};
|
||||||
11
packages/run/.mocharc.cjs
Normal file
11
packages/run/.mocharc.cjs
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
module.exports = {
|
||||||
|
extension: ['ts'],
|
||||||
|
require: ['ts-node/register'],
|
||||||
|
loader: 'ts-node/esm',
|
||||||
|
'node-option': [
|
||||||
|
'experimental-specifier-resolution=node',
|
||||||
|
'loader=ts-node/esm'
|
||||||
|
],
|
||||||
|
timeout: 90000,
|
||||||
|
exit: true
|
||||||
|
};
|
||||||
148
packages/run/README.md
Normal file
148
packages/run/README.md
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
# @waku/run
|
||||||
|
|
||||||
|
> **Spin up a local Waku network for development without relying on external infrastructure**
|
||||||
|
|
||||||
|
Perfect for hackathons, offline development, or when you need a controlled testing environment for your js-waku application.
|
||||||
|
|
||||||
|
## What's Included
|
||||||
|
|
||||||
|
- **2 nwaku nodes** connected to each other with all protocols enabled:
|
||||||
|
- **PostgreSQL database** for message persistence
|
||||||
|
- **Isolated network** - nodes only connect to each other
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- [Docker Desktop](https://www.docker.com/products/docker-desktop/) or Docker Engine with Compose plugin
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### 1. Start the Network
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx @waku/run start
|
||||||
|
```
|
||||||
|
|
||||||
|
This will:
|
||||||
|
- Start 2 nwaku nodes and a PostgreSQL database
|
||||||
|
- Run in the background (detached mode)
|
||||||
|
- Display connection information you need for your app
|
||||||
|
|
||||||
|
**Example output:**
|
||||||
|
```typescript
|
||||||
|
import { createLightNode } from "@waku/sdk";
|
||||||
|
|
||||||
|
const waku = await createLightNode({
|
||||||
|
defaultBootstrap: false,
|
||||||
|
bootstrapPeers: [
|
||||||
|
"/ip4/127.0.0.1/tcp/60000/ws/p2p/16Uiu2HAmF6oAsd23RMAnZb3NJgxXrExxBTPMdEoih232iAZkviU2",
|
||||||
|
"/ip4/127.0.0.1/tcp/60001/ws/p2p/16Uiu2HAm5aZU47YkiUoARqivbCXwuFPzFFXXiURAorySqAQbL6EQ"
|
||||||
|
],
|
||||||
|
numPeersToUse: 2,
|
||||||
|
libp2p: {
|
||||||
|
filterMultiaddrs: false
|
||||||
|
},
|
||||||
|
networkConfig: {
|
||||||
|
clusterId: 0,
|
||||||
|
numShardsInCluster: 8
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2. Connect Your js-waku App
|
||||||
|
|
||||||
|
Copy the configuration from the output above and paste it into your application. Then start your node:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
await waku.start();
|
||||||
|
|
||||||
|
// Your app is now connected to your local Waku network!
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Stop When Done
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx @waku/run stop
|
||||||
|
```
|
||||||
|
|
||||||
|
## Available Commands
|
||||||
|
|
||||||
|
### Using npx (published package)
|
||||||
|
|
||||||
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `npx @waku/run start` | Start the network (detached) and show connection info |
|
||||||
|
| `npx @waku/run stop` | Stop the network and clean up |
|
||||||
|
| `npx @waku/run info` | Show connection info for running network |
|
||||||
|
| `npx @waku/run logs` | View and follow logs from all nodes |
|
||||||
|
| `npx @waku/run test` | Test the network by sending a message |
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
All configuration is done via environment variables passed to the command.
|
||||||
|
|
||||||
|
### Custom Ports
|
||||||
|
|
||||||
|
If the default ports are in use, specify custom ports:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
NODE1_WS_PORT=50000 NODE2_WS_PORT=50001 npx @waku/run start
|
||||||
|
```
|
||||||
|
|
||||||
|
Available port configuration:
|
||||||
|
- `NODE1_WS_PORT` (default: 60000)
|
||||||
|
- `NODE2_WS_PORT` (default: 60001)
|
||||||
|
- `NODE1_REST_PORT` (default: 8646)
|
||||||
|
- `NODE2_REST_PORT` (default: 8647)
|
||||||
|
|
||||||
|
### Cluster Configuration
|
||||||
|
|
||||||
|
The default configuration uses:
|
||||||
|
- Cluster ID: 0
|
||||||
|
- Number of shards: 8
|
||||||
|
|
||||||
|
To test with a different cluster:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
CLUSTER_ID=16 npx @waku/run start
|
||||||
|
```
|
||||||
|
|
||||||
|
### Custom nwaku Version
|
||||||
|
|
||||||
|
To use a different nwaku image version:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
NWAKU_IMAGE=wakuorg/nwaku:v0.35.0 npx @waku/run start
|
||||||
|
```
|
||||||
|
|
||||||
|
## Debugging
|
||||||
|
|
||||||
|
### View Node Logs
|
||||||
|
|
||||||
|
```bash
|
||||||
|
npx @waku/run logs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Check Node Health
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Node 1
|
||||||
|
curl http://127.0.0.1:8646/health
|
||||||
|
|
||||||
|
# Node 2
|
||||||
|
curl http://127.0.0.1:8647/health
|
||||||
|
```
|
||||||
|
|
||||||
|
### Check Peer Connections
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Node 1 debug info
|
||||||
|
curl http://127.0.0.1:8646/debug/v1/info
|
||||||
|
|
||||||
|
# Node 2 debug info
|
||||||
|
curl http://127.0.0.1:8647/debug/v1/info
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT OR Apache-2.0
|
||||||
142
packages/run/docker-compose.yml
Normal file
142
packages/run/docker-compose.yml
Normal file
@ -0,0 +1,142 @@
|
|||||||
|
# Environment variable definitions
|
||||||
|
x-pg-pass: &pg_pass ${POSTGRES_PASSWORD:-test123}
|
||||||
|
x-pg-user: &pg_user ${POSTGRES_USER:-postgres}
|
||||||
|
|
||||||
|
x-pg-environment: &pg_env
|
||||||
|
POSTGRES_USER: *pg_user
|
||||||
|
POSTGRES_PASSWORD: *pg_pass
|
||||||
|
|
||||||
|
# Shared nwaku configuration
|
||||||
|
x-nwaku-base: &nwaku-base
|
||||||
|
image: ${NWAKU_IMAGE:-wakuorg/nwaku:v0.36.0}
|
||||||
|
pull_policy: if_not_present
|
||||||
|
restart: on-failure
|
||||||
|
logging:
|
||||||
|
driver: json-file
|
||||||
|
options:
|
||||||
|
max-size: "10m"
|
||||||
|
max-file: "3"
|
||||||
|
|
||||||
|
services:
|
||||||
|
postgres:
|
||||||
|
image: postgres:15.4-alpine3.18
|
||||||
|
pull_policy: if_not_present
|
||||||
|
restart: on-failure
|
||||||
|
environment:
|
||||||
|
<<: *pg_env
|
||||||
|
POSTGRES_DB: postgres
|
||||||
|
volumes:
|
||||||
|
- postgres-data:/var/lib/postgresql/data
|
||||||
|
- ./init-db.sh:/docker-entrypoint-initdb.d/init-db.sh
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD-SHELL", "pg_isready -U postgres"]
|
||||||
|
interval: 10s
|
||||||
|
timeout: 5s
|
||||||
|
retries: 5
|
||||||
|
start_period: 10s
|
||||||
|
|
||||||
|
nwaku-1:
|
||||||
|
<<: *nwaku-base
|
||||||
|
container_name: ${COMPOSE_PROJECT_NAME:-waku-run-0-0-1}-node-1
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
ipv4_address: 172.20.0.10
|
||||||
|
ports:
|
||||||
|
- "${NODE1_TCP_PORT:-30303}:30303/tcp"
|
||||||
|
- "${NODE1_WS_PORT:-60000}:60000/tcp"
|
||||||
|
- "${NODE1_REST_PORT:-8646}:8646/tcp"
|
||||||
|
environment:
|
||||||
|
<<: *pg_env
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
command:
|
||||||
|
- --nodekey=e419c3cf4f09ac3babdf61856e6faa0e0c6a7d97674d5401a0114616549c7632
|
||||||
|
- --staticnode=/ip4/172.20.0.11/tcp/60001/ws/p2p/16Uiu2HAm5aZU47YkiUoARqivbCXwuFPzFFXXiURAorySqAQbL6EQ
|
||||||
|
- --relay=true
|
||||||
|
- --filter=true
|
||||||
|
- --lightpush=true
|
||||||
|
- --store=true
|
||||||
|
- --peer-exchange=true
|
||||||
|
- --discv5-discovery=true
|
||||||
|
- --cluster-id=0
|
||||||
|
- --shard=0
|
||||||
|
- --shard=1
|
||||||
|
- --shard=2
|
||||||
|
- --shard=3
|
||||||
|
- --shard=4
|
||||||
|
- --shard=5
|
||||||
|
- --shard=6
|
||||||
|
- --shard=7
|
||||||
|
- --listen-address=0.0.0.0
|
||||||
|
- --tcp-port=30303
|
||||||
|
- --websocket-support=true
|
||||||
|
- --websocket-port=60000
|
||||||
|
- --ext-multiaddr=/dns4/nwaku-1/tcp/60000/ws
|
||||||
|
- --ext-multiaddr=/ip4/127.0.0.1/tcp/60000/ws
|
||||||
|
- --rest=true
|
||||||
|
- --rest-address=0.0.0.0
|
||||||
|
- --rest-port=8646
|
||||||
|
- --rest-admin=true
|
||||||
|
- --store-message-db-url=postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-test123}@postgres:5432/nwaku1
|
||||||
|
- --log-level=${LOG_LEVEL:-INFO}
|
||||||
|
- --max-connections=150
|
||||||
|
|
||||||
|
nwaku-2:
|
||||||
|
<<: *nwaku-base
|
||||||
|
container_name: ${COMPOSE_PROJECT_NAME:-waku-run-0-0-1}-node-2
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
ipv4_address: 172.20.0.11
|
||||||
|
ports:
|
||||||
|
- "${NODE2_TCP_PORT:-30304}:30304/tcp"
|
||||||
|
- "${NODE2_WS_PORT:-60001}:60001/tcp"
|
||||||
|
- "${NODE2_REST_PORT:-8647}:8647/tcp"
|
||||||
|
environment:
|
||||||
|
<<: *pg_env
|
||||||
|
depends_on:
|
||||||
|
postgres:
|
||||||
|
condition: service_healthy
|
||||||
|
nwaku-1:
|
||||||
|
condition: service_started
|
||||||
|
command:
|
||||||
|
- --nodekey=50632ab0efd313bfb4aa842de716f03dacd181c863770abd145e3409290fdaa7
|
||||||
|
- --staticnode=/ip4/172.20.0.10/tcp/60000/ws/p2p/16Uiu2HAmF6oAsd23RMAnZb3NJgxXrExxBTPMdEoih232iAZkviU2
|
||||||
|
- --relay=true
|
||||||
|
- --filter=true
|
||||||
|
- --lightpush=true
|
||||||
|
- --store=true
|
||||||
|
- --peer-exchange=true
|
||||||
|
- --discv5-discovery=true
|
||||||
|
- --cluster-id=0
|
||||||
|
- --shard=0
|
||||||
|
- --shard=1
|
||||||
|
- --shard=2
|
||||||
|
- --shard=3
|
||||||
|
- --shard=4
|
||||||
|
- --shard=5
|
||||||
|
- --shard=6
|
||||||
|
- --shard=7
|
||||||
|
- --listen-address=0.0.0.0
|
||||||
|
- --tcp-port=30304
|
||||||
|
- --websocket-support=true
|
||||||
|
- --websocket-port=60001
|
||||||
|
- --ext-multiaddr=/dns4/nwaku-2/tcp/60001/ws
|
||||||
|
- --ext-multiaddr=/ip4/127.0.0.1/tcp/60001/ws
|
||||||
|
- --rest=true
|
||||||
|
- --rest-address=0.0.0.0
|
||||||
|
- --rest-port=8647
|
||||||
|
- --rest-admin=true
|
||||||
|
- --store-message-db-url=postgresql://${POSTGRES_USER:-postgres}:${POSTGRES_PASSWORD:-test123}@postgres:5432/nwaku2
|
||||||
|
- --log-level=${LOG_LEVEL:-INFO}
|
||||||
|
- --max-connections=150
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres-data:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
name: ${COMPOSE_PROJECT_NAME:-waku-run-0-0-1}-network
|
||||||
|
ipam:
|
||||||
|
config:
|
||||||
|
- subnet: 172.20.0.0/16
|
||||||
8
packages/run/init-db.sh
Executable file
8
packages/run/init-db.sh
Executable file
@ -0,0 +1,8 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Create separate databases for each nwaku node
|
||||||
|
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" <<-EOSQL
|
||||||
|
CREATE DATABASE nwaku1;
|
||||||
|
CREATE DATABASE nwaku2;
|
||||||
|
EOSQL
|
||||||
68
packages/run/package.json
Normal file
68
packages/run/package.json
Normal file
@ -0,0 +1,68 @@
|
|||||||
|
{
|
||||||
|
"name": "@waku/run",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"description": "Run a local Waku network for development and testing",
|
||||||
|
"type": "module",
|
||||||
|
"author": "Waku Team",
|
||||||
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/run#readme",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
},
|
||||||
|
"license": "MIT OR Apache-2.0",
|
||||||
|
"keywords": [
|
||||||
|
"waku",
|
||||||
|
"decentralized",
|
||||||
|
"communication",
|
||||||
|
"web3",
|
||||||
|
"testing",
|
||||||
|
"development"
|
||||||
|
],
|
||||||
|
"bin": {
|
||||||
|
"waku-run": "./dist/src/cli.js"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist",
|
||||||
|
"docker-compose.yml",
|
||||||
|
"init-db.sh",
|
||||||
|
"README.md"
|
||||||
|
],
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"prepublishOnly": "npm run build",
|
||||||
|
"start": "node dist/scripts/start.js",
|
||||||
|
"stop": "node dist/scripts/stop.js",
|
||||||
|
"restart": "npm run stop && npm run start",
|
||||||
|
"logs": "node dist/scripts/logs.js",
|
||||||
|
"info": "node dist/scripts/info.js",
|
||||||
|
"test": "if [ \"$CI\" = \"true\" ]; then echo 'Skipping tests in CI'; exit 0; fi && NODE_ENV=test node ./src/run-tests.js \"tests/basic.spec.ts\"",
|
||||||
|
"fix": "run-s fix:*",
|
||||||
|
"fix:lint": "eslint src scripts tests --fix",
|
||||||
|
"check": "run-s check:*",
|
||||||
|
"check:tsc": "tsc -p tsconfig.dev.json",
|
||||||
|
"check:lint": "eslint src scripts tests",
|
||||||
|
"check:spelling": "cspell \"{README.md,src/**/*.ts,scripts/**/*.ts,tests/**/*.ts}\""
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=22"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@waku/core": "*",
|
||||||
|
"@waku/interfaces": "*",
|
||||||
|
"@waku/sdk": "*",
|
||||||
|
"@waku/utils": "*"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/chai": "^4.3.11",
|
||||||
|
"@types/mocha": "^10.0.6",
|
||||||
|
"chai": "^4.3.10",
|
||||||
|
"cspell": "^8.6.1",
|
||||||
|
"mocha": "^10.3.0",
|
||||||
|
"npm-run-all": "^4.1.5",
|
||||||
|
"ts-node": "^10.9.2",
|
||||||
|
"typescript": "^5.3.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
83
packages/run/scripts/info.ts
Executable file
83
packages/run/scripts/info.ts
Executable file
@ -0,0 +1,83 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
import {
|
||||||
|
DEFAULT_CLUSTER_ID,
|
||||||
|
DEFAULT_NODE1_WS_PORT,
|
||||||
|
DEFAULT_NODE2_WS_PORT,
|
||||||
|
NODE1_PEER_ID,
|
||||||
|
NODE2_PEER_ID
|
||||||
|
} from "../src/constants.js";
|
||||||
|
import { getProjectName, printWakuConfig } from "../src/utils.js";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const packageRoot = __dirname.includes("dist")
|
||||||
|
? join(__dirname, "..", "..")
|
||||||
|
: join(__dirname, "..");
|
||||||
|
|
||||||
|
interface Colors {
|
||||||
|
reset: string;
|
||||||
|
cyan: string;
|
||||||
|
blue: string;
|
||||||
|
gray: string;
|
||||||
|
yellow: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ANSI color codes
|
||||||
|
const colors: Colors = {
|
||||||
|
reset: "\x1b[0m",
|
||||||
|
cyan: "\x1b[36m",
|
||||||
|
blue: "\x1b[34m",
|
||||||
|
gray: "\x1b[90m",
|
||||||
|
yellow: "\x1b[33m"
|
||||||
|
};
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if containers are running
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
const output: string = execSync(
|
||||||
|
`docker compose --project-name ${projectName} ps --quiet`,
|
||||||
|
{
|
||||||
|
cwd: packageRoot,
|
||||||
|
encoding: "utf-8",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
}
|
||||||
|
).trim();
|
||||||
|
|
||||||
|
if (!output) {
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.gray}No nodes running. Start with: ${colors.cyan}npm run start${colors.reset}\n`
|
||||||
|
);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get cluster config from env or defaults
|
||||||
|
const clusterId: string = process.env.CLUSTER_ID || DEFAULT_CLUSTER_ID;
|
||||||
|
const node1Port: string = process.env.NODE1_WS_PORT || DEFAULT_NODE1_WS_PORT;
|
||||||
|
const node2Port: string = process.env.NODE2_WS_PORT || DEFAULT_NODE2_WS_PORT;
|
||||||
|
|
||||||
|
// Static peer IDs from --nodekey configuration
|
||||||
|
// cspell:ignore nodekey
|
||||||
|
const peer1: string = NODE1_PEER_ID;
|
||||||
|
const peer2: string = NODE2_PEER_ID;
|
||||||
|
|
||||||
|
// Print TypeScript-style config
|
||||||
|
printWakuConfig(colors, node1Port, node2Port, peer1, peer2, clusterId);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const err = error as { cause?: { code?: string }; message?: string };
|
||||||
|
if (err.cause?.code === "ECONNREFUSED") {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.yellow}⚠${colors.reset} Nodes are still starting. Try again in a few seconds.\n`
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
} else {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.yellow}✗${colors.reset} Error: ${err.message || String(error)}\n`
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
26
packages/run/scripts/logs.ts
Normal file
26
packages/run/scripts/logs.ts
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
import { getProjectName } from "../src/utils.js";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const packageRoot = __dirname.includes("dist")
|
||||||
|
? join(__dirname, "..", "..")
|
||||||
|
: join(__dirname, "..");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
execSync(`docker compose --project-name ${projectName} logs -f`, {
|
||||||
|
cwd: packageRoot,
|
||||||
|
stdio: "inherit",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
});
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const err = error as { message?: string };
|
||||||
|
process.stderr.write(`Error viewing logs: ${err.message || String(error)}\n`);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
172
packages/run/scripts/start.ts
Executable file
172
packages/run/scripts/start.ts
Executable file
@ -0,0 +1,172 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
import {
|
||||||
|
DEFAULT_CLUSTER_ID,
|
||||||
|
DEFAULT_NODE1_WS_PORT,
|
||||||
|
DEFAULT_NODE2_WS_PORT,
|
||||||
|
DEFAULT_NWAKU_IMAGE,
|
||||||
|
NODE1_PEER_ID,
|
||||||
|
NODE2_PEER_ID,
|
||||||
|
POSTGRES_IMAGE,
|
||||||
|
STARTUP_WAIT_MS
|
||||||
|
} from "../src/constants.js";
|
||||||
|
import { getProjectName, printWakuConfig } from "../src/utils.js";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const packageRoot = __dirname.includes("dist")
|
||||||
|
? join(__dirname, "..", "..")
|
||||||
|
: join(__dirname, "..");
|
||||||
|
|
||||||
|
interface Colors {
|
||||||
|
reset: string;
|
||||||
|
cyan: string;
|
||||||
|
green: string;
|
||||||
|
blue: string;
|
||||||
|
gray: string;
|
||||||
|
yellow: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ANSI color codes
|
||||||
|
const colors: Colors = {
|
||||||
|
reset: "\x1b[0m",
|
||||||
|
cyan: "\x1b[36m",
|
||||||
|
green: "\x1b[32m",
|
||||||
|
blue: "\x1b[34m",
|
||||||
|
gray: "\x1b[90m",
|
||||||
|
yellow: "\x1b[33m"
|
||||||
|
};
|
||||||
|
|
||||||
|
function checkAndPullImages(): void {
|
||||||
|
const nwakuImage = process.env.NWAKU_IMAGE || DEFAULT_NWAKU_IMAGE;
|
||||||
|
const postgresImage = POSTGRES_IMAGE;
|
||||||
|
const images = [
|
||||||
|
{ name: nwakuImage, label: "nwaku" },
|
||||||
|
{ name: postgresImage, label: "postgres" }
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const { name, label } of images) {
|
||||||
|
try {
|
||||||
|
// Check if image exists locally
|
||||||
|
const imageId = execSync(`docker images -q ${name}`, {
|
||||||
|
encoding: "utf-8"
|
||||||
|
}).trim();
|
||||||
|
|
||||||
|
if (!imageId) {
|
||||||
|
// Image doesn't exist, pull it
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}Pulling ${label} image (${name})...${colors.reset}\n`
|
||||||
|
);
|
||||||
|
execSync(`docker pull ${name}`, { stdio: "inherit" });
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.green}✓${colors.reset} ${label} image ready\n`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.yellow}⚠${colors.reset} Failed to check/pull ${label} image. Continuing anyway...\n`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function waitWithProgress(ms: number): Promise<void> {
|
||||||
|
const frames = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"];
|
||||||
|
const startTime = Date.now();
|
||||||
|
let frameIndex = 0;
|
||||||
|
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const interval = setInterval(() => {
|
||||||
|
const elapsed = Date.now() - startTime;
|
||||||
|
|
||||||
|
if (elapsed >= ms) {
|
||||||
|
clearInterval(interval);
|
||||||
|
process.stdout.write("\r" + " ".repeat(50) + "\r");
|
||||||
|
resolve();
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const frame = frames[frameIndex % frames.length];
|
||||||
|
process.stdout.write(
|
||||||
|
`\r${colors.cyan}${frame}${colors.reset} Waiting for nodes to start...`
|
||||||
|
);
|
||||||
|
frameIndex++;
|
||||||
|
}, 100);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}Starting local Waku development environment...${colors.reset}\n`
|
||||||
|
);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check and pull images if needed
|
||||||
|
checkAndPullImages();
|
||||||
|
|
||||||
|
// Start docker compose from package root
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
execSync(`docker compose --project-name ${projectName} up -d`, {
|
||||||
|
cwd: packageRoot,
|
||||||
|
stdio: ["ignore", "ignore", "pipe"],
|
||||||
|
encoding: "utf-8",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for nodes to be ready
|
||||||
|
await waitWithProgress(STARTUP_WAIT_MS);
|
||||||
|
|
||||||
|
// Get cluster config from env or defaults
|
||||||
|
const clusterId: string = process.env.CLUSTER_ID || DEFAULT_CLUSTER_ID;
|
||||||
|
const node1Port: string = process.env.NODE1_WS_PORT || DEFAULT_NODE1_WS_PORT;
|
||||||
|
const node2Port: string = process.env.NODE2_WS_PORT || DEFAULT_NODE2_WS_PORT;
|
||||||
|
|
||||||
|
// Static peer IDs from --nodekey configuration
|
||||||
|
// cspell:ignore nodekey
|
||||||
|
const peer1: string = NODE1_PEER_ID;
|
||||||
|
const peer2: string = NODE2_PEER_ID;
|
||||||
|
|
||||||
|
// Print TypeScript-style config
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.green}✓${colors.reset} Network started successfully!\n\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.gray}Copy this into your application:${colors.reset}\n\n`
|
||||||
|
);
|
||||||
|
|
||||||
|
printWakuConfig(colors, node1Port, node2Port, peer1, peer2, clusterId);
|
||||||
|
process.stdout.write(`\n`);
|
||||||
|
process.stdout.write(`${colors.gray}Management:${colors.reset}\n`);
|
||||||
|
|
||||||
|
// Detect if running via npx (published package) or npm run (development)
|
||||||
|
const isPublished = __dirname.includes("dist");
|
||||||
|
const cmdPrefix = isPublished ? "npx @waku/run" : "npm run";
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
` ${colors.cyan}${cmdPrefix} test${colors.reset} - Test network with a message\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
` ${colors.cyan}${cmdPrefix} logs${colors.reset} - View logs\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
` ${colors.cyan}${cmdPrefix} info${colors.reset} - Show config again\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
` ${colors.cyan}${cmdPrefix} stop${colors.reset} - Stop network\n`
|
||||||
|
);
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const err = error as { cause?: { code?: string }; message?: string };
|
||||||
|
if (err.cause?.code === "ECONNREFUSED") {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.yellow}⚠${colors.reset} Nodes are still starting up. Run ${colors.cyan}npm run info${colors.reset} in a few seconds.\n`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.yellow}✗${colors.reset} Error: ${err.message || String(error)}\n`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
28
packages/run/scripts/stop.ts
Normal file
28
packages/run/scripts/stop.ts
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
import { getProjectName } from "../src/utils.js";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const packageRoot = __dirname.includes("dist")
|
||||||
|
? join(__dirname, "..", "..")
|
||||||
|
: join(__dirname, "..");
|
||||||
|
|
||||||
|
try {
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
execSync(`docker compose --project-name ${projectName} down`, {
|
||||||
|
cwd: packageRoot,
|
||||||
|
stdio: "inherit",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
});
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const err = error as { message?: string };
|
||||||
|
process.stderr.write(
|
||||||
|
`Error stopping network: ${err.message || String(error)}\n`
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
122
packages/run/scripts/test.ts
Normal file
122
packages/run/scripts/test.ts
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { execSync } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
import { Protocols } from "@waku/sdk";
|
||||||
|
|
||||||
|
import { WakuTestClient } from "../src/test-client.js";
|
||||||
|
import { getProjectName } from "../src/utils.js";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const packageRoot = __dirname.includes("dist")
|
||||||
|
? join(__dirname, "..", "..")
|
||||||
|
: join(__dirname, "..");
|
||||||
|
|
||||||
|
interface Colors {
|
||||||
|
reset: string;
|
||||||
|
cyan: string;
|
||||||
|
green: string;
|
||||||
|
red: string;
|
||||||
|
yellow: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ANSI color codes
|
||||||
|
const colors: Colors = {
|
||||||
|
reset: "\x1b[0m",
|
||||||
|
cyan: "\x1b[36m",
|
||||||
|
green: "\x1b[32m",
|
||||||
|
red: "\x1b[31m",
|
||||||
|
yellow: "\x1b[33m"
|
||||||
|
};
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
let client: WakuTestClient | null = null;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check if containers are running
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
const output: string = execSync(
|
||||||
|
`docker compose --project-name ${projectName} ps --quiet`,
|
||||||
|
{
|
||||||
|
cwd: packageRoot,
|
||||||
|
encoding: "utf-8",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
}
|
||||||
|
).trim();
|
||||||
|
|
||||||
|
if (!output) {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.red}✗${colors.reset} No nodes running. Start with: ${colors.cyan}npx @waku/run start${colors.reset}\n`
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}Testing local Waku network...${colors.reset}\n\n`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 1: Create client
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}→${colors.reset} Creating Waku light node...\n`
|
||||||
|
);
|
||||||
|
client = new WakuTestClient();
|
||||||
|
|
||||||
|
// Step 2: Start and connect
|
||||||
|
process.stdout.write(`${colors.cyan}→${colors.reset} Starting node...\n`);
|
||||||
|
await client.start();
|
||||||
|
|
||||||
|
// Step 3: Wait for peers
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}→${colors.reset} Waiting for peers...\n`
|
||||||
|
);
|
||||||
|
await client.waku!.waitForPeers([Protocols.LightPush]);
|
||||||
|
const connectedPeers = client.waku!.libp2p.getPeers().length;
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.green}✓${colors.reset} Connected to ${connectedPeers} peer(s)\n`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 4: Send test message
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}→${colors.reset} Sending lightpush message...\n`
|
||||||
|
);
|
||||||
|
const result = await client.sendTestMessage("Test from @waku/run");
|
||||||
|
|
||||||
|
if (result.success) {
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.green}✓${colors.reset} Message sent successfully to ${result.messagesSent} peer(s)\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
`\n${colors.green}✓ All tests passed!${colors.reset}\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.cyan}The local Waku network is working correctly.${colors.reset}\n`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.red}✗${colors.reset} Failed to send message: ${result.error || "Unknown error"}\n`
|
||||||
|
);
|
||||||
|
process.stderr.write(
|
||||||
|
` Sent: ${result.messagesSent}, Failed: ${result.failures}\n`
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
} catch (error: unknown) {
|
||||||
|
const err = error as { message?: string };
|
||||||
|
process.stderr.write(
|
||||||
|
`${colors.red}✗${colors.reset} Test failed: ${err.message || String(error)}\n`
|
||||||
|
);
|
||||||
|
process.exit(1);
|
||||||
|
} finally {
|
||||||
|
if (client) {
|
||||||
|
await client.stop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((error) => {
|
||||||
|
process.stderr.write(`Unexpected error: ${String(error)}\n`);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
40
packages/run/src/cli.ts
Normal file
40
packages/run/src/cli.ts
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { spawn } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
|
||||||
|
const command = process.argv[2];
|
||||||
|
|
||||||
|
const scriptMap: Record<string, string> = {
|
||||||
|
start: join(__dirname, "..", "scripts", "start.js"),
|
||||||
|
stop: join(__dirname, "..", "scripts", "stop.js"),
|
||||||
|
info: join(__dirname, "..", "scripts", "info.js"),
|
||||||
|
logs: join(__dirname, "..", "scripts", "logs.js"),
|
||||||
|
test: join(__dirname, "..", "scripts", "test.js")
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!command || !scriptMap[command]) {
|
||||||
|
process.stderr.write("Usage: @waku/run <command>\n");
|
||||||
|
process.stderr.write("\n");
|
||||||
|
process.stderr.write("Commands:\n");
|
||||||
|
process.stderr.write(" start Start the local Waku network\n");
|
||||||
|
process.stderr.write(" stop Stop the local Waku network\n");
|
||||||
|
process.stderr.write(" info Show connection info for running network\n");
|
||||||
|
process.stderr.write(" logs View logs from running network\n");
|
||||||
|
process.stderr.write(" test Test the network by sending a message\n");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
const scriptPath = scriptMap[command];
|
||||||
|
const child = spawn("node", [scriptPath], {
|
||||||
|
stdio: "inherit",
|
||||||
|
env: process.env
|
||||||
|
});
|
||||||
|
|
||||||
|
child.on("exit", (code) => {
|
||||||
|
process.exit(code || 0);
|
||||||
|
});
|
||||||
40
packages/run/src/constants.ts
Normal file
40
packages/run/src/constants.ts
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
/**
|
||||||
|
* Static configuration constants for the local Waku development environment.
|
||||||
|
* These values are derived from the --nodekey configuration in docker-compose.yml
|
||||||
|
* cspell:ignore nodekey
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Node private keys (from docker-compose.yml --nodekey)
|
||||||
|
export const NODE1_PRIVATE_KEY =
|
||||||
|
"e419c3cf4f09ac3babdf61856e6faa0e0c6a7d97674d5401a0114616549c7632";
|
||||||
|
export const NODE2_PRIVATE_KEY =
|
||||||
|
"50632ab0efd313bfb4aa842de716f03dacd181c863770abd145e3409290fdaa7";
|
||||||
|
|
||||||
|
// Derived peer IDs (libp2p identities from the private keys)
|
||||||
|
export const NODE1_PEER_ID =
|
||||||
|
"16Uiu2HAmF6oAsd23RMAnZb3NJgxXrExxBTPMdEoih232iAZkviU2";
|
||||||
|
export const NODE2_PEER_ID =
|
||||||
|
"16Uiu2HAm5aZU47YkiUoARqivbCXwuFPzFFXXiURAorySqAQbL6EQ";
|
||||||
|
|
||||||
|
// Static IP addresses (from docker-compose.yml network configuration)
|
||||||
|
export const NODE1_IP = "172.20.0.10";
|
||||||
|
export const NODE2_IP = "172.20.0.11";
|
||||||
|
|
||||||
|
// Default WebSocket ports for local nodes
|
||||||
|
export const DEFAULT_NODE1_WS_PORT = "60000";
|
||||||
|
export const DEFAULT_NODE2_WS_PORT = "60001";
|
||||||
|
|
||||||
|
// Default REST API ports for local nodes
|
||||||
|
export const DEFAULT_NODE1_REST_PORT = "8646";
|
||||||
|
export const DEFAULT_NODE2_REST_PORT = "8647";
|
||||||
|
|
||||||
|
// Docker images
|
||||||
|
export const DEFAULT_NWAKU_IMAGE = "wakuorg/nwaku:v0.36.0";
|
||||||
|
export const POSTGRES_IMAGE = "postgres:15.4-alpine3.18";
|
||||||
|
|
||||||
|
// Timing configuration
|
||||||
|
export const STARTUP_WAIT_MS = 20000; // Time to wait for nodes to start
|
||||||
|
|
||||||
|
// Network configuration
|
||||||
|
export const DEFAULT_CLUSTER_ID = "0";
|
||||||
|
export const DEFAULT_NUM_SHARDS_IN_CLUSTER = 8;
|
||||||
30
packages/run/src/run-tests.js
Normal file
30
packages/run/src/run-tests.js
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
|
||||||
|
import { spawn } from "child_process";
|
||||||
|
|
||||||
|
const mochaArgs = [
|
||||||
|
"mocha",
|
||||||
|
"--require",
|
||||||
|
"ts-node/register",
|
||||||
|
"--project",
|
||||||
|
"./tsconfig.json",
|
||||||
|
...process.argv.slice(2)
|
||||||
|
];
|
||||||
|
|
||||||
|
// Run mocha tests
|
||||||
|
const mocha = spawn("npx", mochaArgs, {
|
||||||
|
stdio: "inherit",
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
NODE_ENV: "test"
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
mocha.on("error", (error) => {
|
||||||
|
console.log(`Error running mocha tests: ${error.message}`); // eslint-disable-line no-console
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
mocha.on("exit", (code) => {
|
||||||
|
process.exit(code || 0);
|
||||||
|
});
|
||||||
126
packages/run/src/test-client.ts
Normal file
126
packages/run/src/test-client.ts
Normal file
@ -0,0 +1,126 @@
|
|||||||
|
import { createEncoder } from "@waku/core";
|
||||||
|
import type { LightNode } from "@waku/interfaces";
|
||||||
|
import { createLightNode } from "@waku/sdk";
|
||||||
|
import { createRoutingInfo } from "@waku/utils";
|
||||||
|
|
||||||
|
import {
|
||||||
|
DEFAULT_CLUSTER_ID,
|
||||||
|
DEFAULT_NODE1_WS_PORT,
|
||||||
|
DEFAULT_NODE2_WS_PORT,
|
||||||
|
DEFAULT_NUM_SHARDS_IN_CLUSTER,
|
||||||
|
NODE1_PEER_ID,
|
||||||
|
NODE2_PEER_ID
|
||||||
|
} from "./constants.js";
|
||||||
|
|
||||||
|
export interface WakuTestClientOptions {
|
||||||
|
node1Port?: string;
|
||||||
|
node2Port?: string;
|
||||||
|
clusterId?: number;
|
||||||
|
numShardsInCluster?: number;
|
||||||
|
contentTopic?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TestResult {
|
||||||
|
success: boolean;
|
||||||
|
connectedPeers: number;
|
||||||
|
messagesSent: number;
|
||||||
|
failures: number;
|
||||||
|
error?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class WakuTestClient {
|
||||||
|
public waku: LightNode | null = null;
|
||||||
|
private options: Required<WakuTestClientOptions>;
|
||||||
|
|
||||||
|
public constructor(options: WakuTestClientOptions = {}) {
|
||||||
|
this.options = {
|
||||||
|
node1Port:
|
||||||
|
options.node1Port || process.env.NODE1_WS_PORT || DEFAULT_NODE1_WS_PORT,
|
||||||
|
node2Port:
|
||||||
|
options.node2Port || process.env.NODE2_WS_PORT || DEFAULT_NODE2_WS_PORT,
|
||||||
|
clusterId: options.clusterId ?? parseInt(DEFAULT_CLUSTER_ID),
|
||||||
|
numShardsInCluster:
|
||||||
|
options.numShardsInCluster ?? DEFAULT_NUM_SHARDS_IN_CLUSTER,
|
||||||
|
contentTopic: options.contentTopic || "/waku-run/1/test/proto"
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create and start the Waku light node
|
||||||
|
*/
|
||||||
|
public async start(): Promise<void> {
|
||||||
|
const { node1Port, node2Port, clusterId, numShardsInCluster } =
|
||||||
|
this.options;
|
||||||
|
|
||||||
|
const networkConfig = {
|
||||||
|
clusterId,
|
||||||
|
numShardsInCluster
|
||||||
|
};
|
||||||
|
|
||||||
|
this.waku = await createLightNode({
|
||||||
|
defaultBootstrap: false,
|
||||||
|
bootstrapPeers: [
|
||||||
|
`/ip4/127.0.0.1/tcp/${node1Port}/ws/p2p/${NODE1_PEER_ID}`,
|
||||||
|
`/ip4/127.0.0.1/tcp/${node2Port}/ws/p2p/${NODE2_PEER_ID}`
|
||||||
|
],
|
||||||
|
networkConfig,
|
||||||
|
numPeersToUse: 2,
|
||||||
|
libp2p: {
|
||||||
|
filterMultiaddrs: false
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.waku.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send a test message via lightpush
|
||||||
|
*/
|
||||||
|
public async sendTestMessage(
|
||||||
|
payload: string = "Hello Waku!"
|
||||||
|
): Promise<TestResult> {
|
||||||
|
if (!this.waku) {
|
||||||
|
throw new Error("Waku node not started. Call start() first.");
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const { contentTopic, clusterId, numShardsInCluster } = this.options;
|
||||||
|
const networkConfig = { clusterId, numShardsInCluster };
|
||||||
|
|
||||||
|
const routingInfo = createRoutingInfo(networkConfig, { contentTopic });
|
||||||
|
const encoder = createEncoder({ contentTopic, routingInfo });
|
||||||
|
|
||||||
|
const result = await this.waku.lightPush.send(encoder, {
|
||||||
|
payload: new TextEncoder().encode(payload)
|
||||||
|
});
|
||||||
|
|
||||||
|
const connectedPeers = this.waku.libp2p.getPeers().length;
|
||||||
|
|
||||||
|
return {
|
||||||
|
success:
|
||||||
|
result.successes.length > 0 && (result.failures?.length || 0) === 0,
|
||||||
|
connectedPeers,
|
||||||
|
messagesSent: result.successes.length,
|
||||||
|
failures: result.failures?.length || 0
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return {
|
||||||
|
success: false,
|
||||||
|
connectedPeers: this.waku.libp2p.getPeers().length,
|
||||||
|
messagesSent: 0,
|
||||||
|
failures: 0,
|
||||||
|
error: error instanceof Error ? error.message : String(error)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the Waku node
|
||||||
|
*/
|
||||||
|
public async stop(): Promise<void> {
|
||||||
|
if (this.waku) {
|
||||||
|
await this.waku.stop();
|
||||||
|
this.waku = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
63
packages/run/src/utils.ts
Normal file
63
packages/run/src/utils.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import { readFileSync } from "fs";
|
||||||
|
import { join } from "path";
|
||||||
|
|
||||||
|
import { DEFAULT_NUM_SHARDS_IN_CLUSTER } from "./constants.js";
|
||||||
|
|
||||||
|
export function getProjectName(packageRoot: string): string {
|
||||||
|
const packageJsonPath = join(packageRoot, "package.json");
|
||||||
|
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
||||||
|
// Docker Compose project names must consist only of lowercase alphanumeric characters, hyphens, and underscores
|
||||||
|
const name = packageJson.name.replace("@", "").replace("/", "-");
|
||||||
|
const version = packageJson.version.replace(/\./g, "-");
|
||||||
|
return `${name}-${version}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Colors {
|
||||||
|
reset: string;
|
||||||
|
cyan: string;
|
||||||
|
blue: string;
|
||||||
|
yellow: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function printWakuConfig(
|
||||||
|
colors: Colors,
|
||||||
|
node1Port: string,
|
||||||
|
node2Port: string,
|
||||||
|
peer1: string,
|
||||||
|
peer2: string,
|
||||||
|
clusterId: string
|
||||||
|
): void {
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.blue}import${colors.reset} { createLightNode } ${colors.blue}from${colors.reset} ${colors.yellow}"@waku/sdk"${colors.reset};\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(`\n`);
|
||||||
|
process.stdout.write(
|
||||||
|
`${colors.blue}const${colors.reset} waku = ${colors.blue}await${colors.reset} createLightNode({\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
` defaultBootstrap: ${colors.cyan}false${colors.reset},\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(` bootstrapPeers: [\n`);
|
||||||
|
process.stdout.write(
|
||||||
|
` ${colors.yellow}"/ip4/127.0.0.1/tcp/${node1Port}/ws/p2p/${peer1}"${colors.reset},\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
` ${colors.yellow}"/ip4/127.0.0.1/tcp/${node2Port}/ws/p2p/${peer2}"${colors.reset}\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(` ],\n`);
|
||||||
|
process.stdout.write(` numPeersToUse: ${colors.cyan}2${colors.reset},\n`);
|
||||||
|
process.stdout.write(` libp2p: {\n`);
|
||||||
|
process.stdout.write(
|
||||||
|
` filterMultiaddrs: ${colors.cyan}false${colors.reset}\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(` },\n`);
|
||||||
|
process.stdout.write(` networkConfig: {\n`);
|
||||||
|
process.stdout.write(
|
||||||
|
` clusterId: ${colors.cyan}${clusterId}${colors.reset},\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(
|
||||||
|
` numShardsInCluster: ${colors.cyan}${DEFAULT_NUM_SHARDS_IN_CLUSTER}${colors.reset}\n`
|
||||||
|
);
|
||||||
|
process.stdout.write(` }\n`);
|
||||||
|
process.stdout.write(`});\n`);
|
||||||
|
}
|
||||||
120
packages/run/tests/basic.spec.ts
Normal file
120
packages/run/tests/basic.spec.ts
Normal file
@ -0,0 +1,120 @@
|
|||||||
|
import { execSync } from "child_process";
|
||||||
|
import { dirname, join } from "path";
|
||||||
|
import { fileURLToPath } from "url";
|
||||||
|
|
||||||
|
import { Protocols } from "@waku/sdk";
|
||||||
|
import { expect } from "chai";
|
||||||
|
|
||||||
|
import {
|
||||||
|
DEFAULT_NODE1_REST_PORT,
|
||||||
|
DEFAULT_NODE2_REST_PORT
|
||||||
|
} from "../src/constants.js";
|
||||||
|
import { WakuTestClient } from "../src/test-client.js";
|
||||||
|
import { getProjectName } from "../src/utils.js";
|
||||||
|
|
||||||
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
|
const __dirname = dirname(__filename);
|
||||||
|
const packageRoot = join(__dirname, "..");
|
||||||
|
|
||||||
|
describe("Waku Run - Basic Test", function () {
|
||||||
|
this.timeout(90000);
|
||||||
|
|
||||||
|
let client: WakuTestClient;
|
||||||
|
|
||||||
|
before(async function () {
|
||||||
|
// Step 1: Start the nodes
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
execSync(`docker compose --project-name ${projectName} up -d`, {
|
||||||
|
cwd: packageRoot,
|
||||||
|
stdio: "inherit",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for nodes to be ready
|
||||||
|
const maxRetries = 30;
|
||||||
|
const retryDelay = 2000;
|
||||||
|
let ready = false;
|
||||||
|
|
||||||
|
for (let i = 0; i < maxRetries; i++) {
|
||||||
|
try {
|
||||||
|
await fetch(
|
||||||
|
`http://127.0.0.1:${DEFAULT_NODE1_REST_PORT}/debug/v1/info`
|
||||||
|
);
|
||||||
|
await fetch(
|
||||||
|
`http://127.0.0.1:${DEFAULT_NODE2_REST_PORT}/debug/v1/info`
|
||||||
|
);
|
||||||
|
ready = true;
|
||||||
|
break;
|
||||||
|
} catch {
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, retryDelay));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ready) {
|
||||||
|
throw new Error("Nodes failed to start within expected time");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Nodes automatically connect via --staticnode configuration
|
||||||
|
// cspell:ignore staticnode
|
||||||
|
// Wait for nwaku nodes to connect to each other
|
||||||
|
let connected = false;
|
||||||
|
for (let i = 0; i < 15; i++) {
|
||||||
|
try {
|
||||||
|
const peers = await fetch(
|
||||||
|
`http://127.0.0.1:${DEFAULT_NODE1_REST_PORT}/admin/v1/peers`
|
||||||
|
).then((r) => r.json());
|
||||||
|
if (peers.length > 0 && peers[0].connected === "Connected") {
|
||||||
|
connected = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore errors
|
||||||
|
}
|
||||||
|
await new Promise((resolve) => setTimeout(resolve, 1000));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!connected) {
|
||||||
|
throw new Error("Nwaku nodes failed to connect to each other");
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
after(async function () {
|
||||||
|
// Step 4: Stop the nodes
|
||||||
|
if (client) {
|
||||||
|
await client.stop();
|
||||||
|
}
|
||||||
|
const projectName = getProjectName(packageRoot);
|
||||||
|
execSync(`docker compose --project-name ${projectName} down`, {
|
||||||
|
cwd: packageRoot,
|
||||||
|
stdio: "inherit",
|
||||||
|
env: { ...process.env, COMPOSE_PROJECT_NAME: projectName }
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should connect to both nodes and send lightpush message to both peers", async function () {
|
||||||
|
// Step 2: Connect to nodes via js-waku using WakuTestClient
|
||||||
|
client = new WakuTestClient({
|
||||||
|
contentTopic: "/test/1/basic/proto"
|
||||||
|
});
|
||||||
|
|
||||||
|
await client.start();
|
||||||
|
|
||||||
|
// Wait for both peers to be connected
|
||||||
|
await client.waku!.waitForPeers([Protocols.LightPush]);
|
||||||
|
const connectedPeers = client.waku!.libp2p.getPeers().length;
|
||||||
|
expect(connectedPeers).to.equal(
|
||||||
|
2,
|
||||||
|
"Should be connected to both nwaku nodes"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Step 3: Send lightpush message - it should be sent to both peers
|
||||||
|
const result = await client.sendTestMessage("Hello Waku!");
|
||||||
|
|
||||||
|
expect(result.success).to.be.true;
|
||||||
|
expect(result.messagesSent).to.equal(
|
||||||
|
2,
|
||||||
|
"Message should be sent to both peers"
|
||||||
|
);
|
||||||
|
expect(result.failures).to.equal(0, "Should have no failures");
|
||||||
|
});
|
||||||
|
});
|
||||||
7
packages/run/tsconfig.dev.json
Normal file
7
packages/run/tsconfig.dev.json
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../tsconfig.dev",
|
||||||
|
"compilerOptions": {
|
||||||
|
"rootDir": "."
|
||||||
|
},
|
||||||
|
"include": ["src", "scripts", "tests"]
|
||||||
|
}
|
||||||
10
packages/run/tsconfig.json
Normal file
10
packages/run/tsconfig.json
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"extends": "../../tsconfig",
|
||||||
|
"compilerOptions": {
|
||||||
|
"outDir": "dist/",
|
||||||
|
"rootDir": ".",
|
||||||
|
"tsBuildInfoFile": "dist/.tsbuildinfo"
|
||||||
|
},
|
||||||
|
"include": ["src", "scripts"],
|
||||||
|
"exclude": ["tests", "dist", "node_modules"]
|
||||||
|
}
|
||||||
@ -24,7 +24,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/sdk#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/sdk#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -24,7 +24,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/scalable-data-sync#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/scalable-data-sync#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -14,8 +14,14 @@ export {
|
|||||||
type HistoryEntry,
|
type HistoryEntry,
|
||||||
type ChannelId,
|
type ChannelId,
|
||||||
type MessageChannelEvents,
|
type MessageChannelEvents,
|
||||||
type SenderId,
|
type ParticipantId,
|
||||||
type MessageId
|
type MessageId
|
||||||
} from "./message_channel/index.js";
|
} from "./message_channel/index.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Use ParticipantId instead. SenderId has been renamed to ParticipantId
|
||||||
|
* to better reflect that it represents a channel participant, not just a message sender.
|
||||||
|
*/
|
||||||
|
export type { ParticipantId as SenderId } from "./message_channel/index.js";
|
||||||
|
|
||||||
export { BloomFilter };
|
export { BloomFilter };
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
import { HistoryEntry, Message, MessageId } from "./message.js";
|
import { HistoryEntry, Message, MessageId, ParticipantId } from "./message.js";
|
||||||
|
|
||||||
export enum MessageChannelEvent {
|
export enum MessageChannelEvent {
|
||||||
OutMessageSent = "sds:out:message-sent",
|
OutMessageSent = "sds:out:message-sent",
|
||||||
@ -10,7 +10,13 @@ export enum MessageChannelEvent {
|
|||||||
OutSyncSent = "sds:out:sync-sent",
|
OutSyncSent = "sds:out:sync-sent",
|
||||||
InSyncReceived = "sds:in:sync-received",
|
InSyncReceived = "sds:in:sync-received",
|
||||||
InMessageLost = "sds:in:message-irretrievably-lost",
|
InMessageLost = "sds:in:message-irretrievably-lost",
|
||||||
ErrorTask = "sds:error-task"
|
ErrorTask = "sds:error-task",
|
||||||
|
// SDS-R Repair Events
|
||||||
|
RepairRequestQueued = "sds:repair:request-queued",
|
||||||
|
RepairRequestSent = "sds:repair:request-sent",
|
||||||
|
RepairRequestReceived = "sds:repair:request-received",
|
||||||
|
RepairResponseQueued = "sds:repair:response-queued",
|
||||||
|
RepairResponseSent = "sds:repair:response-sent"
|
||||||
}
|
}
|
||||||
|
|
||||||
export type MessageChannelEvents = {
|
export type MessageChannelEvents = {
|
||||||
@ -26,5 +32,24 @@ export type MessageChannelEvents = {
|
|||||||
[MessageChannelEvent.InMessageLost]: CustomEvent<HistoryEntry[]>;
|
[MessageChannelEvent.InMessageLost]: CustomEvent<HistoryEntry[]>;
|
||||||
[MessageChannelEvent.OutSyncSent]: CustomEvent<Message>;
|
[MessageChannelEvent.OutSyncSent]: CustomEvent<Message>;
|
||||||
[MessageChannelEvent.InSyncReceived]: CustomEvent<Message>;
|
[MessageChannelEvent.InSyncReceived]: CustomEvent<Message>;
|
||||||
[MessageChannelEvent.ErrorTask]: CustomEvent<any>;
|
[MessageChannelEvent.ErrorTask]: CustomEvent<unknown>;
|
||||||
|
[MessageChannelEvent.RepairRequestQueued]: CustomEvent<{
|
||||||
|
messageId: MessageId;
|
||||||
|
tReq: number;
|
||||||
|
}>;
|
||||||
|
[MessageChannelEvent.RepairRequestSent]: CustomEvent<{
|
||||||
|
messageIds: MessageId[];
|
||||||
|
carrierMessageId: MessageId;
|
||||||
|
}>;
|
||||||
|
[MessageChannelEvent.RepairRequestReceived]: CustomEvent<{
|
||||||
|
messageIds: MessageId[];
|
||||||
|
fromSenderId?: ParticipantId;
|
||||||
|
}>;
|
||||||
|
[MessageChannelEvent.RepairResponseQueued]: CustomEvent<{
|
||||||
|
messageId: MessageId;
|
||||||
|
tResp: number;
|
||||||
|
}>;
|
||||||
|
[MessageChannelEvent.RepairResponseSent]: CustomEvent<{
|
||||||
|
messageId: MessageId;
|
||||||
|
}>;
|
||||||
};
|
};
|
||||||
|
|||||||
@ -8,7 +8,7 @@ export {
|
|||||||
HistoryEntry,
|
HistoryEntry,
|
||||||
Message,
|
Message,
|
||||||
MessageId,
|
MessageId,
|
||||||
SenderId,
|
ParticipantId,
|
||||||
SyncMessage,
|
SyncMessage,
|
||||||
isContentMessage,
|
isContentMessage,
|
||||||
isEphemeralMessage,
|
isEphemeralMessage,
|
||||||
|
|||||||
@ -44,6 +44,7 @@ describe("Message serialization", () => {
|
|||||||
[{ messageId: depMessageId, retrievalHint: depRetrievalHint }],
|
[{ messageId: depMessageId, retrievalHint: depRetrievalHint }],
|
||||||
0n,
|
0n,
|
||||||
undefined,
|
undefined,
|
||||||
|
undefined,
|
||||||
undefined
|
undefined
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -54,6 +55,39 @@ describe("Message serialization", () => {
|
|||||||
{ messageId: depMessageId, retrievalHint: depRetrievalHint }
|
{ messageId: depMessageId, retrievalHint: depRetrievalHint }
|
||||||
]);
|
]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it("Repair Request", () => {
|
||||||
|
const repairMessageId = "missing-message";
|
||||||
|
const repairRetrievalHint = utf8ToBytes("missing-retrieval");
|
||||||
|
const repairSenderId = "original-sender";
|
||||||
|
const message = new Message(
|
||||||
|
"123",
|
||||||
|
"my-channel",
|
||||||
|
"me",
|
||||||
|
[],
|
||||||
|
0n,
|
||||||
|
undefined,
|
||||||
|
undefined,
|
||||||
|
[
|
||||||
|
{
|
||||||
|
messageId: repairMessageId,
|
||||||
|
retrievalHint: repairRetrievalHint,
|
||||||
|
senderId: repairSenderId
|
||||||
|
}
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
const bytes = message.encode();
|
||||||
|
const decMessage = Message.decode(bytes);
|
||||||
|
|
||||||
|
expect(decMessage!.repairRequest).to.deep.equal([
|
||||||
|
{
|
||||||
|
messageId: repairMessageId,
|
||||||
|
retrievalHint: repairRetrievalHint,
|
||||||
|
senderId: repairSenderId
|
||||||
|
}
|
||||||
|
]);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe("ContentMessage comparison with < operator", () => {
|
describe("ContentMessage comparison with < operator", () => {
|
||||||
|
|||||||
@ -4,19 +4,20 @@ import { Logger } from "@waku/utils";
|
|||||||
export type MessageId = string;
|
export type MessageId = string;
|
||||||
export type HistoryEntry = proto_sds_message.HistoryEntry;
|
export type HistoryEntry = proto_sds_message.HistoryEntry;
|
||||||
export type ChannelId = string;
|
export type ChannelId = string;
|
||||||
export type SenderId = string;
|
export type ParticipantId = string;
|
||||||
|
|
||||||
const log = new Logger("sds:message");
|
const log = new Logger("sds:message");
|
||||||
|
|
||||||
export class Message implements proto_sds_message.SdsMessage {
|
export class Message implements proto_sds_message.SdsMessage {
|
||||||
public constructor(
|
public constructor(
|
||||||
public messageId: string,
|
public messageId: MessageId,
|
||||||
public channelId: string,
|
public channelId: string,
|
||||||
public senderId: string,
|
public senderId: ParticipantId,
|
||||||
public causalHistory: proto_sds_message.HistoryEntry[],
|
public causalHistory: proto_sds_message.HistoryEntry[],
|
||||||
public lamportTimestamp?: bigint | undefined,
|
public lamportTimestamp?: bigint | undefined,
|
||||||
public bloomFilter?: Uint8Array<ArrayBufferLike> | undefined,
|
public bloomFilter?: Uint8Array<ArrayBufferLike> | undefined,
|
||||||
public content?: Uint8Array<ArrayBufferLike> | undefined,
|
public content?: Uint8Array<ArrayBufferLike> | undefined,
|
||||||
|
public repairRequest: proto_sds_message.HistoryEntry[] = [],
|
||||||
/**
|
/**
|
||||||
* Not encoded, set after it is sent, used to include in follow-up messages
|
* Not encoded, set after it is sent, used to include in follow-up messages
|
||||||
*/
|
*/
|
||||||
@ -38,7 +39,8 @@ export class Message implements proto_sds_message.SdsMessage {
|
|||||||
causalHistory,
|
causalHistory,
|
||||||
lamportTimestamp,
|
lamportTimestamp,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
content
|
content,
|
||||||
|
repairRequest
|
||||||
} = proto_sds_message.SdsMessage.decode(data);
|
} = proto_sds_message.SdsMessage.decode(data);
|
||||||
|
|
||||||
if (testContentMessage({ lamportTimestamp, content })) {
|
if (testContentMessage({ lamportTimestamp, content })) {
|
||||||
@ -49,7 +51,8 @@ export class Message implements proto_sds_message.SdsMessage {
|
|||||||
causalHistory,
|
causalHistory,
|
||||||
lamportTimestamp!,
|
lamportTimestamp!,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
content!
|
content!,
|
||||||
|
repairRequest
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -61,7 +64,8 @@ export class Message implements proto_sds_message.SdsMessage {
|
|||||||
causalHistory,
|
causalHistory,
|
||||||
undefined,
|
undefined,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
content!
|
content!,
|
||||||
|
repairRequest
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -73,7 +77,8 @@ export class Message implements proto_sds_message.SdsMessage {
|
|||||||
causalHistory,
|
causalHistory,
|
||||||
lamportTimestamp!,
|
lamportTimestamp!,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
undefined
|
undefined,
|
||||||
|
repairRequest
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
log.error(
|
log.error(
|
||||||
@ -90,13 +95,14 @@ export class Message implements proto_sds_message.SdsMessage {
|
|||||||
|
|
||||||
export class SyncMessage extends Message {
|
export class SyncMessage extends Message {
|
||||||
public constructor(
|
public constructor(
|
||||||
public messageId: string,
|
public messageId: MessageId,
|
||||||
public channelId: string,
|
public channelId: string,
|
||||||
public senderId: string,
|
public senderId: ParticipantId,
|
||||||
public causalHistory: proto_sds_message.HistoryEntry[],
|
public causalHistory: proto_sds_message.HistoryEntry[],
|
||||||
public lamportTimestamp: bigint,
|
public lamportTimestamp: bigint,
|
||||||
public bloomFilter: Uint8Array<ArrayBufferLike> | undefined,
|
public bloomFilter: Uint8Array<ArrayBufferLike> | undefined,
|
||||||
public content: undefined,
|
public content: undefined,
|
||||||
|
public repairRequest: proto_sds_message.HistoryEntry[] = [],
|
||||||
/**
|
/**
|
||||||
* Not encoded, set after it is sent, used to include in follow-up messages
|
* Not encoded, set after it is sent, used to include in follow-up messages
|
||||||
*/
|
*/
|
||||||
@ -110,6 +116,7 @@ export class SyncMessage extends Message {
|
|||||||
lamportTimestamp,
|
lamportTimestamp,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
content,
|
content,
|
||||||
|
repairRequest,
|
||||||
retrievalHint
|
retrievalHint
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -134,13 +141,14 @@ export function isSyncMessage(
|
|||||||
|
|
||||||
export class EphemeralMessage extends Message {
|
export class EphemeralMessage extends Message {
|
||||||
public constructor(
|
public constructor(
|
||||||
public messageId: string,
|
public messageId: MessageId,
|
||||||
public channelId: string,
|
public channelId: string,
|
||||||
public senderId: string,
|
public senderId: ParticipantId,
|
||||||
public causalHistory: proto_sds_message.HistoryEntry[],
|
public causalHistory: proto_sds_message.HistoryEntry[],
|
||||||
public lamportTimestamp: undefined,
|
public lamportTimestamp: undefined,
|
||||||
public bloomFilter: Uint8Array<ArrayBufferLike> | undefined,
|
public bloomFilter: Uint8Array<ArrayBufferLike> | undefined,
|
||||||
public content: Uint8Array<ArrayBufferLike>,
|
public content: Uint8Array<ArrayBufferLike>,
|
||||||
|
public repairRequest: proto_sds_message.HistoryEntry[] = [],
|
||||||
/**
|
/**
|
||||||
* Not encoded, set after it is sent, used to include in follow-up messages
|
* Not encoded, set after it is sent, used to include in follow-up messages
|
||||||
*/
|
*/
|
||||||
@ -157,6 +165,7 @@ export class EphemeralMessage extends Message {
|
|||||||
lamportTimestamp,
|
lamportTimestamp,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
content,
|
content,
|
||||||
|
repairRequest,
|
||||||
retrievalHint
|
retrievalHint
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -182,13 +191,14 @@ function testEphemeralMessage(message: {
|
|||||||
|
|
||||||
export class ContentMessage extends Message {
|
export class ContentMessage extends Message {
|
||||||
public constructor(
|
public constructor(
|
||||||
public messageId: string,
|
public messageId: MessageId,
|
||||||
public channelId: string,
|
public channelId: string,
|
||||||
public senderId: string,
|
public senderId: ParticipantId,
|
||||||
public causalHistory: proto_sds_message.HistoryEntry[],
|
public causalHistory: proto_sds_message.HistoryEntry[],
|
||||||
public lamportTimestamp: bigint,
|
public lamportTimestamp: bigint,
|
||||||
public bloomFilter: Uint8Array<ArrayBufferLike> | undefined,
|
public bloomFilter: Uint8Array<ArrayBufferLike> | undefined,
|
||||||
public content: Uint8Array<ArrayBufferLike>,
|
public content: Uint8Array<ArrayBufferLike>,
|
||||||
|
public repairRequest: proto_sds_message.HistoryEntry[] = [],
|
||||||
/**
|
/**
|
||||||
* Not encoded, set after it is sent, used to include in follow-up messages
|
* Not encoded, set after it is sent, used to include in follow-up messages
|
||||||
*/
|
*/
|
||||||
@ -205,6 +215,7 @@ export class ContentMessage extends Message {
|
|||||||
lamportTimestamp,
|
lamportTimestamp,
|
||||||
bloomFilter,
|
bloomFilter,
|
||||||
content,
|
content,
|
||||||
|
repairRequest,
|
||||||
retrievalHint
|
retrievalHint
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -162,7 +162,8 @@ describe("MessageChannel", function () {
|
|||||||
.slice(-causalHistorySize - 1, -1)
|
.slice(-causalHistorySize - 1, -1)
|
||||||
.map((message) => ({
|
.map((message) => ({
|
||||||
messageId: MessageChannel.getMessageId(utf8ToBytes(message)),
|
messageId: MessageChannel.getMessageId(utf8ToBytes(message)),
|
||||||
retrievalHint: undefined
|
retrievalHint: undefined,
|
||||||
|
senderId: "alice"
|
||||||
}));
|
}));
|
||||||
expect(causalHistory).to.deep.equal(expectedCausalHistory);
|
expect(causalHistory).to.deep.equal(expectedCausalHistory);
|
||||||
});
|
});
|
||||||
@ -298,6 +299,7 @@ describe("MessageChannel", function () {
|
|||||||
1n,
|
1n,
|
||||||
undefined,
|
undefined,
|
||||||
payload,
|
payload,
|
||||||
|
undefined,
|
||||||
testRetrievalHint
|
testRetrievalHint
|
||||||
),
|
),
|
||||||
testRetrievalHint
|
testRetrievalHint
|
||||||
|
|||||||
@ -18,15 +18,21 @@ import {
|
|||||||
isSyncMessage,
|
isSyncMessage,
|
||||||
Message,
|
Message,
|
||||||
MessageId,
|
MessageId,
|
||||||
SenderId,
|
ParticipantId,
|
||||||
SyncMessage
|
SyncMessage
|
||||||
} from "./message.js";
|
} from "./message.js";
|
||||||
|
import { RepairConfig, RepairManager } from "./repair/repair.js";
|
||||||
|
|
||||||
export const DEFAULT_BLOOM_FILTER_OPTIONS = {
|
export const DEFAULT_BLOOM_FILTER_OPTIONS = {
|
||||||
capacity: 10000,
|
capacity: 10000,
|
||||||
errorRate: 0.001
|
errorRate: 0.001
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maximum number of repair requests to include in a single message
|
||||||
|
*/
|
||||||
|
const MAX_REPAIR_REQUESTS_PER_MESSAGE = 3;
|
||||||
|
|
||||||
const DEFAULT_CAUSAL_HISTORY_SIZE = 200;
|
const DEFAULT_CAUSAL_HISTORY_SIZE = 200;
|
||||||
const DEFAULT_POSSIBLE_ACKS_THRESHOLD = 2;
|
const DEFAULT_POSSIBLE_ACKS_THRESHOLD = 2;
|
||||||
|
|
||||||
@ -46,6 +52,15 @@ export interface MessageChannelOptions {
|
|||||||
* How many possible acks does it take to consider it a definitive ack.
|
* How many possible acks does it take to consider it a definitive ack.
|
||||||
*/
|
*/
|
||||||
possibleAcksThreshold?: number;
|
possibleAcksThreshold?: number;
|
||||||
|
/**
|
||||||
|
* Whether to enable SDS-R repair protocol.
|
||||||
|
* @default true
|
||||||
|
*/
|
||||||
|
enableRepair?: boolean;
|
||||||
|
/**
|
||||||
|
* SDS-R repair configuration. Only used if enableRepair is true.
|
||||||
|
*/
|
||||||
|
repairConfig?: RepairConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ILocalHistory = Pick<
|
export type ILocalHistory = Pick<
|
||||||
@ -55,7 +70,7 @@ export type ILocalHistory = Pick<
|
|||||||
|
|
||||||
export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
||||||
public readonly channelId: ChannelId;
|
public readonly channelId: ChannelId;
|
||||||
public readonly senderId: SenderId;
|
public readonly senderId: ParticipantId;
|
||||||
private lamportTimestamp: bigint;
|
private lamportTimestamp: bigint;
|
||||||
private filter: DefaultBloomFilter;
|
private filter: DefaultBloomFilter;
|
||||||
private outgoingBuffer: ContentMessage[];
|
private outgoingBuffer: ContentMessage[];
|
||||||
@ -66,6 +81,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
private readonly causalHistorySize: number;
|
private readonly causalHistorySize: number;
|
||||||
private readonly possibleAcksThreshold: number;
|
private readonly possibleAcksThreshold: number;
|
||||||
private readonly timeoutForLostMessagesMs?: number;
|
private readonly timeoutForLostMessagesMs?: number;
|
||||||
|
private readonly repairManager?: RepairManager;
|
||||||
|
|
||||||
private tasks: Task[] = [];
|
private tasks: Task[] = [];
|
||||||
private handlers: Handlers = {
|
private handlers: Handlers = {
|
||||||
@ -88,7 +104,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
|
|
||||||
public constructor(
|
public constructor(
|
||||||
channelId: ChannelId,
|
channelId: ChannelId,
|
||||||
senderId: SenderId,
|
senderId: ParticipantId,
|
||||||
options: MessageChannelOptions = {},
|
options: MessageChannelOptions = {},
|
||||||
localHistory: ILocalHistory = new MemLocalHistory()
|
localHistory: ILocalHistory = new MemLocalHistory()
|
||||||
) {
|
) {
|
||||||
@ -109,6 +125,17 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
options.possibleAcksThreshold ?? DEFAULT_POSSIBLE_ACKS_THRESHOLD;
|
options.possibleAcksThreshold ?? DEFAULT_POSSIBLE_ACKS_THRESHOLD;
|
||||||
this.timeReceived = new Map();
|
this.timeReceived = new Map();
|
||||||
this.timeoutForLostMessagesMs = options.timeoutForLostMessagesMs;
|
this.timeoutForLostMessagesMs = options.timeoutForLostMessagesMs;
|
||||||
|
|
||||||
|
// Only construct RepairManager if repair is enabled (default: true)
|
||||||
|
if (options.enableRepair ?? true) {
|
||||||
|
this.repairManager = new RepairManager(
|
||||||
|
senderId,
|
||||||
|
options.repairConfig,
|
||||||
|
(event: string, detail: unknown) => {
|
||||||
|
this.safeSendEvent(event as MessageChannelEvent, { detail });
|
||||||
|
}
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static getMessageId(payload: Uint8Array): MessageId {
|
public static getMessageId(payload: Uint8Array): MessageId {
|
||||||
@ -272,9 +299,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
);
|
);
|
||||||
const missingDependencies = message.causalHistory.filter(
|
const missingDependencies = message.causalHistory.filter(
|
||||||
(messageHistoryEntry) =>
|
(messageHistoryEntry) =>
|
||||||
!this.localHistory.some(
|
!this.isMessageAvailable(messageHistoryEntry.messageId)
|
||||||
({ messageId }) => messageId === messageHistoryEntry.messageId
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
if (missingDependencies.length === 0) {
|
if (missingDependencies.length === 0) {
|
||||||
if (isContentMessage(message) && this.deliverMessage(message)) {
|
if (isContentMessage(message) && this.deliverMessage(message)) {
|
||||||
@ -355,6 +380,44 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sweep repair incoming buffer and rebroadcast messages ready for repair.
|
||||||
|
* Per SDS-R spec: periodically check for repair responses that are due.
|
||||||
|
*
|
||||||
|
* @param callback - callback to rebroadcast the message
|
||||||
|
* @returns Promise that resolves when all ready repairs have been sent
|
||||||
|
*/
|
||||||
|
public async sweepRepairIncomingBuffer(
|
||||||
|
callback?: (message: Message) => Promise<boolean>
|
||||||
|
): Promise<Message[]> {
|
||||||
|
const repairsToSend =
|
||||||
|
this.repairManager?.sweepIncomingBuffer(this.localHistory) ?? [];
|
||||||
|
|
||||||
|
if (callback) {
|
||||||
|
for (const message of repairsToSend) {
|
||||||
|
try {
|
||||||
|
await callback(message);
|
||||||
|
log.info(
|
||||||
|
this.senderId,
|
||||||
|
"repair message rebroadcast",
|
||||||
|
message.messageId
|
||||||
|
);
|
||||||
|
|
||||||
|
// Emit RepairResponseSent event
|
||||||
|
this.safeSendEvent(MessageChannelEvent.RepairResponseSent, {
|
||||||
|
detail: {
|
||||||
|
messageId: message.messageId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
log.error("Failed to rebroadcast repair message:", error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return repairsToSend;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Send a sync message to the SDS channel.
|
* Send a sync message to the SDS channel.
|
||||||
*
|
*
|
||||||
@ -369,6 +432,12 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
callback?: (message: SyncMessage) => Promise<boolean>
|
callback?: (message: SyncMessage) => Promise<boolean>
|
||||||
): Promise<boolean> {
|
): Promise<boolean> {
|
||||||
this.lamportTimestamp = lamportTimestampIncrement(this.lamportTimestamp);
|
this.lamportTimestamp = lamportTimestampIncrement(this.lamportTimestamp);
|
||||||
|
|
||||||
|
// Get repair requests to include in sync message (SDS-R)
|
||||||
|
const repairRequests =
|
||||||
|
this.repairManager?.getRepairRequests(MAX_REPAIR_REQUESTS_PER_MESSAGE) ??
|
||||||
|
[];
|
||||||
|
|
||||||
const message = new SyncMessage(
|
const message = new SyncMessage(
|
||||||
// does not need to be secure randomness
|
// does not need to be secure randomness
|
||||||
`sync-${Math.random().toString(36).substring(2)}`,
|
`sync-${Math.random().toString(36).substring(2)}`,
|
||||||
@ -376,18 +445,22 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
this.senderId,
|
this.senderId,
|
||||||
this.localHistory
|
this.localHistory
|
||||||
.slice(-this.causalHistorySize)
|
.slice(-this.causalHistorySize)
|
||||||
.map(({ messageId, retrievalHint }) => {
|
.map(({ messageId, retrievalHint, senderId }) => {
|
||||||
return { messageId, retrievalHint };
|
return { messageId, retrievalHint, senderId };
|
||||||
}),
|
}),
|
||||||
this.lamportTimestamp,
|
this.lamportTimestamp,
|
||||||
this.filter.toBytes(),
|
this.filter.toBytes(),
|
||||||
undefined
|
undefined,
|
||||||
|
repairRequests
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!message.causalHistory || message.causalHistory.length === 0) {
|
if (
|
||||||
|
(!message.causalHistory || message.causalHistory.length === 0) &&
|
||||||
|
repairRequests.length === 0
|
||||||
|
) {
|
||||||
log.info(
|
log.info(
|
||||||
this.senderId,
|
this.senderId,
|
||||||
"no causal history in sync message, aborting sending"
|
"no causal history and no repair requests in sync message, aborting sending"
|
||||||
);
|
);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -399,6 +472,17 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
this.safeSendEvent(MessageChannelEvent.OutSyncSent, {
|
this.safeSendEvent(MessageChannelEvent.OutSyncSent, {
|
||||||
detail: message
|
detail: message
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Emit RepairRequestSent event if repair requests were included
|
||||||
|
if (repairRequests.length > 0) {
|
||||||
|
this.safeSendEvent(MessageChannelEvent.RepairRequestSent, {
|
||||||
|
detail: {
|
||||||
|
messageIds: repairRequests.map((r) => r.messageId),
|
||||||
|
carrierMessageId: message.messageId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.error(
|
log.error(
|
||||||
@ -464,6 +548,26 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
detail: message
|
detail: message
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SDS-R: Handle received message in repair manager
|
||||||
|
this.repairManager?.markMessageReceived(message.messageId);
|
||||||
|
|
||||||
|
// SDS-R: Process incoming repair requests
|
||||||
|
if (message.repairRequest && message.repairRequest.length > 0) {
|
||||||
|
// Emit RepairRequestReceived event
|
||||||
|
this.safeSendEvent(MessageChannelEvent.RepairRequestReceived, {
|
||||||
|
detail: {
|
||||||
|
messageIds: message.repairRequest.map((r) => r.messageId),
|
||||||
|
fromSenderId: message.senderId
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
this.repairManager?.processIncomingRepairRequests(
|
||||||
|
message.repairRequest,
|
||||||
|
this.localHistory
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
this.reviewAckStatus(message);
|
this.reviewAckStatus(message);
|
||||||
if (isContentMessage(message)) {
|
if (isContentMessage(message)) {
|
||||||
this.filter.insert(message.messageId);
|
this.filter.insert(message.messageId);
|
||||||
@ -471,9 +575,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
|
|
||||||
const missingDependencies = message.causalHistory.filter(
|
const missingDependencies = message.causalHistory.filter(
|
||||||
(messageHistoryEntry) =>
|
(messageHistoryEntry) =>
|
||||||
!this.localHistory.some(
|
!this.isMessageAvailable(messageHistoryEntry.messageId)
|
||||||
({ messageId }) => messageId === messageHistoryEntry.messageId
|
|
||||||
)
|
|
||||||
);
|
);
|
||||||
|
|
||||||
if (missingDependencies.length > 0) {
|
if (missingDependencies.length > 0) {
|
||||||
@ -487,6 +589,9 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
missingDependencies.map((ch) => ch.messageId)
|
missingDependencies.map((ch) => ch.messageId)
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// SDS-R: Track missing dependencies in repair manager
|
||||||
|
this.repairManager?.markDependenciesMissing(missingDependencies);
|
||||||
|
|
||||||
this.safeSendEvent(MessageChannelEvent.InMessageMissing, {
|
this.safeSendEvent(MessageChannelEvent.InMessageMissing, {
|
||||||
detail: Array.from(missingDependencies)
|
detail: Array.from(missingDependencies)
|
||||||
});
|
});
|
||||||
@ -549,18 +654,26 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
// It's a new message
|
// It's a new message
|
||||||
if (!message) {
|
if (!message) {
|
||||||
log.info(this.senderId, "sending new message", messageId);
|
log.info(this.senderId, "sending new message", messageId);
|
||||||
|
|
||||||
|
// Get repair requests to include in the message (SDS-R)
|
||||||
|
const repairRequests =
|
||||||
|
this.repairManager?.getRepairRequests(
|
||||||
|
MAX_REPAIR_REQUESTS_PER_MESSAGE
|
||||||
|
) ?? [];
|
||||||
|
|
||||||
message = new ContentMessage(
|
message = new ContentMessage(
|
||||||
messageId,
|
messageId,
|
||||||
this.channelId,
|
this.channelId,
|
||||||
this.senderId,
|
this.senderId,
|
||||||
this.localHistory
|
this.localHistory
|
||||||
.slice(-this.causalHistorySize)
|
.slice(-this.causalHistorySize)
|
||||||
.map(({ messageId, retrievalHint }) => {
|
.map(({ messageId, retrievalHint, senderId }) => {
|
||||||
return { messageId, retrievalHint };
|
return { messageId, retrievalHint, senderId };
|
||||||
}),
|
}),
|
||||||
this.lamportTimestamp,
|
this.lamportTimestamp,
|
||||||
this.filter.toBytes(),
|
this.filter.toBytes(),
|
||||||
payload
|
payload,
|
||||||
|
repairRequests
|
||||||
);
|
);
|
||||||
|
|
||||||
this.outgoingBuffer.push(message);
|
this.outgoingBuffer.push(message);
|
||||||
@ -616,6 +729,26 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a message is available (either in localHistory or incomingBuffer)
|
||||||
|
* This prevents treating messages as "missing" when they've already been received
|
||||||
|
* but are waiting in the incoming buffer for their dependencies.
|
||||||
|
*
|
||||||
|
* @param messageId - The ID of the message to check
|
||||||
|
* @private
|
||||||
|
*/
|
||||||
|
private isMessageAvailable(messageId: MessageId): boolean {
|
||||||
|
// Check if in local history
|
||||||
|
if (this.localHistory.some((m) => m.messageId === messageId)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
// Check if in incoming buffer (already received, waiting for dependencies)
|
||||||
|
if (this.incomingBuffer.some((m) => m.messageId === messageId)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return true if the message was "delivered"
|
* Return true if the message was "delivered"
|
||||||
*
|
*
|
||||||
@ -657,6 +790,7 @@ export class MessageChannel extends TypedEventEmitter<MessageChannelEvents> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
this.localHistory.push(message);
|
this.localHistory.push(message);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
239
packages/sds/src/message_channel/repair/buffers.spec.ts
Normal file
239
packages/sds/src/message_channel/repair/buffers.spec.ts
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
import { expect } from "chai";
|
||||||
|
|
||||||
|
import type { HistoryEntry } from "../message.js";
|
||||||
|
|
||||||
|
import { IncomingRepairBuffer, OutgoingRepairBuffer } from "./buffers.js";
|
||||||
|
|
||||||
|
describe("OutgoingRepairBuffer", () => {
|
||||||
|
let buffer: OutgoingRepairBuffer;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
buffer = new OutgoingRepairBuffer(3); // Small buffer for testing
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should add entries and maintain sorted order", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
|
||||||
|
const items = buffer.getItems();
|
||||||
|
expect(items).to.have.lengthOf(3);
|
||||||
|
expect(items[0].tReq).to.equal(1000);
|
||||||
|
expect(items[1].tReq).to.equal(2000);
|
||||||
|
expect(items[2].tReq).to.equal(3000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should not update T_req if message already exists", () => {
|
||||||
|
const entry: HistoryEntry = { messageId: "msg1" };
|
||||||
|
|
||||||
|
buffer.add(entry, 1000);
|
||||||
|
buffer.add(entry, 2000); // Try to add again with different T_req
|
||||||
|
|
||||||
|
const items = buffer.getItems();
|
||||||
|
expect(items).to.have.lengthOf(1);
|
||||||
|
expect(items[0].tReq).to.equal(1000); // Should keep original
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should evict furthest entry when buffer is full", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
const entry4: HistoryEntry = { messageId: "msg4" };
|
||||||
|
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
buffer.add(entry4, 1500); // Should evict msg3 (furthest T_req = 3000)
|
||||||
|
|
||||||
|
const items = buffer.getItems();
|
||||||
|
expect(items).to.have.lengthOf(3);
|
||||||
|
expect(buffer.has("msg3")).to.be.false; // msg3 should be evicted (furthest T_req)
|
||||||
|
expect(buffer.has("msg1")).to.be.true;
|
||||||
|
expect(buffer.has("msg2")).to.be.true;
|
||||||
|
expect(buffer.has("msg4")).to.be.true;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should get eligible entries based on current time", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
|
||||||
|
const eligible = buffer.getEligible(1500, 3);
|
||||||
|
expect(eligible).to.have.lengthOf(1);
|
||||||
|
expect(eligible[0].messageId).to.equal("msg1");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should get multiple eligible entries at later time", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
|
||||||
|
// Create new buffer for second test since getEligible marks entries as requested
|
||||||
|
const buffer2 = new OutgoingRepairBuffer(3);
|
||||||
|
buffer2.add(entry1, 1000);
|
||||||
|
buffer2.add(entry2, 2000);
|
||||||
|
buffer2.add(entry3, 3000);
|
||||||
|
|
||||||
|
const eligible = buffer2.getEligible(2500, 3);
|
||||||
|
expect(eligible).to.have.lengthOf(2);
|
||||||
|
expect(eligible[0].messageId).to.equal("msg1");
|
||||||
|
expect(eligible[1].messageId).to.equal("msg2");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should respect maxRequests limit", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
|
||||||
|
const eligible = buffer.getEligible(5000, 2); // All are eligible but limit to 2
|
||||||
|
expect(eligible).to.have.lengthOf(2);
|
||||||
|
expect(eligible[0].messageId).to.equal("msg1");
|
||||||
|
expect(eligible[1].messageId).to.equal("msg2");
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should remove entries", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
|
||||||
|
expect(buffer.size).to.equal(2);
|
||||||
|
buffer.remove("msg1");
|
||||||
|
expect(buffer.size).to.equal(1);
|
||||||
|
expect(buffer.has("msg1")).to.be.false;
|
||||||
|
expect(buffer.has("msg2")).to.be.true;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should handle retrieval hint and sender_id", () => {
|
||||||
|
const hint = new Uint8Array([1, 2, 3]);
|
||||||
|
const entry: HistoryEntry = {
|
||||||
|
messageId: "msg1",
|
||||||
|
retrievalHint: hint,
|
||||||
|
senderId: "sender1"
|
||||||
|
};
|
||||||
|
|
||||||
|
buffer.add(entry, 1000);
|
||||||
|
const all = buffer.getAll();
|
||||||
|
expect(all[0].retrievalHint).to.deep.equal(hint);
|
||||||
|
expect(all[0].senderId).to.equal("sender1");
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
describe("IncomingRepairBuffer", () => {
|
||||||
|
let buffer: IncomingRepairBuffer;
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
buffer = new IncomingRepairBuffer(3); // Small buffer for testing
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should add entries and maintain sorted order", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
|
||||||
|
const items = buffer.getItems();
|
||||||
|
expect(items).to.have.lengthOf(3);
|
||||||
|
expect(items[0].tResp).to.equal(1000);
|
||||||
|
expect(items[1].tResp).to.equal(2000);
|
||||||
|
expect(items[2].tResp).to.equal(3000);
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should ignore duplicate entries", () => {
|
||||||
|
const entry: HistoryEntry = { messageId: "msg1" };
|
||||||
|
|
||||||
|
buffer.add(entry, 1000);
|
||||||
|
buffer.add(entry, 500); // Try to add again with earlier T_resp
|
||||||
|
|
||||||
|
const items = buffer.getItems();
|
||||||
|
expect(items).to.have.lengthOf(1);
|
||||||
|
expect(items[0].tResp).to.equal(1000); // Should keep original
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should evict furthest entry when buffer is full", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
const entry4: HistoryEntry = { messageId: "msg4" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
buffer.add(entry4, 1500); // Should evict msg3 (furthest T_resp)
|
||||||
|
|
||||||
|
const items = buffer.getItems();
|
||||||
|
expect(items).to.have.lengthOf(3);
|
||||||
|
expect(buffer.has("msg3")).to.be.false; // msg3 should be evicted
|
||||||
|
expect(buffer.has("msg1")).to.be.true;
|
||||||
|
expect(buffer.has("msg2")).to.be.true;
|
||||||
|
expect(buffer.has("msg4")).to.be.true;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should get and remove ready entries", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
const entry3: HistoryEntry = { messageId: "msg3" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
buffer.add(entry3, 3000);
|
||||||
|
|
||||||
|
const ready = buffer.getReady(1500);
|
||||||
|
expect(ready).to.have.lengthOf(1);
|
||||||
|
expect(ready[0].messageId).to.equal("msg1");
|
||||||
|
|
||||||
|
// Entry should be removed from buffer
|
||||||
|
expect(buffer.size).to.equal(2);
|
||||||
|
expect(buffer.has("msg1")).to.be.false;
|
||||||
|
|
||||||
|
const ready2 = buffer.getReady(2500);
|
||||||
|
expect(ready2).to.have.lengthOf(1);
|
||||||
|
expect(ready2[0].messageId).to.equal("msg2");
|
||||||
|
|
||||||
|
expect(buffer.size).to.equal(1);
|
||||||
|
expect(buffer.has("msg2")).to.be.false;
|
||||||
|
expect(buffer.has("msg3")).to.be.true;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should remove entries", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
|
||||||
|
expect(buffer.size).to.equal(2);
|
||||||
|
buffer.remove("msg1");
|
||||||
|
expect(buffer.size).to.equal(1);
|
||||||
|
expect(buffer.has("msg1")).to.be.false;
|
||||||
|
expect(buffer.has("msg2")).to.be.true;
|
||||||
|
});
|
||||||
|
|
||||||
|
it("should clear all entries", () => {
|
||||||
|
const entry1: HistoryEntry = { messageId: "msg1" };
|
||||||
|
const entry2: HistoryEntry = { messageId: "msg2" };
|
||||||
|
|
||||||
|
buffer.add(entry1, 1000);
|
||||||
|
buffer.add(entry2, 2000);
|
||||||
|
|
||||||
|
expect(buffer.size).to.equal(2);
|
||||||
|
buffer.clear();
|
||||||
|
expect(buffer.size).to.equal(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
277
packages/sds/src/message_channel/repair/buffers.ts
Normal file
277
packages/sds/src/message_channel/repair/buffers.ts
Normal file
@ -0,0 +1,277 @@
|
|||||||
|
import { Logger } from "@waku/utils";
|
||||||
|
|
||||||
|
import type { HistoryEntry, MessageId } from "../message.js";
|
||||||
|
|
||||||
|
const log = new Logger("sds:repair:buffers");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Entry in the outgoing repair buffer with request timing
|
||||||
|
*/
|
||||||
|
interface OutgoingBufferEntry {
|
||||||
|
entry: HistoryEntry;
|
||||||
|
tReq: number; // Timestamp when this repair request should be sent
|
||||||
|
requested: boolean; // Whether this repair has already been requested by the local node
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Entry in the incoming repair buffer with response timing
|
||||||
|
*/
|
||||||
|
interface IncomingBufferEntry {
|
||||||
|
entry: HistoryEntry;
|
||||||
|
tResp: number; // Timestamp when we should respond with this repair
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Buffer for outgoing repair requests (messages we need)
|
||||||
|
* Maintains a sorted array by T_req for efficient retrieval of eligible entries
|
||||||
|
*/
|
||||||
|
export class OutgoingRepairBuffer {
|
||||||
|
// Sorted array by T_req (ascending - earliest first)
|
||||||
|
private items: OutgoingBufferEntry[] = [];
|
||||||
|
private readonly maxSize: number;
|
||||||
|
|
||||||
|
public constructor(maxSize = 1000) {
|
||||||
|
this.maxSize = maxSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a missing message to the outgoing repair request buffer
|
||||||
|
* If message already exists, it is not updated (keeps original T_req)
|
||||||
|
* @returns true if the entry was added, false if it already existed
|
||||||
|
*/
|
||||||
|
public add(entry: HistoryEntry, tReq: number): boolean {
|
||||||
|
const messageId = entry.messageId;
|
||||||
|
|
||||||
|
// Check if already exists - do NOT update T_req per spec
|
||||||
|
if (this.has(messageId)) {
|
||||||
|
log.info(
|
||||||
|
`Message ${messageId} already in outgoing buffer, keeping original T_req`
|
||||||
|
);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check buffer size limit
|
||||||
|
if (this.items.length >= this.maxSize) {
|
||||||
|
// Evict furthest T_req entry (last in sorted array) to preserve repairs that need to be sent the soonest
|
||||||
|
const evicted = this.items.pop()!;
|
||||||
|
log.warn(
|
||||||
|
`Buffer full, evicted furthest entry ${evicted.entry.messageId} with T_req ${evicted.tReq}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new entry and re-sort
|
||||||
|
const newEntry: OutgoingBufferEntry = { entry, tReq, requested: false };
|
||||||
|
const combined = [...this.items, newEntry];
|
||||||
|
|
||||||
|
// Sort by T_req (ascending)
|
||||||
|
combined.sort((a, b) => a.tReq - b.tReq);
|
||||||
|
|
||||||
|
this.items = combined;
|
||||||
|
log.info(`Added ${messageId} to outgoing buffer with T_req: ${tReq}`);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a message from the buffer (e.g., when received)
|
||||||
|
*/
|
||||||
|
public remove(messageId: MessageId): void {
|
||||||
|
this.items = this.items.filter(
|
||||||
|
(item) => item.entry.messageId !== messageId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get eligible repair requests (where T_req <= currentTime)
|
||||||
|
* Returns up to maxRequests entries from the front of the sorted array
|
||||||
|
* Marks returned entries as requested but keeps them in buffer until received
|
||||||
|
*/
|
||||||
|
public getEligible(
|
||||||
|
currentTime: number = Date.now(),
|
||||||
|
maxRequests = 3
|
||||||
|
): HistoryEntry[] {
|
||||||
|
const eligible: HistoryEntry[] = [];
|
||||||
|
|
||||||
|
// Iterate from front of sorted array (earliest T_req first)
|
||||||
|
for (const item of this.items) {
|
||||||
|
// Since array is sorted, once we hit an item with tReq > currentTime,
|
||||||
|
// all remaining items also have tReq > currentTime
|
||||||
|
if (item.tReq > currentTime) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Only return items that haven't been requested yet
|
||||||
|
if (!item.requested && eligible.length < maxRequests) {
|
||||||
|
eligible.push(item.entry);
|
||||||
|
// Mark as requested so we don't request it again
|
||||||
|
item.requested = true;
|
||||||
|
log.info(
|
||||||
|
`Repair request for ${item.entry.messageId} is eligible and marked as requested`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// If we've found enough eligible items, exit early
|
||||||
|
if (eligible.length >= maxRequests) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return eligible;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a message is in the buffer
|
||||||
|
*/
|
||||||
|
public has(messageId: MessageId): boolean {
|
||||||
|
return this.items.some((item) => item.entry.messageId === messageId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current buffer size
|
||||||
|
*/
|
||||||
|
public get size(): number {
|
||||||
|
return this.items.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all entries
|
||||||
|
*/
|
||||||
|
public clear(): void {
|
||||||
|
this.items = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all entries (for testing/debugging)
|
||||||
|
*/
|
||||||
|
public getAll(): HistoryEntry[] {
|
||||||
|
return this.items.map((item) => item.entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get items array directly (for testing)
|
||||||
|
*/
|
||||||
|
public getItems(): OutgoingBufferEntry[] {
|
||||||
|
return [...this.items];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Buffer for incoming repair requests (repairs we need to send)
|
||||||
|
* Maintains a sorted array by T_resp for efficient retrieval of ready entries
|
||||||
|
*/
|
||||||
|
export class IncomingRepairBuffer {
|
||||||
|
// Sorted array by T_resp (ascending - earliest first)
|
||||||
|
private items: IncomingBufferEntry[] = [];
|
||||||
|
private readonly maxSize: number;
|
||||||
|
|
||||||
|
public constructor(maxSize = 1000) {
|
||||||
|
this.maxSize = maxSize;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a repair request that we can fulfill
|
||||||
|
* If message already exists, it is ignored (not updated)
|
||||||
|
* @returns true if the entry was added, false if it already existed
|
||||||
|
*/
|
||||||
|
public add(entry: HistoryEntry, tResp: number): boolean {
|
||||||
|
const messageId = entry.messageId;
|
||||||
|
|
||||||
|
// Check if already exists - ignore per spec
|
||||||
|
if (this.has(messageId)) {
|
||||||
|
log.info(`Message ${messageId} already in incoming buffer, ignoring`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check buffer size limit
|
||||||
|
if (this.items.length >= this.maxSize) {
|
||||||
|
// Evict furthest T_resp entry (last in sorted array)
|
||||||
|
const evicted = this.items.pop()!;
|
||||||
|
log.warn(
|
||||||
|
`Buffer full, evicted furthest entry ${evicted.entry.messageId} with T_resp ${evicted.tResp}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new entry and re-sort
|
||||||
|
const newEntry: IncomingBufferEntry = { entry, tResp };
|
||||||
|
const combined = [...this.items, newEntry];
|
||||||
|
|
||||||
|
// Sort by T_resp (ascending)
|
||||||
|
combined.sort((a, b) => a.tResp - b.tResp);
|
||||||
|
|
||||||
|
this.items = combined;
|
||||||
|
log.info(`Added ${messageId} to incoming buffer with T_resp: ${tResp}`);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a message from the buffer
|
||||||
|
*/
|
||||||
|
public remove(messageId: MessageId): void {
|
||||||
|
this.items = this.items.filter(
|
||||||
|
(item) => item.entry.messageId !== messageId
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get repairs ready to be sent (where T_resp <= currentTime)
|
||||||
|
* Removes and returns ready entries
|
||||||
|
*/
|
||||||
|
public getReady(currentTime: number): HistoryEntry[] {
|
||||||
|
// Find cutoff point - first item with tResp > currentTime
|
||||||
|
// Since array is sorted, all items before this are ready
|
||||||
|
let cutoff = 0;
|
||||||
|
for (let i = 0; i < this.items.length; i++) {
|
||||||
|
if (this.items[i].tResp > currentTime) {
|
||||||
|
cutoff = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
// If we reach the end, all items are ready
|
||||||
|
cutoff = i + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract ready items and log them
|
||||||
|
const ready = this.items.slice(0, cutoff).map((item) => {
|
||||||
|
log.info(`Repair for ${item.entry.messageId} is ready to be sent`);
|
||||||
|
return item.entry;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Keep only items after cutoff
|
||||||
|
this.items = this.items.slice(cutoff);
|
||||||
|
|
||||||
|
return ready;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a message is in the buffer
|
||||||
|
*/
|
||||||
|
public has(messageId: MessageId): boolean {
|
||||||
|
return this.items.some((item) => item.entry.messageId === messageId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current buffer size
|
||||||
|
*/
|
||||||
|
public get size(): number {
|
||||||
|
return this.items.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all entries
|
||||||
|
*/
|
||||||
|
public clear(): void {
|
||||||
|
this.items = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all entries (for testing/debugging)
|
||||||
|
*/
|
||||||
|
public getAll(): HistoryEntry[] {
|
||||||
|
return this.items.map((item) => item.entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get items array directly (for testing)
|
||||||
|
*/
|
||||||
|
public getItems(): IncomingBufferEntry[] {
|
||||||
|
return [...this.items];
|
||||||
|
}
|
||||||
|
}
|
||||||
331
packages/sds/src/message_channel/repair/repair.ts
Normal file
331
packages/sds/src/message_channel/repair/repair.ts
Normal file
@ -0,0 +1,331 @@
|
|||||||
|
import { Logger } from "@waku/utils";
|
||||||
|
|
||||||
|
import type { HistoryEntry, MessageId } from "../message.js";
|
||||||
|
import { Message } from "../message.js";
|
||||||
|
import type { ILocalHistory } from "../message_channel.js";
|
||||||
|
|
||||||
|
import { IncomingRepairBuffer, OutgoingRepairBuffer } from "./buffers.js";
|
||||||
|
import {
|
||||||
|
bigintToNumber,
|
||||||
|
calculateXorDistance,
|
||||||
|
combinedHash,
|
||||||
|
hashString,
|
||||||
|
ParticipantId
|
||||||
|
} from "./utils.js";
|
||||||
|
|
||||||
|
const log = new Logger("sds:repair:manager");
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Per SDS-R spec: One response group per 128 participants
|
||||||
|
*/
|
||||||
|
const PARTICIPANTS_PER_RESPONSE_GROUP = 128;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Event emitter callback for repair events
|
||||||
|
*/
|
||||||
|
export type RepairEventEmitter = (event: string, detail: unknown) => void;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration for SDS-R repair protocol
|
||||||
|
*/
|
||||||
|
export interface RepairConfig {
|
||||||
|
/** Minimum wait time before requesting repair (milliseconds) */
|
||||||
|
tMin?: number;
|
||||||
|
/** Maximum wait time for repair window (milliseconds) */
|
||||||
|
tMax?: number;
|
||||||
|
/** Number of response groups for load distribution */
|
||||||
|
numResponseGroups?: number;
|
||||||
|
/** Maximum buffer size for repair requests */
|
||||||
|
bufferSize?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Default configuration values based on spec recommendations
|
||||||
|
*/
|
||||||
|
export const DEFAULT_REPAIR_CONFIG: Required<RepairConfig> = {
|
||||||
|
tMin: 30000, // 30 seconds
|
||||||
|
tMax: 120000, // 120 seconds
|
||||||
|
numResponseGroups: 1, // Recommendation is 1 group per PARTICIPANTS_PER_RESPONSE_GROUP participants
|
||||||
|
bufferSize: 1000
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manager for SDS-R repair protocol
|
||||||
|
* Handles repair request/response timing and coordination
|
||||||
|
*/
|
||||||
|
export class RepairManager {
|
||||||
|
private readonly participantId: ParticipantId;
|
||||||
|
private readonly config: Required<RepairConfig>;
|
||||||
|
private readonly outgoingBuffer: OutgoingRepairBuffer;
|
||||||
|
private readonly incomingBuffer: IncomingRepairBuffer;
|
||||||
|
private readonly eventEmitter?: RepairEventEmitter;
|
||||||
|
|
||||||
|
public constructor(
|
||||||
|
participantId: ParticipantId,
|
||||||
|
config: RepairConfig = {},
|
||||||
|
eventEmitter?: RepairEventEmitter
|
||||||
|
) {
|
||||||
|
this.participantId = participantId;
|
||||||
|
this.config = { ...DEFAULT_REPAIR_CONFIG, ...config };
|
||||||
|
this.eventEmitter = eventEmitter;
|
||||||
|
|
||||||
|
this.outgoingBuffer = new OutgoingRepairBuffer(this.config.bufferSize);
|
||||||
|
this.incomingBuffer = new IncomingRepairBuffer(this.config.bufferSize);
|
||||||
|
|
||||||
|
log.info(`RepairManager initialized for participant ${participantId}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate T_req - when to request repair for a missing message
|
||||||
|
* Per spec: T_req = current_time + hash(participant_id, message_id) % (T_max - T_min) + T_min
|
||||||
|
*/
|
||||||
|
public calculateTReq(messageId: MessageId, currentTime = Date.now()): number {
|
||||||
|
const hash = combinedHash(this.participantId, messageId);
|
||||||
|
const range = BigInt(this.config.tMax - this.config.tMin);
|
||||||
|
const offset = bigintToNumber(hash % range) + this.config.tMin;
|
||||||
|
return currentTime + offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate T_resp - when to respond with a repair
|
||||||
|
* Per spec: T_resp = current_time + (distance * hash(message_id)) % T_max
|
||||||
|
* where distance = participant_id XOR sender_id
|
||||||
|
*/
|
||||||
|
public calculateTResp(
|
||||||
|
senderId: ParticipantId,
|
||||||
|
messageId: MessageId,
|
||||||
|
currentTime = Date.now()
|
||||||
|
): number {
|
||||||
|
const distance = calculateXorDistance(this.participantId, senderId);
|
||||||
|
const messageHash = hashString(messageId);
|
||||||
|
const product = distance * messageHash;
|
||||||
|
const offset = bigintToNumber(product % BigInt(this.config.tMax));
|
||||||
|
return currentTime + offset;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Determine if this participant is in the response group for a message
|
||||||
|
* Per spec: (hash(participant_id, message_id) % num_response_groups) ==
|
||||||
|
* (hash(sender_id, message_id) % num_response_groups)
|
||||||
|
*/
|
||||||
|
public isInResponseGroup(
|
||||||
|
senderId: ParticipantId,
|
||||||
|
messageId: MessageId
|
||||||
|
): boolean {
|
||||||
|
if (!senderId) {
|
||||||
|
// Cannot determine response group without sender_id
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const numGroups = BigInt(this.config.numResponseGroups);
|
||||||
|
if (numGroups <= BigInt(1)) {
|
||||||
|
// Single group, everyone is in it
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const participantGroup =
|
||||||
|
combinedHash(this.participantId, messageId) % numGroups;
|
||||||
|
const senderGroup = combinedHash(senderId, messageId) % numGroups;
|
||||||
|
|
||||||
|
return participantGroup === senderGroup;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle missing dependencies by adding them to outgoing repair buffer
|
||||||
|
* Called when causal dependencies are detected as missing
|
||||||
|
*/
|
||||||
|
public markDependenciesMissing(
|
||||||
|
missingEntries: HistoryEntry[],
|
||||||
|
currentTime = Date.now()
|
||||||
|
): void {
|
||||||
|
for (const entry of missingEntries) {
|
||||||
|
// Calculate when to request this repair
|
||||||
|
const tReq = this.calculateTReq(entry.messageId, currentTime);
|
||||||
|
|
||||||
|
// Add to outgoing buffer - only log and emit event if actually added
|
||||||
|
const wasAdded = this.outgoingBuffer.add(entry, tReq);
|
||||||
|
|
||||||
|
if (wasAdded) {
|
||||||
|
log.info(
|
||||||
|
`Added missing dependency ${entry.messageId} to repair buffer with T_req=${tReq}`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Emit event
|
||||||
|
this.eventEmitter?.("RepairRequestQueued", {
|
||||||
|
messageId: entry.messageId,
|
||||||
|
tReq
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle receipt of a message - remove from repair buffers
|
||||||
|
* Called when a message is successfully received
|
||||||
|
*/
|
||||||
|
public markMessageReceived(messageId: MessageId): void {
|
||||||
|
// Remove from both buffers as we no longer need to request or respond
|
||||||
|
const wasInOutgoing = this.outgoingBuffer.has(messageId);
|
||||||
|
const wasInIncoming = this.incomingBuffer.has(messageId);
|
||||||
|
|
||||||
|
if (wasInOutgoing) {
|
||||||
|
this.outgoingBuffer.remove(messageId);
|
||||||
|
log.info(
|
||||||
|
`Removed ${messageId} from outgoing repair buffer after receipt`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (wasInIncoming) {
|
||||||
|
this.incomingBuffer.remove(messageId);
|
||||||
|
log.info(
|
||||||
|
`Removed ${messageId} from incoming repair buffer after receipt`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get repair requests that are eligible to be sent
|
||||||
|
* Returns up to maxRequests entries where T_req <= currentTime
|
||||||
|
*/
|
||||||
|
public getRepairRequests(
|
||||||
|
maxRequests = 3,
|
||||||
|
currentTime = Date.now()
|
||||||
|
): HistoryEntry[] {
|
||||||
|
return this.outgoingBuffer.getEligible(currentTime, maxRequests);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process incoming repair requests from other participants
|
||||||
|
* Adds to incoming buffer if we can fulfill and are in response group
|
||||||
|
*/
|
||||||
|
public processIncomingRepairRequests(
|
||||||
|
requests: HistoryEntry[],
|
||||||
|
localHistory: ILocalHistory,
|
||||||
|
currentTime = Date.now()
|
||||||
|
): void {
|
||||||
|
for (const request of requests) {
|
||||||
|
// Remove from our own outgoing buffer (someone else is requesting it)
|
||||||
|
this.outgoingBuffer.remove(request.messageId);
|
||||||
|
|
||||||
|
// Check if we have this message
|
||||||
|
const message = localHistory.find(
|
||||||
|
(m) => m.messageId === request.messageId
|
||||||
|
);
|
||||||
|
if (!message) {
|
||||||
|
log.info(
|
||||||
|
`Cannot fulfill repair for ${request.messageId} - not in local history`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we're in the response group
|
||||||
|
if (!request.senderId) {
|
||||||
|
log.warn(
|
||||||
|
`Cannot determine response group for ${request.messageId} - missing sender_id`
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.isInResponseGroup(request.senderId, request.messageId)) {
|
||||||
|
log.info(`Not in response group for ${request.messageId}`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate when to respond
|
||||||
|
const tResp = this.calculateTResp(
|
||||||
|
request.senderId,
|
||||||
|
request.messageId,
|
||||||
|
currentTime
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add to incoming buffer - only log and emit event if actually added
|
||||||
|
const wasAdded = this.incomingBuffer.add(request, tResp);
|
||||||
|
|
||||||
|
if (wasAdded) {
|
||||||
|
log.info(
|
||||||
|
`Will respond to repair request for ${request.messageId} at T_resp=${tResp}`
|
||||||
|
);
|
||||||
|
|
||||||
|
// Emit event
|
||||||
|
this.eventEmitter?.("RepairResponseQueued", {
|
||||||
|
messageId: request.messageId,
|
||||||
|
tResp
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sweep outgoing buffer for repairs that should be requested
|
||||||
|
* Returns entries where T_req <= currentTime
|
||||||
|
*/
|
||||||
|
public sweepOutgoingBuffer(
|
||||||
|
maxRequests = 3,
|
||||||
|
currentTime = Date.now()
|
||||||
|
): HistoryEntry[] {
|
||||||
|
return this.getRepairRequests(maxRequests, currentTime);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sweep incoming buffer for repairs ready to be sent
|
||||||
|
* Returns messages that should be rebroadcast
|
||||||
|
*/
|
||||||
|
public sweepIncomingBuffer(
|
||||||
|
localHistory: ILocalHistory,
|
||||||
|
currentTime = Date.now()
|
||||||
|
): Message[] {
|
||||||
|
const ready = this.incomingBuffer.getReady(currentTime);
|
||||||
|
const messages: Message[] = [];
|
||||||
|
|
||||||
|
for (const entry of ready) {
|
||||||
|
const message = localHistory.find((m) => m.messageId === entry.messageId);
|
||||||
|
if (message) {
|
||||||
|
messages.push(message);
|
||||||
|
log.info(`Sending repair for ${entry.messageId}`);
|
||||||
|
} else {
|
||||||
|
log.warn(`Message ${entry.messageId} no longer in local history`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return messages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear all buffers
|
||||||
|
*/
|
||||||
|
public clear(): void {
|
||||||
|
this.outgoingBuffer.clear();
|
||||||
|
this.incomingBuffer.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update number of response groups (e.g., when participants change)
|
||||||
|
*/
|
||||||
|
public updateResponseGroups(numParticipants: number): void {
|
||||||
|
if (
|
||||||
|
numParticipants < 0 ||
|
||||||
|
!Number.isFinite(numParticipants) ||
|
||||||
|
!Number.isInteger(numParticipants)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
`Invalid numParticipants: ${numParticipants}. Must be a positive integer.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (numParticipants > Number.MAX_SAFE_INTEGER) {
|
||||||
|
log.warn(
|
||||||
|
`numParticipants ${numParticipants} exceeds MAX_SAFE_INTEGER, using MAX_SAFE_INTEGER`
|
||||||
|
);
|
||||||
|
numParticipants = Number.MAX_SAFE_INTEGER;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Per spec: num_response_groups = max(1, num_participants / PARTICIPANTS_PER_RESPONSE_GROUP)
|
||||||
|
this.config.numResponseGroups = Math.max(
|
||||||
|
1,
|
||||||
|
Math.floor(numParticipants / PARTICIPANTS_PER_RESPONSE_GROUP)
|
||||||
|
);
|
||||||
|
log.info(
|
||||||
|
`Updated response groups to ${this.config.numResponseGroups} for ${numParticipants} participants`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
80
packages/sds/src/message_channel/repair/utils.ts
Normal file
80
packages/sds/src/message_channel/repair/utils.ts
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
import { sha256 } from "@noble/hashes/sha2";
|
||||||
|
import { bytesToHex } from "@waku/utils/bytes";
|
||||||
|
|
||||||
|
import type { MessageId } from "../message.js";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ParticipantId can be a string or converted to a numeric representation for XOR operations
|
||||||
|
*/
|
||||||
|
export type ParticipantId = string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute SHA256 hash and convert to integer for modulo operations
|
||||||
|
* Uses first 8 bytes of hash for the integer conversion
|
||||||
|
*/
|
||||||
|
export function hashToInteger(input: string): bigint {
|
||||||
|
const hashBytes = sha256(new TextEncoder().encode(input));
|
||||||
|
// Use first 8 bytes for a 64-bit integer
|
||||||
|
const view = new DataView(hashBytes.buffer, 0, 8);
|
||||||
|
return view.getBigUint64(0, false); // big-endian
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Compute combined hash for (participantId, messageId) and convert to integer
|
||||||
|
* This is used for T_req calculations and response group membership
|
||||||
|
*/
|
||||||
|
export function combinedHash(
|
||||||
|
participantId: ParticipantId,
|
||||||
|
messageId: MessageId
|
||||||
|
): bigint {
|
||||||
|
const combined = `${participantId}${messageId}`;
|
||||||
|
return hashToInteger(combined);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ParticipantId to numeric representation for XOR operations
|
||||||
|
* TODO: Not per spec, further review needed
|
||||||
|
* The spec assumes participant IDs support XOR natively, but we're using
|
||||||
|
* SHA256 hash to ensure consistent numeric representation for string IDs
|
||||||
|
*/
|
||||||
|
export function participantIdToNumeric(participantId: ParticipantId): bigint {
|
||||||
|
return hashToInteger(participantId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate XOR distance between two participant IDs
|
||||||
|
* Used for T_resp calculations where distance affects response timing
|
||||||
|
*/
|
||||||
|
export function calculateXorDistance(
|
||||||
|
participantId1: ParticipantId,
|
||||||
|
participantId2: ParticipantId
|
||||||
|
): bigint {
|
||||||
|
const numeric1 = participantIdToNumeric(participantId1);
|
||||||
|
const numeric2 = participantIdToNumeric(participantId2);
|
||||||
|
return numeric1 ^ numeric2;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to convert bigint to number for timing calculations
|
||||||
|
* Ensures the result fits in JavaScript's number range
|
||||||
|
*/
|
||||||
|
export function bigintToNumber(value: bigint): number {
|
||||||
|
// For timing calculations, we modulo by MAX_SAFE_INTEGER to ensure it fits
|
||||||
|
const maxSafe = BigInt(Number.MAX_SAFE_INTEGER);
|
||||||
|
return Number(value % maxSafe);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate hash for a single string (used for message_id in T_resp)
|
||||||
|
*/
|
||||||
|
export function hashString(input: string): bigint {
|
||||||
|
return hashToInteger(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert a hash result to hex string for debugging/logging
|
||||||
|
*/
|
||||||
|
export function hashToHex(input: string): string {
|
||||||
|
const hashBytes = sha256(new TextEncoder().encode(input));
|
||||||
|
return bytesToHex(hashBytes);
|
||||||
|
}
|
||||||
@ -16,7 +16,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/tests#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/tests#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -34,7 +34,7 @@ const WAKU_SERVICE_NODE_PARAMS =
|
|||||||
const NODE_READY_LOG_LINE = "Node setup complete";
|
const NODE_READY_LOG_LINE = "Node setup complete";
|
||||||
|
|
||||||
export const DOCKER_IMAGE_NAME =
|
export const DOCKER_IMAGE_NAME =
|
||||||
process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.35.1";
|
process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.36.0";
|
||||||
|
|
||||||
const LOG_DIR = "./log";
|
const LOG_DIR = "./log";
|
||||||
|
|
||||||
|
|||||||
@ -3,7 +3,7 @@ import { promisify } from "util";
|
|||||||
|
|
||||||
const execAsync = promisify(exec);
|
const execAsync = promisify(exec);
|
||||||
|
|
||||||
const WAKUNODE_IMAGE = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.35.1";
|
const WAKUNODE_IMAGE = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.36.0";
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
try {
|
try {
|
||||||
|
|||||||
@ -1,56 +0,0 @@
|
|||||||
import { exec } from "child_process";
|
|
||||||
import { setTimeout } from "timers";
|
|
||||||
import { promisify } from "util";
|
|
||||||
|
|
||||||
import { SEPOLIA_RPC_URL } from "./constants.js";
|
|
||||||
import { ServiceNode } from "./lib/index.js";
|
|
||||||
|
|
||||||
const execAsync = promisify(exec);
|
|
||||||
|
|
||||||
const WAKUNODE_IMAGE = process.env.WAKUNODE_IMAGE || "wakuorg/nwaku:v0.35.1";
|
|
||||||
const containerName = "rln_tree";
|
|
||||||
|
|
||||||
async function syncRlnTree() {
|
|
||||||
try {
|
|
||||||
await execAsync(`docker inspect ${WAKUNODE_IMAGE}`);
|
|
||||||
console.log(`Using local image ${WAKUNODE_IMAGE}`);
|
|
||||||
} catch (error) {
|
|
||||||
console.log(`Pulling image ${WAKUNODE_IMAGE}`);
|
|
||||||
await execAsync(`docker pull ${WAKUNODE_IMAGE}`);
|
|
||||||
console.log("Image pulled");
|
|
||||||
}
|
|
||||||
|
|
||||||
const nwaku = new ServiceNode(containerName);
|
|
||||||
await nwaku.start(
|
|
||||||
{
|
|
||||||
store: false,
|
|
||||||
lightpush: false,
|
|
||||||
relay: true,
|
|
||||||
filter: false,
|
|
||||||
rest: true,
|
|
||||||
clusterId: 1,
|
|
||||||
rlnRelayEthClientAddress: SEPOLIA_RPC_URL
|
|
||||||
},
|
|
||||||
{ retries: 3 }
|
|
||||||
);
|
|
||||||
let healthy = false;
|
|
||||||
while (!healthy) {
|
|
||||||
healthy = await nwaku.healthy();
|
|
||||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
||||||
}
|
|
||||||
|
|
||||||
await execAsync(
|
|
||||||
`docker cp ${nwaku.containerName}:/rln_tree.db /tmp/rln_tree.db`
|
|
||||||
);
|
|
||||||
await nwaku.stop();
|
|
||||||
}
|
|
||||||
|
|
||||||
syncRlnTree()
|
|
||||||
.then(() => {
|
|
||||||
console.log("Synced RLN tree");
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch((err) => {
|
|
||||||
console.error(`Error syncing RLN tree: ${err}`);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@ -28,7 +28,7 @@
|
|||||||
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/utils#readme",
|
"homepage": "https://github.com/waku-org/js-waku/tree/master/packages/utils#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/waku-org/js-waku.git"
|
"url": "git+https://github.com/waku-org/js-waku.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://github.com/waku-org/js-waku/issues"
|
"url": "https://github.com/waku-org/js-waku/issues"
|
||||||
|
|||||||
@ -24,6 +24,7 @@
|
|||||||
"packages/discovery": {},
|
"packages/discovery": {},
|
||||||
"packages/sds": {},
|
"packages/sds": {},
|
||||||
"packages/rln": {},
|
"packages/rln": {},
|
||||||
"packages/react": {}
|
"packages/react": {},
|
||||||
|
"packages/run": {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Loading…
x
Reference in New Issue
Block a user