Merge 9c06c62c500324f6bb3a33a1f4ec9aa161fb0720 into ba85873f03a1da6ab04287949849815fd97b7bfd

This commit is contained in:
Darshan 2026-02-26 17:59:31 +00:00 committed by GitHub
commit 90210fe2ca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
165 changed files with 5664 additions and 6397 deletions

View File

@ -41,7 +41,7 @@ jobs:
- name: 'Run Nix build for {{ matrix.nixpkg }}'
shell: bash
run: nix build -L '.?submodules=1#${{ matrix.nixpkg }}'
run: nix build -L '.#${{ matrix.nixpkg }}'
- name: 'Show result contents'
shell: bash

View File

@ -56,31 +56,45 @@ jobs:
matrix:
os: [ubuntu-22.04, macos-15]
runs-on: ${{ matrix.os }}
timeout-minutes: 45
timeout-minutes: 90
name: build-${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get submodules hash
id: submodules
run: |
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
- name: Cache submodules
uses: actions/cache@v3
- name: Install Nim (pinned)
uses: iffy/install-nim@v5
with:
path: |
vendor/
.git/modules
key: ${{ runner.os }}-vendor-modules-${{ steps.submodules.outputs.hash }}
version: binary:2.2.6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Nimble (pinned)
uses: nim-lang/setup-nimble-action@v1
with:
nimble-version: '0.20.1'
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Make update
run: make update
- name: Debug nimble.paths
run: |
echo "=== Nim version ==="
nim --version
echo "=== nimble version ==="
nimble --version
echo "=== nimble.paths content ==="
cat nimble.paths || echo "nimble.paths not found"
echo "=== Check for empty paths ==="
grep -n '""' nimble.paths || echo "No empty paths found"
grep -n '^--path:$' nimble.paths || echo "No empty --path: entries"
echo "=== Verify nimcrypto is in paths ==="
grep -c nimcrypto nimble.paths || echo "WARNING: nimcrypto not found in nimble.paths!"
- name: Build binaries
run: make V=1 QUICK_AND_DIRTY_COMPILER=1 all
run: make V=1 QUICK_AND_DIRTY_COMPILER=1 USE_LIBBACKTRACE=0 all
build-windows:
needs: changes
@ -97,39 +111,50 @@ jobs:
matrix:
os: [ubuntu-22.04, macos-15]
runs-on: ${{ matrix.os }}
timeout-minutes: 45
timeout-minutes: 90
name: test-${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Get submodules hash
id: submodules
run: |
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
- name: Cache submodules
uses: actions/cache@v3
- name: Install Nim (pinned)
uses: iffy/install-nim@v5
with:
path: |
vendor/
.git/modules
key: ${{ runner.os }}-vendor-modules-${{ steps.submodules.outputs.hash }}
version: binary:2.2.6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Nimble (pinned)
uses: nim-lang/setup-nimble-action@v1
with:
nimble-version: '0.20.1'
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Make update
run: make update
- name: Verify nimble.paths
run: |
echo "=== nimble.paths content ==="
cat nimble.paths || { echo "ERROR: nimble.paths not found!"; exit 1; }
echo "=== Verify critical dependencies ==="
grep -q nimcrypto nimble.paths || { echo "ERROR: nimcrypto not in paths!"; exit 1; }
grep -q libp2p nimble.paths || { echo "ERROR: libp2p not in paths!"; exit 1; }
- name: Run tests
run: |
postgres_enabled=0
if [ ${{ runner.os }} == "Linux" ]; then
sudo docker run --rm -d -e POSTGRES_PASSWORD=test123 -p 5432:5432 postgres:15.4-alpine3.18
postgres_enabled=1
# Disable march=native on Linux to avoid potential nimcrypto SHA2 issues
export NIMFLAGS="--colors:off -d:chronicles_colors:none -d:disableMarchNative"
else
export NIMFLAGS="--colors:off -d:chronicles_colors:none"
fi
export MAKEFLAGS="-j1"
export NIMFLAGS="--colors:off -d:chronicles_colors:none"
export USE_LIBBACKTRACE=0
make V=1 LOG_LEVEL=DEBUG QUICK_AND_DIRTY_COMPILER=1 POSTGRES=$postgres_enabled test
@ -138,7 +163,7 @@ jobs:
build-docker-image:
needs: changes
if: ${{ needs.changes.outputs.v2 == 'true' || needs.changes.outputs.common == 'true' || needs.changes.outputs.docker == 'true' }}
uses: logos-messaging/logos-delivery/.github/workflows/container-image.yml@10dc3d3eb4b6a3d4313f7b2cc4a85a925e9ce039
uses: ./.github/workflows/container-image.yml
secrets: inherit
nwaku-nwaku-interop-tests:
@ -171,18 +196,18 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Get submodules hash
id: submodules
run: |
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
- name: Cache submodules
uses: actions/cache@v3
- name: Install Nim (pinned)
uses: iffy/install-nim@v5
with:
path: |
vendor/
.git/modules
key: ${{ runner.os }}-vendor-modules-${{ steps.submodules.outputs.hash }}
version: binary:2.2.6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Nimble (pinned)
uses: nim-lang/setup-nimble-action@v1
with:
nimble-version: '0.20.1'
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build nph
run: |

View File

@ -46,20 +46,18 @@ jobs:
if: ${{ steps.secrets.outcome == 'success' }}
uses: actions/checkout@v4
- name: Get submodules hash
id: submodules
if: ${{ steps.secrets.outcome == 'success' }}
run: |
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
- name: Cache submodules
if: ${{ steps.secrets.outcome == 'success' }}
uses: actions/cache@v3
- name: Install Nim (pinned)
uses: iffy/install-nim@v5
with:
path: |
vendor/
.git/modules
key: ${{ runner.os }}-vendor-modules-${{ steps.submodules.outputs.hash }}
version: binary:2.2.6
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Install Nimble (pinned)
uses: nim-lang/setup-nimble-action@v1
with:
nimble-version: '0.20.1'
repo-token: ${{ secrets.GITHUB_TOKEN }}
- name: Build binaries
id: build
@ -67,7 +65,7 @@ jobs:
run: |
make update
make -j${NPROC} V=1 QUICK_AND_DIRTY_COMPILER=1 NIMFLAGS="-d:disableMarchNative -d:postgres -d:chronicles_colors:none" wakunode2
make -j${NPROC} V=1 QUICK_AND_DIRTY_COMPILER=1 USE_LIBBACKTRACE=0 NIMFLAGS="-d:disableMarchNative -d:postgres -d:chronicles_colors:none" wakunode2
SHORT_REF=$(git rev-parse --short HEAD)

View File

@ -44,6 +44,11 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Nim
uses: iffy/install-nim@v5
with:
version: binary:2.2.4
- name: prep variables
id: vars
run: |
@ -66,7 +71,7 @@ jobs:
make QUICK_AND_DIRTY_COMPILER=1 V=1 CI=false NIMFLAGS="-d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}}" \
update
make QUICK_AND_DIRTY_COMPILER=1 V=1 CI=false\
make QUICK_AND_DIRTY_COMPILER=1 V=1 CI=false USE_LIBBACKTRACE=0\
NIMFLAGS="-d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" \
wakunode2\
chat2\

View File

@ -28,18 +28,10 @@ jobs:
- name: Checkout code
uses: actions/checkout@v2
- name: Get submodules hash
id: submodules
run: |
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
- name: Cache submodules
uses: actions/cache@v3
- name: Install Nim
uses: iffy/install-nim@v5
with:
path: |
vendor/
.git/modules
key: ${{ runner.os }}-${{matrix.arch}}-submodules-${{ steps.submodules.outputs.hash }}
version: binary:2.2.4
- name: Get tag
id: version
@ -76,12 +68,12 @@ jobs:
OS=$([[ "${{runner.os}}" == "macOS" ]] && echo "macosx" || echo "linux")
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}}" V=1 update
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" CI=false wakunode2
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}}" CI=false chat2
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" CI=false USE_LIBBACKTRACE=0 wakunode2
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}}" CI=false USE_LIBBACKTRACE=0 chat2
tar -cvzf ${{steps.vars.outputs.waku}} ./build/
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" CI=false libwaku
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" CI=false STATIC=1 libwaku
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" CI=false USE_LIBBACKTRACE=0 libwaku
make -j${NPROC} NIMFLAGS="--parallelBuild:${NPROC} -d:disableMarchNative --os:${OS} --cpu:${{matrix.arch}} -d:postgres" CI=false USE_LIBBACKTRACE=0 STATIC=1 libwaku
- name: Create distributable libwaku package
run: |

View File

@ -45,14 +45,22 @@ jobs:
mingw-w64-x86_64-cmake
mingw-w64-x86_64-llvm
mingw-w64-x86_64-clang
mingw-w64-x86_64-nimble
mingw-w64-x86_64-nasm
- name: Manually install nasm
run: |
bash scripts/install_nasm_in_windows.sh
source $HOME/.bashrc
- name: Add UPX to PATH
- name: Install Nim
uses: iffy/install-nim@v5
with:
version: binary:2.2.4
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Add paths
run: |
echo "/usr/bin:$PATH" >> $GITHUB_PATH
echo "/mingw64/bin:$PATH" >> $GITHUB_PATH
@ -61,31 +69,10 @@ jobs:
- name: Verify dependencies
run: |
which upx gcc g++ make cmake cargo rustc python nasm
which upx gcc g++ make cmake cargo rustc python nasm nimble
- name: Updating submodules
run: git submodule update --init --recursive
- name: Creating tmp directory
run: mkdir -p tmp
- name: Building Nim
run: |
cd vendor/nimbus-build-system/vendor/Nim
./build_all.bat
cd ../../../..
- name: Building miniupnpc
run: |
cd vendor/nim-nat-traversal/vendor/miniupnp/miniupnpc
make -f Makefile.mingw CC=gcc CXX=g++ libminiupnpc.a V=1
cd ../../../../..
- name: Building libnatpmp
run: |
cd ./vendor/nim-nat-traversal/vendor/libnatpmp-upstream
make CC="gcc -fPIC -D_WIN32_WINNT=0x0600 -DNATPMP_STATICLIB" libnatpmp.a V=1
cd ../../../../
- name: Make update
run: make update
- name: Building wakunode2.exe
run: |

187
.gitmodules vendored
View File

@ -1,197 +1,10 @@
[submodule "vendor/nim-eth"]
path = vendor/nim-eth
url = https://github.com/status-im/nim-eth.git
ignore = dirty
branch = master
[submodule "vendor/nim-secp256k1"]
path = vendor/nim-secp256k1
url = https://github.com/status-im/nim-secp256k1.git
ignore = dirty
branch = master
[submodule "vendor/nim-libp2p"]
path = vendor/nim-libp2p
url = https://github.com/vacp2p/nim-libp2p.git
ignore = dirty
branch = master
[submodule "vendor/nim-stew"]
path = vendor/nim-stew
url = https://github.com/status-im/nim-stew.git
ignore = dirty
branch = master
[submodule "vendor/nimbus-build-system"]
path = vendor/nimbus-build-system
url = https://github.com/status-im/nimbus-build-system.git
ignore = dirty
branch = master
[submodule "vendor/nim-nat-traversal"]
path = vendor/nim-nat-traversal
url = https://github.com/status-im/nim-nat-traversal.git
ignore = dirty
branch = master
[submodule "vendor/nim-libbacktrace"]
path = vendor/nim-libbacktrace
url = https://github.com/status-im/nim-libbacktrace.git
ignore = dirty
branch = master
[submodule "vendor/nim-confutils"]
path = vendor/nim-confutils
url = https://github.com/status-im/nim-confutils.git
ignore = dirty
branch = master
[submodule "vendor/nim-chronicles"]
path = vendor/nim-chronicles
url = https://github.com/status-im/nim-chronicles.git
ignore = dirty
branch = master
[submodule "vendor/nim-faststreams"]
path = vendor/nim-faststreams
url = https://github.com/status-im/nim-faststreams.git
ignore = dirty
branch = master
[submodule "vendor/nim-chronos"]
path = vendor/nim-chronos
url = https://github.com/status-im/nim-chronos.git
ignore = dirty
branch = master
[submodule "vendor/nim-json-serialization"]
path = vendor/nim-json-serialization
url = https://github.com/status-im/nim-json-serialization.git
ignore = dirty
branch = master
[submodule "vendor/nim-serialization"]
path = vendor/nim-serialization
url = https://github.com/status-im/nim-serialization.git
ignore = dirty
branch = master
[submodule "vendor/nimcrypto"]
path = vendor/nimcrypto
url = https://github.com/cheatfate/nimcrypto.git
ignore = dirty
branch = master
[submodule "vendor/nim-metrics"]
path = vendor/nim-metrics
url = https://github.com/status-im/nim-metrics.git
ignore = dirty
branch = master
[submodule "vendor/nim-stint"]
path = vendor/nim-stint
url = https://github.com/status-im/nim-stint.git
ignore = dirty
branch = master
[submodule "vendor/nim-json-rpc"]
path = vendor/nim-json-rpc
url = https://github.com/status-im/nim-json-rpc.git
ignore = dirty
branch = master
[submodule "vendor/nim-http-utils"]
path = vendor/nim-http-utils
url = https://github.com/status-im/nim-http-utils.git
ignore = dirty
branch = master
[submodule "vendor/nim-bearssl"]
path = vendor/nim-bearssl
url = https://github.com/status-im/nim-bearssl.git
ignore = dirty
branch = master
[submodule "vendor/nim-sqlite3-abi"]
path = vendor/nim-sqlite3-abi
url = https://github.com/arnetheduck/nim-sqlite3-abi.git
ignore = dirty
branch = master
[submodule "vendor/nim-web3"]
path = vendor/nim-web3
url = https://github.com/status-im/nim-web3.git
[submodule "vendor/nim-testutils"]
path = vendor/nim-testutils
url = https://github.com/status-im/nim-testutils.git
ignore = untracked
branch = master
[submodule "vendor/nim-unittest2"]
path = vendor/nim-unittest2
url = https://github.com/status-im/nim-unittest2.git
ignore = untracked
branch = master
[submodule "vendor/nim-websock"]
path = vendor/nim-websock
url = https://github.com/status-im/nim-websock.git
ignore = untracked
branch = main
[submodule "vendor/nim-zlib"]
path = vendor/nim-zlib
url = https://github.com/status-im/nim-zlib.git
ignore = untracked
branch = master
[submodule "vendor/nim-dnsdisc"]
path = vendor/nim-dnsdisc
url = https://github.com/status-im/nim-dnsdisc.git
ignore = untracked
branch = main
[submodule "vendor/dnsclient.nim"]
path = vendor/dnsclient.nim
url = https://github.com/ba0f3/dnsclient.nim.git
ignore = untracked
branch = master
[submodule "vendor/nim-toml-serialization"]
path = vendor/nim-toml-serialization
url = https://github.com/status-im/nim-toml-serialization.git
[submodule "vendor/nim-presto"]
path = vendor/nim-presto
url = https://github.com/status-im/nim-presto.git
ignore = untracked
branch = master
[submodule "vendor/zerokit"]
path = vendor/zerokit
url = https://github.com/vacp2p/zerokit.git
ignore = dirty
branch = v0.5.1
[submodule "vendor/nim-regex"]
path = vendor/nim-regex
url = https://github.com/nitely/nim-regex.git
ignore = untracked
branch = master
[submodule "vendor/nim-unicodedb"]
path = vendor/nim-unicodedb
url = https://github.com/nitely/nim-unicodedb.git
ignore = untracked
branch = master
[submodule "vendor/nim-taskpools"]
path = vendor/nim-taskpools
url = https://github.com/status-im/nim-taskpools.git
ignore = untracked
branch = stable
[submodule "vendor/nim-results"]
ignore = untracked
branch = master
path = vendor/nim-results
url = https://github.com/arnetheduck/nim-results.git
[submodule "vendor/db_connector"]
path = vendor/db_connector
url = https://github.com/nim-lang/db_connector.git
ignore = untracked
branch = devel
[submodule "vendor/nph"]
ignore = untracked
branch = master
path = vendor/nph
url = https://github.com/arnetheduck/nph.git
[submodule "vendor/nim-minilru"]
path = vendor/nim-minilru
url = https://github.com/status-im/nim-minilru.git
ignore = untracked
branch = master
[submodule "vendor/waku-rlnv2-contract"]
path = vendor/waku-rlnv2-contract
url = https://github.com/logos-messaging/waku-rlnv2-contract.git
ignore = untracked
branch = master
[submodule "vendor/nim-lsquic"]
path = vendor/nim-lsquic
url = https://github.com/vacp2p/nim-lsquic
[submodule "vendor/nim-jwt"]
path = vendor/nim-jwt
url = https://github.com/vacp2p/nim-jwt.git
[submodule "vendor/nim-ffi"]
path = vendor/nim-ffi
url = https://github.com/logos-messaging/nim-ffi/
ignore = untracked
branch = master

348
Makefile
View File

@ -4,28 +4,10 @@
# - MIT license
# at your option. This file may not be copied, modified, or distributed except
# according to those terms.
export BUILD_SYSTEM_DIR := vendor/nimbus-build-system
export EXCLUDED_NIM_PACKAGES := vendor/nim-dnsdisc/vendor
LINK_PCRE := 0
FORMAT_MSG := "\\x1B[95mFormatting:\\x1B[39m"
# we don't want an error here, so we can handle things later, in the ".DEFAULT" target
-include $(BUILD_SYSTEM_DIR)/makefiles/variables.mk
ifeq ($(NIM_PARAMS),)
# "variables.mk" was not included, so we update the submodules.
GIT_SUBMODULE_UPDATE := git submodule update --init --recursive
.DEFAULT:
+@ echo -e "Git submodules not found. Running '$(GIT_SUBMODULE_UPDATE)'.\n"; \
$(GIT_SUBMODULE_UPDATE); \
echo
# Now that the included *.mk files appeared, and are newer than this file, Make will restart itself:
# https://www.gnu.org/software/make/manual/make.html#Remaking-Makefiles
#
# After restarting, it will execute its original goal, so we don't have to start a child Make here
# with "$(MAKE) $(MAKECMDGOALS)". Isn't hidden control flow great?
else # "variables.mk" was included. Business as usual until the end of this file.
BUILD_MSG := "\\x1B[92mBuilding:\\x1B[39m"
# Determine the OS
detected_OS := $(shell uname -s)
@ -33,28 +15,29 @@ ifneq (,$(findstring MINGW,$(detected_OS)))
detected_OS := Windows
endif
# NIM binary location
NIM_BINARY := $(shell which nim)
NPH := $(shell dirname $(NIM_BINARY))/nph
# Compilation parameters
NIM_PARAMS ?=
ifeq ($(detected_OS),Windows)
# Update MINGW_PATH to standard MinGW location
MINGW_PATH = /mingw64
NIM_PARAMS += --passC:"-I$(MINGW_PATH)/include"
NIM_PARAMS += --passL:"-L$(MINGW_PATH)/lib"
NIM_PARAMS += --passL:"-Lvendor/nim-nat-traversal/vendor/miniupnp/miniupnpc"
NIM_PARAMS += --passL:"-Lvendor/nim-nat-traversal/vendor/libnatpmp-upstream"
LIBS = -lws2_32 -lbcrypt -liphlpapi -luserenv -lntdll -lminiupnpc -lnatpmp -lpq
LIBS = -lws2_32 -lbcrypt -liphlpapi -luserenv -lntdll -lpq
NIM_PARAMS += $(foreach lib,$(LIBS),--passL:"$(lib)")
NIM_PARAMS += --passL:"-Wl,--allow-multiple-definition"
export PATH := /c/msys64/usr/bin:/c/msys64/mingw64/bin:/c/msys64/usr/lib:/c/msys64/mingw64/lib:$(PATH)
endif
##########
## Main ##
##########
.PHONY: all test update clean examples
.PHONY: all test update clean examples deps nimble
# default target, because it's the first one that doesn't start with '.'
# default target
all: | wakunode2 libwaku
examples: | example2 chat2 chat2bridge
@ -71,102 +54,76 @@ ifeq ($(strip $(test_file)),)
else
$(MAKE) compile-test TEST_FILE="$(test_file)" TEST_NAME="$(call test_name)"
endif
# this prevents make from erroring on unknown targets like "Index"
# this prevents make from erroring on unknown targets
%:
@true
waku.nims:
ln -s waku.nimble $@
update: | update-common
rm -rf waku.nims && \
$(MAKE) waku.nims $(HANDLE_OUTPUT)
update: | waku.nims
git submodule update --init --recursive
nimble setup --localdeps
nimble install --depsOnly
$(MAKE) build-nph
clean:
rm -rf build
rm -rf nimbledeps
# must be included after the default target
-include $(BUILD_SYSTEM_DIR)/makefiles/targets.mk
build:
mkdir -p build
ifeq ($(OS),Windows_NT)
NIMBLE_DIR ?= $(USERPROFILE)/.nimble
else
NIMBLE_DIR ?= $(HOME)/.nimble
endif
export NIMBLE_DIR
nimble:
echo "Inside nimble target, checking for nimble..." && \
command -v nimble >/dev/null 2>&1 || { \
mv nimbledeps nimbledeps_backup 2>/dev/null || true; \
echo "choosenim not found, installing into $(NIMBLE_DIR)..."; \
curl -sSf https://nim-lang.org/choosenim/init.sh | sh; \
mv nimbledeps_backup nimbledeps 2>/dev/null || true; \
}
## Possible values: prod; debug
TARGET ?= prod
## Git version
GIT_VERSION ?= $(shell git describe --abbrev=6 --always --tags)
## Compilation parameters. If defined in the CLI the assignments won't be executed
NIM_PARAMS := $(NIM_PARAMS) -d:git_version=\"$(GIT_VERSION)\"
## Heaptracker options
HEAPTRACKER ?= 0
HEAPTRACKER_INJECT ?= 0
ifeq ($(HEAPTRACKER), 1)
# Assumes Nim's lib/system/alloc.nim is patched!
TARGET := debug-with-heaptrack
ifeq ($(HEAPTRACKER_INJECT), 1)
# the Nim compiler will load 'libheaptrack_inject.so'
HEAPTRACK_PARAMS := -d:heaptracker -d:heaptracker_inject
NIM_PARAMS := $(NIM_PARAMS) -d:heaptracker -d:heaptracker_inject
else
# the Nim compiler will load 'libheaptrack_preload.so'
HEAPTRACK_PARAMS := -d:heaptracker
NIM_PARAMS := $(NIM_PARAMS) -d:heaptracker
endif
endif
## end of Heaptracker options
##################
## Dependencies ##
##################
.PHONY: deps libbacktrace
FOUNDRY_VERSION := 1.5.0
PNPM_VERSION := 10.23.0
rustup:
ifeq (, $(shell which cargo))
# Install Rustup if it's not installed
# -y: Assume "yes" for all prompts
# --default-toolchain stable: Install the stable toolchain
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
endif
rln-deps: rustup
./scripts/install_rln_tests_dependencies.sh $(FOUNDRY_VERSION) $(PNPM_VERSION)
deps: | deps-common nat-libs waku.nims
### nim-libbacktrace
# "-d:release" implies "--stacktrace:off" and it cannot be added to config.nims
# Debug/Release mode
ifeq ($(DEBUG), 0)
NIM_PARAMS := $(NIM_PARAMS) -d:release
else
NIM_PARAMS := $(NIM_PARAMS) -d:debug
endif
ifeq ($(USE_LIBBACKTRACE), 0)
NIM_PARAMS := $(NIM_PARAMS) -d:disable_libbacktrace
endif
# enable experimental exit is dest feature in libp2p mix
NIM_PARAMS := $(NIM_PARAMS) -d:libp2p_mix_experimental_exit_is_dest
libbacktrace:
+ $(MAKE) -C vendor/nim-libbacktrace --no-print-directory BUILD_CXX_LIB=0
clean-libbacktrace:
+ $(MAKE) -C vendor/nim-libbacktrace clean $(HANDLE_OUTPUT)
# Extend deps and clean targets
ifneq ($(USE_LIBBACKTRACE), 0)
deps: | libbacktrace
endif
ifeq ($(POSTGRES), 1)
NIM_PARAMS := $(NIM_PARAMS) -d:postgres -d:nimDebugDlOpen
endif
@ -175,14 +132,26 @@ ifeq ($(DEBUG_DISCV5), 1)
NIM_PARAMS := $(NIM_PARAMS) -d:debugDiscv5
endif
clean: | clean-libbacktrace
# Export NIM_PARAMS so nimble can access it
export NIM_PARAMS
### Create nimble links (used when building with Nix)
##################
## Dependencies ##
##################
.PHONY: deps
nimbus-build-system-nimble-dir:
NIMBLE_DIR="$(CURDIR)/$(NIMBLE_DIR)" \
PWD_CMD="$(PWD)" \
$(CURDIR)/scripts/generate_nimble_links.sh
FOUNDRY_VERSION := 1.5.0
PNPM_VERSION := 10.23.0
rustup:
ifeq (, $(shell which cargo))
curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
endif
rln-deps: rustup
./scripts/install_rln_tests_dependencies.sh $(FOUNDRY_VERSION) $(PNPM_VERSION)
deps: | nimble
##################
## RLN ##
@ -200,7 +169,7 @@ endif
$(LIBRLN_FILE):
echo -e $(BUILD_MSG) "$@" && \
./scripts/build_rln.sh $(LIBRLN_BUILDDIR) $(LIBRLN_VERSION) $(LIBRLN_FILE)
bash scripts/build_rln.sh $(LIBRLN_BUILDDIR) $(LIBRLN_VERSION) $(LIBRLN_FILE)
librln: | $(LIBRLN_FILE)
$(eval NIM_PARAMS += --passL:$(LIBRLN_FILE) --passL:-lm)
@ -209,7 +178,6 @@ clean-librln:
cargo clean --manifest-path vendor/zerokit/rln/Cargo.toml
rm -f $(LIBRLN_FILE)
# Extend clean target
clean: | clean-librln
#################
@ -217,61 +185,58 @@ clean: | clean-librln
#################
.PHONY: testcommon
testcommon: | build deps
testcommon: | build
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim testcommon $(NIM_PARAMS) waku.nims
nimble testcommon
##########
## Waku ##
##########
.PHONY: testwaku wakunode2 testwakunode2 example2 chat2 chat2bridge liteprotocoltester
# install rln-deps only for the testwaku target
testwaku: | build deps rln-deps librln
testwaku: | build rln-deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim test -d:os=$(shell uname) $(NIM_PARAMS) waku.nims
nimble test
wakunode2: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
\
$(ENV_SCRIPT) nim wakunode2 $(NIM_PARAMS) waku.nims
nimble wakunode2
benchmarks: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim benchmarks $(NIM_PARAMS) waku.nims
nimble benchmarks
testwakunode2: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim testwakunode2 $(NIM_PARAMS) waku.nims
nimble testwakunode2
example2: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim example2 $(NIM_PARAMS) waku.nims
nimble example2
chat2: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim chat2 $(NIM_PARAMS) waku.nims
nimble chat2
chat2mix: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim chat2mix $(NIM_PARAMS) waku.nims
nimble chat2mix
rln-db-inspector: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim rln_db_inspector $(NIM_PARAMS) waku.nims
nimble rln_db_inspector
chat2bridge: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim chat2bridge $(NIM_PARAMS) waku.nims
nimble chat2bridge
liteprotocoltester: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim liteprotocoltester $(NIM_PARAMS) waku.nims
nimble liteprotocoltester
lightpushwithmix: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim lightpushwithmix $(NIM_PARAMS) waku.nims
nimble lightpushwithmix
api_example: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
@ -279,12 +244,12 @@ api_example: | build deps librln
build/%: | build deps librln
echo -e $(BUILD_MSG) "build/$*" && \
$(ENV_SCRIPT) nim buildone $(NIM_PARAMS) waku.nims $*
nimble buildone $*
compile-test: | build deps librln
echo -e $(BUILD_MSG) "$(TEST_FILE)" "\"$(TEST_NAME)\"" && \
$(ENV_SCRIPT) nim buildTest $(NIM_PARAMS) waku.nims $(TEST_FILE) && \
$(ENV_SCRIPT) nim execTest $(NIM_PARAMS) waku.nims $(TEST_FILE) "\"$(TEST_NAME)\""; \
nimble buildTest $(TEST_FILE) && \
nimble execTest $(TEST_FILE) "\"$(TEST_NAME)\""
################
## Waku tools ##
@ -295,28 +260,21 @@ tools: networkmonitor wakucanary
wakucanary: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim wakucanary $(NIM_PARAMS) waku.nims
nimble wakucanary
networkmonitor: | build deps librln
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim networkmonitor $(NIM_PARAMS) waku.nims
nimble networkmonitor
############
## Format ##
############
.PHONY: build-nph install-nph clean-nph print-nph-path
# Default location for nph binary shall be next to nim binary to make it available on the path.
NPH:=$(shell dirname $(NIM_BINARY))/nph
build-nph: | build deps
ifeq ("$(wildcard $(NPH))","")
$(ENV_SCRIPT) nim c --skipParentCfg:on vendor/nph/src/nph.nim && \
mv vendor/nph/src/nph $(shell dirname $(NPH))
echo "nph utility is available at " $(NPH)
else
echo "nph utility already exists at " $(NPH)
endif
nimble install nph@0.7.0 -y
echo "Check if nph utility is available:"
command -v nph
GIT_PRE_COMMIT_HOOK := .git/hooks/pre-commit
@ -335,9 +293,8 @@ nph/%: | build-nph
clean-nph:
rm -f $(NPH)
# To avoid hardcoding nph binary location in several places
print-nph-path:
echo "$(NPH)"
@echo "$(NPH)"
clean: | clean-nph
@ -346,25 +303,20 @@ clean: | clean-nph
###################
.PHONY: docs coverage
# TODO: Remove unused target
docs: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim doc --run --index:on --project --out:.gh-pages waku/waku.nim waku.nims
nimble doc --run --index:on --project --out:.gh-pages waku/waku.nim waku.nims
coverage:
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) ./scripts/run_cov.sh -y
./scripts/run_cov.sh -y
#####################
## Container image ##
#####################
# -d:insecure - Necessary to enable Prometheus HTTP endpoint for metrics
# -d:chronicles_colors:none - Necessary to disable colors in logs for Docker
DOCKER_IMAGE_NIMFLAGS ?= -d:chronicles_colors:none -d:insecure -d:postgres
DOCKER_IMAGE_NIMFLAGS := $(DOCKER_IMAGE_NIMFLAGS) $(HEAPTRACK_PARAMS)
# build a docker image for the fleet
docker-image: MAKE_TARGET ?= wakunode2
docker-image: DOCKER_IMAGE_TAG ?= $(MAKE_TARGET)-$(GIT_VERSION)
docker-image: DOCKER_IMAGE_NAME ?= wakuorg/nwaku:$(DOCKER_IMAGE_TAG)
@ -372,7 +324,6 @@ docker-image:
docker build \
--build-arg="MAKE_TARGET=$(MAKE_TARGET)" \
--build-arg="NIMFLAGS=$(DOCKER_IMAGE_NIMFLAGS)" \
--build-arg="NIM_COMMIT=$(DOCKER_NIM_COMMIT)" \
--build-arg="LOG_LEVEL=$(LOG_LEVEL)" \
--build-arg="HEAPTRACK_BUILD=$(HEAPTRACKER)" \
--label="commit=$(shell git rev-parse HEAD)" \
@ -384,7 +335,7 @@ docker-quick-image: MAKE_TARGET ?= wakunode2
docker-quick-image: DOCKER_IMAGE_TAG ?= $(MAKE_TARGET)-$(GIT_VERSION)
docker-quick-image: DOCKER_IMAGE_NAME ?= wakuorg/nwaku:$(DOCKER_IMAGE_TAG)
docker-quick-image: NIM_PARAMS := $(NIM_PARAMS) -d:chronicles_colors:none -d:insecure -d:postgres --passL:$(LIBRLN_FILE) --passL:-lm
docker-quick-image: | build deps librln wakunode2
docker-quick-image: | build librln wakunode2
docker build \
--build-arg="MAKE_TARGET=$(MAKE_TARGET)" \
--tag $(DOCKER_IMAGE_NAME) \
@ -398,19 +349,14 @@ docker-push:
####################################
## Container lite-protocol-tester ##
####################################
# -d:insecure - Necessary to enable Prometheus HTTP endpoint for metrics
# -d:chronicles_colors:none - Necessary to disable colors in logs for Docker
DOCKER_LPT_NIMFLAGS ?= -d:chronicles_colors:none -d:insecure
# build a docker image for the fleet
docker-liteprotocoltester: DOCKER_LPT_TAG ?= latest
docker-liteprotocoltester: DOCKER_LPT_NAME ?= wakuorg/liteprotocoltester:$(DOCKER_LPT_TAG)
# --no-cache
docker-liteprotocoltester:
docker build \
--build-arg="MAKE_TARGET=liteprotocoltester" \
--build-arg="NIMFLAGS=$(DOCKER_LPT_NIMFLAGS)" \
--build-arg="NIM_COMMIT=$(DOCKER_NIM_COMMIT)" \
--build-arg="LOG_LEVEL=TRACE" \
--label="commit=$(shell git rev-parse HEAD)" \
--label="version=$(GIT_VERSION)" \
@ -430,39 +376,38 @@ docker-quick-liteprotocoltester: | liteprotocoltester
docker-liteprotocoltester-push:
docker push $(DOCKER_LPT_NAME)
################
## C Bindings ##
################
.PHONY: cbindings cwaku_example libwaku liblogosdelivery liblogosdelivery_example
detected_OS ?= Linux
ifeq ($(OS),Windows_NT)
detected_OS := Windows
else
detected_OS := $(shell uname -s)
endif
BUILD_COMMAND ?= Dynamic
STATIC ?= 0
LIBWAKU_BUILD_COMMAND ?= libwakuDynamic
LIBLOGOSDELIVERY_BUILD_COMMAND ?= liblogosdeliveryDynamic
ifeq ($(STATIC), 1)
BUILD_COMMAND = Static
endif
ifeq ($(detected_OS),Windows)
LIB_EXT_DYNAMIC = dll
LIB_EXT_STATIC = lib
BUILD_COMMAND := $(BUILD_COMMAND)Windows
else ifeq ($(detected_OS),Darwin)
LIB_EXT_DYNAMIC = dylib
LIB_EXT_STATIC = a
BUILD_COMMAND := $(BUILD_COMMAND)Mac
export IOS_SDK_PATH := $(shell xcrun --sdk iphoneos --show-sdk-path)
else ifeq ($(detected_OS),Linux)
LIB_EXT_DYNAMIC = so
LIB_EXT_STATIC = a
BUILD_COMMAND := $(BUILD_COMMAND)Linux
endif
LIB_EXT := $(LIB_EXT_DYNAMIC)
ifeq ($(STATIC), 1)
LIB_EXT = $(LIB_EXT_STATIC)
LIBWAKU_BUILD_COMMAND = libwakuStatic
LIBLOGOSDELIVERY_BUILD_COMMAND = liblogosdeliveryStatic
endif
libwaku: |
nimble --verbose libwaku$(BUILD_COMMAND) $(NIM_PARAMS) waku.nimble
libwaku: | build deps librln
echo -e $(BUILD_MSG) "build/$@.$(LIB_EXT)" && $(ENV_SCRIPT) nim $(LIBWAKU_BUILD_COMMAND) $(NIM_PARAMS) waku.nims $@.$(LIB_EXT)
liblogosdelivery: | build deps librln
echo -e $(BUILD_MSG) "build/$@.$(LIB_EXT)" && $(ENV_SCRIPT) nim $(LIBLOGOSDELIVERY_BUILD_COMMAND) $(NIM_PARAMS) waku.nims $@.$(LIB_EXT)
liblogosdelivery: |
nimble --verbose liblogosdelivery$(BUILD_COMMAND) $(NIM_PARAMS) waku.nimble
logosdelivery_example: | build liblogosdelivery
@echo -e $(BUILD_MSG) "build/$@"
@ -489,17 +434,35 @@ else ifeq ($(detected_OS),Windows)
-lws2_32
endif
cwaku_example: | build libwaku
echo -e $(BUILD_MSG) "build/$@" && \
cc -o "build/$@" \
./examples/cbindings/waku_example.c \
./examples/cbindings/base64.c \
-lwaku -Lbuild/ \
-pthread -ldl -lm
cppwaku_example: | build libwaku
echo -e $(BUILD_MSG) "build/$@" && \
g++ -o "build/$@" \
./examples/cpp/waku.cpp \
./examples/cpp/base64.cpp \
-lwaku -Lbuild/ \
-pthread -ldl -lm
nodejswaku: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
node-gyp build --directory=examples/nodejs/
#####################
## Mobile Bindings ##
#####################
.PHONY: libwaku-android \
libwaku-android-precheck \
libwaku-android-arm64 \
libwaku-android-amd64 \
libwaku-android-x86 \
libwaku-android-arm \
rebuild-nat-libs \
build-libwaku-for-android-arch
libwaku-android-precheck \
libwaku-android-arm64 \
libwaku-android-amd64 \
libwaku-android-x86 \
libwaku-android-arm
ANDROID_TARGET ?= 30
ifeq ($(detected_OS),Darwin)
@ -508,22 +471,19 @@ else
ANDROID_TOOLCHAIN_DIR := $(ANDROID_NDK_HOME)/toolchains/llvm/prebuilt/linux-x86_64
endif
rebuild-nat-libs: | clean-cross nat-libs
libwaku-android-precheck:
ifndef ANDROID_NDK_HOME
$(error ANDROID_NDK_HOME is not set)
$(error ANDROID_NDK_HOME is not set)
endif
build-libwaku-for-android-arch:
ifneq ($(findstring /nix/store,$(LIBRLN_FILE)),)
mkdir -p $(CURDIR)/build/android/$(ABIDIR)/
cp $(LIBRLN_FILE) $(CURDIR)/build/android/$(ABIDIR)/
CPU=$(CPU) ABIDIR=$(ABIDIR) ANDROID_ARCH=$(ANDROID_ARCH) ANDROID_COMPILER=$(ANDROID_COMPILER) ANDROID_TOOLCHAIN_DIR=$(ANDROID_TOOLCHAIN_DIR) nimble libWakuAndroid
else
./scripts/build_rln_android.sh $(CURDIR)/build $(LIBRLN_BUILDDIR) $(LIBRLN_VERSION) $(CROSS_TARGET) $(ABIDIR)
endif
$(MAKE) rebuild-nat-libs CC=$(ANDROID_TOOLCHAIN_DIR)/bin/$(ANDROID_COMPILER)
CPU=$(CPU) ABIDIR=$(ABIDIR) ANDROID_ARCH=$(ANDROID_ARCH) ANDROID_COMPILER=$(ANDROID_COMPILER) ANDROID_TOOLCHAIN_DIR=$(ANDROID_TOOLCHAIN_DIR) $(ENV_SCRIPT) nim libWakuAndroid $(NIM_PARAMS) waku.nims
libwaku-android-arm64: ANDROID_ARCH=aarch64-linux-android
libwaku-android-arm64: CPU=arm64
@ -547,29 +507,23 @@ libwaku-android-arm: ANDROID_ARCH=armv7a-linux-androideabi
libwaku-android-arm: CPU=arm
libwaku-android-arm: ABIDIR=armeabi-v7a
libwaku-android-arm: | libwaku-android-precheck build deps
# cross-rs target architecture name does not match the one used in android
$(MAKE) build-libwaku-for-android-arch ANDROID_ARCH=$(ANDROID_ARCH) CROSS_TARGET=armv7-linux-androideabi CPU=$(CPU) ABIDIR=$(ABIDIR) ANDROID_COMPILER=$(ANDROID_ARCH)$(ANDROID_TARGET)-clang
libwaku-android:
$(MAKE) libwaku-android-amd64
$(MAKE) libwaku-android-arm64
$(MAKE) libwaku-android-x86
# This target is disabled because on recent versions of cross-rs complain with the following error
# relocation R_ARM_THM_ALU_PREL_11_0 cannot be used against symbol 'stack_init_trampoline_return'; recompile with -fPIC
# It's likely this architecture is not used so we might just not support it.
# $(MAKE) libwaku-android-arm
#################
## iOS Bindings #
#################
.PHONY: libwaku-ios-precheck \
libwaku-ios-device \
libwaku-ios-simulator \
libwaku-ios
libwaku-ios-device \
libwaku-ios-simulator \
libwaku-ios
IOS_DEPLOYMENT_TARGET ?= 18.0
# Get SDK paths dynamically using xcrun
define get_ios_sdk_path
$(shell xcrun --sdk $(1) --show-sdk-path 2>/dev/null)
endef
@ -581,59 +535,25 @@ else
$(error iOS builds are only supported on macOS)
endif
# Build for iOS architecture
build-libwaku-for-ios-arch:
IOS_SDK=$(IOS_SDK) IOS_ARCH=$(IOS_ARCH) IOS_SDK_PATH=$(IOS_SDK_PATH) $(ENV_SCRIPT) nim libWakuIOS $(NIM_PARAMS) waku.nims
IOS_SDK=$(IOS_SDK) IOS_ARCH=$(IOS_ARCH) IOS_SDK_PATH=$(IOS_SDK_PATH) nimble libWakuIOS
# iOS device (arm64)
libwaku-ios-device: IOS_ARCH=arm64
libwaku-ios-device: IOS_SDK=iphoneos
libwaku-ios-device: IOS_SDK_PATH=$(call get_ios_sdk_path,iphoneos)
libwaku-ios-device: | libwaku-ios-precheck build deps
$(MAKE) build-libwaku-for-ios-arch IOS_ARCH=$(IOS_ARCH) IOS_SDK=$(IOS_SDK) IOS_SDK_PATH=$(IOS_SDK_PATH)
# iOS simulator (arm64 - Apple Silicon Macs)
libwaku-ios-simulator: IOS_ARCH=arm64
libwaku-ios-simulator: IOS_SDK=iphonesimulator
libwaku-ios-simulator: IOS_SDK_PATH=$(call get_ios_sdk_path,iphonesimulator)
libwaku-ios-simulator: | libwaku-ios-precheck build deps
$(MAKE) build-libwaku-for-ios-arch IOS_ARCH=$(IOS_ARCH) IOS_SDK=$(IOS_SDK) IOS_SDK_PATH=$(IOS_SDK_PATH)
# Build all iOS targets
libwaku-ios:
$(MAKE) libwaku-ios-device
$(MAKE) libwaku-ios-simulator
cwaku_example: | build libwaku
echo -e $(BUILD_MSG) "build/$@" && \
cc -o "build/$@" \
./examples/cbindings/waku_example.c \
./examples/cbindings/base64.c \
-lwaku -Lbuild/ \
-pthread -ldl -lm \
-lminiupnpc -Lvendor/nim-nat-traversal/vendor/miniupnp/miniupnpc/build/ \
-lnatpmp -Lvendor/nim-nat-traversal/vendor/libnatpmp-upstream/ \
vendor/nim-libbacktrace/libbacktrace_wrapper.o \
vendor/nim-libbacktrace/install/usr/lib/libbacktrace.a
cppwaku_example: | build libwaku
echo -e $(BUILD_MSG) "build/$@" && \
g++ -o "build/$@" \
./examples/cpp/waku.cpp \
./examples/cpp/base64.cpp \
-lwaku -Lbuild/ \
-pthread -ldl -lm \
-lminiupnpc -Lvendor/nim-nat-traversal/vendor/miniupnp/miniupnpc/build/ \
-lnatpmp -Lvendor/nim-nat-traversal/vendor/libnatpmp-upstream/ \
vendor/nim-libbacktrace/libbacktrace_wrapper.o \
vendor/nim-libbacktrace/install/usr/lib/libbacktrace.a
nodejswaku: | build deps
echo -e $(BUILD_MSG) "build/$@" && \
node-gyp build --directory=examples/nodejs/
endif # "variables.mk" was not included
###################
# Release Targets #
###################
@ -647,5 +567,3 @@ release-notes:
docker.io/wakuorg/sv4git:latest \
release-notes |\
sed -E 's@#([0-9]+)@[#\1](https://github.com/waku-org/nwaku/issues/\1)@g'
# I could not get the tool to replace issue ids with links, so using sed for now,
# asked here: https://github.com/bvieira/sv4git/discussions/101

View File

@ -13,7 +13,8 @@ import
chronos,
eth/keys,
bearssl,
stew/[byteutils, results],
stew/byteutils,
results,
metrics,
metrics/chronos_httpserver
import
@ -50,8 +51,7 @@ import
import libp2p/protocols/pubsub/rpc/messages, libp2p/protocols/pubsub/pubsub
import ../../waku/waku_rln_relay
const Help =
"""
const Help = """
Commands: /[?|help|connect|nick|exit]
help: Prints this help
connect: dials a remote peer
@ -337,16 +337,16 @@ proc processInput(rfd: AsyncFD, rng: ref HmacDrbgContext) {.async.} =
builder.withRecord(record)
builder
.withNetworkConfigurationDetails(
conf.listenAddress,
Port(uint16(conf.tcpPort) + conf.portsShift),
extIp,
extTcpPort,
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
wsEnabled = conf.websocketSupport,
wssEnabled = conf.websocketSecureSupport,
)
.tryGet()
.withNetworkConfigurationDetails(
conf.listenAddress,
Port(uint16(conf.tcpPort) + conf.portsShift),
extIp,
extTcpPort,
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
wsEnabled = conf.websocketSupport,
wssEnabled = conf.websocketSecureSupport,
)
.tryGet()
builder.build().tryGet()
await node.start()

View File

@ -127,8 +127,10 @@ proc toMatterbridge(
assert chat2Msg.isOk
if not cmb.mbClient
.postMessage(text = string.fromBytes(chat2Msg[].payload), username = chat2Msg[].nick)
.containsValue(true):
.postMessage(
text = string.fromBytes(chat2Msg[].payload), username = chat2Msg[].nick
)
.containsValue(true):
chat2_mb_dropped.inc(labelValues = ["duplicate"])
error "Matterbridge host unreachable. Dropping message."
@ -175,10 +177,10 @@ proc new*(
builder.withNodeKey(nodev2Key)
builder
.withNetworkConfigurationDetails(
nodev2BindIp, nodev2BindPort, nodev2ExtIp, nodev2ExtPort
)
.tryGet()
.withNetworkConfigurationDetails(
nodev2BindIp, nodev2BindPort, nodev2ExtIp, nodev2ExtPort
)
.tryGet()
builder.build().tryGet()
return Chat2MatterBridge(

View File

@ -57,8 +57,7 @@ import ../../waku/waku_rln_relay
logScope:
topics = "chat2 mix"
const Help =
"""
const Help = """
Commands: /[?|help|connect|nick|exit]
help: Prints this help
connect: dials a remote peer
@ -429,16 +428,16 @@ proc processInput(rfd: AsyncFD, rng: ref HmacDrbgContext) {.async.} =
builder.withRecord(record)
builder
.withNetworkConfigurationDetails(
conf.listenAddress,
Port(uint16(conf.tcpPort) + conf.portsShift),
extIp,
extTcpPort,
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
wsEnabled = conf.websocketSupport,
wssEnabled = conf.websocketSecureSupport,
)
.tryGet()
.withNetworkConfigurationDetails(
conf.listenAddress,
Port(uint16(conf.tcpPort) + conf.portsShift),
extIp,
extTcpPort,
wsBindPort = Port(uint16(conf.websocketPort) + conf.portsShift),
wsEnabled = conf.websocketSupport,
wssEnabled = conf.websocketSecureSupport,
)
.tryGet()
builder.build().tryGet()
node.mountAutoSharding(conf.clusterId, conf.numShardsInNetwork).isOkOr:

View File

@ -113,17 +113,16 @@ type
shards* {.
desc:
"Shards index to subscribe to [0..NUM_SHARDS_IN_NETWORK-1]. Argument may be repeated.",
defaultValue:
@[
uint16(0),
uint16(1),
uint16(2),
uint16(3),
uint16(4),
uint16(5),
uint16(6),
uint16(7),
],
defaultValue: @[
uint16(0),
uint16(1),
uint16(2),
uint16(3),
uint16(4),
uint16(5),
uint16(6),
uint16(7),
],
name: "shard"
.}: seq[uint16]

View File

@ -161,11 +161,10 @@ proc main(rng: ref HmacDrbgContext): Future[int] {.async.} =
# create dns resolver
let
nameServers =
@[
initTAddress(parseIpAddress("1.1.1.1"), Port(53)),
initTAddress(parseIpAddress("1.0.0.1"), Port(53)),
]
nameServers = @[
initTAddress(parseIpAddress("1.1.1.1"), Port(53)),
initTAddress(parseIpAddress("1.0.0.1"), Port(53)),
]
resolver: DnsResolver = DnsResolver.new(nameServers)
if conf.logLevel != LogLevel.NONE:

View File

@ -5,7 +5,6 @@ import
chronicles,
chronos,
metrics,
libbacktrace,
system/ansi_c,
libp2p/crypto/crypto
import
@ -88,7 +87,7 @@ when isMainModule:
when defined(posix):
proc handleSigsegv(signal: cint) {.noconv.} =
# Require --debugger:native
fatal "Shutting down after receiving SIGSEGV", stacktrace = getBacktrace()
fatal "Shutting down after receiving SIGSEGV"
# Not available in -d:release mode
writeStackTrace()

View File

@ -1,20 +1,13 @@
import os
if defined(release):
switch("nimcache", "nimcache/release/$projectName")
else:
switch("nimcache", "nimcache/debug/$projectName")
--noNimblePath
when withDir(thisDir(), system.fileExists("nimble.paths")):
include "nimble.paths"
if defined(windows):
switch("passL", "rln.lib")
switch("define", "postgres=false")
# Automatically add all vendor subdirectories
for dir in walkDir("./vendor"):
if dir.kind == pcDir:
switch("path", dir.path)
switch("path", dir.path / "src")
# disable timestamps in Windows PE headers - https://wiki.debian.org/ReproducibleBuilds/TimestampsInPEBinaries
switch("passL", "-Wl,--no-insert-timestamp")
# increase stack size
@ -63,9 +56,6 @@ elif defined(macosx) and defined(arm64):
switch("passC", "-mcpu=apple-m1")
switch("passL", "-mcpu=apple-m1")
else:
if not defined(android):
switch("passC", "-march=native")
switch("passL", "-march=native")
if defined(windows):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=65782
# ("-fno-asynchronous-unwind-tables" breaks Nim's exception raising, sometimes)
@ -87,18 +77,6 @@ else:
switch("define", "withoutPCRE")
# the default open files limit is too low on macOS (512), breaking the
# "--debugger:native" build. It can be increased with `ulimit -n 1024`.
if not defined(macosx) and not defined(android):
# add debugging symbols and original files and line numbers
--debugger:
native
if not (defined(windows) and defined(i386)) and not defined(disable_libbacktrace):
# light-weight stack traces using libbacktrace and libunwind
--define:
nimStackTraceOverride
switch("import", "libbacktrace")
--define:
nimOldCaseObjects
# https://github.com/status-im/nim-confutils/issues/9

57
flake.lock generated
View File

@ -2,27 +2,48 @@
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1757590060,
"narHash": "sha256-EWwwdKLMZALkgHFyKW7rmyhxECO74+N+ZO5xTDnY/5c=",
"lastModified": 1770464364,
"narHash": "sha256-z5NJPSBwsLf/OfD8WTmh79tlSU8XgIbwmk6qB1/TFzY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "0ef228213045d2cdb5a169a95d63ded38670b293",
"rev": "23d72dabcb3b12469f57b37170fcbc1789bd7457",
"type": "github"
},
"original": {
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "0ef228213045d2cdb5a169a95d63ded38670b293",
"rev": "23d72dabcb3b12469f57b37170fcbc1789bd7457",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay",
"zerokit": "zerokit"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1771211437,
"narHash": "sha256-lcNK438i4DGtyA+bPXXyVLHVmJjYpVKmpux9WASa3ro=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "c62195b3d6e1bb11e0c2fb2a494117d3b55d410f",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
},
"rust-overlay_2": {
"inputs": {
"nixpkgs": [
"zerokit",
@ -30,11 +51,11 @@
]
},
"locked": {
"lastModified": 1748399823,
"narHash": "sha256-kahD8D5hOXOsGbNdoLLnqCL887cjHkx98Izc37nDjlA=",
"lastModified": 1771211437,
"narHash": "sha256-lcNK438i4DGtyA+bPXXyVLHVmJjYpVKmpux9WASa3ro=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "d68a69dc71bc19beb3479800392112c2f6218159",
"rev": "c62195b3d6e1bb11e0c2fb2a494117d3b55d410f",
"type": "github"
},
"original": {
@ -48,21 +69,21 @@
"nixpkgs": [
"nixpkgs"
],
"rust-overlay": "rust-overlay"
"rust-overlay": "rust-overlay_2"
},
"locked": {
"lastModified": 1762211504,
"narHash": "sha256-SbDoBElFYJ4cYebltxlO2lYnz6qOaDAVY6aNJ5bqHDE=",
"ref": "refs/heads/master",
"rev": "3160d9504d07791f2fc9b610948a6cf9a58ed488",
"revCount": 342,
"type": "git",
"url": "https://github.com/vacp2p/zerokit"
"lastModified": 1771279884,
"narHash": "sha256-tzkQPwSl4vPTUo1ixHh6NCENjsBDroMKTjifg2q8QX8=",
"owner": "vacp2p",
"repo": "zerokit",
"rev": "53b18098e6d5d046e3eb1ac338a8f4f651432477",
"type": "github"
},
"original": {
"rev": "3160d9504d07791f2fc9b610948a6cf9a58ed488",
"type": "git",
"url": "https://github.com/vacp2p/zerokit"
"owner": "vacp2p",
"repo": "zerokit",
"rev": "53b18098e6d5d046e3eb1ac338a8f4f651432477",
"type": "github"
}
}
},

View File

@ -1,5 +1,5 @@
{
description = "Logos Messaging Nim build flake";
description = "Logos-message-delivery build flake";
nixConfig = {
extra-substituters = [ "https://nix-cache.status.im/" ];
@ -7,17 +7,23 @@
};
inputs = {
# We are pinning the commit because ultimately we want to use same commit across different projects.
# A commit from nixpkgs 24.11 release : https://github.com/NixOS/nixpkgs/tree/release-24.11
nixpkgs.url = "github:NixOS/nixpkgs/0ef228213045d2cdb5a169a95d63ded38670b293";
# WARNING: Remember to update commit and use 'nix flake update' to update flake.lock.
# Pinning the commit to use same commit across different projects.
# A commit from nixpkgs 25.11 release : https://github.com/NixOS/nixpkgs/tree/release-25.11
nixpkgs.url = "github:NixOS/nixpkgs?rev=23d72dabcb3b12469f57b37170fcbc1789bd7457";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
# External flake input: Zerokit pinned to a specific commit
zerokit = {
url = "git+https://github.com/vacp2p/zerokit?rev=3160d9504d07791f2fc9b610948a6cf9a58ed488";
url = "github:vacp2p/zerokit/53b18098e6d5d046e3eb1ac338a8f4f651432477";
inputs.nixpkgs.follows = "nixpkgs";
};
};
outputs = { self, nixpkgs, zerokit }:
outputs = { self, nixpkgs, rust-overlay, zerokit }:
let
stableSystems = [
"x86_64-linux" "aarch64-linux"
@ -35,7 +41,8 @@
android_sdk.accept_license = true;
allowUnfree = true;
};
overlays = [
overlays = [
(import rust-overlay)
(final: prev: {
androidEnvCustom = prev.callPackage ./nix/pkgs/android-sdk { };
androidPkgs = final.androidEnvCustom.pkgs;
@ -46,40 +53,43 @@
);
in rec {
packages = forAllSystems (system: let
pkgs = pkgsFor.${system};
in rec {
libwaku-android-arm64 = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["libwaku-android-arm64"];
abidir = "arm64-v8a";
zerokitRln = zerokit.packages.${system}.rln-android-arm64;
};
packages = forAllSystems (system:
let pkgs = pkgsFor.${system};
libwaku = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["libwaku"];
zerokitRln = zerokit.packages.${system}.rln;
};
in rec {
# Consumer packages (src = self)
libwaku-android-arm64 = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["libwaku-android-arm64"];
abidir = "arm64-v8a";
zerokitRln = zerokit.packages.${system}.rln-android-arm64;
};
wakucanary = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["wakucanary"];
zerokitRln = zerokit.packages.${system}.rln;
};
libwaku = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["libwaku"];
zerokitRln = zerokit.packages.${system}.rln;
};
liblogosdelivery = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["liblogosdelivery"];
zerokitRln = zerokit.packages.${system}.rln;
};
wakucanary = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["wakucanary"];
zerokitRln = zerokit.packages.${system}.rln;
};
default = libwaku;
});
liblogosdelivery = pkgs.callPackage ./nix/default.nix {
inherit stableSystems;
src = self;
targets = ["liblogosdelivery"];
zerokitRln = zerokit.packages.${system}.rln;
};
default = libwaku;
}
);
devShells = forAllSystems (system: {
default = pkgsFor.${system}.callPackage ./nix/shell.nix {};

View File

@ -7,13 +7,9 @@ import ../../waku/api/types
type JsonConnectionStatusChangeEvent* = ref object of JsonEvent
status*: ConnectionStatus
proc new*(
T: type JsonConnectionStatusChangeEvent, status: ConnectionStatus
): T =
return JsonConnectionStatusChangeEvent(
eventType: "node_health_change",
status: status
)
proc new*(T: type JsonConnectionStatusChangeEvent, status: ConnectionStatus): T =
return
JsonConnectionStatusChangeEvent(eventType: "node_health_change", status: status)
method `$`*(event: JsonConnectionStatusChangeEvent): string =
$(%*event)

View File

@ -9,7 +9,7 @@ import
metrics,
ffi
import
waku/factory/waku, waku/node/waku_node, waku/node/health_monitor, library/declare_lib
waku/factory/waku, waku/node/waku_node, waku/node/health_monitor, libwaku/declare_lib
proc getMultiaddresses(node: WakuNode): seq[string] =
return node.info().listenAddresses

View File

@ -7,7 +7,7 @@ import
waku/waku_core/peers,
waku/node/waku_node,
waku/node/kernel_api,
library/declare_lib
libwaku/declare_lib
proc retrieveBootstrapNodes(
enrTreeUrl: string, ipDnsServer: string

View File

@ -8,7 +8,7 @@ import
waku/factory/node_factory,
waku/factory/app_callbacks,
waku/rest_api/endpoint/builder,
library/declare_lib
libwaku/declare_lib
proc createWaku(
configJson: cstring, appCallbacks: AppCallbacks = nil

View File

@ -1,7 +1,7 @@
import std/[json, strutils]
import chronos, results, ffi
import libp2p/[protocols/ping, switch, multiaddress, multicodec]
import waku/[factory/waku, waku_core/peers, node/waku_node], library/declare_lib
import waku/[factory/waku, waku_core/peers, node/waku_node], libwaku/declare_lib
proc waku_ping_peer(
ctx: ptr FFIContext[Waku],

View File

@ -12,8 +12,8 @@ import
waku/node/kernel_api,
waku/waku_core/topics/pubsub_topic,
waku/waku_core/topics/content_topic,
library/events/json_message_event,
library/declare_lib
libwaku/events/json_message_event,
libwaku/declare_lib
const FilterOpTimeout = 5.seconds

View File

@ -8,8 +8,8 @@ import
waku/waku_core/topics/pubsub_topic,
waku/waku_lightpush_legacy/client,
waku/node/peer_manager/peer_manager,
library/events/json_message_event,
library/declare_lib
libwaku/events/json_message_event,
libwaku/declare_lib
proc waku_lightpush_publish(
ctx: ptr FFIContext[Waku],

View File

@ -10,8 +10,8 @@ import
waku/node/kernel_api/relay,
waku/waku_relay/protocol,
waku/node/peer_manager,
library/events/json_message_event,
library/declare_lib
libwaku/events/json_message_event,
libwaku/declare_lib
proc waku_relay_get_peers_in_mesh(
ctx: ptr FFIContext[Waku],

View File

@ -2,13 +2,13 @@ import std/[json, sugar, strutils, options]
import chronos, chronicles, results, stew/byteutils, ffi
import
waku/factory/waku,
library/utils,
libwaku/utils,
waku/waku_core/peers,
waku/waku_core/message/digest,
waku/waku_store/common,
waku/waku_store/client,
waku/common/paging,
library/declare_lib
libwaku/declare_lib
func fromJsonNode(jsonContent: JsonNode): Result[StoreQueryRequest, string] =
var contentTopics: seq[string]

View File

@ -72,7 +72,7 @@ proc waku_new(
relayHandler: onReceivedMessage(ctx),
topicHealthChangeHandler: onTopicHealthChange(ctx),
connectionChangeHandler: onConnectionChange(ctx),
connectionStatusChangeHandler: onConnectionStatusChange(ctx)
connectionStatusChangeHandler: onConnectionStatusChange(ctx),
)
ffi.sendRequestToFFIThread(

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_1* =
"""
const ContentScriptVersion_1* = """
CREATE TABLE IF NOT EXISTS messages (
pubsubTopic VARCHAR NOT NULL,
contentTopic VARCHAR NOT NULL,

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_2* =
"""
const ContentScriptVersion_2* = """
ALTER TABLE IF EXISTS messages_backup RENAME TO messages;
ALTER TABLE messages RENAME TO messages_backup;
ALTER TABLE messages_backup DROP CONSTRAINT messageIndex;

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_3* =
"""
const ContentScriptVersion_3* = """
CREATE INDEX IF NOT EXISTS i_query ON messages
(contentTopic, pubsubTopic, storedAt, id);

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_4* =
"""
const ContentScriptVersion_4* = """
ALTER TABLE messages ADD meta VARCHAR default null;
CREATE INDEX IF NOT EXISTS i_query ON messages (contentTopic, pubsubTopic, storedAt, id);

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_5* =
"""
const ContentScriptVersion_5* = """
CREATE INDEX IF NOT EXISTS i_query_storedAt ON messages (storedAt, id);
UPDATE version SET version = 5 WHERE version = 4;

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_6* =
"""
const ContentScriptVersion_6* = """
-- we can drop the timestamp column because this data is also kept in the storedAt column
ALTER TABLE messages DROP COLUMN timestamp;

View File

@ -1,5 +1,4 @@
const ContentScriptVersion_7* =
"""
const ContentScriptVersion_7* = """
-- Create lookup table
CREATE TABLE IF NOT EXISTS messages_lookup (

View File

@ -10,16 +10,15 @@ type MigrationScript* = object
proc init*(T: type MigrationScript, targetVersion: int, scriptContent: string): T =
return MigrationScript(targetVersion: targetVersion, scriptContent: scriptContent)
const PgMigrationScripts* =
@[
MigrationScript(version: 1, scriptContent: ContentScriptVersion_1),
MigrationScript(version: 2, scriptContent: ContentScriptVersion_2),
MigrationScript(version: 3, scriptContent: ContentScriptVersion_3),
MigrationScript(version: 4, scriptContent: ContentScriptVersion_4),
MigrationScript(version: 5, scriptContent: ContentScriptVersion_5),
MigrationScript(version: 6, scriptContent: ContentScriptVersion_6),
MigrationScript(version: 7, scriptContent: ContentScriptVersion_7),
]
const PgMigrationScripts* = @[
MigrationScript(version: 1, scriptContent: ContentScriptVersion_1),
MigrationScript(version: 2, scriptContent: ContentScriptVersion_2),
MigrationScript(version: 3, scriptContent: ContentScriptVersion_3),
MigrationScript(version: 4, scriptContent: ContentScriptVersion_4),
MigrationScript(version: 5, scriptContent: ContentScriptVersion_5),
MigrationScript(version: 6, scriptContent: ContentScriptVersion_6),
MigrationScript(version: 7, scriptContent: ContentScriptVersion_7),
]
proc getMigrationScripts*(currentVersion: int64, targetVersion: int64): seq[string] =
var ret = newSeq[string]()

551
nimble.lock Normal file
View File

@ -0,0 +1,551 @@
{
"version": 2,
"packages": {
"libp2p": {
"version": "1.15.2",
"vcsRevision": "ff8d51857b4b79a68468e7bcc27b2026cca02996",
"url": "https://github.com/status-im/nim-libp2p.git",
"downloadMethod": "git",
"dependencies": [
"nimcrypto",
"dnsclient",
"bearssl",
"chronicles",
"chronos",
"metrics",
"secp256k1",
"stew",
"websock",
"unittest2",
"results",
"serialization",
"lsquic",
"jwt"
],
"checksums": {
"sha1": "fa2a7552c6ec860717b77ce34cf0b7afe4570234"
}
},
"unittest2": {
"version": "0.2.5",
"vcsRevision": "26f2ef3ae0ec72a2a75bfe557e02e88f6a31c189",
"url": "https://github.com/status-im/nim-unittest2",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "02bb3751ba9ddc3c17bfd89f2e41cb6bfb8fc0c9"
}
},
"bearssl": {
"version": "0.2.6",
"vcsRevision": "11e798b62b8e6beabe958e048e9e24c7e0f9ee63",
"url": "https://github.com/status-im/nim-bearssl",
"downloadMethod": "git",
"dependencies": [
"unittest2"
],
"checksums": {
"sha1": "7e068f119664cf47ad0cfb74ef4c56fb6b616523"
}
},
"bearssl_pkey_decoder": {
"version": "0.1.0",
"vcsRevision": "21dd3710df9345ed2ad8bf8f882761e07863b8e0",
"url": "https://github.com/vacp2p/bearssl_pkey_decoder",
"downloadMethod": "git",
"dependencies": [
"bearssl"
],
"checksums": {
"sha1": "21b42e2e6ddca6c875d3fc50f36a5115abf51714"
}
},
"jwt": {
"version": "0.2",
"vcsRevision": "18f8378de52b241f321c1f9ea905456e89b95c6f",
"url": "https://github.com/vacp2p/nim-jwt.git",
"downloadMethod": "git",
"dependencies": [
"bearssl",
"bearssl_pkey_decoder"
],
"checksums": {
"sha1": "bcfd6fc9c5e10a52b87117219b7ab5c98136bc8e"
}
},
"testutils": {
"version": "0.8.0",
"vcsRevision": "e4d37dc1652d5c63afb89907efb5a5e812261797",
"url": "https://github.com/status-im/nim-testutils",
"downloadMethod": "git",
"dependencies": [
"unittest2"
],
"checksums": {
"sha1": "d1678f50aa47d113b4e77d41eec2190830b523fa"
}
},
"db_connector": {
"version": "0.1.0",
"vcsRevision": "29450a2063970712422e1ab857695c12d80112a6",
"url": "https://github.com/nim-lang/db_connector",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "4f2e67d0e4b61af9ac5575509305660b473f01a4"
}
},
"results": {
"version": "0.5.1",
"vcsRevision": "df8113dda4c2d74d460a8fa98252b0b771bf1f27",
"url": "https://github.com/arnetheduck/nim-results",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "a9c011f74bc9ed5c91103917b9f382b12e82a9e7"
}
},
"nat_traversal": {
"version": "0.0.1",
"vcsRevision": "860e18c37667b5dd005b94c63264560c35d88004",
"url": "https://github.com/status-im/nim-nat-traversal",
"downloadMethod": "git",
"dependencies": [
"results"
],
"checksums": {
"sha1": "1a376d3e710590ef2c48748a546369755f0a7c97"
}
},
"stew": {
"version": "0.4.2",
"vcsRevision": "b66168735d6f3841c5239c3169d3fe5fe98b1257",
"url": "https://github.com/status-im/nim-stew",
"downloadMethod": "git",
"dependencies": [
"results",
"unittest2"
],
"checksums": {
"sha1": "928e82cb8d2f554e8f10feb2349ee9c32fee3a8c"
}
},
"zlib": {
"version": "0.1.0",
"vcsRevision": "e680f269fb01af2c34a2ba879ff281795a5258fe",
"url": "https://github.com/status-im/nim-zlib",
"downloadMethod": "git",
"dependencies": [
"stew",
"results"
],
"checksums": {
"sha1": "bbde4f5a97a84b450fef7d107461e5f35cf2b47f"
}
},
"httputils": {
"version": "0.4.0",
"vcsRevision": "c53852d9e24205b6363bba517fa8ee7bde823691",
"url": "https://github.com/status-im/nim-http-utils",
"downloadMethod": "git",
"dependencies": [
"stew",
"results",
"unittest2"
],
"checksums": {
"sha1": "298bc5b6fe4e5aa9c3b7e2ebfa17191675020f10"
}
},
"chronos": {
"version": "4.2.0",
"vcsRevision": "0d00279e67ad9fadeb944944449adc89f052b8bd",
"url": "https://github.com/status-im/nim-chronos",
"downloadMethod": "git",
"dependencies": [
"results",
"stew",
"bearssl",
"httputils",
"unittest2"
],
"checksums": {
"sha1": "add14e711abc98b2203e7c5a35c860c7b86f15b5"
}
},
"metrics": {
"version": "0.1.2",
"vcsRevision": "11d0cddfb0e711aa2a8c75d1892ae24a64c299fc",
"url": "https://github.com/status-im/nim-metrics",
"downloadMethod": "git",
"dependencies": [
"chronos",
"results",
"stew"
],
"checksums": {
"sha1": "5cdac99d85d3c146d170e85064c88fb28f377842"
}
},
"faststreams": {
"version": "0.5.0",
"vcsRevision": "ce27581a3e881f782f482cb66dc5b07a02bd615e",
"url": "https://github.com/status-im/nim-faststreams",
"downloadMethod": "git",
"dependencies": [
"stew",
"unittest2"
],
"checksums": {
"sha1": "ee61e507b805ae1df7ec936f03f2d101b0d72383"
}
},
"snappy": {
"version": "0.1.0",
"vcsRevision": "00bfcef94f8ef6981df5d5b994897f6695badfb2",
"url": "https://github.com/status-im/nim-snappy",
"downloadMethod": "git",
"dependencies": [
"faststreams",
"unittest2",
"results",
"stew"
],
"checksums": {
"sha1": "e572d60d6a3178c5b1cde2400c51ad771812cd3d"
}
},
"serialization": {
"version": "0.5.2",
"vcsRevision": "b0f2fa32960ea532a184394b0f27be37bd80248b",
"url": "https://github.com/status-im/nim-serialization",
"downloadMethod": "git",
"dependencies": [
"faststreams",
"unittest2",
"stew"
],
"checksums": {
"sha1": "fa35c1bb76a0a02a2379fe86eaae0957c7527cb8"
}
},
"toml_serialization": {
"version": "0.2.18",
"vcsRevision": "b5b387e6fb2a7cc75d54a269b07cc6218361bd46",
"url": "https://github.com/status-im/nim-toml-serialization",
"downloadMethod": "git",
"dependencies": [
"faststreams",
"serialization",
"stew"
],
"checksums": {
"sha1": "76ae1c2af5dd092849b41750ff29217980dc9ca3"
}
},
"confutils": {
"version": "0.1.0",
"vcsRevision": "f684e55d56ba4016e2add64f74c4840476aa493d",
"url": "https://github.com/status-im/nim-confutils",
"downloadMethod": "git",
"dependencies": [
"stew",
"serialization",
"results"
],
"checksums": {
"sha1": "1bef15b34686adf71e88883cfc2452afe9fa095f"
}
},
"json_serialization": {
"version": "0.4.4",
"vcsRevision": "c343b0e243d9e17e2c40f3a8a24340f7c4a71d44",
"url": "https://github.com/status-im/nim-json-serialization",
"downloadMethod": "git",
"dependencies": [
"faststreams",
"serialization",
"stew",
"results"
],
"checksums": {
"sha1": "8b3115354104858a0ac9019356fb29720529c2bd"
}
},
"chronicles": {
"version": "0.12.2",
"vcsRevision": "27ec507429a4eb81edc20f28292ee8ec420be05b",
"url": "https://github.com/status-im/nim-chronicles",
"downloadMethod": "git",
"dependencies": [
"faststreams",
"serialization",
"json_serialization",
"testutils"
],
"checksums": {
"sha1": "02febb20d088120b2836d3306cfa21f434f88f65"
}
},
"presto": {
"version": "0.1.1",
"vcsRevision": "d66043dd7ede146442e6c39720c76a20bde5225f",
"url": "https://github.com/status-im/nim-presto",
"downloadMethod": "git",
"dependencies": [
"chronos",
"chronicles",
"metrics",
"results",
"stew"
],
"checksums": {
"sha1": "8df97c45683abe2337bdff43b844c4fbcc124ca2"
}
},
"stint": {
"version": "0.8.2",
"vcsRevision": "470b7892561b5179ab20bd389a69217d6213fe58",
"url": "https://github.com/status-im/nim-stint",
"downloadMethod": "git",
"dependencies": [
"stew",
"unittest2"
],
"checksums": {
"sha1": "d8f871fd617e7857192d4609fe003b48942a8ae5"
}
},
"minilru": {
"version": "0.1.0",
"vcsRevision": "6dd93feb60f4cded3c05e7af7209cf63fb677893",
"url": "https://github.com/status-im/nim-minilru",
"downloadMethod": "git",
"dependencies": [
"results",
"unittest2"
],
"checksums": {
"sha1": "0be03a5da29fdd4409ea74a60fd0ccce882601b4"
}
},
"sqlite3_abi": {
"version": "3.51.2.0",
"vcsRevision": "89ba51f557414d3a3e17ab3df8270e1bdaa3ca2a",
"url": "https://github.com/arnetheduck/nim-sqlite3-abi",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "921e733e4e8ebadc7fd06660716be8821df384ba"
}
},
"dnsclient": {
"version": "0.3.4",
"vcsRevision": "23214235d4784d24aceed99bbfe153379ea557c8",
"url": "https://github.com/ba0f3/dnsclient.nim",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "65262c7e533ff49d6aca5539da4bc6c6ce132f40"
}
},
"unicodedb": {
"version": "0.13.2",
"vcsRevision": "66f2458710dc641dd4640368f9483c8a0ec70561",
"url": "https://github.com/nitely/nim-unicodedb",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "739102d885d99bb4571b1955f5f12aee423c935b"
}
},
"regex": {
"version": "0.26.3",
"vcsRevision": "4593305ed1e49731fc75af1dc572dd2559aad19c",
"url": "https://github.com/nitely/nim-regex",
"downloadMethod": "git",
"dependencies": [
"unicodedb"
],
"checksums": {
"sha1": "4d24e7d7441137cd202e16f2359a5807ddbdc31f"
}
},
"nimcrypto": {
"version": "0.7.3",
"vcsRevision": "b3dbc9c4d08e58c5b7bfad6dc7ef2ee52f2f4c08",
"url": "https://github.com/cheatfate/nimcrypto",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "f72b90fe3f4da09efa482de4f8729e7ee4abea2f"
}
},
"websock": {
"version": "0.2.1",
"vcsRevision": "21a7c1982d1524081f29f7fcfab0850b80708db5",
"url": "https://github.com/status-im/nim-websock",
"downloadMethod": "git",
"dependencies": [
"chronos",
"httputils",
"chronicles",
"stew",
"nimcrypto",
"bearssl",
"results",
"zlib"
],
"checksums": {
"sha1": "50744c55ca69a01332d38825a478e3cb13935b60"
}
},
"json_rpc": {
"version": "0.5.4",
"vcsRevision": "b6e40a776fa2d00b97a9366761fb7da18f31ae5c",
"url": "https://github.com/status-im/nim-json-rpc",
"downloadMethod": "git",
"dependencies": [
"stew",
"nimcrypto",
"stint",
"chronos",
"httputils",
"chronicles",
"websock",
"serialization",
"json_serialization",
"unittest2"
],
"checksums": {
"sha1": "d8e8be795fcf098f4ce03b5826f6b3153f6a6e07"
}
},
"lsquic": {
"version": "0.0.1",
"vcsRevision": "4fb03ee7bfb39aecb3316889fdcb60bec3d0936f",
"url": "https://github.com/vacp2p/nim-lsquic",
"downloadMethod": "git",
"dependencies": [
"zlib",
"stew",
"chronos",
"nimcrypto",
"unittest2",
"chronicles"
],
"checksums": {
"sha1": "f465fa994346490d0924d162f53d9b5aec62f948"
}
},
"secp256k1": {
"version": "0.6.0.3.2",
"vcsRevision": "d8f1288b7c72f00be5fc2c5ea72bf5cae1eafb15",
"url": "https://github.com/status-im/nim-secp256k1",
"downloadMethod": "git",
"dependencies": [
"stew",
"results",
"nimcrypto"
],
"checksums": {
"sha1": "6618ef9de17121846a8c1d0317026b0ce8584e10"
}
},
"eth": {
"version": "0.9.0",
"vcsRevision": "d9135e6c3c5d6d819afdfb566aa8d958756b73a8",
"url": "https://github.com/status-im/nim-eth",
"downloadMethod": "git",
"dependencies": [
"nimcrypto",
"stint",
"secp256k1",
"chronos",
"chronicles",
"stew",
"nat_traversal",
"metrics",
"sqlite3_abi",
"confutils",
"testutils",
"unittest2",
"results",
"minilru",
"snappy"
],
"checksums": {
"sha1": "2e01b0cfff9523d110562af70d19948280f8013e"
}
},
"web3": {
"version": "0.8.0",
"vcsRevision": "cdfe5601d2812a58e54faf53ee634452d01e5918",
"url": "https://github.com/status-im/nim-web3",
"downloadMethod": "git",
"dependencies": [
"chronicles",
"chronos",
"bearssl",
"eth",
"faststreams",
"json_rpc",
"serialization",
"json_serialization",
"nimcrypto",
"stew",
"stint",
"results"
],
"checksums": {
"sha1": "26a112af032ef1536f97da2ca7364af618a11b80"
}
},
"dnsdisc": {
"version": "0.1.0",
"vcsRevision": "203abd2b3e758e0ea3ae325769b20a7e1bcd1010",
"url": "https://github.com/status-im/nim-dnsdisc",
"downloadMethod": "git",
"dependencies": [
"bearssl",
"chronicles",
"chronos",
"eth",
"secp256k1",
"stew",
"testutils",
"unittest2",
"nimcrypto",
"results"
],
"checksums": {
"sha1": "c8aeb7a29b378d9ed5201c8f0273000b46552e26"
}
},
"taskpools": {
"version": "0.1.0",
"vcsRevision": "9e8ccc754631ac55ac2fd495e167e74e86293edb",
"url": "https://github.com/status-im/nim-taskpools",
"downloadMethod": "git",
"dependencies": [],
"checksums": {
"sha1": "09e1b2fdad55b973724d61227971afc0df0b7a81"
}
},
"ffi": {
"version": "0.1.3",
"vcsRevision": "06111de155253b34e47ed2aaed1d61d08d62cc1b",
"url": "https://github.com/logos-messaging/nim-ffi",
"downloadMethod": "git",
"dependencies": [
"chronos",
"chronicles",
"taskpools"
],
"checksums": {
"sha1": "6f9d49375ea1dc71add55c72ac80a808f238e5b0"
}
}
},
"tasks": {}
}

View File

@ -1,35 +0,0 @@
# Usage
## Shell
A development shell can be started using:
```sh
nix develop
```
## Building
To build a Codex you can use:
```sh
nix build '.?submodules=1#default'
```
The `?submodules=1` part should eventually not be necessary.
For more details see:
https://github.com/NixOS/nix/issues/4423
It can be also done without even cloning the repo:
```sh
nix build 'git+https://github.com/waku-org/nwaku?submodules=1#'
```
## Running
```sh
nix run 'git+https://github.com/waku-org/nwaku?submodules=1#''
```
## Testing
```sh
nix flake check ".?submodules=1#"
```

View File

@ -1,12 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
let
tools = pkgs.callPackage ./tools.nix {};
sourceFile = ../vendor/nimbus-build-system/vendor/Nim/koch.nim;
in pkgs.fetchFromGitHub {
owner = "nim-lang";
repo = "checksums";
rev = tools.findKeyValue "^ +ChecksumsStableCommit = \"([a-f0-9]+)\".*$" sourceFile;
# WARNING: Requires manual updates when Nim compiler version changes.
hash = "sha256-JZhWqn4SrAgNw/HLzBK0rrj3WzvJ3Tv1nuDMn83KoYY=";
}

View File

@ -1,12 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
let
tools = pkgs.callPackage ./tools.nix {};
sourceFile = ../vendor/nimbus-build-system/vendor/Nim/config/build_config.txt;
in pkgs.fetchFromGitHub {
owner = "nim-lang";
repo = "csources_v2";
rev = tools.findKeyValue "^nim_csourcesHash=([a-f0-9]+)$" sourceFile;
# WARNING: Requires manual updates when Nim compiler version changes.
hash = "sha256-UCLtoxOcGYjBdvHx7A47x6FjLMi6VZqpSs65MN7fpBs=";
}

View File

@ -3,8 +3,6 @@
src ? ../.,
targets ? ["libwaku-android-arm64"],
verbosity ? 1,
useSystemNim ? true,
quickAndDirty ? true,
stableSystems ? [
"x86_64-linux" "aarch64-linux"
],
@ -12,9 +10,6 @@
zerokitRln,
}:
assert pkgs.lib.assertMsg ((src.submodules or true) == true)
"Unable to build without submodules. Append '?submodules=1#' to the URI.";
let
inherit (pkgs) stdenv lib writeScriptBin callPackage;
@ -28,73 +23,50 @@ let
copyWakunode2 = lib.elem "wakunode2" targets;
hasKnownInstallTarget = copyLibwaku || copyLiblogosdelivery || copyWakunode2;
nimbleDeps = callPackage ./deps.nix {
inherit src version revision;
};
in stdenv.mkDerivation {
pname = "logos-messaging-nim";
pname = "logos-delivery";
inherit src;
version = "${version}-${revision}";
inherit src;
env = {
ANDROID_SDK_ROOT="${pkgs.androidPkgs.sdk}";
ANDROID_NDK_HOME="${pkgs.androidPkgs.ndk}";
NIMFLAGS = "-d:disableMarchNative -d:git_revision_override=${revision}";
};
# Runtime dependencies
buildInputs = with pkgs; [
openssl gmp zip
openssl gmp zip bash nim nimble cacert
];
# Dependencies that should only exist in the build environment.
nativeBuildInputs = let
# Fix for Nim compiler calling 'git rev-parse' and 'lsb_release'.
fakeGit = writeScriptBin "git" "echo ${version}";
in with pkgs; [
cmake which zerokitRln nim-unwrapped-2_2 fakeGit
cmake which zerokitRln fakeGit nimbleDeps cargo nimble nim cacert
] ++ lib.optionals stdenv.isDarwin [
pkgs.darwin.cctools gcc # Necessary for libbacktrace
];
# Environment variables required for Android builds
ANDROID_SDK_ROOT = "${pkgs.androidPkgs.sdk}";
ANDROID_NDK_HOME = "${pkgs.androidPkgs.ndk}";
NIMFLAGS = "-d:disableMarchNative -d:git_revision_override=${revision}";
XDG_CACHE_HOME = "/tmp";
makeFlags = targets ++ [
"V=${toString verbosity}"
"QUICK_AND_DIRTY_COMPILER=${if quickAndDirty then "1" else "0"}"
"QUICK_AND_DIRTY_NIMBLE=${if quickAndDirty then "1" else "0"}"
"USE_SYSTEM_NIM=${if useSystemNim then "1" else "0"}"
"LIBRLN_FILE=${zerokitRln}/lib/librln.${if abidir != null then "so" else "a"}"
"POSTGRES=1"
];
configurePhase = ''
patchShebangs . vendor/nimbus-build-system > /dev/null
export HOME=$TMPDIR/myhome
mkdir -p $HOME
export NIMBLE_DIR=$NIX_BUILD_TOP/nimbledeps
cp -r ${nimbleDeps}/nimbledeps $NIMBLE_DIR
cp ${nimbleDeps}/nimble.paths ./
chmod 775 -R $NIMBLE_DIR
# Fix relative paths to absolute paths
sed -i "s|./nimbledeps|$NIMBLE_DIR|g" nimble.paths
# build_nim.sh guards "rm -rf dist/checksums" with NIX_BUILD_TOP != "/build",
# but on macOS the nix sandbox uses /private/tmp/... so the check fails and
# dist/checksums (provided via preBuild) gets deleted. Fix the check to skip
# the removal whenever NIX_BUILD_TOP is set (i.e. any nix build).
substituteInPlace vendor/nimbus-build-system/scripts/build_nim.sh \
--replace 'if [[ "''${NIX_BUILD_TOP}" != "/build" ]]; then' \
'if [[ -z "''${NIX_BUILD_TOP}" ]]; then'
make nimbus-build-system-paths
make nimbus-build-system-nimble-dir
'';
# For the Nim v2.2.4 built with NBS we added sat and zippy
preBuild = lib.optionalString (!useSystemNim) ''
pushd vendor/nimbus-build-system/vendor/Nim
mkdir dist
mkdir -p dist/nimble/vendor/sat
mkdir -p dist/nimble/vendor/checksums
mkdir -p dist/nimble/vendor/zippy
cp -r ${callPackage ./nimble.nix {}}/. dist/nimble
cp -r ${callPackage ./checksums.nix {}}/. dist/checksums
cp -r ${callPackage ./csources.nix {}}/. csources_v2
cp -r ${callPackage ./sat.nix {}}/. dist/nimble/vendor/sat
cp -r ${callPackage ./checksums.nix {}}/. dist/nimble/vendor/checksums
cp -r ${callPackage ./zippy.nix {}}/. dist/nimble/vendor/zippy
chmod 777 -R dist/nimble csources_v2
popd
'';
installPhase = if abidir != null then ''
@ -141,8 +113,8 @@ in stdenv.mkDerivation {
'';
meta = with pkgs.lib; {
description = "NWaku derivation to build libwaku for mobile targets using Android NDK and Rust.";
homepage = "https://github.com/status-im/nwaku";
description = "Logos-message-delivery derivation.";
homepage = "https://github.com/logos-messaging/logos-messaging-nim";
license = licenses.mit;
platforms = stableSystems;
};

61
nix/deps.nix Normal file
View File

@ -0,0 +1,61 @@
{ pkgs, stdenv, src, version, revision }:
stdenv.mkDerivation {
pname = "logos-delivery-nimble-deps";
version = "${version}-${revision}";
inherit src;
nativeBuildInputs = with pkgs; [
jq rsync git nim nimble cacert moreutils xz
];
configurePhase = ''
export XDG_CACHE_HOME=$TMPDIR
export NIMBLE_DIR=$NIX_BUILD_TOP/nimbledir
export HOME=$TMPDIR
'';
buildPhase = ''
nimble --version
nimble --verbose --localdeps setup
nimble --silent --localdeps install -y --depsOnly
'';
installPhase = ''
mkdir -p $out/nimbledeps
cp nimble.paths $out/nimble.paths
rsync -ra \
--prune-empty-dirs \
--include='*/' \
--include='*.json' \
--include='*.nim' \
--include='*.nimble' \
--include='*.c' \
--include='*.h' \
--include='*.S' \
--include='*.cc' \
--include='*.inc' \
--include='*.a' \
--exclude='*' \
$NIMBLE_DIR/pkgs2 $out/nimbledeps
'';
fixupPhase = ''
# Replace build path with deterministic $out.
sed "s|$NIMBLE_DIR|./nimbledeps|g" $out/nimble.paths \
| sort | sponge $out/nimble.paths
# Nimble does not maintain order of files list.
for META_FILE in $(find $out -name nimblemeta.json); do
jq '.metaData.files |= sort' $META_FILE | sponge $META_FILE
done
'';
# Make this a fixed-output derivation to allow internet access for Nimble.
outputHash = "sha256-hyH53xPwTJlAwjTVpuZkV4nwLfrKY7BXnZkjsAyt/1w=";
outputHashAlgo = "sha256";
outputHashMode = "recursive";
}

View File

@ -1,12 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
let
tools = pkgs.callPackage ./tools.nix {};
sourceFile = ../vendor/nimbus-build-system/vendor/Nim/koch.nim;
in pkgs.fetchFromGitHub {
owner = "nim-lang";
repo = "nimble";
rev = tools.findKeyValue "^ +NimbleStableCommit = \"([a-f0-9]+)\".*$" sourceFile;
# WARNING: Requires manual updates when Nim compiler version changes.
hash = "sha256-8iutVgNzDtttZ7V+7S11KfLEuwhKA9TsgS51mlUI08k=";
}

View File

@ -10,8 +10,8 @@
androidenv.composeAndroidPackages {
cmdLineToolsVersion = "9.0";
toolsVersion = "26.1.1";
platformToolsVersion = "34.0.5";
buildToolsVersions = [ "34.0.0" ];
platformToolsVersion = "36.0.2";
buildToolsVersions = [ "36.0.0" ];
platformVersions = [ "34" ];
cmakeVersions = [ "3.22.1" ];
ndkVersion = "27.2.12479018";

View File

@ -1,13 +0,0 @@
{ pkgs ? import <nixpkgs> { } }:
let
tools = pkgs.callPackage ./tools.nix {};
sourceFile = ../vendor/nimbus-build-system/vendor/Nim/koch.nim;
in pkgs.fetchFromGitHub {
owner = "nim-lang";
repo = "sat";
rev = tools.findKeyValue "^ +SatStableCommit = \"([a-f0-9]+)\".*$" sourceFile;
# WARNING: Requires manual updates when Nim compiler version changes.
# WARNING: Requires manual updates when Nim compiler version changes.
hash = "sha256-JFrrSV+mehG0gP7NiQ8hYthL0cjh44HNbXfuxQNhq7c=";
}

View File

@ -1,9 +0,0 @@
{ pkgs }:
pkgs.fetchFromGitHub {
owner = "guzba";
repo = "zippy";
rev = "a99f6a7d8a8e3e0213b3cad0daf0ea974bf58e3f";
# WARNING: Requires manual updates when Nim compiler version changes.
hash = "sha256-e2ma2Oyp0dlNx8pJsdZl5o5KnaoAX87tqfY0RLG3DZs=";
}

View File

@ -1,82 +0,0 @@
#!/usr/bin/env bash
# Generates nix/submodules.json from .gitmodules and git ls-tree.
# This allows Nix to fetch all git submodules without requiring
# locally initialized submodules or the '?submodules=1' URI flag.
#
# Usage: ./scripts/generate_nix_submodules.sh
#
# Run this script after:
# - Adding/removing submodules
# - Updating submodule commits (e.g. after 'make update')
# - Any change to .gitmodules
#
# Compatible with macOS bash 3.x (no associative arrays).
set -euo pipefail
REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
OUTPUT="${REPO_ROOT}/nix/submodules.json"
cd "$REPO_ROOT"
TMP_URLS=$(mktemp)
TMP_REVS=$(mktemp)
trap 'rm -f "$TMP_URLS" "$TMP_REVS"' EXIT
# Parse .gitmodules: extract (path, url) pairs
current_path=""
while IFS= read -r line; do
case "$line" in
*"path = "*)
current_path="${line#*path = }"
;;
*"url = "*)
if [ -n "$current_path" ]; then
url="${line#*url = }"
url="${url%/}"
printf '%s\t%s\n' "$current_path" "$url" >> "$TMP_URLS"
current_path=""
fi
;;
esac
done < .gitmodules
# Get pinned commit hashes from git tree
git ls-tree HEAD vendor/ | while IFS= read -r tree_line; do
mode=$(echo "$tree_line" | awk '{print $1}')
type=$(echo "$tree_line" | awk '{print $2}')
hash=$(echo "$tree_line" | awk '{print $3}')
path=$(echo "$tree_line" | awk '{print $4}')
if [ "$type" = "commit" ]; then
path="${path%/}"
printf '%s\t%s\n' "$path" "$hash" >> "$TMP_REVS"
fi
done
# Generate JSON by joining urls and revs on path
printf '[\n' > "$OUTPUT"
first=true
sort "$TMP_URLS" | while IFS="$(printf '\t')" read -r path url; do
rev=$(grep "^${path} " "$TMP_REVS" | cut -f2 || true)
if [ -z "$rev" ]; then
echo "WARNING: No commit hash found for submodule '$path', skipping" >&2
continue
fi
if [ "$first" = true ]; then
first=false
else
printf ' ,\n' >> "$OUTPUT"
fi
printf ' {\n "path": "%s",\n "url": "%s",\n "rev": "%s"\n }\n' \
"$path" "$url" "$rev" >> "$OUTPUT"
done
printf ']\n' >> "$OUTPUT"
count=$(grep -c '"path"' "$OUTPUT" || echo 0)
echo "Generated $OUTPUT with $count submodule entries"

View File

@ -87,7 +87,7 @@ proc waitForEvents(
return await allFutures(
manager.sentFuture, manager.propagatedFuture, manager.errorFuture
)
.withTimeout(timeout)
.withTimeout(timeout)
proc outcomes(manager: SendEventListenerManager): set[SendEventOutcome] =
if manager.sentFuture.completed():

View File

@ -126,12 +126,11 @@ suite "Entry Nodes Classification":
suite "Entry Nodes Processing":
test "Process mixed entry nodes":
let entryNodes =
@[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
]
let entryNodes = @[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
]
let result = processEntryNodes(entryNodes)
check:
@ -147,11 +146,10 @@ suite "Entry Nodes Processing":
staticNodes[0] == entryNodes[1] # multiaddr added to static
test "Process only ENRTree nodes":
let entryNodes =
@[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"enrtree://ANOTHER_TREE@example.com",
]
let entryNodes = @[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"enrtree://ANOTHER_TREE@example.com",
]
let result = processEntryNodes(entryNodes)
check:
@ -165,11 +163,10 @@ suite "Entry Nodes Processing":
enrTreeUrls == entryNodes
test "Process only multiaddresses":
let entryNodes =
@[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
]
let entryNodes = @[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
]
let result = processEntryNodes(entryNodes)
check:
@ -183,11 +180,10 @@ suite "Entry Nodes Processing":
staticNodes == entryNodes
test "Process only ENR nodes":
let entryNodes =
@[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
]
let entryNodes = @[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
]
let result = processEntryNodes(entryNodes)
check:
@ -224,13 +220,12 @@ suite "Entry Nodes Processing":
"Entry node error: Unrecognized entry node format. Must start with 'enrtree:', 'enr:', or '/'"
test "Process different multiaddr formats":
let entryNodes =
@[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip6/::1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
"/dns4/example.com/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYe",
"/dns/node.example.org/tcp/443/wss/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYf",
]
let entryNodes = @[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip6/::1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
"/dns4/example.com/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYe",
"/dns/node.example.org/tcp/443/wss/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYf",
]
let result = processEntryNodes(entryNodes)
check:
@ -244,13 +239,12 @@ suite "Entry Nodes Processing":
staticNodes == entryNodes
test "Process with duplicate entries":
let entryNodes =
@[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
]
let entryNodes = @[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
]
let result = processEntryNodes(entryNodes)
check:

View File

@ -88,11 +88,10 @@ suite "LibWaku Conf - toWakuConf":
test "Bootstrap nodes configuration":
## Given
let entryNodes =
@[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
]
let entryNodes = @[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g",
"enr:-QEkuECnZ3IbVAgkOzv-QLnKC4dRKAPRY80m1-R7G8jZ7yfT3ipEfBrhKN7ARcQgQ-vg-h40AQzyvAkPYlHPaFKk6u9MBgmlkgnY0iXNlY3AyNTZrMaEDk49D8JjMSns4p1XVNBvJquOUzT4PENSJknkROspfAFGg3RjcIJ2X4N1ZHCCd2g",
]
let libConf = NodeConfig.init(
mode = Core,
protocolsConfig = ProtocolsConfig.init(
@ -113,11 +112,10 @@ suite "LibWaku Conf - toWakuConf":
test "Static store nodes configuration":
## Given
let staticStoreNodes =
@[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
]
let staticStoreNodes = @[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
"/ip4/192.168.1.1/tcp/60001/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYd",
]
let nodeConf = NodeConfig.init(
protocolsConfig = ProtocolsConfig.init(
entryNodes = @[], staticStoreNodes = staticStoreNodes, clusterId = 1
@ -199,14 +197,12 @@ suite "LibWaku Conf - toWakuConf":
let nodeConfig = NodeConfig.init(
mode = Core,
protocolsConfig = ProtocolsConfig.init(
entryNodes =
@[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
],
staticStoreNodes =
@[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
],
entryNodes = @[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
],
staticStoreNodes = @[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
],
clusterId = 99,
autoShardingConfig = AutoShardingConfig(numShardsInCluster: 12),
messageValidation = MessageValidation(
@ -270,11 +266,10 @@ suite "LibWaku Conf - toWakuConf":
test "NodeConfig with mixed entry nodes (integration test)":
## Given
let entryNodes =
@[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
]
let entryNodes = @[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
]
let nodeConfig = NodeConfig.init(
mode = Core,
@ -304,8 +299,7 @@ suite "LibWaku Conf - toWakuConf":
suite "NodeConfig JSON - complete format":
test "Full NodeConfig from complete JSON with field validation":
## Given
let jsonStr =
"""
let jsonStr = """
{
"mode": "Core",
"protocolsConfig": {
@ -362,8 +356,7 @@ suite "NodeConfig JSON - complete format":
test "Full NodeConfig with RlnConfig present":
## Given
let jsonStr =
"""
let jsonStr = """
{
"mode": "Edge",
"protocolsConfig": {
@ -408,10 +401,9 @@ suite "NodeConfig JSON - complete format":
mode = Edge,
protocolsConfig = ProtocolsConfig.init(
entryNodes = @["enrtree://TREE@example.com"],
staticStoreNodes =
@[
"/ip4/1.2.3.4/tcp/80/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
],
staticStoreNodes = @[
"/ip4/1.2.3.4/tcp/80/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
],
clusterId = 42,
autoShardingConfig = AutoShardingConfig(numShardsInCluster: 16),
messageValidation = MessageValidation(
@ -515,8 +507,7 @@ suite "NodeConfig JSON - partial format with defaults":
test "ProtocolsConfig partial - optional fields get defaults":
## Given — only entryNodes and clusterId provided
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": ["enrtree://X@y.com"],
@ -546,8 +537,7 @@ suite "NodeConfig JSON - partial format with defaults":
test "MessageValidation partial - rlnConfig omitted defaults to none":
## Given
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -574,8 +564,7 @@ suite "NodeConfig JSON - partial format with defaults":
test "logLevel and logFormat omitted use defaults":
## Given
let jsonStr =
"""
let jsonStr = """
{
"mode": "Core",
"protocolsConfig": {
@ -600,8 +589,7 @@ suite "NodeConfig JSON - partial format with defaults":
suite "NodeConfig JSON - unsupported fields raise errors":
test "Unknown field at NodeConfig level raises":
let jsonStr =
"""
let jsonStr = """
{
"mode": "Core",
"unknownTopLevel": true
@ -616,8 +604,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
check raised
test "Typo in NodeConfig field name raises":
let jsonStr =
"""
let jsonStr = """
{
"modes": "Core"
}
@ -631,8 +618,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
check raised
test "Unknown field in ProtocolsConfig raises":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -655,8 +641,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
check raised
test "Unknown field in NetworkingConfig raises":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -679,8 +664,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
check raised
test "Unknown field in MessageValidation raises":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -706,8 +690,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
check raised
test "Unknown field in RlnConfig raises":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -738,8 +721,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
check raised
test "Unknown field in AutoShardingConfig raises":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -766,8 +748,7 @@ suite "NodeConfig JSON - unsupported fields raise errors":
suite "NodeConfig JSON - missing required fields":
test "Missing 'entryNodes' in ProtocolsConfig":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"clusterId": 1
@ -788,8 +769,7 @@ suite "NodeConfig JSON - missing required fields":
check raised
test "Missing 'clusterId' in ProtocolsConfig":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": []
@ -810,8 +790,7 @@ suite "NodeConfig JSON - missing required fields":
check raised
test "Missing required fields in NetworkingConfig":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -831,8 +810,7 @@ suite "NodeConfig JSON - missing required fields":
check raised
test "Missing 'numShardsInCluster' in AutoShardingConfig":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -855,8 +833,7 @@ suite "NodeConfig JSON - missing required fields":
check raised
test "Missing required fields in RlnConfig":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -884,8 +861,7 @@ suite "NodeConfig JSON - missing required fields":
check raised
test "Missing 'maxMessageSize' in MessageValidation":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -927,8 +903,7 @@ suite "NodeConfig JSON - invalid values":
check raised
test "Wrong type for clusterId (string instead of number)":
let jsonStr =
"""
let jsonStr = """
{
"protocolsConfig": {
"entryNodes": [],
@ -960,8 +935,7 @@ suite "NodeConfig JSON - invalid values":
suite "NodeConfig JSON -> WakuConf integration":
test "Decoded config translates to valid WakuConf":
## Given
let jsonStr =
"""
let jsonStr = """
{
"mode": "Core",
"protocolsConfig": {

View File

@ -4,23 +4,22 @@ import std/strutils, results, stew/byteutils, testutils/unittests
import waku/common/base64
suite "Waku Common - stew base64 wrapper":
const TestData =
@[
# Test vectors from RFC 4648
# See: https://datatracker.ietf.org/doc/html/rfc4648#section-10
("", Base64String("")),
("f", Base64String("Zg==")),
("fo", Base64String("Zm8=")),
("foo", Base64String("Zm9v")),
("foob", Base64String("Zm9vYg==")),
("fooba", Base64String("Zm9vYmE=")),
("foobar", Base64String("Zm9vYmFy")),
const TestData = @[
# Test vectors from RFC 4648
# See: https://datatracker.ietf.org/doc/html/rfc4648#section-10
("", Base64String("")),
("f", Base64String("Zg==")),
("fo", Base64String("Zm8=")),
("foo", Base64String("Zm9v")),
("foob", Base64String("Zm9vYg==")),
("fooba", Base64String("Zm9vYmE=")),
("foobar", Base64String("Zm9vYmFy")),
# Custom test vectors
("\x01", Base64String("AQ==")),
("\x13", Base64String("Ew==")),
("\x01\x02\x03\x04", Base64String("AQIDBA==")),
]
# Custom test vectors
("\x01", Base64String("AQ==")),
("\x13", Base64String("Ew==")),
("\x01\x02\x03\x04", Base64String("AQIDBA==")),
]
for (plaintext, encoded) in TestData:
test "encode into base64 (" & escape(plaintext) & " -> \"" & string(encoded) & "\")":

View File

@ -45,11 +45,11 @@ static:
suite "RequestBroker macro (async mode)":
test "serves zero-argument providers":
check SimpleResponse
.setProvider(
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "hi"))
)
.isOk()
.setProvider(
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "hi"))
)
.isOk()
let res = waitFor SimpleResponse.request()
check res.isOk()
@ -65,12 +65,14 @@ suite "RequestBroker macro (async mode)":
test "serves input-based providers":
var seen: seq[string] = @[]
check KeyedResponse
.setProvider(
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
seen.add(key)
ok(KeyedResponse(key: key, payload: key & "-payload+" & $subKey))
)
.isOk()
.setProvider(
proc(
key: string, subKey: int
): Future[Result[KeyedResponse, string]] {.async.} =
seen.add(key)
ok(KeyedResponse(key: key, payload: key & "-payload+" & $subKey))
)
.isOk()
let res = waitFor KeyedResponse.request("topic", 1)
check res.isOk()
@ -82,11 +84,13 @@ suite "RequestBroker macro (async mode)":
test "catches provider exception":
check KeyedResponse
.setProvider(
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
raise newException(ValueError, "simulated failure")
)
.isOk()
.setProvider(
proc(
key: string, subKey: int
): Future[Result[KeyedResponse, string]] {.async.} =
raise newException(ValueError, "simulated failure")
)
.isOk()
let res = waitFor KeyedResponse.request("neglected", 11)
check res.isErr()
@ -101,18 +105,18 @@ suite "RequestBroker macro (async mode)":
test "supports both provider types simultaneously":
check DualResponse
.setProvider(
proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base", count: 1))
)
.isOk()
.setProvider(
proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base", count: 1))
)
.isOk()
check DualResponse
.setProvider(
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base" & suffix, count: suffix.len))
)
.isOk()
.setProvider(
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base" & suffix, count: suffix.len))
)
.isOk()
let noInput = waitFor DualResponse.request()
check noInput.isOk()
@ -127,11 +131,11 @@ suite "RequestBroker macro (async mode)":
test "clearProvider resets both entries":
check DualResponse
.setProvider(
proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "temp", count: 0))
)
.isOk()
.setProvider(
proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "temp", count: 0))
)
.isOk()
DualResponse.clearProvider()
let res = waitFor DualResponse.request()
@ -139,11 +143,11 @@ suite "RequestBroker macro (async mode)":
test "implicit zero-argument provider works by default":
check ImplicitResponse
.setProvider(
proc(): Future[Result[ImplicitResponse, string]] {.async.} =
ok(ImplicitResponse(note: "auto"))
)
.isOk()
.setProvider(
proc(): Future[Result[ImplicitResponse, string]] {.async.} =
ok(ImplicitResponse(note: "auto"))
)
.isOk()
let res = waitFor ImplicitResponse.request()
check res.isOk()
@ -158,18 +162,18 @@ suite "RequestBroker macro (async mode)":
test "no provider override":
check DualResponse
.setProvider(
proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base", count: 1))
)
.isOk()
.setProvider(
proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base", count: 1))
)
.isOk()
check DualResponse
.setProvider(
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base" & suffix, count: suffix.len))
)
.isOk()
.setProvider(
proc(suffix: string): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "base" & suffix, count: suffix.len))
)
.isOk()
let overrideProc = proc(): Future[Result[DualResponse, string]] {.async.} =
ok(DualResponse(note: "something else", count: 1))
@ -207,27 +211,27 @@ suite "RequestBroker macro (async mode)":
SimpleResponse.clearProvider()
check SimpleResponse
.setProvider(
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "default"))
)
.isOk()
.setProvider(
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "default"))
)
.isOk()
check SimpleResponse
.setProvider(
BrokerContext(0x11111111'u32),
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "one")),
)
.isOk()
.setProvider(
BrokerContext(0x11111111'u32),
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "one")),
)
.isOk()
check SimpleResponse
.setProvider(
BrokerContext(0x22222222'u32),
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "two")),
)
.isOk()
.setProvider(
BrokerContext(0x22222222'u32),
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "two")),
)
.isOk()
let defaultRes = waitFor SimpleResponse.request()
check defaultRes.isOk()
@ -246,12 +250,12 @@ suite "RequestBroker macro (async mode)":
check missing.error.contains("no provider registered for broker context")
check SimpleResponse
.setProvider(
BrokerContext(0x11111111'u32),
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "dup")),
)
.isErr()
.setProvider(
BrokerContext(0x11111111'u32),
proc(): Future[Result[SimpleResponse, string]] {.async.} =
ok(SimpleResponse(value: "dup")),
)
.isErr()
SimpleResponse.clearProvider()
@ -259,27 +263,33 @@ suite "RequestBroker macro (async mode)":
KeyedResponse.clearProvider()
check KeyedResponse
.setProvider(
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
ok(KeyedResponse(key: "default-" & key, payload: $subKey))
)
.isOk()
.setProvider(
proc(
key: string, subKey: int
): Future[Result[KeyedResponse, string]] {.async.} =
ok(KeyedResponse(key: "default-" & key, payload: $subKey))
)
.isOk()
check KeyedResponse
.setProvider(
BrokerContext(0xABCDEF01'u32),
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
ok(KeyedResponse(key: "k1-" & key, payload: "p" & $subKey)),
)
.isOk()
.setProvider(
BrokerContext(0xABCDEF01'u32),
proc(
key: string, subKey: int
): Future[Result[KeyedResponse, string]] {.async.} =
ok(KeyedResponse(key: "k1-" & key, payload: "p" & $subKey)),
)
.isOk()
check KeyedResponse
.setProvider(
BrokerContext(0xABCDEF02'u32),
proc(key: string, subKey: int): Future[Result[KeyedResponse, string]] {.async.} =
ok(KeyedResponse(key: "k2-" & key, payload: "q" & $subKey)),
)
.isOk()
.setProvider(
BrokerContext(0xABCDEF02'u32),
proc(
key: string, subKey: int
): Future[Result[KeyedResponse, string]] {.async.} =
ok(KeyedResponse(key: "k2-" & key, payload: "q" & $subKey)),
)
.isOk()
let d = waitFor KeyedResponse.request("topic", 7)
check d.isOk()
@ -343,11 +353,11 @@ static:
suite "RequestBroker macro (sync mode)":
test "serves zero-argument providers (sync)":
check SimpleResponseSync
.setProvider(
proc(): Result[SimpleResponseSync, string] =
ok(SimpleResponseSync(value: "hi"))
)
.isOk()
.setProvider(
proc(): Result[SimpleResponseSync, string] =
ok(SimpleResponseSync(value: "hi"))
)
.isOk()
let res = SimpleResponseSync.request()
check res.isOk()
@ -363,12 +373,12 @@ suite "RequestBroker macro (sync mode)":
test "serves input-based providers (sync)":
var seen: seq[string] = @[]
check KeyedResponseSync
.setProvider(
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
seen.add(key)
ok(KeyedResponseSync(key: key, payload: key & "-payload+" & $subKey))
)
.isOk()
.setProvider(
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
seen.add(key)
ok(KeyedResponseSync(key: key, payload: key & "-payload+" & $subKey))
)
.isOk()
let res = KeyedResponseSync.request("topic", 1)
check res.isOk()
@ -380,11 +390,11 @@ suite "RequestBroker macro (sync mode)":
test "catches provider exception (sync)":
check KeyedResponseSync
.setProvider(
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
raise newException(ValueError, "simulated failure")
)
.isOk()
.setProvider(
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
raise newException(ValueError, "simulated failure")
)
.isOk()
let res = KeyedResponseSync.request("neglected", 11)
check res.isErr()
@ -399,18 +409,18 @@ suite "RequestBroker macro (sync mode)":
test "supports both provider types simultaneously (sync)":
check DualResponseSync
.setProvider(
proc(): Result[DualResponseSync, string] =
ok(DualResponseSync(note: "base", count: 1))
)
.isOk()
.setProvider(
proc(): Result[DualResponseSync, string] =
ok(DualResponseSync(note: "base", count: 1))
)
.isOk()
check DualResponseSync
.setProvider(
proc(suffix: string): Result[DualResponseSync, string] =
ok(DualResponseSync(note: "base" & suffix, count: suffix.len))
)
.isOk()
.setProvider(
proc(suffix: string): Result[DualResponseSync, string] =
ok(DualResponseSync(note: "base" & suffix, count: suffix.len))
)
.isOk()
let noInput = DualResponseSync.request()
check noInput.isOk()
@ -425,11 +435,11 @@ suite "RequestBroker macro (sync mode)":
test "clearProvider resets both entries (sync)":
check DualResponseSync
.setProvider(
proc(): Result[DualResponseSync, string] =
ok(DualResponseSync(note: "temp", count: 0))
)
.isOk()
.setProvider(
proc(): Result[DualResponseSync, string] =
ok(DualResponseSync(note: "temp", count: 0))
)
.isOk()
DualResponseSync.clearProvider()
let res = DualResponseSync.request()
@ -437,11 +447,11 @@ suite "RequestBroker macro (sync mode)":
test "implicit zero-argument provider works by default (sync)":
check ImplicitResponseSync
.setProvider(
proc(): Result[ImplicitResponseSync, string] =
ok(ImplicitResponseSync(note: "auto"))
)
.isOk()
.setProvider(
proc(): Result[ImplicitResponseSync, string] =
ok(ImplicitResponseSync(note: "auto"))
)
.isOk()
let res = ImplicitResponseSync.request()
check res.isOk()
@ -456,11 +466,11 @@ suite "RequestBroker macro (sync mode)":
test "implicit zero-argument provider raises error (sync)":
check ImplicitResponseSync
.setProvider(
proc(): Result[ImplicitResponseSync, string] =
raise newException(ValueError, "simulated failure")
)
.isOk()
.setProvider(
proc(): Result[ImplicitResponseSync, string] =
raise newException(ValueError, "simulated failure")
)
.isOk()
let res = ImplicitResponseSync.request()
check res.isErr()
@ -472,19 +482,19 @@ suite "RequestBroker macro (sync mode)":
SimpleResponseSync.clearProvider()
check SimpleResponseSync
.setProvider(
proc(): Result[SimpleResponseSync, string] =
ok(SimpleResponseSync(value: "default"))
)
.isOk()
.setProvider(
proc(): Result[SimpleResponseSync, string] =
ok(SimpleResponseSync(value: "default"))
)
.isOk()
check SimpleResponseSync
.setProvider(
BrokerContext(0x10101010'u32),
proc(): Result[SimpleResponseSync, string] =
ok(SimpleResponseSync(value: "ten")),
)
.isOk()
.setProvider(
BrokerContext(0x10101010'u32),
proc(): Result[SimpleResponseSync, string] =
ok(SimpleResponseSync(value: "ten")),
)
.isOk()
let defaultRes = SimpleResponseSync.request()
check defaultRes.isOk()
@ -504,19 +514,19 @@ suite "RequestBroker macro (sync mode)":
KeyedResponseSync.clearProvider()
check KeyedResponseSync
.setProvider(
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
ok(KeyedResponseSync(key: "default-" & key, payload: $subKey))
)
.isOk()
.setProvider(
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
ok(KeyedResponseSync(key: "default-" & key, payload: $subKey))
)
.isOk()
check KeyedResponseSync
.setProvider(
BrokerContext(0xA0A0A0A0'u32),
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
ok(KeyedResponseSync(key: "k-" & key, payload: "p" & $subKey)),
)
.isOk()
.setProvider(
BrokerContext(0xA0A0A0A0'u32),
proc(key: string, subKey: int): Result[KeyedResponseSync, string] =
ok(KeyedResponseSync(key: "k-" & key, payload: "p" & $subKey)),
)
.isOk()
let d = KeyedResponseSync.request("topic", 2)
check d.isOk()
@ -576,11 +586,11 @@ RequestBroker(sync):
suite "RequestBroker macro (POD/external types)":
test "supports non-object response types (async)":
check PodResponse
.setProvider(
proc(): Future[Result[PodResponse, string]] {.async.} =
ok(PodResponse(123))
)
.isOk()
.setProvider(
proc(): Future[Result[PodResponse, string]] {.async.} =
ok(PodResponse(123))
)
.isOk()
let res = waitFor PodResponse.request()
check res.isOk()
@ -590,11 +600,11 @@ suite "RequestBroker macro (POD/external types)":
test "supports aliased external types (async)":
check ExternalAliasedResponse
.setProvider(
proc(): Future[Result[ExternalAliasedResponse, string]] {.async.} =
ok(ExternalAliasedResponse(ExternalDefinedTypeAsync(label: "ext")))
)
.isOk()
.setProvider(
proc(): Future[Result[ExternalAliasedResponse, string]] {.async.} =
ok(ExternalAliasedResponse(ExternalDefinedTypeAsync(label: "ext")))
)
.isOk()
let res = waitFor ExternalAliasedResponse.request()
check res.isOk()
@ -604,11 +614,11 @@ suite "RequestBroker macro (POD/external types)":
test "supports aliased external types (sync)":
check ExternalAliasedResponseSync
.setProvider(
proc(): Result[ExternalAliasedResponseSync, string] =
ok(ExternalAliasedResponseSync(ExternalDefinedTypeSync(label: "ext")))
)
.isOk()
.setProvider(
proc(): Result[ExternalAliasedResponseSync, string] =
ok(ExternalAliasedResponseSync(ExternalDefinedTypeSync(label: "ext")))
)
.isOk()
let res = ExternalAliasedResponseSync.request()
check res.isOk()
@ -618,32 +628,32 @@ suite "RequestBroker macro (POD/external types)":
test "distinct response types avoid overload ambiguity (sync)":
check DistinctStringResponseA
.setProvider(
proc(): Result[DistinctStringResponseA, string] =
ok(DistinctStringResponseA("a"))
)
.isOk()
.setProvider(
proc(): Result[DistinctStringResponseA, string] =
ok(DistinctStringResponseA("a"))
)
.isOk()
check DistinctStringResponseB
.setProvider(
proc(): Result[DistinctStringResponseB, string] =
ok(DistinctStringResponseB("b"))
)
.isOk()
.setProvider(
proc(): Result[DistinctStringResponseB, string] =
ok(DistinctStringResponseB("b"))
)
.isOk()
check ExternalDistinctResponseA
.setProvider(
proc(): Result[ExternalDistinctResponseA, string] =
ok(ExternalDistinctResponseA(ExternalDefinedTypeShared(label: "ea")))
)
.isOk()
.setProvider(
proc(): Result[ExternalDistinctResponseA, string] =
ok(ExternalDistinctResponseA(ExternalDefinedTypeShared(label: "ea")))
)
.isOk()
check ExternalDistinctResponseB
.setProvider(
proc(): Result[ExternalDistinctResponseB, string] =
ok(ExternalDistinctResponseB(ExternalDefinedTypeShared(label: "eb")))
)
.isOk()
.setProvider(
proc(): Result[ExternalDistinctResponseB, string] =
ok(ExternalDistinctResponseB(ExternalDefinedTypeShared(label: "eb")))
)
.isOk()
let resA = DistinctStringResponseA.request()
let resB = DistinctStringResponseB.request()

View File

@ -29,17 +29,16 @@ suite "SQLite - migrations":
test "filter and order migration script file paths":
## Given
let paths =
@[
sourceDir / "00001_valid.up.sql",
sourceDir / "00002_alsoValidWithUpperCaseExtension.UP.SQL",
sourceDir / "00007_unorderedValid.up.sql",
sourceDir / "00003_validRepeated.up.sql",
sourceDir / "00003_validRepeated.up.sql",
sourceDir / "00666_noMigrationScript.bmp",
sourceDir / "00X00_invalidVersion.down.sql",
sourceDir / "00008_notWithinVersionRange.up.sql",
]
let paths = @[
sourceDir / "00001_valid.up.sql",
sourceDir / "00002_alsoValidWithUpperCaseExtension.UP.SQL",
sourceDir / "00007_unorderedValid.up.sql",
sourceDir / "00003_validRepeated.up.sql",
sourceDir / "00003_validRepeated.up.sql",
sourceDir / "00666_noMigrationScript.bmp",
sourceDir / "00X00_invalidVersion.down.sql",
sourceDir / "00008_notWithinVersionRange.up.sql",
]
let
lowerVersion = 0
@ -64,16 +63,14 @@ suite "SQLite - migrations":
test "break migration scripts into queries":
## Given
let statement1 =
"""CREATE TABLE contacts1 (
let statement1 = """CREATE TABLE contacts1 (
contact_id INTEGER PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,
email TEXT NOT NULL UNIQUE,
phone TEXT NOT NULL UNIQUE
);"""
let statement2 =
"""CREATE TABLE contacts2 (
let statement2 = """CREATE TABLE contacts2 (
contact_id INTEGER PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,
@ -91,16 +88,14 @@ suite "SQLite - migrations":
test "break statements script into queries - empty statements":
## Given
let statement1 =
"""CREATE TABLE contacts1 (
let statement1 = """CREATE TABLE contacts1 (
contact_id INTEGER PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,
email TEXT NOT NULL UNIQUE,
phone TEXT NOT NULL UNIQUE
);"""
let statement2 =
"""CREATE TABLE contacts2 (
let statement2 = """CREATE TABLE contacts2 (
contact_id INTEGER PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,

View File

@ -10,22 +10,21 @@ import
import waku/waku_core/peers, waku/node/peer_manager/peer_store/waku_peer_storage
proc `==`(a, b: RemotePeerInfo): bool =
let comparisons =
@[
a.peerId == b.peerId,
a.addrs == b.addrs,
a.enr == b.enr,
a.protocols == b.protocols,
a.agent == b.agent,
a.protoVersion == b.protoVersion,
a.publicKey == b.publicKey,
a.connectedness == b.connectedness,
a.disconnectTime == b.disconnectTime,
a.origin == b.origin,
a.direction == b.direction,
a.lastFailedConn == b.lastFailedConn,
a.numberFailedConn == b.numberFailedConn,
]
let comparisons = @[
a.peerId == b.peerId,
a.addrs == b.addrs,
a.enr == b.enr,
a.protocols == b.protocols,
a.agent == b.agent,
a.protoVersion == b.protoVersion,
a.publicKey == b.publicKey,
a.connectedness == b.connectedness,
a.disconnectTime == b.disconnectTime,
a.origin == b.origin,
a.direction == b.direction,
a.lastFailedConn == b.lastFailedConn,
a.numberFailedConn == b.numberFailedConn,
]
allIt(comparisons, it == true)
@ -61,18 +60,17 @@ suite "Protobuf Serialisation":
suite "encode":
test "simple":
# Given the expected bytes representation of a valid RemotePeerInfo
let expectedBuffer: seq[byte] =
@[
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134,
72, 206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245,
213, 48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1,
216, 230, 236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81,
81, 12, 9, 142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247,
124, 64, 158, 98, 40, 0, 48, 0,
]
let expectedBuffer: seq[byte] = @[
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72,
206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245, 213,
48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1, 216, 230,
236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81, 81, 12, 9,
142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247, 124, 64, 158,
98, 40, 0, 48, 0,
]
# When converting a valid RemotePeerInfo to a ProtoBuffer
let encodedRemotePeerInfo = encode(remotePeerInfo).get()
@ -87,18 +85,17 @@ suite "Protobuf Serialisation":
suite "decode":
test "simple":
# Given the bytes representation of a valid RemotePeerInfo
let buffer: seq[byte] =
@[
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134,
72, 206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245,
213, 48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1,
216, 230, 236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81,
81, 12, 9, 142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247,
124, 64, 158, 98, 40, 0, 48, 0,
]
let buffer: seq[byte] = @[
10, 39, 0, 37, 8, 2, 18, 33, 3, 43, 246, 238, 219, 109, 147, 79, 129, 40, 145,
217, 209, 109, 105, 185, 186, 200, 180, 203, 72, 166, 220, 196, 232, 170, 74,
141, 125, 255, 112, 238, 204, 18, 8, 4, 192, 168, 0, 1, 6, 31, 144, 34, 95, 8,
3, 18, 91, 48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72,
206, 61, 3, 1, 7, 3, 66, 0, 4, 222, 61, 48, 15, 163, 106, 224, 232, 245, 213,
48, 137, 157, 131, 171, 171, 68, 171, 243, 22, 31, 22, 42, 75, 201, 1, 216, 230,
236, 218, 2, 14, 139, 109, 95, 141, 163, 5, 37, 231, 29, 104, 81, 81, 12, 9,
142, 92, 71, 198, 70, 165, 151, 251, 77, 206, 192, 52, 233, 247, 124, 64, 158,
98, 40, 0, 48, 0,
]
# When converting a valid buffer to RemotePeerInfo
let decodedRemotePeerInfo = RemotePeerInfo.decode(buffer).get()

View File

@ -35,13 +35,12 @@ proc protoHealthMock(kind: WakuProtocol, health: HealthStatus): ProtocolHealth =
suite "Health Monitor - health state calculation":
test "Disconnected, zero peers":
let protocols =
@[
protoHealthMock(RelayProtocol, HealthStatus.NOT_READY),
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
protoHealthMock(FilterClientProtocol, HealthStatus.NOT_READY),
protoHealthMock(LightpushClientProtocol, HealthStatus.NOT_READY),
]
let protocols = @[
protoHealthMock(RelayProtocol, HealthStatus.NOT_READY),
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
protoHealthMock(FilterClientProtocol, HealthStatus.NOT_READY),
protoHealthMock(LightpushClientProtocol, HealthStatus.NOT_READY),
]
let strength = initTable[WakuProtocol, int]()
let state = calculateConnectionState(protocols, strength, some(MockDLow))
check state == ConnectionStatus.Disconnected
@ -64,13 +63,12 @@ suite "Health Monitor - health state calculation":
check state == ConnectionStatus.Connected
test "Connected, robust edge":
let protocols =
@[
protoHealthMock(RelayProtocol, HealthStatus.NOT_MOUNTED),
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
]
let protocols = @[
protoHealthMock(RelayProtocol, HealthStatus.NOT_MOUNTED),
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
]
var strength = initTable[WakuProtocol, int]()
strength[LightpushClientProtocol] = HealthyThreshold
strength[FilterClientProtocol] = HealthyThreshold
@ -79,12 +77,11 @@ suite "Health Monitor - health state calculation":
check state == ConnectionStatus.Connected
test "Disconnected, edge missing store":
let protocols =
@[
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
]
let protocols = @[
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
]
var strength = initTable[WakuProtocol, int]()
strength[LightpushClientProtocol] = HealthyThreshold
strength[FilterClientProtocol] = HealthyThreshold
@ -94,12 +91,11 @@ suite "Health Monitor - health state calculation":
test "PartiallyConnected, edge meets minimum failover requirement":
let weakCount = max(1, HealthyThreshold - 1)
let protocols =
@[
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
]
let protocols = @[
protoHealthMock(LightpushClientProtocol, HealthStatus.READY),
protoHealthMock(FilterClientProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.READY),
]
var strength = initTable[WakuProtocol, int]()
strength[LightpushClientProtocol] = weakCount
strength[FilterClientProtocol] = weakCount
@ -108,11 +104,10 @@ suite "Health Monitor - health state calculation":
check state == ConnectionStatus.PartiallyConnected
test "Connected, robust relay ignores store server":
let protocols =
@[
protoHealthMock(RelayProtocol, HealthStatus.READY),
protoHealthMock(StoreProtocol, HealthStatus.READY),
]
let protocols = @[
protoHealthMock(RelayProtocol, HealthStatus.READY),
protoHealthMock(StoreProtocol, HealthStatus.READY),
]
var strength = initTable[WakuProtocol, int]()
strength[RelayProtocol] = MockDLow
strength[StoreProtocol] = 0
@ -120,12 +115,11 @@ suite "Health Monitor - health state calculation":
check state == ConnectionStatus.Connected
test "Connected, robust relay ignores store client":
let protocols =
@[
protoHealthMock(RelayProtocol, HealthStatus.READY),
protoHealthMock(StoreProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
]
let protocols = @[
protoHealthMock(RelayProtocol, HealthStatus.READY),
protoHealthMock(StoreProtocol, HealthStatus.READY),
protoHealthMock(StoreClientProtocol, HealthStatus.NOT_READY),
]
var strength = initTable[WakuProtocol, int]()
strength[RelayProtocol] = MockDLow
strength[StoreProtocol] = 0

View File

@ -37,19 +37,18 @@ suite "Waku Store - End to End - Sorted Archive":
contentTopicSeq = @[contentTopic]
let timeOrigin = now()
archiveMessages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
archiveMessages = @[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
historyQuery = HistoryQuery(
pubsubTopic: some(pubsubTopic),
@ -500,19 +499,18 @@ suite "Waku Store - End to End - Unsorted Archive":
)
let timeOrigin = now()
unsortedArchiveMessages =
@[ # SortIndex (by timestamp and digest)
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)), # 1
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)), # 2
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)), # 0
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)), # 4
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)), # 3
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)), # 5
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)), # 6
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)), # 9
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)), # 7
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)), # 8
]
unsortedArchiveMessages = @[ # SortIndex (by timestamp and digest)
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)), # 1
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)), # 2
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)), # 0
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)), # 4
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)), # 3
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)), # 5
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)), # 6
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)), # 9
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)), # 7
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)), # 8
]
let
serverKey = generateSecp256k1Key()
@ -654,21 +652,20 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
originTs = proc(offset = 0): Timestamp {.gcsafe, raises: [].} =
ts(offset, timeOrigin)
archiveMessages =
@[
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
fakeWakuMessage(
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
),
]
archiveMessages = @[
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
fakeWakuMessage(
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
),
]
let
serverKey = generateSecp256k1Key()
@ -910,12 +907,11 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
xasyncTest "Only ephemeral Messages:":
# Given an archive with only ephemeral messages
let
ephemeralMessages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
ephemeralMessages = @[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
ephemeralArchiveDriver =
newSqliteArchiveDriver().put(pubsubTopic, ephemeralMessages)
@ -946,18 +942,16 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
xasyncTest "Mixed messages":
# Given an archive with both ephemeral and non-ephemeral messages
let
ephemeralMessages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
nonEphemeralMessages =
@[
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
]
ephemeralMessages = @[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
nonEphemeralMessages = @[
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
]
mixedArchiveDriver = newSqliteArchiveDriver()
.put(pubsubTopic, ephemeralMessages)
.put(pubsubTopic, nonEphemeralMessages)

View File

@ -283,31 +283,31 @@ suite "Waku RlnRelay - End to End - Static":
doAssert(
client.wakuRlnRelay
.appendRLNProof(
message1b, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 0)
)
.isOk()
.appendRLNProof(
message1b, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 0)
)
.isOk()
)
doAssert(
client.wakuRlnRelay
.appendRLNProof(
message1kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 1)
)
.isOk()
.appendRLNProof(
message1kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 1)
)
.isOk()
)
doAssert(
client.wakuRlnRelay
.appendRLNProof(
message150kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 2)
)
.isOk()
.appendRLNProof(
message150kib, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 2)
)
.isOk()
)
doAssert(
client.wakuRlnRelay
.appendRLNProof(
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
)
.isOk()
.appendRLNProof(
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
)
.isOk()
)
# When sending the 1B message
@ -372,10 +372,10 @@ suite "Waku RlnRelay - End to End - Static":
doAssert(
client.wakuRlnRelay
.appendRLNProof(
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
)
.isOk()
.appendRLNProof(
message151kibPlus, epoch + float64(client.wakuRlnRelay.rlnEpochSizeSec * 3)
)
.isOk()
)
# When sending the 150KiB plus message
@ -496,11 +496,11 @@ suite "Waku RlnRelay - End to End - OnChain":
# However, it doesn't reduce the retries against the blockchain that the mounting rln process attempts (until it accepts failure).
# Note: These retries might be an unintended library issue.
discard await server
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig1)
.withTimeout(FUTURE_TIMEOUT)
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig1)
.withTimeout(FUTURE_TIMEOUT)
discard await client
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig2)
.withTimeout(FUTURE_TIMEOUT)
.setupRelayWithOnChainRln(@[pubsubTopic], wakuRlnConfig2)
.withTimeout(FUTURE_TIMEOUT)
check:
(await serverErrorFuture.waitForResult()).get() ==

View File

@ -434,18 +434,16 @@ suite "Sharding":
contentTopicShort = "/toychat/2/huilong/proto"
contentTopicFull = "/0/toychat/2/huilong/proto"
pubsubTopic = "/waku/2/rs/0/58355"
archiveMessages1 =
@[
fakeWakuMessage(
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopicShort
)
]
archiveMessages2 =
@[
fakeWakuMessage(
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopicFull
)
]
archiveMessages1 = @[
fakeWakuMessage(
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopicShort
)
]
archiveMessages2 = @[
fakeWakuMessage(
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopicFull
)
]
archiveDriver = newArchiveDriverWithMessages(pubsubTopic, archiveMessages1)
discard archiveDriver.put(pubsubTopic, archiveMessages2)
let mountArchiveResult = server.mountArchive(archiveDriver)
@ -597,18 +595,16 @@ suite "Sharding":
contentTopic2 = "/0/toychat2/2/huilong/proto"
pubsubTopic2 = "/waku/2/rs/0/23286"
# Automatically generated from the contentTopic above
archiveMessages1 =
@[
fakeWakuMessage(
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopic1
)
]
archiveMessages2 =
@[
fakeWakuMessage(
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopic2
)
]
archiveMessages1 = @[
fakeWakuMessage(
@[byte 00], ts = ts(00, timeOrigin), contentTopic = contentTopic1
)
]
archiveMessages2 = @[
fakeWakuMessage(
@[byte 01], ts = ts(10, timeOrigin), contentTopic = contentTopic2
)
]
archiveDriver = newArchiveDriverWithMessages(pubsubTopic1, archiveMessages1)
discard archiveDriver.put(pubsubTopic2, archiveMessages2)
let mountArchiveResult = server.mountArchive(archiveDriver)

View File

@ -38,19 +38,18 @@ suite "Waku Store - End to End - Sorted Archive":
contentTopicSeq = @[contentTopic]
let timeOrigin = now()
let messages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
let messages = @[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
archiveMessages = messages.mapIt(
WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it),
@ -542,19 +541,18 @@ suite "Waku Store - End to End - Unsorted Archive":
)
let timeOrigin = now()
let messages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)),
]
let messages = @[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(20, timeOrigin)),
]
unsortedArchiveMessages = messages.mapIt(
WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it),
@ -759,19 +757,19 @@ suite "Waku Store - End to End - Unsorted Archive without provided Timestamp":
paginationLimit: some(uint64(5)),
)
let messages =
@[ # Not providing explicit timestamp means it will be set in "arrive" order
fakeWakuMessage(@[byte 09]),
fakeWakuMessage(@[byte 07]),
fakeWakuMessage(@[byte 05]),
fakeWakuMessage(@[byte 03]),
fakeWakuMessage(@[byte 01]),
fakeWakuMessage(@[byte 00]),
fakeWakuMessage(@[byte 02]),
fakeWakuMessage(@[byte 04]),
fakeWakuMessage(@[byte 06]),
fakeWakuMessage(@[byte 08]),
]
let messages = @[
# Not providing explicit timestamp means it will be set in "arrive" order
fakeWakuMessage(@[byte 09]),
fakeWakuMessage(@[byte 07]),
fakeWakuMessage(@[byte 05]),
fakeWakuMessage(@[byte 03]),
fakeWakuMessage(@[byte 01]),
fakeWakuMessage(@[byte 00]),
fakeWakuMessage(@[byte 02]),
fakeWakuMessage(@[byte 04]),
fakeWakuMessage(@[byte 06]),
fakeWakuMessage(@[byte 08]),
]
unsortedArchiveMessages = messages.mapIt(
WakuMessageKeyValue(
messageHash: computeMessageHash(pubsubTopic, it),
@ -900,21 +898,20 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
originTs = proc(offset = 0): Timestamp {.gcsafe, raises: [].} =
ts(offset, timeOrigin)
let messages =
@[
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
fakeWakuMessage(
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
),
]
let messages = @[
fakeWakuMessage(@[byte 00], ts = originTs(00), contentTopic = contentTopic),
fakeWakuMessage(@[byte 01], ts = originTs(10), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 02], ts = originTs(20), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 03], ts = originTs(30), contentTopic = contentTopic),
fakeWakuMessage(@[byte 04], ts = originTs(40), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 05], ts = originTs(50), contentTopic = contentTopicC),
fakeWakuMessage(@[byte 06], ts = originTs(60), contentTopic = contentTopic),
fakeWakuMessage(@[byte 07], ts = originTs(70), contentTopic = contentTopicB),
fakeWakuMessage(@[byte 08], ts = originTs(80), contentTopic = contentTopicC),
fakeWakuMessage(
@[byte 09], ts = originTs(90), contentTopic = contentTopicSpecials
),
]
archiveMessages = messages.mapIt(
WakuMessageKeyValue(
@ -1172,12 +1169,11 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
xasyncTest "Only ephemeral Messages:":
# Given an archive with only ephemeral messages
let
ephemeralMessages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
ephemeralMessages = @[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
ephemeralArchiveDriver =
newSqliteArchiveDriver().put(pubsubTopic, ephemeralMessages)
@ -1207,18 +1203,16 @@ suite "Waku Store - End to End - Archive with Multiple Topics":
xasyncTest "Mixed messages":
# Given an archive with both ephemeral and non-ephemeral messages
let
ephemeralMessages =
@[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
nonEphemeralMessages =
@[
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
]
ephemeralMessages = @[
fakeWakuMessage(@[byte 00], ts = ts(00), ephemeral = true),
fakeWakuMessage(@[byte 01], ts = ts(10), ephemeral = true),
fakeWakuMessage(@[byte 02], ts = ts(20), ephemeral = true),
]
nonEphemeralMessages = @[
fakeWakuMessage(@[byte 03], ts = ts(30), ephemeral = false),
fakeWakuMessage(@[byte 04], ts = ts(40), ephemeral = false),
fakeWakuMessage(@[byte 05], ts = ts(50), ephemeral = false),
]
mixedArchiveDriver = newSqliteArchiveDriver()
.put(pubsubTopic, ephemeralMessages)
.put(pubsubTopic, nonEphemeralMessages)

View File

@ -8,8 +8,7 @@ const
EMOJI* =
"😀 😃 😄 😁 😆 😅 🤣 😂 🙂 🙃 😉 😊 😇 🥰 😍 🤩 😘 😗 😚 😙"
CODE* = "def main():\n\tprint('Hello, world!')"
QUERY* =
"""
QUERY* = """
SELECT
u.id,
u.name,
@ -30,8 +29,7 @@ const
u.id = 1
"""
TEXT_SMALL* = "Lorem ipsum dolor sit amet, consectetur adipiscing elit."
TEXT_LARGE* =
"""
TEXT_LARGE* = """
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Cras gravida vulputate semper. Proin
eleifend varius cursus. Morbi lacinia posuere quam sit amet pretium. Sed non metus fermentum,
venenatis nisl id, vestibulum eros. Quisque non lorem sit amet lectus faucibus elementum eu

View File

@ -1216,30 +1216,29 @@ procSuite "Peer Manager":
shardId1 = 1.uint16
# Create 3 nodes with different shards
let nodes =
@[
newTestWakuNode(
generateSecp256k1Key(),
parseIpAddress("0.0.0.0"),
Port(0),
clusterId = clusterId,
subscribeShards = @[shardId0],
),
newTestWakuNode(
generateSecp256k1Key(),
parseIpAddress("0.0.0.0"),
Port(0),
clusterId = clusterId,
subscribeShards = @[shardId1],
),
newTestWakuNode(
generateSecp256k1Key(),
parseIpAddress("0.0.0.0"),
Port(0),
clusterId = clusterId,
subscribeShards = @[shardId0],
),
]
let nodes = @[
newTestWakuNode(
generateSecp256k1Key(),
parseIpAddress("0.0.0.0"),
Port(0),
clusterId = clusterId,
subscribeShards = @[shardId0],
),
newTestWakuNode(
generateSecp256k1Key(),
parseIpAddress("0.0.0.0"),
Port(0),
clusterId = clusterId,
subscribeShards = @[shardId1],
),
newTestWakuNode(
generateSecp256k1Key(),
parseIpAddress("0.0.0.0"),
Port(0),
clusterId = clusterId,
subscribeShards = @[shardId0],
),
]
await allFutures(nodes.mapIt(it.start()))
for node in nodes:
@ -1364,13 +1363,12 @@ procSuite "Peer Manager":
node.peerManager.switch.peerStore[ProtoBook][peerInfo.peerId] = @[WakuRelayCodec]
## When: selectPeer is called with malformed pubsub topic
let invalidTopics =
@[
some(PubsubTopic("invalid-topic")),
some(PubsubTopic("/waku/2/invalid")),
some(PubsubTopic("/waku/2/rs/abc/0")), # non-numeric cluster
some(PubsubTopic("")), # empty topic
]
let invalidTopics = @[
some(PubsubTopic("invalid-topic")),
some(PubsubTopic("/waku/2/invalid")),
some(PubsubTopic("/waku/2/rs/abc/0")), # non-numeric cluster
some(PubsubTopic("")), # empty topic
]
## Then: Returns none(RemotePeerInfo) without crashing
for invalidTopic in invalidTopics:

View File

@ -29,14 +29,12 @@ suite "Waku API - Create node":
let nodeConfig = NodeConfig.init(
mode = Core,
protocolsConfig = ProtocolsConfig.init(
entryNodes =
@[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
],
staticStoreNodes =
@[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
],
entryNodes = @[
"enr:-QESuEC1p_s3xJzAC_XlOuuNrhVUETmfhbm1wxRGis0f7DlqGSw2FM-p2Vn7gmfkTTnAe8Ys2cgGBN8ufJnvzKQFZqFMBgmlkgnY0iXNlY3AyNTZrMaEDS8-D878DrdbNwcuY-3p1qdDp5MOoCurhdsNPJTXZ3c5g3RjcIJ2X4N1ZHCCd2g"
],
staticStoreNodes = @[
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc"
],
clusterId = 99,
autoShardingConfig = AutoShardingConfig(numShardsInCluster: 16),
messageValidation =
@ -65,11 +63,10 @@ suite "Waku API - Create node":
let nodeConfig = NodeConfig.init(
mode = Core,
protocolsConfig = ProtocolsConfig.init(
entryNodes =
@[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
],
entryNodes = @[
"enrtree://AIRVQ5DDA4FFWLRBCHJWUWOO6X6S4ZTZ5B667LQ6AJU6PEYDLRD5O@sandbox.waku.nodes.status.im",
"/ip4/127.0.0.1/tcp/60000/p2p/16Uuu2HBmAcHvhLqQKwSSbX6BG5JLWUDRcaLVrehUVqpw7fz1hbYc",
],
clusterId = 42,
),
)

View File

@ -307,30 +307,29 @@ suite "KeyFile test suite (adapted from nim-eth keyfile tests)":
# but the last byte of mac is changed to 00.
# While ciphertext is the correct encryption of priv under password,
# mac verfication should fail and nothing will be decrypted
let keyfileWrongMac =
%*{
"keyfile": {
"crypto": {
"cipher": "aes-128-ctr",
"cipherparams": {"iv": "6087dab2f9fdbbfaddc31a909735c1e6"},
"ciphertext":
"5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46",
"kdf": "pbkdf2",
"kdfparams": {
"c": 262144,
"dklen": 32,
"prf": "hmac-sha256",
"salt": "ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd",
},
"mac": "517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e900",
let keyfileWrongMac = %*{
"keyfile": {
"crypto": {
"cipher": "aes-128-ctr",
"cipherparams": {"iv": "6087dab2f9fdbbfaddc31a909735c1e6"},
"ciphertext":
"5318b4d5bcd28de64ee5559e671353e16f075ecae9f99c7a79a38af5f869aa46",
"kdf": "pbkdf2",
"kdfparams": {
"c": 262144,
"dklen": 32,
"prf": "hmac-sha256",
"salt": "ae3cd4e7013836a3df6bd7241b12db061dbe2c6785853cce422d148a624ce0bd",
},
"id": "3198bc9c-6672-5ab3-d995-4942343ae5b6",
"version": 3,
"mac": "517ead924a9d0dc3124507e3393d175ce3ff7c1e96529c6c555ce9e51205e900",
},
"name": "test1",
"password": "testpassword",
"priv": "7a28b5ba57c53603b0b07b56bba752f7784bf506fa95edc395f5cf6c7514fe9d",
}
"id": "3198bc9c-6672-5ab3-d995-4942343ae5b6",
"version": 3,
},
"name": "test1",
"password": "testpassword",
"priv": "7a28b5ba57c53603b0b07b56bba752f7784bf506fa95edc395f5cf6c7514fe9d",
}
# Decryption with correct password
let expectedSecret = decodeHex(keyfileWrongMac.getOrDefault("priv").getStr())

View File

@ -669,11 +669,10 @@ procSuite "Waku Noise":
# <- s
# ...
# So we define accordingly the sequence of the pre-message public keys
let preMessagePKs: seq[NoisePublicKey] =
@[
toNoisePublicKey(getPublicKey(aliceStaticKey)),
toNoisePublicKey(getPublicKey(bobStaticKey)),
]
let preMessagePKs: seq[NoisePublicKey] = @[
toNoisePublicKey(getPublicKey(aliceStaticKey)),
toNoisePublicKey(getPublicKey(bobStaticKey)),
]
var aliceHS = initialize(
hsPattern = hsPattern,

View File

@ -117,11 +117,10 @@ procSuite "Waku Rendezvous":
## Given: A light client node with no relay protocol
let
clusterId = 10.uint16
configuredShards =
@[
RelayShard(clusterId: clusterId, shardId: 0),
RelayShard(clusterId: clusterId, shardId: 1),
]
configuredShards = @[
RelayShard(clusterId: clusterId, shardId: 0),
RelayShard(clusterId: clusterId, shardId: 1),
]
let lightClient = newTestWakuNode(
generateSecp256k1Key(), parseIpAddress("0.0.0.0"), Port(0), clusterId = clusterId

View File

@ -12,14 +12,14 @@ import waku/node/waku_switch, ./testlib/common, ./testlib/wakucore
proc newCircuitRelayClientSwitch(relayClient: RelayClient): Switch =
SwitchBuilder
.new()
.withRng(rng())
.withAddresses(@[MultiAddress.init("/ip4/0.0.0.0/tcp/0").tryGet()])
.withTcpTransport()
.withMplex()
.withNoise()
.withCircuitRelay(relayClient)
.build()
.new()
.withRng(rng())
.withAddresses(@[MultiAddress.init("/ip4/0.0.0.0/tcp/0").tryGet()])
.withTcpTransport()
.withMplex()
.withNoise()
.withCircuitRelay(relayClient)
.build()
suite "Waku Switch":
asyncTest "Waku Switch works with AutoNat":

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -121,16 +121,15 @@ suite "Waku Archive - Retention policy":
retentionPolicy: RetentionPolicy =
CapacityRetentionPolicy.new(capacity = capacity)
let messages =
@[
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(0)),
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(1)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(2)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(3)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(4)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(5)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(6)),
]
let messages = @[
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(0)),
fakeWakuMessage(contentTopic = DefaultContentTopic, ts = ts(1)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(2)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(3)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(4)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(5)),
fakeWakuMessage(contentTopic = contentTopic, ts = ts(6)),
]
## When
for msg in messages:

View File

@ -36,14 +36,13 @@ suite "Waku Archive - message handling":
let archive = newWakuArchive(driver)
## Given
let msgList =
@[
fakeWakuMessage(ephemeral = false, payload = "1"),
fakeWakuMessage(ephemeral = true, payload = "2"),
fakeWakuMessage(ephemeral = true, payload = "3"),
fakeWakuMessage(ephemeral = true, payload = "4"),
fakeWakuMessage(ephemeral = false, payload = "5"),
]
let msgList = @[
fakeWakuMessage(ephemeral = false, payload = "1"),
fakeWakuMessage(ephemeral = true, payload = "2"),
fakeWakuMessage(ephemeral = true, payload = "3"),
fakeWakuMessage(ephemeral = true, payload = "4"),
fakeWakuMessage(ephemeral = false, payload = "5"),
]
## When
for msg in msgList:
@ -127,39 +126,38 @@ suite "Waku Archive - message handling":
procSuite "Waku Archive - find messages":
## Fixtures
let timeOrigin = now()
let msgListA =
@[
fakeWakuMessage(
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
),
fakeWakuMessage(
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
),
fakeWakuMessage(
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
),
fakeWakuMessage(
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
),
fakeWakuMessage(
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
),
fakeWakuMessage(
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
),
fakeWakuMessage(
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
),
fakeWakuMessage(
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
),
fakeWakuMessage(
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
),
fakeWakuMessage(
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
),
]
let msgListA = @[
fakeWakuMessage(
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
),
fakeWakuMessage(
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
),
fakeWakuMessage(
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
),
fakeWakuMessage(
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
),
fakeWakuMessage(
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
),
fakeWakuMessage(
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
),
fakeWakuMessage(
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
),
fakeWakuMessage(
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
),
fakeWakuMessage(
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
),
fakeWakuMessage(
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
),
]
let archiveA = block:
let
@ -446,19 +444,18 @@ procSuite "Waku Archive - find messages":
driver = newSqliteArchiveDriver()
archive = newWakuArchive(driver)
let msgList =
@[
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
]
let msgList = @[
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
]
for msg in msgList:
require (

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -33,14 +33,13 @@ suite "Waku Archive - message handling":
let archive = newWakuArchive(driver)
## Given
let msgList =
@[
fakeWakuMessage(ephemeral = false, payload = "1"),
fakeWakuMessage(ephemeral = true, payload = "2"),
fakeWakuMessage(ephemeral = true, payload = "3"),
fakeWakuMessage(ephemeral = true, payload = "4"),
fakeWakuMessage(ephemeral = false, payload = "5"),
]
let msgList = @[
fakeWakuMessage(ephemeral = false, payload = "1"),
fakeWakuMessage(ephemeral = true, payload = "2"),
fakeWakuMessage(ephemeral = true, payload = "3"),
fakeWakuMessage(ephemeral = true, payload = "4"),
fakeWakuMessage(ephemeral = false, payload = "5"),
]
## When
for msg in msgList:
@ -108,39 +107,38 @@ suite "Waku Archive - message handling":
procSuite "Waku Archive - find messages":
## Fixtures
let timeOrigin = now()
let msgListA =
@[
fakeWakuMessage(
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
),
fakeWakuMessage(
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
),
fakeWakuMessage(
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
),
fakeWakuMessage(
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
),
fakeWakuMessage(
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
),
fakeWakuMessage(
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
),
fakeWakuMessage(
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
),
fakeWakuMessage(
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
),
fakeWakuMessage(
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
),
fakeWakuMessage(
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
),
]
let msgListA = @[
fakeWakuMessage(
@[byte 00], contentTopic = ContentTopic("2"), ts = ts(00, timeOrigin)
),
fakeWakuMessage(
@[byte 01], contentTopic = ContentTopic("1"), ts = ts(10, timeOrigin)
),
fakeWakuMessage(
@[byte 02], contentTopic = ContentTopic("2"), ts = ts(20, timeOrigin)
),
fakeWakuMessage(
@[byte 03], contentTopic = ContentTopic("1"), ts = ts(30, timeOrigin)
),
fakeWakuMessage(
@[byte 04], contentTopic = ContentTopic("2"), ts = ts(40, timeOrigin)
),
fakeWakuMessage(
@[byte 05], contentTopic = ContentTopic("1"), ts = ts(50, timeOrigin)
),
fakeWakuMessage(
@[byte 06], contentTopic = ContentTopic("2"), ts = ts(60, timeOrigin)
),
fakeWakuMessage(
@[byte 07], contentTopic = ContentTopic("1"), ts = ts(70, timeOrigin)
),
fakeWakuMessage(
@[byte 08], contentTopic = ContentTopic("2"), ts = ts(80, timeOrigin)
),
fakeWakuMessage(
@[byte 09], contentTopic = ContentTopic("1"), ts = ts(90, timeOrigin)
),
]
let archiveA = block:
let
@ -433,19 +431,18 @@ procSuite "Waku Archive - find messages":
driver = newSqliteArchiveDriver()
archive = newWakuArchive(driver)
let msgList =
@[
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
]
let msgList = @[
fakeWakuMessage(@[byte 0], contentTopic = ContentTopic("2")),
fakeWakuMessage(@[byte 1], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 2], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 3], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 4], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 5], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 6], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 7], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 8], contentTopic = DefaultContentTopic),
fakeWakuMessage(@[byte 9], contentTopic = ContentTopic("2")),
]
for msg in msgList:
require (

View File

@ -1,5 +1,6 @@
{.used.}
import std/options
import
results,
testutils/unittests,
@ -155,9 +156,9 @@ suite "Waku Core - Peers":
## When
var builder = EnrBuilder.init(enrPrivKey, seqNum = enrSeqNum)
builder.withIpAddressAndPorts(
ipAddr = some(parseIpAddress("127.0.0.1")),
tcpPort = some(Port(0)),
udpPort = some(Port(0)),
ipAddr = options.some(parseIpAddress("127.0.0.1")),
tcpPort = options.some(Port(0)),
udpPort = options.some(Port(0)),
)
builder.withWakuCapabilities(Capabilities.Relay, Capabilities.Store)

View File

@ -140,14 +140,13 @@ suite "Discovery Mechanisms for Shards":
test "Bit Vector Representation":
# Given a valid bit vector and its representation
let
bitVector: seq[byte] =
@[
0, 73, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]
bitVector: seq[byte] = @[
0, 73, 2, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
]
clusterId: uint16 = 73 # bitVector's clusterId
shardIds: seq[uint16] = @[1u16, 10u16] # bitVector's shardIds

View File

@ -79,11 +79,10 @@ suite "Waku rln relay":
let rln = rlnInstance.get()
# prepare the input
let msg =
@[
"126f4c026cd731979365f79bd345a46d673c5a3f6f588bdc718e6356d02b6fdc".toBytes(),
"1f0e5db2b69d599166ab16219a97b82b662085c93220382b39f9f911d3b943b1".toBytes(),
]
let msg = @[
"126f4c026cd731979365f79bd345a46d673c5a3f6f588bdc718e6356d02b6fdc".toBytes(),
"1f0e5db2b69d599166ab16219a97b82b662085c93220382b39f9f911d3b943b1".toBytes(),
]
let hashRes = poseidon(msg)
@ -457,7 +456,7 @@ suite "Waku rln relay":
password = password,
appInfo = RLNAppInfo,
)
.isOk()
.isOk()
let readKeystoreRes = getMembershipCredentials(
path = filepath,

View File

@ -138,8 +138,10 @@ procSuite "WakuNode - RLN relay":
WakuMessage(payload: @payload, contentTopic: contentTopic, timestamp: now())
doAssert(
node1.wakuRlnRelay
.unsafeAppendRLNProof(message, node1.wakuRlnRelay.getCurrentEpoch(), MessageId(0))
.isOk()
.unsafeAppendRLNProof(
message, node1.wakuRlnRelay.getCurrentEpoch(), MessageId(0)
)
.isOk()
)
info " Nodes participating in the test",
@ -223,11 +225,10 @@ procSuite "WakuNode - RLN relay":
let shards =
@[RelayShard(clusterId: 0, shardId: 0), RelayShard(clusterId: 0, shardId: 1)]
let contentTopics =
@[
ContentTopic("/waku/2/content-topic-a/proto"),
ContentTopic("/waku/2/content-topic-b/proto"),
]
let contentTopics = @[
ContentTopic("/waku/2/content-topic-a/proto"),
ContentTopic("/waku/2/content-topic-b/proto"),
]
# connect them together
await node1.connectToNodes(@[node2.switch.peerInfo.toRemotePeerInfo()])

View File

@ -535,22 +535,21 @@ proc runAnvil*(
let anvilPath = getAnvilPath()
info "Anvil path", anvilPath
var args =
@[
"--port",
$port,
"--gas-limit",
"30000000",
"--gas-price",
"7",
"--base-fee",
"7",
"--balance",
"10000000000",
"--chain-id",
$chainId,
"--disable-min-priority-fee",
]
var args = @[
"--port",
$port,
"--gas-limit",
"30000000",
"--gas-price",
"7",
"--base-fee",
"7",
"--balance",
"10000000000",
"--chain-id",
$chainId,
"--disable-min-priority-fee",
]
# Add state file argument if provided
if stateFile.isSome():

View File

@ -35,24 +35,23 @@ suite "Store Client":
hash1 = computeMessageHash(DefaultPubsubTopic, message1)
hash2 = computeMessageHash(DefaultPubsubTopic, message2)
hash3 = computeMessageHash(DefaultPubsubTopic, message3)
messageSeq =
@[
WakuMessageKeyValue(
messageHash: hash1,
message: some(message1),
pubsubTopic: some(DefaultPubsubTopic),
),
WakuMessageKeyValue(
messageHash: hash2,
message: some(message2),
pubsubTopic: some(DefaultPubsubTopic),
),
WakuMessageKeyValue(
messageHash: hash3,
message: some(message3),
pubsubTopic: some(DefaultPubsubTopic),
),
]
messageSeq = @[
WakuMessageKeyValue(
messageHash: hash1,
message: some(message1),
pubsubTopic: some(DefaultPubsubTopic),
),
WakuMessageKeyValue(
messageHash: hash2,
message: some(message2),
pubsubTopic: some(DefaultPubsubTopic),
),
WakuMessageKeyValue(
messageHash: hash3,
message: some(message3),
pubsubTopic: some(DefaultPubsubTopic),
),
]
handlerFuture = newHistoryFuture()
handler = proc(req: StoreQueryRequest): Future[StoreQueryResult] {.async, gcsafe.} =
var request = req

View File

@ -50,19 +50,18 @@ suite "Store Resume - End to End":
var clientDriver {.threadvar.}: ArchiveDriver
asyncSetup:
let messages =
@[
fakeWakuMessage(@[byte 00]),
fakeWakuMessage(@[byte 01]),
fakeWakuMessage(@[byte 02]),
fakeWakuMessage(@[byte 03]),
fakeWakuMessage(@[byte 04]),
fakeWakuMessage(@[byte 05]),
fakeWakuMessage(@[byte 06]),
fakeWakuMessage(@[byte 07]),
fakeWakuMessage(@[byte 08]),
fakeWakuMessage(@[byte 09]),
]
let messages = @[
fakeWakuMessage(@[byte 00]),
fakeWakuMessage(@[byte 01]),
fakeWakuMessage(@[byte 02]),
fakeWakuMessage(@[byte 03]),
fakeWakuMessage(@[byte 04]),
fakeWakuMessage(@[byte 05]),
fakeWakuMessage(@[byte 06]),
fakeWakuMessage(@[byte 07]),
fakeWakuMessage(@[byte 08]),
fakeWakuMessage(@[byte 09]),
]
let
serverKey = generateSecp256k1Key()

View File

@ -32,19 +32,18 @@ import
procSuite "WakuNode - Store":
## Fixtures
let timeOrigin = now()
let msgListA =
@[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
let msgListA = @[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
let hashes = msgListA.mapIt(computeMessageHash(DefaultPubsubTopic, it))

View File

@ -29,93 +29,91 @@ when defined(waku_exp_store_resume):
## Fixtures
let storeA = block:
let store = newTestMessageStore()
let msgList =
@[
fakeWakuMessage(
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
),
fakeWakuMessage(
payload = @[byte 1], contentTopic = ContentTopic("1"), ts = ts(1)
),
fakeWakuMessage(
payload = @[byte 2], contentTopic = ContentTopic("2"), ts = ts(2)
),
fakeWakuMessage(
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
),
fakeWakuMessage(
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
),
fakeWakuMessage(
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
),
fakeWakuMessage(
payload = @[byte 6], contentTopic = ContentTopic("2"), ts = ts(6)
),
fakeWakuMessage(
payload = @[byte 7], contentTopic = ContentTopic("1"), ts = ts(7)
),
fakeWakuMessage(
payload = @[byte 8], contentTopic = ContentTopic("2"), ts = ts(8)
),
fakeWakuMessage(
payload = @[byte 9], contentTopic = ContentTopic("1"), ts = ts(9)
),
]
let msgList = @[
fakeWakuMessage(
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
),
fakeWakuMessage(
payload = @[byte 1], contentTopic = ContentTopic("1"), ts = ts(1)
),
fakeWakuMessage(
payload = @[byte 2], contentTopic = ContentTopic("2"), ts = ts(2)
),
fakeWakuMessage(
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
),
fakeWakuMessage(
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
),
fakeWakuMessage(
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
),
fakeWakuMessage(
payload = @[byte 6], contentTopic = ContentTopic("2"), ts = ts(6)
),
fakeWakuMessage(
payload = @[byte 7], contentTopic = ContentTopic("1"), ts = ts(7)
),
fakeWakuMessage(
payload = @[byte 8], contentTopic = ContentTopic("2"), ts = ts(8)
),
fakeWakuMessage(
payload = @[byte 9], contentTopic = ContentTopic("1"), ts = ts(9)
),
]
for msg in msgList:
require store
.put(
DefaultPubsubTopic,
msg,
computeDigest(msg),
computeMessageHash(DefaultPubsubTopic, msg),
msg.timestamp,
)
.isOk()
.put(
DefaultPubsubTopic,
msg,
computeDigest(msg),
computeMessageHash(DefaultPubsubTopic, msg),
msg.timestamp,
)
.isOk()
store
let storeB = block:
let store = newTestMessageStore()
let msgList2 =
@[
fakeWakuMessage(
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
),
fakeWakuMessage(
payload = @[byte 11], contentTopic = ContentTopic("1"), ts = ts(1)
),
fakeWakuMessage(
payload = @[byte 12], contentTopic = ContentTopic("2"), ts = ts(2)
),
fakeWakuMessage(
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
),
fakeWakuMessage(
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
),
fakeWakuMessage(
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
),
fakeWakuMessage(
payload = @[byte 13], contentTopic = ContentTopic("2"), ts = ts(6)
),
fakeWakuMessage(
payload = @[byte 14], contentTopic = ContentTopic("1"), ts = ts(7)
),
]
let msgList2 = @[
fakeWakuMessage(
payload = @[byte 0], contentTopic = ContentTopic("2"), ts = ts(0)
),
fakeWakuMessage(
payload = @[byte 11], contentTopic = ContentTopic("1"), ts = ts(1)
),
fakeWakuMessage(
payload = @[byte 12], contentTopic = ContentTopic("2"), ts = ts(2)
),
fakeWakuMessage(
payload = @[byte 3], contentTopic = ContentTopic("1"), ts = ts(3)
),
fakeWakuMessage(
payload = @[byte 4], contentTopic = ContentTopic("2"), ts = ts(4)
),
fakeWakuMessage(
payload = @[byte 5], contentTopic = ContentTopic("1"), ts = ts(5)
),
fakeWakuMessage(
payload = @[byte 13], contentTopic = ContentTopic("2"), ts = ts(6)
),
fakeWakuMessage(
payload = @[byte 14], contentTopic = ContentTopic("1"), ts = ts(7)
),
]
for msg in msgList2:
require store
.put(
DefaultPubsubTopic,
msg,
computeDigest(msg),
computeMessageHash(DefaultPubsubTopic, msg),
msg.timestamp,
)
.isOk()
.put(
DefaultPubsubTopic,
msg,
computeDigest(msg),
computeMessageHash(DefaultPubsubTopic, msg),
msg.timestamp,
)
.isOk()
store
@ -136,11 +134,10 @@ when defined(waku_exp_store_resume):
client = newTestWakuStoreClient(clientSwitch)
## Given
let peers =
@[
serverSwitchA.peerInfo.toRemotePeerInfo(),
serverSwitchB.peerInfo.toRemotePeerInfo(),
]
let peers = @[
serverSwitchA.peerInfo.toRemotePeerInfo(),
serverSwitchB.peerInfo.toRemotePeerInfo(),
]
let req = HistoryQuery(contentTopics: @[DefaultContentTopic], pageSize: 5)
## When
@ -226,12 +223,11 @@ when defined(waku_exp_store_resume):
client = await newTestWakuStore(clientSwitch)
## Given
let peers =
@[
offlineSwitch.peerInfo.toRemotePeerInfo(),
serverASwitch.peerInfo.toRemotePeerInfo(),
serverBSwitch.peerInfo.toRemotePeerInfo(),
]
let peers = @[
offlineSwitch.peerInfo.toRemotePeerInfo(),
serverASwitch.peerInfo.toRemotePeerInfo(),
serverBSwitch.peerInfo.toRemotePeerInfo(),
]
## When
let res = await client.resume(some(peers))
@ -323,11 +319,11 @@ when defined(waku_exp_store_resume):
receivedTime3 = now() + getNanosecondTime(10)
digest3 = computeDigest(msg3)
require server.wakuStore.store
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
.isOk()
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
.isOk()
require client.wakuStore.store
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
.isOk()
.put(DefaultPubsubTopic, msg3, digest3, receivedTime3)
.isOk()
let serverPeer = server.peerInfo.toRemotePeerInfo()

View File

@ -100,11 +100,10 @@ procSuite "Waku Store - RPC codec":
direction: some(PagingDirection.BACKWARD),
)
query = HistoryQueryRPC(
contentFilters:
@[
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
],
contentFilters: @[
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
HistoryContentFilterRPC(contentTopic: DefaultContentTopic),
],
pagingInfo: some(pagingInfo),
startTime: some(Timestamp(10)),
endTime: some(Timestamp(11)),

View File

@ -30,19 +30,18 @@ import
procSuite "WakuNode - Store Legacy":
## Fixtures
let timeOrigin = now()
let msgListA =
@[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
let msgListA = @[
fakeWakuMessage(@[byte 00], ts = ts(00, timeOrigin)),
fakeWakuMessage(@[byte 01], ts = ts(10, timeOrigin)),
fakeWakuMessage(@[byte 02], ts = ts(20, timeOrigin)),
fakeWakuMessage(@[byte 03], ts = ts(30, timeOrigin)),
fakeWakuMessage(@[byte 04], ts = ts(40, timeOrigin)),
fakeWakuMessage(@[byte 05], ts = ts(50, timeOrigin)),
fakeWakuMessage(@[byte 06], ts = ts(60, timeOrigin)),
fakeWakuMessage(@[byte 07], ts = ts(70, timeOrigin)),
fakeWakuMessage(@[byte 08], ts = ts(80, timeOrigin)),
fakeWakuMessage(@[byte 09], ts = ts(90, timeOrigin)),
]
let archiveA = block:
let driver = newSqliteArchiveDriver()

View File

@ -119,12 +119,11 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(wholeRange, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
wholeRange, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(
wholeRange, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
itemSets: @[],
)
@ -180,12 +179,11 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(sliceWhole, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
sliceWhole, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(
sliceWhole, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
itemSets: @[],
)
@ -207,12 +205,11 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(subSlice, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
subSlice, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(
subSlice, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
itemSets: @[],
)
@ -272,12 +269,9 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(slice, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
slice, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(slice, @[DefaultPubsubTopic], @[DefaultContentTopic])
],
itemSets: @[],
)

View File

@ -44,12 +44,9 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(whole, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
whole, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(whole, @[DefaultPubsubTopic], @[DefaultContentTopic])
],
itemSets: @[],
)
let rep1 = local.processPayload(p1, s1, r1)
@ -131,15 +128,10 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(sliceA, RangeType.Fingerprint), (sliceB, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
sliceA, @[DefaultPubsubTopic], @[DefaultContentTopic]
),
remote.computeFingerprint(
sliceB, @[DefaultPubsubTopic], @[DefaultContentTopic]
),
],
fingerprints: @[
remote.computeFingerprint(sliceA, @[DefaultPubsubTopic], @[DefaultContentTopic]),
remote.computeFingerprint(sliceB, @[DefaultPubsubTopic], @[DefaultContentTopic]),
],
itemSets: @[],
)
let reply = local.processPayload(payload, s, r)
@ -180,12 +172,9 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(slice, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
slice, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(slice, @[DefaultPubsubTopic], @[DefaultContentTopic])
],
itemSets: @[],
)
let reply = local.processPayload(p, toS, toR)
@ -236,12 +225,9 @@ suite "Waku Sync reconciliation":
pubsubTopics: @[DefaultPubsubTopic],
contentTopics: @[DefaultContentTopic],
ranges: @[(s, RangeType.Fingerprint)],
fingerprints:
@[
remote.computeFingerprint(
s, @[DefaultPubsubTopic], @[DefaultContentTopic]
)
],
fingerprints: @[
remote.computeFingerprint(s, @[DefaultPubsubTopic], @[DefaultContentTopic])
],
itemSets: @[],
),
sendQ,

View File

@ -176,12 +176,11 @@ suite "Waku v2 Rest API - Filter V2":
)
discard await restFilterTest.client.filterPostSubscriptions(requestBody)
let contentFilters =
@[
ContentTopic("1"),
ContentTopic("2"),
ContentTopic("3"), # ,ContentTopic("4") # Keep this subscription for check
]
let contentFilters = @[
ContentTopic("1"),
ContentTopic("2"),
ContentTopic("3"), # ,ContentTopic("4") # Keep this subscription for check
]
let requestBodyUnsub = FilterUnsubscribeRequest(
requestId: "4321",

View File

@ -193,15 +193,14 @@ suite "Waku v2 Rest API - Relay":
let pubSubTopic = "/waku/2/rs/0/0"
var messages =
@[
fakeWakuMessage(
contentTopic = "content-topic-x",
payload = toBytes("TEST-1"),
meta = toBytes("test-meta"),
ephemeral = true,
)
]
var messages = @[
fakeWakuMessage(
contentTopic = "content-topic-x",
payload = toBytes("TEST-1"),
meta = toBytes("test-meta"),
ephemeral = true,
)
]
# Prevent duplicate messages
for i in 0 ..< 2:
@ -348,12 +347,11 @@ suite "Waku v2 Rest API - Relay":
installRelayApiHandlers(restServer.router, node, cache)
restServer.start()
let contentTopics =
@[
ContentTopic("/app-1/2/default-content/proto"),
ContentTopic("/app-2/2/default-content/proto"),
ContentTopic("/app-3/2/default-content/proto"),
]
let contentTopics = @[
ContentTopic("/app-1/2/default-content/proto"),
ContentTopic("/app-2/2/default-content/proto"),
ContentTopic("/app-3/2/default-content/proto"),
]
# When
let client = newRestHttpClient(initTAddress(restAddress, restPort))
@ -394,13 +392,12 @@ suite "Waku v2 Rest API - Relay":
restPort = restServer.httpServer.address.port # update with bound port for client use
let contentTopics =
@[
ContentTopic("/waku/2/default-content1/proto"),
ContentTopic("/waku/2/default-content2/proto"),
ContentTopic("/waku/2/default-content3/proto"),
ContentTopic("/waku/2/default-contentX/proto"),
]
let contentTopics = @[
ContentTopic("/waku/2/default-content1/proto"),
ContentTopic("/waku/2/default-content2/proto"),
ContentTopic("/waku/2/default-content3/proto"),
ContentTopic("/waku/2/default-contentX/proto"),
]
let cache = MessageCache.init()
cache.contentSubscribe(contentTopics[0])
@ -454,10 +451,9 @@ suite "Waku v2 Rest API - Relay":
let contentTopic = DefaultContentTopic
var messages =
@[
fakeWakuMessage(contentTopic = DefaultContentTopic, payload = toBytes("TEST-1"))
]
var messages = @[
fakeWakuMessage(contentTopic = DefaultContentTopic, payload = toBytes("TEST-1"))
]
# Prevent duplicate messages
for i in 0 ..< 2:

Some files were not shown because too many files have changed in this diff Show More