chore: bump v0.38.0 (#3712)

This commit is contained in:
Darshan 2026-02-11 03:00:57 +05:30 committed by GitHub
parent 2c2d8e1c15
commit 6421685eca
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
33 changed files with 225 additions and 85 deletions

View File

@ -33,6 +33,7 @@ jobs:
make
cmake
upx
unzip
mingw-w64-x86_64-rust
mingw-w64-x86_64-postgresql
mingw-w64-x86_64-gcc
@ -44,6 +45,12 @@ jobs:
mingw-w64-x86_64-cmake
mingw-w64-x86_64-llvm
mingw-w64-x86_64-clang
mingw-w64-x86_64-nasm
- name: Manually install nasm
run: |
bash scripts/install_nasm_in_windows.sh
source $HOME/.bashrc
- name: Add UPX to PATH
run: |
@ -54,7 +61,7 @@ jobs:
- name: Verify dependencies
run: |
which upx gcc g++ make cmake cargo rustc python
which upx gcc g++ make cmake cargo rustc python nasm
- name: Updating submodules
run: git submodule update --init --recursive

View File

@ -43,6 +43,7 @@ ifeq ($(detected_OS),Windows)
LIBS = -lws2_32 -lbcrypt -liphlpapi -luserenv -lntdll -lminiupnpc -lnatpmp -lpq
NIM_PARAMS += $(foreach lib,$(LIBS),--passL:"$(lib)")
NIM_PARAMS += --passL:"-Wl,--allow-multiple-definition"
export PATH := /c/msys64/usr/bin:/c/msys64/mingw64/bin:/c/msys64/usr/lib:/c/msys64/mingw64/lib:$(PATH)

View File

@ -1,7 +1,8 @@
#!/usr/bin/env bash
# This script is used to build the rln library for the current platform, or download it from the
# release page if it is available.
# This script is used to build the rln library for the current platform.
# Previously downloaded prebuilt binaries, but due to compatibility issues
# we now always build from source.
set -e
@ -14,41 +15,26 @@ output_filename=$3
[[ -z "${rln_version}" ]] && { echo "No rln version specified"; exit 1; }
[[ -z "${output_filename}" ]] && { echo "No output filename specified"; exit 1; }
# Get the host triplet
host_triplet=$(rustc --version --verbose | awk '/host:/{print $2}')
echo "Building RLN library from source (version ${rln_version})..."
tarball="${host_triplet}"
tarball+="-stateless"
tarball+="-rln.tar.gz"
# Check if submodule version = version in Makefile
cargo metadata --format-version=1 --no-deps --manifest-path "${build_dir}/rln/Cargo.toml"
# Download the prebuilt rln library if it is available
if curl --silent --fail-with-body -L \
"https://github.com/vacp2p/zerokit/releases/download/$rln_version/$tarball" \
-o "${tarball}";
then
echo "Downloaded ${tarball}"
tar -xzf "${tarball}"
mv "release/librln.a" "${output_filename}"
rm -rf "${tarball}" release
detected_OS=$(uname -s)
if [[ "$detected_OS" == MINGW* || "$detected_OS" == MSYS* ]]; then
submodule_version=$(cargo metadata --format-version=1 --no-deps --manifest-path "${build_dir}/rln/Cargo.toml" | sed -n 's/.*"name":"rln","version":"\([^"]*\)".*/\1/p')
else
echo "Failed to download ${tarball}"
# Build rln instead
# first, check if submodule version = version in Makefile
cargo metadata --format-version=1 --no-deps --manifest-path "${build_dir}/rln/Cargo.toml"
detected_OS=$(uname -s)
if [[ "$detected_OS" == MINGW* || "$detected_OS" == MSYS* ]]; then
submodule_version=$(cargo metadata --format-version=1 --no-deps --manifest-path "${build_dir}/rln/Cargo.toml" | sed -n 's/.*"name":"rln","version":"\([^"]*\)".*/\1/p')
else
submodule_version=$(cargo metadata --format-version=1 --no-deps --manifest-path "${build_dir}/rln/Cargo.toml" | jq -r '.packages[] | select(.name == "rln") | .version')
fi
if [[ "v${submodule_version}" != "${rln_version}" ]]; then
echo "Submodule version (v${submodule_version}) does not match version in Makefile (${rln_version})"
echo "Please update the submodule to ${rln_version}"
exit 1
fi
# if submodule version = version in Makefile, build rln
cargo build --release -p rln --manifest-path "${build_dir}/rln/Cargo.toml"
cp "${build_dir}/target/release/librln.a" "${output_filename}"
submodule_version=$(cargo metadata --format-version=1 --no-deps --manifest-path "${build_dir}/rln/Cargo.toml" | jq -r '.packages[] | select(.name == "rln") | .version')
fi
if [[ "v${submodule_version}" != "${rln_version}" ]]; then
echo "Submodule version (v${submodule_version}) does not match version in Makefile (${rln_version})"
echo "Please update the submodule to ${rln_version}"
exit 1
fi
# Build rln from source
cargo build --release -p rln --manifest-path "${build_dir}/rln/Cargo.toml"
cp "${build_dir}/target/release/librln.a" "${output_filename}"
echo "Successfully built ${output_filename}"

View File

@ -0,0 +1,37 @@
#!/usr/bin/env sh
set -e
NASM_VERSION="2.16.01"
NASM_ZIP="nasm-${NASM_VERSION}-win64.zip"
NASM_URL="https://www.nasm.us/pub/nasm/releasebuilds/${NASM_VERSION}/win64/${NASM_ZIP}"
INSTALL_DIR="$HOME/.local/nasm"
BIN_DIR="$INSTALL_DIR/bin"
echo "Installing NASM ${NASM_VERSION}..."
# Create directories
mkdir -p "$BIN_DIR"
cd "$INSTALL_DIR"
# Download
if [ ! -f "$NASM_ZIP" ]; then
echo "Downloading NASM..."
curl -LO "$NASM_URL"
fi
# Extract
echo "Extracting..."
unzip -o "$NASM_ZIP"
# Move binaries
cp nasm-*/nasm.exe "$BIN_DIR/"
cp nasm-*/ndisasm.exe "$BIN_DIR/"
# Add to PATH in bashrc (idempotent)
if ! grep -q 'nasm/bin' "$HOME/.bashrc"; then
echo '' >> "$HOME/.bashrc"
echo '# NASM' >> "$HOME/.bashrc"
echo 'export PATH="$HOME/.local/nasm/bin:$PATH"' >> "$HOME/.bashrc"
fi

104
scripts/regenerate_anvil_state.sh Executable file
View File

@ -0,0 +1,104 @@
#!/usr/bin/env bash
# Simple script to regenerate the Anvil state file
# This creates a state file compatible with the current Foundry version
set -e
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
STATE_DIR="$PROJECT_ROOT/tests/waku_rln_relay/anvil_state"
STATE_FILE="$STATE_DIR/state-deployed-contracts-mint-and-approved.json"
STATE_FILE_GZ="${STATE_FILE}.gz"
echo "==================================="
echo "Anvil State File Regeneration Tool"
echo "==================================="
echo ""
# Check if Foundry is installed
if ! command -v anvil &> /dev/null; then
echo "ERROR: anvil is not installed!"
echo "Please run: make rln-deps"
exit 1
fi
ANVIL_VERSION=$(anvil --version 2>/dev/null | head -n1)
echo "Using Foundry: $ANVIL_VERSION"
echo ""
# Backup existing state file
if [ -f "$STATE_FILE_GZ" ]; then
BACKUP_FILE="${STATE_FILE_GZ}.backup-$(date +%Y%m%d-%H%M%S)"
echo "Backing up existing state file to: $(basename $BACKUP_FILE)"
cp "$STATE_FILE_GZ" "$BACKUP_FILE"
fi
# Remove old state files
rm -f "$STATE_FILE" "$STATE_FILE_GZ"
echo ""
echo "Running test to generate fresh state file..."
echo "This will:"
echo " 1. Build RLN library"
echo " 2. Start Anvil with state dump enabled"
echo " 3. Deploy contracts"
echo " 4. Save state and compress it"
echo ""
cd "$PROJECT_ROOT"
# Run a single test that deploys contracts
# The test framework will handle state dump
make test tests/waku_rln_relay/test_rln_group_manager_onchain.nim "RLN instances" || {
echo ""
echo "Test execution completed (exit status: $?)"
echo "Checking if state file was generated..."
}
# Check if state file was created
if [ -f "$STATE_FILE" ]; then
echo ""
echo "✓ State file generated: $STATE_FILE"
# Compress it
gzip -c "$STATE_FILE" > "$STATE_FILE_GZ"
echo "✓ Compressed: $STATE_FILE_GZ"
# File sizes
STATE_SIZE=$(du -h "$STATE_FILE" | cut -f1)
GZ_SIZE=$(du -h "$STATE_FILE_GZ" | cut -f1)
echo ""
echo "File sizes:"
echo " Uncompressed: $STATE_SIZE"
echo " Compressed: $GZ_SIZE"
# Optionally remove uncompressed
echo ""
read -p "Remove uncompressed state file? [y/N] " -n 1 -r
echo
if [[ $REPLY =~ ^[Yy]$ ]]; then
rm "$STATE_FILE"
echo "✓ Removed uncompressed file"
fi
echo ""
echo "============================================"
echo "✓ SUCCESS! State file regenerated"
echo "============================================"
echo ""
echo "Next steps:"
echo " 1. Test locally: make test tests/node/test_wakunode_lightpush.nim"
echo " 2. If tests pass, commit: git add $STATE_FILE_GZ"
echo " 3. Push and verify CI passes"
echo ""
else
echo ""
echo "============================================"
echo "✗ ERROR: State file was not generated"
echo "============================================"
echo ""
echo "The state file should have been created at: $STATE_FILE"
echo "Please check the test output above for errors."
exit 1
fi

View File

@ -27,7 +27,7 @@ import
# TODO: migrate to usage of a test cluster conf
proc defaultTestWakuConfBuilder*(): WakuConfBuilder =
var builder = WakuConfBuilder.init()
builder.withP2pTcpPort(Port(60000))
builder.withP2pTcpPort(Port(0))
builder.withP2pListenAddress(parseIpAddress("0.0.0.0"))
builder.restServerConf.withListenAddress(parseIpAddress("127.0.0.1"))
builder.withDnsAddrsNameServers(
@ -80,7 +80,7 @@ proc newTestWakuNode*(
# Update extPort to default value if it's missing and there's an extIp or a DNS domain
let extPort =
if (extIp.isSome() or dns4DomainName.isSome()) and extPort.isNone():
some(Port(60000))
some(Port(0))
else:
extPort

View File

@ -35,7 +35,7 @@ suite "Waku Message - Deterministic hashing":
byteutils.toHex(message.payload) == "010203045445535405060708"
byteutils.toHex(message.meta) == ""
byteutils.toHex(toBytesBE(uint64(message.timestamp))) == "175789bfa23f8400"
messageHash.toHex() ==
byteutils.toHex(messageHash) ==
"cccab07fed94181c83937c8ca8340c9108492b7ede354a6d95421ad34141fd37"
test "digest computation - meta field (12 bytes)":
@ -69,7 +69,7 @@ suite "Waku Message - Deterministic hashing":
byteutils.toHex(message.payload) == "010203045445535405060708"
byteutils.toHex(message.meta) == "73757065722d736563726574"
byteutils.toHex(toBytesBE(uint64(message.timestamp))) == "175789bfa23f8400"
messageHash.toHex() ==
byteutils.toHex(messageHash) ==
"b9b4852f9d8c489846e8bfc6c5ca6a1a8d460a40d28832a966e029eb39619199"
test "digest computation - meta field (64 bytes)":
@ -104,7 +104,7 @@ suite "Waku Message - Deterministic hashing":
byteutils.toHex(message.meta) ==
"000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f202122232425262728292a2b2c2d2e2f303132333435363738393a3b3c3d3e3f"
byteutils.toHex(toBytesBE(uint64(message.timestamp))) == "175789bfa23f8400"
messageHash.toHex() ==
byteutils.toHex(messageHash) ==
"653460d04f66c5b11814d235152f4f246e6f03ef80a305a825913636fbafd0ba"
test "digest computation - zero length payload":
@ -132,7 +132,7 @@ suite "Waku Message - Deterministic hashing":
## Then
check:
messageHash.toHex() ==
byteutils.toHex(messageHash) ==
"0f6448cc23b2db6c696aa6ab4b693eff4cf3549ff346fe1dbeb281697396a09f"
test "waku message - check meta size is enforced":

View File

@ -386,7 +386,7 @@ procSuite "WakuNode - Store":
let mountArchiveRes = server.mountArchive(archiveA)
assert mountArchiveRes.isOk(), mountArchiveRes.error
waitFor server.mountStore((3, 500.millis))
waitFor server.mountStore((3, 200.millis))
client.mountStoreClient()
@ -413,11 +413,11 @@ procSuite "WakuNode - Store":
for count in 0 ..< 3:
waitFor successProc()
waitFor sleepAsync(5.millis)
waitFor sleepAsync(1.millis)
waitFor failsProc()
waitFor sleepAsync(500.millis)
waitFor sleepAsync(200.millis)
for count in 0 ..< 3:
waitFor successProc()

2
vendor/nim-dnsdisc vendored

@ -1 +1 @@
Subproject commit b71d029f4da4ec56974d54c04518bada00e1b623
Subproject commit 203abd2b3e758e0ea3ae325769b20a7e1bcd1010

@ -1 +1 @@
Subproject commit c3ac3f639ed1d62f59d3077d376a29c63ac9750c
Subproject commit ce27581a3e881f782f482cb66dc5b07a02bd615e

@ -1 +1 @@
Subproject commit 79cbab1460f4c0cdde2084589d017c43a3d7b4f1
Subproject commit c53852d9e24205b6363bba517fa8ee7bde823691

@ -1 +1 @@
Subproject commit b65fd6a7e64c864dabe40e7dfd6c7d07db0014ac
Subproject commit c343b0e243d9e17e2c40f3a8a24340f7c4a71d44

2
vendor/nim-libp2p vendored

@ -1 +1 @@
Subproject commit eb7e6ff89889e41b57515f891ba82986c54809fb
Subproject commit ca48c3718246bb411ff0e354a70cb82d9a28de0d

2
vendor/nim-lsquic vendored

@ -1 +1 @@
Subproject commit f3fe33462601ea34eb2e8e9c357c92e61f8d121b
Subproject commit 4fb03ee7bfb39aecb3316889fdcb60bec3d0936f

2
vendor/nim-metrics vendored

@ -1 +1 @@
Subproject commit ecf64c6078d1276d3b7d9b3d931fbdb70004db11
Subproject commit 11d0cddfb0e711aa2a8c75d1892ae24a64c299fc

2
vendor/nim-presto vendored

@ -1 +1 @@
Subproject commit 92b1c7ff141e6920e1f8a98a14c35c1fa098e3be
Subproject commit d66043dd7ede146442e6c39720c76a20bde5225f

@ -1 +1 @@
Subproject commit 6f525d5447d97256750ca7856faead03e562ed20
Subproject commit b0f2fa32960ea532a184394b0f27be37bd80248b

@ -1 +1 @@
Subproject commit bdf01cf4236fb40788f0733466cdf6708783cbac
Subproject commit 89ba51f557414d3a3e17ab3df8270e1bdaa3ca2a

2
vendor/nim-stew vendored

@ -1 +1 @@
Subproject commit e5740014961438610d336cd81706582dbf2c96f0
Subproject commit b66168735d6f3841c5239c3169d3fe5fe98b1257

@ -1 +1 @@
Subproject commit 94d68e796c045d5b37cabc6be32d7bfa168f8857
Subproject commit e4d37dc1652d5c63afb89907efb5a5e812261797

@ -1 +1 @@
Subproject commit fea85b27f0badcf617033ca1bc05444b5fd8aa7a
Subproject commit b5b387e6fb2a7cc75d54a269b07cc6218361bd46

@ -1 +1 @@
Subproject commit 8b51e99b4a57fcfb31689230e75595f024543024
Subproject commit 26f2ef3ae0ec72a2a75bfe557e02e88f6a31c189

2
vendor/nim-websock vendored

@ -1 +1 @@
Subproject commit ebe308a79a7b440a11dfbe74f352be86a3883508
Subproject commit 35ae76f1559e835c80f9c1a3943bf995d3dd9eb5

@ -1 +1 @@
Subproject commit 8a338f354481e8a3f3d64a72e38fad4c62e32dcd
Subproject commit d9906ef40f1e113fcf51de4ad27c61aa45375c2d

2
vendor/zerokit vendored

@ -1 +1 @@
Subproject commit 70c79fbc989d4f87d9352b2f4bddcb60ebe55b19
Subproject commit a4bb3feb5054e6fd24827adf204493e6e173437b

View File

@ -70,7 +70,9 @@ method sendImpl*(self: RelaySendProcessor, task: DeliveryTask) {.async.} =
if noOfPublishedPeers > 0:
info "Message propagated via Relay",
requestId = task.requestId, msgHash = task.msgHash.to0xHex(), noOfPeers = noOfPublishedPeers
requestId = task.requestId,
msgHash = task.msgHash.to0xHex(),
noOfPeers = noOfPublishedPeers
task.state = DeliveryState.SuccessfullyPropagated
task.deliveryTime = Moment.now()
else:

View File

@ -7,4 +7,4 @@ import bearssl/rand, stew/byteutils
proc generateRequestId*(rng: ref HmacDrbgContext): string =
var bytes: array[10, byte]
hmacDrbgGenerate(rng[], bytes)
return toHex(bytes)
return byteutils.toHex(bytes)

View File

@ -297,13 +297,13 @@ method put*(
pubsubTopic: PubsubTopic,
message: WakuMessage,
): Future[ArchiveDriverResult[void]] {.async.} =
let messageHash = toHex(messageHash)
let messageHash = byteutils.toHex(messageHash)
let contentTopic = message.contentTopic
let payload = toHex(message.payload)
let payload = byteutils.toHex(message.payload)
let version = $message.version
let timestamp = $message.timestamp
let meta = toHex(message.meta)
let meta = byteutils.toHex(message.meta)
trace "put PostgresDriver",
messageHash, contentTopic, payload, version, timestamp, meta
@ -439,7 +439,7 @@ proc getMessagesArbitraryQuery(
var args: seq[string]
if cursor.isSome():
let hashHex = toHex(cursor.get())
let hashHex = byteutils.toHex(cursor.get())
let timeCursor = ?await s.getTimeCursor(hashHex)
@ -520,7 +520,7 @@ proc getMessageHashesArbitraryQuery(
var args: seq[string]
if cursor.isSome():
let hashHex = toHex(cursor.get())
let hashHex = byteutils.toHex(cursor.get())
let timeCursor = ?await s.getTimeCursor(hashHex)
@ -630,7 +630,7 @@ proc getMessagesPreparedStmt(
return ok(rows)
let hashHex = toHex(cursor.get())
let hashHex = byteutils.toHex(cursor.get())
let timeCursor = ?await s.getTimeCursor(hashHex)
@ -723,7 +723,7 @@ proc getMessageHashesPreparedStmt(
return ok(rows)
let hashHex = toHex(cursor.get())
let hashHex = byteutils.toHex(cursor.get())
let timeCursor = ?await s.getTimeCursor(hashHex)

View File

@ -213,13 +213,13 @@ method put*(
messageHash: WakuMessageHash,
receivedTime: Timestamp,
): Future[ArchiveDriverResult[void]] {.async.} =
let digest = toHex(digest.data)
let messageHash = toHex(messageHash)
let digest = byteutils.toHex(digest.data)
let messageHash = byteutils.toHex(messageHash)
let contentTopic = message.contentTopic
let payload = toHex(message.payload)
let payload = byteutils.toHex(message.payload)
let version = $message.version
let timestamp = $message.timestamp
let meta = toHex(message.meta)
let meta = byteutils.toHex(message.meta)
trace "put PostgresDriver", timestamp = timestamp
@ -312,7 +312,7 @@ proc getMessagesArbitraryQuery(
args.add(pubsubTopic.get())
if cursor.isSome():
let hashHex = toHex(cursor.get().hash)
let hashHex = byteutils.toHex(cursor.get().hash)
var entree: seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp, WakuMessageHash)]
proc entreeCallback(pqResult: ptr PGresult) =
@ -463,7 +463,7 @@ proc getMessagesPreparedStmt(
let limit = $maxPageSize
if cursor.isSome():
let hash = toHex(cursor.get().hash)
let hash = byteutils.toHex(cursor.get().hash)
var entree: seq[(PubsubTopic, WakuMessage, seq[byte], Timestamp, WakuMessageHash)]
@ -576,7 +576,7 @@ proc getMessagesV2PreparedStmt(
var stmtDef =
if ascOrder: SelectWithCursorV2AscStmtDef else: SelectWithCursorV2DescStmtDef
let digest = toHex(cursor.get().digest.data)
let digest = byteutils.toHex(cursor.get().digest.data)
let timestamp = $cursor.get().storeTime
(

View File

@ -29,7 +29,7 @@ type WakuFilterClient* = ref object of LPProtocol
func generateRequestId(rng: ref HmacDrbgContext): string =
var bytes: array[10, byte]
hmacDrbgGenerate(rng[], bytes)
return toHex(bytes)
return byteutils.toHex(bytes)
proc sendSubscribeRequest(
wfc: WakuFilterClient,

View File

@ -346,7 +346,7 @@ proc generateRlnValidator*(
let validationRes = wakuRlnRelay.validateMessageAndUpdateLog(message)
let
proof = toHex(msgProof.proof)
proof = byteutils.toHex(msgProof.proof)
epoch = fromEpoch(msgProof.epoch)
root = inHex(msgProof.merkleRoot)
shareX = inHex(msgProof.shareX)

View File

@ -79,7 +79,8 @@ proc messageIngress*(
let id = SyncID(time: msg.timestamp, hash: msgHash)
self.storage.insert(id, pubsubTopic, msg.contentTopic).isOkOr:
error "failed to insert new message", msg_hash = $id.hash.toHex(), error = $error
error "failed to insert new message",
msg_hash = byteutils.toHex(id.hash), error = $error
proc messageIngress*(
self: SyncReconciliation,
@ -87,7 +88,7 @@ proc messageIngress*(
pubsubTopic: PubsubTopic,
msg: WakuMessage,
) =
trace "message ingress", msg_hash = msgHash.toHex(), msg = msg
trace "message ingress", msg_hash = byteutils.toHex(msgHash), msg = msg
if msg.ephemeral:
return
@ -95,7 +96,8 @@ proc messageIngress*(
let id = SyncID(time: msg.timestamp, hash: msgHash)
self.storage.insert(id, pubsubTopic, msg.contentTopic).isOkOr:
error "failed to insert new message", msg_hash = $id.hash.toHex(), error = $error
error "failed to insert new message",
msg_hash = byteutils.toHex(id.hash), error = $error
proc messageIngress*(
self: SyncReconciliation,
@ -104,7 +106,8 @@ proc messageIngress*(
contentTopic: ContentTopic,
) =
self.storage.insert(id, pubsubTopic, contentTopic).isOkOr:
error "failed to insert new message", msg_hash = $id.hash.toHex(), error = $error
error "failed to insert new message",
msg_hash = byteutils.toHex(id.hash), error = $error
proc preProcessPayload(
self: SyncReconciliation, payload: RangesData