chore: bump dependencies to v0.37.0 (#3536)

This commit is contained in:
Darshan K 2025-09-10 13:20:37 +05:30 committed by GitHub
parent a36601ab0d
commit 5fc8c59f54
37 changed files with 76 additions and 57 deletions

View File

@ -71,7 +71,6 @@ jobs:
- name: Building miniupnpc
run: |
cd vendor/nim-nat-traversal/vendor/miniupnp/miniupnpc
git checkout little_chore_windows_support
make -f Makefile.mingw CC=gcc CXX=g++ libminiupnpc.a V=1
cd ../../../../..

View File

@ -41,7 +41,7 @@ ifeq ($(detected_OS),Windows)
NIM_PARAMS += --passL:"-Lvendor/nim-nat-traversal/vendor/miniupnp/miniupnpc"
NIM_PARAMS += --passL:"-Lvendor/nim-nat-traversal/vendor/libnatpmp-upstream"
LIBS = -static -lws2_32 -lbcrypt -liphlpapi -luserenv -lntdll -lminiupnpc -lnatpmp -lpq
LIBS = -lws2_32 -lbcrypt -liphlpapi -luserenv -lntdll -lminiupnpc -lnatpmp -lpq
NIM_PARAMS += $(foreach lib,$(LIBS),--passL:"$(lib)")
endif

View File

@ -7,8 +7,13 @@ import
results,
regex
const git_version* {.strdefine.} = "n/a"
type EthRpcUrl* = distinct string
proc `$`*(u: EthRpcUrl): string =
string(u)
type NetworkMonitorConf* = object
logLevel* {.
desc: "Sets the log level",

View File

@ -31,7 +31,7 @@ proc decodeBytes*(
try:
let jsonContent = parseJson(res)
if $jsonContent["status"].getStr() != "success":
error "query failed", result = jsonContent
error "query failed", result = $jsonContent
return err("query failed")
return ok(
NodeLocation(

View File

@ -1,5 +1,5 @@
# Dockerfile to build a distributable container image from pre-existing binaries
FROM debian:stable-slim AS prod
FROM debian:bookworm-slim AS prod
ARG MAKE_TARGET=wakunode2

View File

@ -1,4 +1,4 @@
import std/[sequtils, strutils]
import std/[sequtils, strutils, tables]
import chronicles, chronos, results, options, json
import
../../../waku/factory/waku,

View File

@ -37,13 +37,10 @@ cd ../../../..
echo "6. -.-.-.- Building libunwind -.-.-.-"
cd vendor/nim-libbacktrace
execute_command "make all V=1 -j8"
execute_command "make install/usr/lib/libunwind.a V=1 -j8"
cp ./vendor/libunwind/build/lib/libunwind.a install/usr/lib
cd ../../
echo "7. -.-.-.- Building miniupnpc -.-.-.- "
cd vendor/nim-nat-traversal/vendor/miniupnp/miniupnpc
execute_command "git checkout little_chore_windows_support"
execute_command "make -f Makefile.mingw CC=gcc CXX=g++ libminiupnpc.a V=1 -j8"
cd ../../../../..

View File

@ -405,9 +405,9 @@ suite "Waku Sync: reconciliation":
let (_, deliveredHash) = await remoteNeeds.get()
check deliveredHash in diffMsgHashes
asyncTest "sync 2 nodes, 40 msgs: 20 in-window diff, 20 out-window ignored":
asyncTest "sync 2 nodes, 40 msgs: 18 in-window diff, 20 out-window ignored":
const
diffInWin = 20
diffInWin = 18
diffOutWin = 20
stepOutNs = 100_000_000'u64
outOffsetNs = 2_300_000_000'u64 # for 20 mesg they sent 2 seconds earlier
@ -424,7 +424,7 @@ suite "Waku Sync: reconciliation":
var inWinHashes, outWinHashes: HashSet[WakuMessageHash]
var ts = sliceStart
var ts = sliceStart + (Timestamp(stepIn) * 2)
for _ in 0 ..< diffInWin:
let msg = fakeWakuMessage(ts = Timestamp ts, contentTopic = DefaultContentTopic)
let hash = computeMessageHash(DefaultPubsubTopic, msg)
@ -462,7 +462,7 @@ suite "Waku Sync: reconciliation":
check remoteNeeds.len == diffInWin
for _ in 0 ..< diffInWin:
let (_, deliveredHashes) = await remoteNeeds.get()
let (_, deliveredHashes) = await remoteNeeds.popFirst()
check deliveredHashes in inWinHashes
check deliveredHashes notin outWinHashes

2
vendor/nim-bearssl vendored

@ -1 +1 @@
Subproject commit 667b40440a53a58e9f922e29e20818720c62d9ac
Subproject commit 11e798b62b8e6beabe958e048e9e24c7e0f9ee63

@ -1 +1 @@
Subproject commit a8fb38a10bcb548df78e9a70bd77b26bb50abd12
Subproject commit 54f5b726025e8c7385e3a6529d3aa27454c6e6ff

2
vendor/nim-dnsdisc vendored

@ -1 +1 @@
Subproject commit c3d37c2860bcef9e3e2616ee4c53100fe7f0e845
Subproject commit b71d029f4da4ec56974d54c04518bada00e1b623

2
vendor/nim-eth vendored

@ -1 +1 @@
Subproject commit a1f7d63ababa6ce90798e16a110fc4e43ac93f03
Subproject commit d9135e6c3c5d6d819afdfb566aa8d958756b73a8

@ -1 +1 @@
Subproject commit c51315d0ae5eb2594d0bf41181d0e1aca1b3c01d
Subproject commit c3ac3f639ed1d62f59d3077d376a29c63ac9750c

2
vendor/nim-json-rpc vendored

@ -1 +1 @@
Subproject commit cbe8edf69d743a787b76b1cd25bfc4eae89927f7
Subproject commit 9665c265035f49f5ff94bbffdeadde68e19d6221

@ -1 +1 @@
Subproject commit 2b1c5eb11df3647a2cee107cd4cce3593cbb8bcf
Subproject commit 0640259af2fad330ea28e77359c0d0cefac5a361

@ -1 +1 @@
Subproject commit 822849874926ba3849a86cb3eafdf017bd11bd2d
Subproject commit d8bd4ce5c46bb6d2f984f6b3f3d7380897d95ecb

2
vendor/nim-libp2p vendored

@ -1 +1 @@
Subproject commit cd60b254a0700b0daac7a6cb2c0c48860b57c539
Subproject commit 5eaa43b8608221a615e442587f27520a49a56460

2
vendor/nim-minilru vendored

@ -1 +1 @@
Subproject commit 2682cffa8733f3b61751c65a963941315e887bac
Subproject commit 0c4b2bce959591f0a862e9b541ba43c6d0cf3476

@ -1 +1 @@
Subproject commit dfbf8c9ad3655f238b350f690bbfce5ec34d25fb
Subproject commit 860e18c37667b5dd005b94c63264560c35d88004

2
vendor/nim-presto vendored

@ -1 +1 @@
Subproject commit 3ccb356220b70f7d9eb0fbd58b674c4080f78014
Subproject commit 92b1c7ff141e6920e1f8a98a14c35c1fa098e3be

@ -1 +1 @@
Subproject commit f808ed5e7a7bfc42204ec7830f14b7a42b63c284
Subproject commit 9dd3df62124aae79d564da636bb22627c53c7676

@ -1 +1 @@
Subproject commit 2086c99608b4bf472e1ef5fe063710f280243396
Subproject commit 6f525d5447d97256750ca7856faead03e562ed20

@ -1 +1 @@
Subproject commit d08e964872271e83fb1b6de67ad57c2d0fcdfe63
Subproject commit bdf01cf4236fb40788f0733466cdf6708783cbac

2
vendor/nim-stew vendored

@ -1 +1 @@
Subproject commit 58abb4891f97c6cdc07335e868414e0c7b736c68
Subproject commit e5740014961438610d336cd81706582dbf2c96f0

2
vendor/nim-stint vendored

@ -1 +1 @@
Subproject commit 1a2c661e3f50ff696b0b6692fab0d7bb2abf10cc
Subproject commit 470b7892561b5179ab20bd389a69217d6213fe58

2
vendor/nim-web3 vendored

@ -1 +1 @@
Subproject commit 3ef986c9d93604775595f116a35c6ac0bf5257fc
Subproject commit 81ee8ce479d86acb73be7c4f365328e238d9b4a3

@ -1 +1 @@
Subproject commit 0be0663e1af76e869837226a4ef3e586fcc737d3
Subproject commit e6c2c9da39c2d368d9cf420ac22692e99715d22c

2
vendor/nimcrypto vendored

@ -1 +1 @@
Subproject commit 19c41d6be4c00b4a2c8000583bd30cf8ceb5f4b1
Subproject commit 721fb99ee099b632eb86dfad1f0d96ee87583774

@ -1 +1 @@
Subproject commit b7e9a9b1bc69256a2a3076c1f099b50ce84e7eff
Subproject commit 900d4f95e0e618bdeb4c241f7a4b6347df6bb950

View File

@ -1,5 +1,5 @@
import
std/[times, strutils, os, sets, strformat],
std/[times, strutils, os, sets, strformat, tables],
results,
chronos,
chronos/threadsync,

View File

@ -96,7 +96,12 @@ proc readValue*[T](r: var EnvvarReader, value: var T) {.raises: [SerializationEr
var reader = findFieldReader(fields[], fieldName, expectedFieldPos)
if reader != nil:
try:
reader(value, r)
except ValueError, IOError:
raise newException(
SerializationError, "Couldn't read field: " & getCurrentExceptionMsg()
)
discard r.key.pop()
else:
const typeName = typetraits.name(T)

View File

@ -1,7 +1,7 @@
## This module reinforces the publish operation with regular store-v3 requests.
##
import std/sequtils
import std/[sequtils, tables]
import chronos, chronicles, libp2p/utility
import
./delivery_callback,

View File

@ -1,7 +1,7 @@
{.push raises: [].}
import
std/[options, sets, sequtils, times, strformat, strutils, math, random],
std/[options, sets, sequtils, times, strformat, strutils, math, random, tables],
chronos,
chronicles,
metrics,

View File

@ -53,7 +53,8 @@ proc withWssTransport*(
upgr,
tlsPrivateKey = key,
tlsCertificate = cert,
{TLSFlags.NoVerifyHost, TLSFlags.NoVerifyServerName},
autotls = nil, # required 5th param
tlsFlags = {TLSFlags.NoVerifyHost, TLSFlags.NoVerifyServerName},
)
)

View File

@ -40,28 +40,40 @@ proc readValue*(
value = Base64String(reader.readValue(string))
proc decodeFromJsonString*[T](
t: typedesc[T], data: JsonString, requireAllFields = true
t: typedesc[T], data: JsonString, requireAllFields: bool = true
): SerdesResult[T] =
try:
if requireAllFields:
ok(
RestJson.decode(
string(data), T, requireAllFields = requireAllFields, allowUnknownFields = true
string(data), T, requireAllFields = true, allowUnknownFields = true
)
)
else:
ok(
RestJson.decode(
string(data), T, requireAllFields = false, allowUnknownFields = true
)
)
except SerializationError:
# TODO: Do better error reporting here
err("Unable to deserialize data")
# Internal static implementation
proc decodeFromJsonBytes*[T](
t: typedesc[T], data: openArray[byte], requireAllFields = true
t: typedesc[T], data: openArray[byte], requireAllFields: bool = true
): SerdesResult[T] =
try:
if requireAllFields:
ok(
RestJson.decode(
string.fromBytes(data),
T,
requireAllFields = requireAllFields,
allowUnknownFields = true,
string.fromBytes(data), T, requireAllFields = true, allowUnknownFields = true
)
)
else:
ok(
RestJson.decode(
string.fromBytes(data), T, requireAllFields = false, allowUnknownFields = true
)
)
except SerializationError:
@ -95,16 +107,16 @@ proc encodeIntoJsonBytes*(value: auto): SerdesResult[seq[byte]] =
#### helpers
proc encodeString*(value: string): RestResult[string] =
proc encodeString*(value: string): SerdesResult[string] =
ok(value)
proc decodeString*(t: typedesc[string], value: string): RestResult[string] =
proc decodeString*(t: typedesc[string], value: string): SerdesResult[string] =
ok(value)
proc encodeString*(value: SomeUnsignedInt): RestResult[string] =
proc encodeString*(value: SomeUnsignedInt): SerdesResult[string] =
ok(Base10.toString(value))
proc decodeString*(T: typedesc[SomeUnsignedInt], value: string): RestResult[T] =
proc decodeString*(T: typedesc[SomeUnsignedInt], value: string): SerdesResult[T] =
let v = Base10.decode(T, value)
if v.isErr():
return err(v.error())

View File

@ -1,6 +1,6 @@
{.push raises: [].}
import std/options, results, chronicles, chronos, metrics, bearssl/rand
import std/[options, tables], results, chronicles, chronos, metrics, bearssl/rand
import
../node/peer_manager, ../utils/requests, ./protocol_metrics, ./common, ./rpc_codec

View File

@ -1,7 +1,7 @@
{.push raises: [].}
import
std/sets,
std/[sets, tables],
results,
chronicles,
chronos,