mirror of https://github.com/waku-org/nwaku.git
chore: use submodule nph in CI to check lint (#3027)
This commit is contained in:
parent
a3cd2a1a92
commit
ce9a8c468a
|
@ -145,21 +145,32 @@ jobs:
|
||||||
lint:
|
lint:
|
||||||
name: "Lint"
|
name: "Lint"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
needs: build
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout code
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Get submodules hash
|
||||||
|
id: submodules
|
||||||
|
run: |
|
||||||
|
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Cache submodules
|
||||||
|
uses: actions/cache@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: 2 # In PR, has extra merge commit: ^1 = PR, ^2 = base
|
path: |
|
||||||
|
vendor/
|
||||||
|
.git/modules
|
||||||
|
key: ${{ runner.os }}-vendor-modules-${{ steps.submodules.outputs.hash }}
|
||||||
|
|
||||||
|
- name: Build nph
|
||||||
|
run: |
|
||||||
|
make build-nph
|
||||||
|
|
||||||
- name: Check nph formatting
|
- name: Check nph formatting
|
||||||
# Pin nph to a specific version to avoid sudden style differences.
|
|
||||||
# Updating nph version should be accompanied with running the new
|
|
||||||
# version on the fluffy directory.
|
|
||||||
run: |
|
run: |
|
||||||
VERSION="v0.5.1"
|
|
||||||
ARCHIVE="nph-linux_x64.tar.gz"
|
|
||||||
curl -L "https://github.com/arnetheduck/nph/releases/download/${VERSION}/${ARCHIVE}" -o ${ARCHIVE}
|
|
||||||
tar -xzf ${ARCHIVE}
|
|
||||||
shopt -s extglob # Enable extended globbing
|
shopt -s extglob # Enable extended globbing
|
||||||
./nph examples waku tests tools apps *.@(nim|nims|nimble)
|
NPH=$(make print-nph-path)
|
||||||
|
echo "using nph at ${NPH}"
|
||||||
|
"${NPH}" examples waku tests tools apps *.@(nim|nims|nimble)
|
||||||
git diff --exit-code
|
git diff --exit-code
|
||||||
|
|
10
Makefile
10
Makefile
|
@ -270,12 +270,10 @@ networkmonitor: | build deps librln
|
||||||
############
|
############
|
||||||
## Format ##
|
## Format ##
|
||||||
############
|
############
|
||||||
.PHONY: build-nph clean-nph install-nph
|
.PHONY: build-nph install-nph clean-nph print-nph-path
|
||||||
|
|
||||||
build-nph:
|
build-nph:
|
||||||
ifeq ("$(wildcard $(NPH))","")
|
$(ENV_SCRIPT) nim c vendor/nph/src/nph.nim
|
||||||
$(ENV_SCRIPT) nim c vendor/nph/src/nph.nim
|
|
||||||
endif
|
|
||||||
|
|
||||||
GIT_PRE_COMMIT_HOOK := .git/hooks/pre-commit
|
GIT_PRE_COMMIT_HOOK := .git/hooks/pre-commit
|
||||||
|
|
||||||
|
@ -294,6 +292,10 @@ nph/%: build-nph
|
||||||
clean-nph:
|
clean-nph:
|
||||||
rm -f $(NPH)
|
rm -f $(NPH)
|
||||||
|
|
||||||
|
# To avoid hardcoding nph binary location in several places
|
||||||
|
print-nph-path:
|
||||||
|
echo "$(NPH)"
|
||||||
|
|
||||||
clean: | clean-nph
|
clean: | clean-nph
|
||||||
|
|
||||||
###################
|
###################
|
||||||
|
|
|
@ -75,8 +75,7 @@ when isMainModule:
|
||||||
wnconf: WakuNodeConf, sources: auto
|
wnconf: WakuNodeConf, sources: auto
|
||||||
) {.gcsafe, raises: [ConfigurationError].} =
|
) {.gcsafe, raises: [ConfigurationError].} =
|
||||||
echo "Loading secondary configuration file into WakuNodeConf"
|
echo "Loading secondary configuration file into WakuNodeConf"
|
||||||
sources.addConfigFile(Toml, configFile)
|
sources.addConfigFile(Toml, configFile),
|
||||||
,
|
|
||||||
)
|
)
|
||||||
except CatchableError:
|
except CatchableError:
|
||||||
error "Loading Waku configuration failed", error = getCurrentExceptionMsg()
|
error "Loading Waku configuration failed", error = getCurrentExceptionMsg()
|
||||||
|
|
|
@ -159,8 +159,7 @@ proc load*(T: type LiteProtocolTesterConf, version = ""): ConfResult[T] =
|
||||||
secondarySources = proc(
|
secondarySources = proc(
|
||||||
conf: LiteProtocolTesterConf, sources: auto
|
conf: LiteProtocolTesterConf, sources: auto
|
||||||
) {.gcsafe, raises: [ConfigurationError].} =
|
) {.gcsafe, raises: [ConfigurationError].} =
|
||||||
sources.addConfigFile(Envvar, InputFile("liteprotocoltester"))
|
sources.addConfigFile(Envvar, InputFile("liteprotocoltester")),
|
||||||
,
|
|
||||||
)
|
)
|
||||||
ok(conf)
|
ok(conf)
|
||||||
except CatchableError:
|
except CatchableError:
|
||||||
|
|
|
@ -137,14 +137,12 @@ proc newTestWakuNode*(
|
||||||
if secureKey != "":
|
if secureKey != "":
|
||||||
some(secureKey)
|
some(secureKey)
|
||||||
else:
|
else:
|
||||||
none(string)
|
none(string),
|
||||||
,
|
|
||||||
secureCert =
|
secureCert =
|
||||||
if secureCert != "":
|
if secureCert != "":
|
||||||
some(secureCert)
|
some(secureCert)
|
||||||
else:
|
else:
|
||||||
none(string)
|
none(string),
|
||||||
,
|
|
||||||
agentString = agentString,
|
agentString = agentString,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -297,8 +297,7 @@ procSuite "Waku Rest API - Store v3":
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get().toRestStringWakuMessageHash()
|
||||||
else:
|
else:
|
||||||
""
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
, # base64-encoded digest. Empty ignores the field.
|
|
||||||
"true", # ascending
|
"true", # ascending
|
||||||
"7", # page size. Empty implies default page size.
|
"7", # page size. Empty implies default page size.
|
||||||
)
|
)
|
||||||
|
@ -790,8 +789,7 @@ procSuite "Waku Rest API - Store v3":
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get().toRestStringWakuMessageHash()
|
||||||
else:
|
else:
|
||||||
""
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
, # base64-encoded digest. Empty ignores the field.
|
|
||||||
"true", # ascending
|
"true", # ascending
|
||||||
"3", # page size. Empty implies default page size.
|
"3", # page size. Empty implies default page size.
|
||||||
)
|
)
|
||||||
|
@ -827,8 +825,7 @@ procSuite "Waku Rest API - Store v3":
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get().toRestStringWakuMessageHash()
|
||||||
else:
|
else:
|
||||||
""
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
, # base64-encoded digest. Empty ignores the field.
|
|
||||||
)
|
)
|
||||||
|
|
||||||
check:
|
check:
|
||||||
|
@ -850,8 +847,7 @@ procSuite "Waku Rest API - Store v3":
|
||||||
if reqHash.isSome():
|
if reqHash.isSome():
|
||||||
reqHash.get().toRestStringWakuMessageHash()
|
reqHash.get().toRestStringWakuMessageHash()
|
||||||
else:
|
else:
|
||||||
""
|
"", # base64-encoded digest. Empty ignores the field.
|
||||||
, # base64-encoded digest. Empty ignores the field.
|
|
||||||
"true", # ascending
|
"true", # ascending
|
||||||
"5", # page size. Empty implies default page size.
|
"5", # page size. Empty implies default page size.
|
||||||
)
|
)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit de5cd4823e63424adb58ef3717524348ae6c4d87
|
Subproject commit 31bdced07d3dc3d254669bd94210101c701deeda
|
|
@ -55,8 +55,7 @@ proc get*(peerStore: PeerStore, peerId: PeerID): RemotePeerInfo =
|
||||||
if peerStore[ENRBook][peerId] != default(enr.Record):
|
if peerStore[ENRBook][peerId] != default(enr.Record):
|
||||||
some(peerStore[ENRBook][peerId])
|
some(peerStore[ENRBook][peerId])
|
||||||
else:
|
else:
|
||||||
none(enr.Record)
|
none(enr.Record),
|
||||||
,
|
|
||||||
protocols: peerStore[ProtoBook][peerId],
|
protocols: peerStore[ProtoBook][peerId],
|
||||||
agent: peerStore[AgentBook][peerId],
|
agent: peerStore[AgentBook][peerId],
|
||||||
protoVersion: peerStore[ProtoVersionBook][peerId],
|
protoVersion: peerStore[ProtoVersionBook][peerId],
|
||||||
|
|
|
@ -58,8 +58,7 @@ proc toFilterWakuMessage*(msg: WakuMessage): FilterWakuMessage =
|
||||||
if msg.meta.len > 0:
|
if msg.meta.len > 0:
|
||||||
some(base64.encode(msg.meta))
|
some(base64.encode(msg.meta))
|
||||||
else:
|
else:
|
||||||
none(Base64String)
|
none(Base64String),
|
||||||
,
|
|
||||||
ephemeral: some(msg.ephemeral),
|
ephemeral: some(msg.ephemeral),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -239,8 +238,7 @@ proc readValue*(
|
||||||
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
||||||
none(string)
|
none(string)
|
||||||
else:
|
else:
|
||||||
some(pubsubTopic.get())
|
some(pubsubTopic.get()),
|
||||||
,
|
|
||||||
contentFilters: contentFilters.get(),
|
contentFilters: contentFilters.get(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -315,8 +313,7 @@ proc readValue*(
|
||||||
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
||||||
none(string)
|
none(string)
|
||||||
else:
|
else:
|
||||||
some(pubsubTopic.get())
|
some(pubsubTopic.get()),
|
||||||
,
|
|
||||||
contentFilters: contentFilters.get(),
|
contentFilters: contentFilters.get(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -364,8 +361,7 @@ proc readValue*(
|
||||||
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
||||||
none(string)
|
none(string)
|
||||||
else:
|
else:
|
||||||
some(pubsubTopic.get())
|
some(pubsubTopic.get()),
|
||||||
,
|
|
||||||
contentFilters: contentFilters.get(),
|
contentFilters: contentFilters.get(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -110,8 +110,7 @@ proc toStoreResponseRest*(histResp: HistoryResponse): StoreResponseRest =
|
||||||
if message.meta.len > 0:
|
if message.meta.len > 0:
|
||||||
some(base64.encode(message.meta))
|
some(base64.encode(message.meta))
|
||||||
else:
|
else:
|
||||||
none(Base64String)
|
none(Base64String),
|
||||||
,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var storeWakuMsgs: seq[StoreWakuMessage]
|
var storeWakuMsgs: seq[StoreWakuMessage]
|
||||||
|
|
|
@ -62,7 +62,6 @@ proc readValue*(
|
||||||
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
if pubsubTopic.isNone() or pubsubTopic.get() == "":
|
||||||
none(string)
|
none(string)
|
||||||
else:
|
else:
|
||||||
some(pubsubTopic.get())
|
some(pubsubTopic.get()),
|
||||||
,
|
|
||||||
message: message.get(),
|
message: message.get(),
|
||||||
)
|
)
|
||||||
|
|
|
@ -34,8 +34,7 @@ proc toRelayWakuMessage*(msg: WakuMessage): RelayWakuMessage =
|
||||||
if msg.meta.len > 0:
|
if msg.meta.len > 0:
|
||||||
some(base64.encode(msg.meta))
|
some(base64.encode(msg.meta))
|
||||||
else:
|
else:
|
||||||
none(Base64String)
|
none(Base64String),
|
||||||
,
|
|
||||||
ephemeral: some(msg.ephemeral),
|
ephemeral: some(msg.ephemeral),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -82,8 +82,7 @@ proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -98,8 +97,7 @@ proc createOldestMessageTimestampIndex*(db: SqliteDatabase): DatabaseResult[void
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -184,8 +182,7 @@ proc deleteMessagesOlderThanTimestamp*(
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -206,8 +203,7 @@ proc deleteOldestMessagesNotWithinLimit*(
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
|
|
@ -96,8 +96,7 @@ proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -112,8 +111,7 @@ proc createOldestMessageTimestampIndex*(db: SqliteDatabase): DatabaseResult[void
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -127,8 +125,7 @@ proc createHistoryQueryIndex*(db: SqliteDatabase): DatabaseResult[void] =
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -226,8 +223,7 @@ proc deleteMessagesOlderThanTimestamp*(
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
@ -248,8 +244,7 @@ proc deleteOldestMessagesNotWithinLimit*(
|
||||||
?db.query(
|
?db.query(
|
||||||
query,
|
query,
|
||||||
proc(s: ptr sqlite3_stmt) =
|
proc(s: ptr sqlite3_stmt) =
|
||||||
discard
|
discard,
|
||||||
,
|
|
||||||
)
|
)
|
||||||
return ok()
|
return ok()
|
||||||
|
|
||||||
|
|
|
@ -9,8 +9,7 @@ proc hasKeys*(data: JsonNode, keys: openArray[string]): bool =
|
||||||
return all(
|
return all(
|
||||||
keys,
|
keys,
|
||||||
proc(key: string): bool =
|
proc(key: string): bool =
|
||||||
return data.hasKey(key)
|
return data.hasKey(key),
|
||||||
,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Safely saves a Keystore's JsonNode to disk.
|
# Safely saves a Keystore's JsonNode to disk.
|
||||||
|
|
Loading…
Reference in New Issue