chore: use submodule nph in CI to check lint (#3027)

This commit is contained in:
fryorcraken 2024-09-11 11:51:42 +10:00 committed by GitHub
parent a3cd2a1a92
commit ce9a8c468a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
15 changed files with 57 additions and 70 deletions

View File

@ -117,7 +117,7 @@ jobs:
export MAKEFLAGS="-j1"
export NIMFLAGS="--colors:off -d:chronicles_colors:none"
make V=1 LOG_LEVEL=DEBUG QUICK_AND_DIRTY_COMPILER=1 POSTGRES=$postgres_enabled test testwakunode2
build-docker-image:
@ -141,25 +141,36 @@ jobs:
nim_wakunode_image: ${{ needs.build-docker-image.outputs.image }}
test_type: node-optional
debug: waku*
lint:
name: "Lint"
runs-on: ubuntu-latest
needs: build
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Checkout code
uses: actions/checkout@v3
- name: Get submodules hash
id: submodules
run: |
echo "hash=$(git submodule status | awk '{print $1}' | sort | shasum -a 256 | sed 's/[ -]*//g')" >> $GITHUB_OUTPUT
- name: Cache submodules
uses: actions/cache@v3
with:
fetch-depth: 2 # In PR, has extra merge commit: ^1 = PR, ^2 = base
path: |
vendor/
.git/modules
key: ${{ runner.os }}-vendor-modules-${{ steps.submodules.outputs.hash }}
- name: Build nph
run: |
make build-nph
- name: Check nph formatting
# Pin nph to a specific version to avoid sudden style differences.
# Updating nph version should be accompanied with running the new
# version on the fluffy directory.
run: |
VERSION="v0.5.1"
ARCHIVE="nph-linux_x64.tar.gz"
curl -L "https://github.com/arnetheduck/nph/releases/download/${VERSION}/${ARCHIVE}" -o ${ARCHIVE}
tar -xzf ${ARCHIVE}
shopt -s extglob # Enable extended globbing
./nph examples waku tests tools apps *.@(nim|nims|nimble)
NPH=$(make print-nph-path)
echo "using nph at ${NPH}"
"${NPH}" examples waku tests tools apps *.@(nim|nims|nimble)
git diff --exit-code

View File

@ -270,12 +270,10 @@ networkmonitor: | build deps librln
############
## Format ##
############
.PHONY: build-nph clean-nph install-nph
.PHONY: build-nph install-nph clean-nph print-nph-path
build-nph:
ifeq ("$(wildcard $(NPH))","")
$(ENV_SCRIPT) nim c vendor/nph/src/nph.nim
endif
$(ENV_SCRIPT) nim c vendor/nph/src/nph.nim
GIT_PRE_COMMIT_HOOK := .git/hooks/pre-commit
@ -294,6 +292,10 @@ nph/%: build-nph
clean-nph:
rm -f $(NPH)
# To avoid hardcoding nph binary location in several places
print-nph-path:
echo "$(NPH)"
clean: | clean-nph
###################

View File

@ -75,8 +75,7 @@ when isMainModule:
wnconf: WakuNodeConf, sources: auto
) {.gcsafe, raises: [ConfigurationError].} =
echo "Loading secondary configuration file into WakuNodeConf"
sources.addConfigFile(Toml, configFile)
,
sources.addConfigFile(Toml, configFile),
)
except CatchableError:
error "Loading Waku configuration failed", error = getCurrentExceptionMsg()

View File

@ -159,8 +159,7 @@ proc load*(T: type LiteProtocolTesterConf, version = ""): ConfResult[T] =
secondarySources = proc(
conf: LiteProtocolTesterConf, sources: auto
) {.gcsafe, raises: [ConfigurationError].} =
sources.addConfigFile(Envvar, InputFile("liteprotocoltester"))
,
sources.addConfigFile(Envvar, InputFile("liteprotocoltester")),
)
ok(conf)
except CatchableError:

View File

@ -137,14 +137,12 @@ proc newTestWakuNode*(
if secureKey != "":
some(secureKey)
else:
none(string)
,
none(string),
secureCert =
if secureCert != "":
some(secureCert)
else:
none(string)
,
none(string),
agentString = agentString,
)

View File

@ -297,8 +297,7 @@ procSuite "Waku Rest API - Store v3":
if reqHash.isSome():
reqHash.get().toRestStringWakuMessageHash()
else:
""
, # base64-encoded digest. Empty ignores the field.
"", # base64-encoded digest. Empty ignores the field.
"true", # ascending
"7", # page size. Empty implies default page size.
)
@ -790,8 +789,7 @@ procSuite "Waku Rest API - Store v3":
if reqHash.isSome():
reqHash.get().toRestStringWakuMessageHash()
else:
""
, # base64-encoded digest. Empty ignores the field.
"", # base64-encoded digest. Empty ignores the field.
"true", # ascending
"3", # page size. Empty implies default page size.
)
@ -827,8 +825,7 @@ procSuite "Waku Rest API - Store v3":
if reqHash.isSome():
reqHash.get().toRestStringWakuMessageHash()
else:
""
, # base64-encoded digest. Empty ignores the field.
"", # base64-encoded digest. Empty ignores the field.
)
check:
@ -850,8 +847,7 @@ procSuite "Waku Rest API - Store v3":
if reqHash.isSome():
reqHash.get().toRestStringWakuMessageHash()
else:
""
, # base64-encoded digest. Empty ignores the field.
"", # base64-encoded digest. Empty ignores the field.
"true", # ascending
"5", # page size. Empty implies default page size.
)

2
vendor/nph vendored

@ -1 +1 @@
Subproject commit de5cd4823e63424adb58ef3717524348ae6c4d87
Subproject commit 31bdced07d3dc3d254669bd94210101c701deeda

View File

@ -55,8 +55,7 @@ proc get*(peerStore: PeerStore, peerId: PeerID): RemotePeerInfo =
if peerStore[ENRBook][peerId] != default(enr.Record):
some(peerStore[ENRBook][peerId])
else:
none(enr.Record)
,
none(enr.Record),
protocols: peerStore[ProtoBook][peerId],
agent: peerStore[AgentBook][peerId],
protoVersion: peerStore[ProtoVersionBook][peerId],

View File

@ -58,8 +58,7 @@ proc toFilterWakuMessage*(msg: WakuMessage): FilterWakuMessage =
if msg.meta.len > 0:
some(base64.encode(msg.meta))
else:
none(Base64String)
,
none(Base64String),
ephemeral: some(msg.ephemeral),
)
@ -239,8 +238,7 @@ proc readValue*(
if pubsubTopic.isNone() or pubsubTopic.get() == "":
none(string)
else:
some(pubsubTopic.get())
,
some(pubsubTopic.get()),
contentFilters: contentFilters.get(),
)
@ -315,8 +313,7 @@ proc readValue*(
if pubsubTopic.isNone() or pubsubTopic.get() == "":
none(string)
else:
some(pubsubTopic.get())
,
some(pubsubTopic.get()),
contentFilters: contentFilters.get(),
)
@ -364,8 +361,7 @@ proc readValue*(
if pubsubTopic.isNone() or pubsubTopic.get() == "":
none(string)
else:
some(pubsubTopic.get())
,
some(pubsubTopic.get()),
contentFilters: contentFilters.get(),
)

View File

@ -110,8 +110,7 @@ proc toStoreResponseRest*(histResp: HistoryResponse): StoreResponseRest =
if message.meta.len > 0:
some(base64.encode(message.meta))
else:
none(Base64String)
,
none(Base64String),
)
var storeWakuMsgs: seq[StoreWakuMessage]

View File

@ -62,7 +62,6 @@ proc readValue*(
if pubsubTopic.isNone() or pubsubTopic.get() == "":
none(string)
else:
some(pubsubTopic.get())
,
some(pubsubTopic.get()),
message: message.get(),
)

View File

@ -34,8 +34,7 @@ proc toRelayWakuMessage*(msg: WakuMessage): RelayWakuMessage =
if msg.meta.len > 0:
some(base64.encode(msg.meta))
else:
none(Base64String)
,
none(Base64String),
ephemeral: some(msg.ephemeral),
)

View File

@ -82,8 +82,7 @@ proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -98,8 +97,7 @@ proc createOldestMessageTimestampIndex*(db: SqliteDatabase): DatabaseResult[void
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -184,8 +182,7 @@ proc deleteMessagesOlderThanTimestamp*(
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -206,8 +203,7 @@ proc deleteOldestMessagesNotWithinLimit*(
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()

View File

@ -96,8 +96,7 @@ proc createTable*(db: SqliteDatabase): DatabaseResult[void] =
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -112,8 +111,7 @@ proc createOldestMessageTimestampIndex*(db: SqliteDatabase): DatabaseResult[void
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -127,8 +125,7 @@ proc createHistoryQueryIndex*(db: SqliteDatabase): DatabaseResult[void] =
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -226,8 +223,7 @@ proc deleteMessagesOlderThanTimestamp*(
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()
@ -248,8 +244,7 @@ proc deleteOldestMessagesNotWithinLimit*(
?db.query(
query,
proc(s: ptr sqlite3_stmt) =
discard
,
discard,
)
return ok()

View File

@ -9,8 +9,7 @@ proc hasKeys*(data: JsonNode, keys: openArray[string]): bool =
return all(
keys,
proc(key: string): bool =
return data.hasKey(key)
,
return data.hasKey(key),
)
# Safely saves a Keystore's JsonNode to disk.