Bump deps take2 (#492)

* extra utilities and tweaks
* add atlas lock
* update ignores
* break build into it's own script
* update url rules
* base off codexdht's
* compile fixes for Nim 1.6.14
* update submodules
* convert mapFailure to procs to work around type resolution issues
* add toml parser for multiaddress
* change error type on keyutils
* bump nimbus build to use 1.6.14
* update gitignore
* adding new deps submodules
* bump nim ci version
* even more fixes
* more libp2p changes
* update keys
* fix eventually function
* adding coverage test file
* move coverage to build.nims
* use nimcache/coverage
* move libp2p import for tests into helper.nim
* remove named bin
* bug fixes for networkpeers (from Dmitriy)

---------

Co-authored-by: Dmitriy Ryajov <dryajov@gmail.com>
This commit is contained in:
Jaremy Creechley 2023-08-01 16:47:57 -07:00 committed by GitHub
parent 3e80de3454
commit 7efa9177df
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
84 changed files with 1931 additions and 255 deletions

View File

@ -7,7 +7,7 @@ on:
workflow_dispatch: workflow_dispatch:
env: env:
cache_nonce: 0 # Allows for easily busting actions/cache caches cache_nonce: 0 # Allows for easily busting actions/cache caches
nim_version: v1.6.10 nim_version: v1.6.14
jobs: jobs:
build: build:
strategy: strategy:
@ -105,7 +105,9 @@ jobs:
nim_version: ${{ env.nim_version }} nim_version: ${{ env.nim_version }}
- name: Generate coverage data - name: Generate coverage data
run: make -j${ncpu} coverage run: |
# make -j${ncpu} coverage
make -j${ncpu} coverage-script
shell: bash shell: bash
- name: Upload coverage data to Codecov - name: Upload coverage data to Codecov

3
.gitignore vendored
View File

@ -15,6 +15,8 @@ coverage/
# Nimble packages # Nimble packages
/vendor/.nimble /vendor/.nimble
/vendor/packages/
# /vendor/*/
# Nimble user files # Nimble user files
nimble.develop nimble.develop
@ -36,3 +38,4 @@ nimbus-build-system.paths
docker/hostdatadir docker/hostdatadir
docker/prometheus-data docker/prometheus-data
.DS_Store .DS_Store
nim.cfg

12
.gitmodules vendored
View File

@ -181,3 +181,15 @@
[submodule "vendor/codex-contracts-eth"] [submodule "vendor/codex-contracts-eth"]
path = vendor/codex-contracts-eth path = vendor/codex-contracts-eth
url = https://github.com/status-im/codex-contracts-eth url = https://github.com/status-im/codex-contracts-eth
[submodule "vendor/nim-protobuf-serialization"]
path = vendor/nim-protobuf-serialization
url = https://github.com/status-im/nim-protobuf-serialization
[submodule "vendor/nim-results"]
path = vendor/nim-results
url = https://github.com/arnetheduck/nim-results
[submodule "vendor/nim-testutils"]
path = vendor/nim-testutils
url = https://github.com/status-im/nim-testutils
[submodule "vendor/npeg"]
path = vendor/npeg
url = https://github.com/zevv/npeg

View File

@ -48,7 +48,7 @@ else # "variables.mk" was included. Business as usual until the end of this file
# Builds the codex binary # Builds the codex binary
all: | build deps all: | build deps
echo -e $(BUILD_MSG) "build/$@" && \ echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim codex $(NIM_PARAMS) codex.nims $(ENV_SCRIPT) nim codex $(NIM_PARAMS) build.nims
# must be included after the default target # must be included after the default target
-include $(BUILD_SYSTEM_DIR)/makefiles/targets.mk -include $(BUILD_SYSTEM_DIR)/makefiles/targets.mk
@ -60,15 +60,12 @@ else
NIM_PARAMS := $(NIM_PARAMS) -d:release NIM_PARAMS := $(NIM_PARAMS) -d:release
endif endif
deps: | deps-common nat-libs codex.nims deps: | deps-common nat-libs
ifneq ($(USE_LIBBACKTRACE), 0) ifneq ($(USE_LIBBACKTRACE), 0)
deps: | libbacktrace deps: | libbacktrace
endif endif
#- deletes and recreates "codex.nims" which on Windows is a copy instead of a proper symlink
update: | update-common update: | update-common
rm -rf codex.nims && \
$(MAKE) codex.nims $(HANDLE_OUTPUT)
# detecting the os # detecting the os
ifeq ($(OS),Windows_NT) # is Windows_NT on XP, 2000, 7, Vista, 10... ifeq ($(OS),Windows_NT) # is Windows_NT on XP, 2000, 7, Vista, 10...
@ -83,26 +80,22 @@ endif
# Builds and run a part of the test suite # Builds and run a part of the test suite
test: | build deps test: | build deps
echo -e $(BUILD_MSG) "build/$@" && \ echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim test $(NIM_PARAMS) codex.nims $(ENV_SCRIPT) nim test $(NIM_PARAMS) build.nims
# Builds and runs the smart contract tests # Builds and runs the smart contract tests
testContracts: | build deps testContracts: | build deps
echo -e $(BUILD_MSG) "build/$@" && \ echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim testContracts $(NIM_PARAMS) codex.nims $(ENV_SCRIPT) nim testContracts $(NIM_PARAMS) build.nims
# Builds and runs the integration tests # Builds and runs the integration tests
testIntegration: | build deps testIntegration: | build deps
echo -e $(BUILD_MSG) "build/$@" && \ echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim testIntegration $(NIM_PARAMS) codex.nims $(ENV_SCRIPT) nim testIntegration $(NIM_PARAMS) build.nims
# Builds and runs all tests # Builds and runs all tests
testAll: | build deps testAll: | build deps
echo -e $(BUILD_MSG) "build/$@" && \ echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim testAll $(NIM_PARAMS) codex.nims $(ENV_SCRIPT) nim testAll $(NIM_PARAMS) build.nims
# symlink
codex.nims:
ln -s codex.nimble $@
# nim-libbacktrace # nim-libbacktrace
LIBBACKTRACE_MAKE_FLAGS := -C vendor/nim-libbacktrace --no-print-directory BUILD_CXX_LIB=0 LIBBACKTRACE_MAKE_FLAGS := -C vendor/nim-libbacktrace --no-print-directory BUILD_CXX_LIB=0
@ -127,8 +120,15 @@ coverage:
shopt -s globstar && lcov --extract coverage/coverage.info $$(pwd)/codex/{*,**/*}.nim --output-file coverage/coverage.f.info shopt -s globstar && lcov --extract coverage/coverage.info $$(pwd)/codex/{*,**/*}.nim --output-file coverage/coverage.f.info
echo -e $(BUILD_MSG) "coverage/report/index.html" echo -e $(BUILD_MSG) "coverage/report/index.html"
genhtml coverage/coverage.f.info --output-directory coverage/report genhtml coverage/coverage.f.info --output-directory coverage/report
show-coverage:
if which open >/dev/null; then (echo -e "\e[92mOpening\e[39m HTML coverage report in browser..." && open coverage/report/index.html) || true; fi if which open >/dev/null; then (echo -e "\e[92mOpening\e[39m HTML coverage report in browser..." && open coverage/report/index.html) || true; fi
coverage-script: build deps
echo -e $(BUILD_MSG) "build/$@" && \
$(ENV_SCRIPT) nim coverage $(NIM_PARAMS) build.nims
echo "Run `make show-coverage` to view coverage results"
# usual cleaning # usual cleaning
clean: | clean-common clean: | clean-common
rm -rf build rm -rf build

209
atlas.lock Normal file
View File

@ -0,0 +1,209 @@
{
"clangVersion": "",
"gccVersion": "",
"hostCPU": "arm64",
"hostOS": "macosx",
"items": {
"asynctest": {
"commit": "fe1a34caf572b05f8bdba3b650f1871af9fce31e",
"dir": "vendor/asynctest",
"url": "https://github.com/codex-storage/asynctest"
},
"dnsclient.nim": {
"commit": "23214235d4784d24aceed99bbfe153379ea557c8",
"dir": "vendor/dnsclient.nim",
"url": "https://github.com/ba0f3/dnsclient.nim"
},
"lrucache.nim": {
"commit": "8767ade0b76ea5b5d4ce24a52d0c58a6ebeb66cd",
"dir": "vendor/lrucache.nim",
"url": "https://github.com/status-im/lrucache.nim"
},
"nim-bearssl": {
"commit": "99fcb3405c55b27cfffbf60f5368c55da7346f23",
"dir": "vendor/nim-bearssl",
"url": "https://github.com/status-im/nim-bearssl"
},
"nim-blscurve": {
"commit": "48d8668c5a9a350d3a7ee0c3713ef9a11980a40d",
"dir": "vendor/nim-blscurve",
"url": "https://github.com/status-im/nim-blscurve"
},
"nim-chronicles": {
"commit": "c9c8e58ec3f89b655a046c485f622f9021c68b61",
"dir": "vendor/nim-chronicles",
"url": "https://github.com/status-im/nim-chronicles"
},
"nim-chronos": {
"commit": "0277b65be2c7a365ac13df002fba6e172be55537",
"dir": "vendor/nim-chronos",
"url": "https://github.com/status-im/nim-chronos"
},
"nim-confutils": {
"commit": "2028b41602b3abf7c9bf450744efde7b296707a2",
"dir": "vendor/nim-confutils",
"url": "https://github.com/status-im/nim-confutils"
},
"nim-contract-abi": {
"commit": "61f8f59b3917d8e27c6eb4330a6d8cf428e98b2d",
"dir": "vendor/nim-contract-abi",
"url": "https://github.com/status-im/nim-contract-abi"
},
"nim-datastore": {
"commit": "0cde8aeb67c59fd0ac95496dc6b5e1168d6632aa",
"dir": "vendor/nim-datastore",
"url": "https://github.com/codex-storage/nim-datastore"
},
"nim-faststreams": {
"commit": "720fc5e5c8e428d9d0af618e1e27c44b42350309",
"dir": "vendor/nim-faststreams",
"url": "https://github.com/status-im/nim-faststreams"
},
"nim-http-utils": {
"commit": "3b491a40c60aad9e8d3407443f46f62511e63b18",
"dir": "vendor/nim-http-utils",
"url": "https://github.com/status-im/nim-http-utils"
},
"nim-json-rpc": {
"commit": "0bf2bcbe74a18a3c7a709d57108bb7b51e748a92",
"dir": "vendor/nim-json-rpc",
"url": "https://github.com/status-im/nim-json-rpc"
},
"nim-json-serialization": {
"commit": "bb53d49caf2a6c6cf1df365ba84af93cdcfa7aa3",
"dir": "vendor/nim-json-serialization",
"url": "https://github.com/status-im/nim-json-serialization"
},
"nim-leopard": {
"commit": "1a6f2ab7252426a6ac01482a68b75d0c3b134cf0",
"dir": "vendor/nim-leopard",
"url": "https://github.com/status-im/nim-leopard"
},
"nim-libbacktrace": {
"commit": "b29c22ba0ef13de50b779c776830dbea1d50cd33",
"dir": "vendor/nim-libbacktrace",
"url": "https://github.com/status-im/nim-libbacktrace"
},
"nim-libp2p": {
"commit": "440461b24b9e66542b34d26a0b908c17f6549d05",
"dir": "vendor/nim-libp2p",
"url": "https://github.com/status-im/nim-libp2p"
},
"nim-libp2p-dht": {
"commit": "fdd02450aa6979add7dabd29a3ba0f8738bf89f8",
"dir": "vendor/nim-libp2p-dht",
"url": "https://github.com/status-im/nim-libp2p-dht"
},
"nim-metrics": {
"commit": "6142e433fc8ea9b73379770a788017ac528d46ff",
"dir": "vendor/nim-metrics",
"url": "https://github.com/status-im/nim-metrics"
},
"nim-nat-traversal": {
"commit": "27d314d65c9078924b3239fe4e2f5af0c512b28c",
"dir": "vendor/nim-nat-traversal",
"url": "https://github.com/status-im/nim-nat-traversal"
},
"nim-nitro": {
"commit": "6b4c455bf4dad7449c1580055733a1738fcd5aec",
"dir": "vendor/nim-nitro",
"url": "https://github.com/status-im/nim-nitro"
},
"nim-presto": {
"commit": "3984431dc0fc829eb668e12e57e90542b041d298",
"dir": "vendor/nim-presto",
"url": "https://github.com/status-im/nim-presto"
},
"nim-protobuf-serialization": {
"commit": "28214b3e40c755a9886d2ec8f261ec48fbb6bec6",
"dir": "vendor/nim-protobuf-serialization",
"url": "https://github.com/status-im/nim-protobuf-serialization"
},
"nim-results": {
"commit": "f3c666a272c69d70cb41e7245e7f6844797303ad",
"dir": "vendor/nim-results",
"url": "https://github.com/arnetheduck/nim-results"
},
"nim-secp256k1": {
"commit": "2acbbdcc0e63002a013fff49f015708522875832",
"dir": "vendor/nim-secp256k1",
"url": "https://github.com/status-im/nim-secp256k1"
},
"nim-serialization": {
"commit": "384eb2561ee755446cff512a8e057325848b86a7",
"dir": "vendor/nim-serialization",
"url": "https://github.com/status-im/nim-serialization"
},
"nim-sqlite3-abi": {
"commit": "362e1bd9f689ad9f5380d9d27f0705b3d4dfc7d3",
"dir": "vendor/nim-sqlite3-abi",
"url": "https://github.com/arnetheduck/nim-sqlite3-abi"
},
"nim-stew": {
"commit": "7afe7e3c070758cac1f628e4330109f3ef6fc853",
"dir": "vendor/nim-stew",
"url": "https://github.com/status-im/nim-stew"
},
"nim-taskpools": {
"commit": "b3673c7a7a959ccacb393bd9b47e997bbd177f5a",
"dir": "vendor/nim-taskpools",
"url": "https://github.com/status-im/nim-taskpools"
},
"nim-testutils": {
"commit": "b56a5953e37fc5117bd6ea6dfa18418c5e112815",
"dir": "vendor/nim-testutils",
"url": "https://github.com/status-im/nim-testutils"
},
"nim-toml-serialization": {
"commit": "86d477136f105f04bfd0dd7c0e939593d81fc581",
"dir": "vendor/nim-toml-serialization",
"url": "https://github.com/status-im/nim-toml-serialization"
},
"nim-unittest2": {
"commit": "b178f47527074964f76c395ad0dfc81cf118f379",
"dir": "vendor/nim-unittest2",
"url": "https://github.com/status-im/nim-unittest2"
},
"nim-websock": {
"commit": "2c3ae3137f3c9cb48134285bd4a47186fa51f0e8",
"dir": "vendor/nim-websock",
"url": "https://github.com/status-im/nim-websock"
},
"nim-zlib": {
"commit": "f34ca261efd90f118dc1647beefd2f7a69b05d93",
"dir": "vendor/nim-zlib",
"url": "https://github.com/status-im/nim-zlib"
},
"nim-stint": {
"dir": "vendor/stint",
"url": "https://github.com/status-im/nim-stint",
"commit": "86621eced1dcfb5e25903019ebcfc76ed9128ec5"
},
"nimcrypto": {
"commit": "24e006df85927f64916e60511620583b11403178",
"dir": "vendor/nimcrypto",
"url": "https://github.com/status-im/nimcrypto"
},
"npeg": {
"commit": "b15a10e388b91b898c581dbbcb6a718d46b27d2f",
"dir": "vendor/npeg",
"url": "https://github.com/zevv/npeg"
},
"questionable": {
"commit": "b3cf35ac450fd42c9ea83dc084f5cba2efc55da3",
"dir": "vendor/questionable",
"url": "https://github.com/codex-storage/questionable"
},
"upraises": {
"commit": "ff4f8108e44fba9b35cac535ab63d3927e8fd3c2",
"dir": "vendor/upraises",
"url": "https://github.com/markspanbroek/upraises"
}
},
"nimVersion": "1.6.14",
"nimbleFile": {
"content": "# Package\n\nversion = \"0.3.2\"\nauthor = \"Status Research & Development GmbH\"\ndescription = \"DHT based on the libp2p Kademlia spec\"\nlicense = \"MIT\"\nskipDirs = @[\"tests\"]\n\n\n# Dependencies\nrequires \"nim >= 1.2.0\"\nrequires \"secp256k1#2acbbdcc0e63002a013fff49f015708522875832\" # >= 0.5.2 & < 0.6.0\nrequires \"protobuf_serialization\" # >= 0.2.0 & < 0.3.0\nrequires \"nimcrypto == 0.5.4\"\nrequires \"bearssl#head\"\nrequires \"chronicles >= 0.10.2 & < 0.11.0\"\nrequires \"chronos == 3.2.0\" # >= 3.0.11 & < 3.1.0\nrequires \"libp2p#unstable\"\nrequires \"metrics\"\nrequires \"stew#head\"\nrequires \"stint\"\nrequires \"asynctest >= 0.3.1 & < 0.4.0\"\nrequires \"https://github.com/codex-storage/nim-datastore#head\"\nrequires \"questionable\"\n\ninclude \"build.nims\"\n\n",
"filename": ""
},
"nimcfg": "############# begin Atlas config section ##########\n--noNimblePath\n--path:\"vendor/nim-secp256k1\"\n--path:\"vendor/nim-protobuf-serialization\"\n--path:\"vendor/nimcrypto\"\n--path:\"vendor/nim-bearssl\"\n--path:\"vendor/nim-chronicles\"\n--path:\"vendor/nim-chronos\"\n--path:\"vendor/nim-libp2p\"\n--path:\"vendor/nim-metrics\"\n--path:\"vendor/nim-stew\"\n--path:\"vendor/nim-stint\"\n--path:\"vendor/asynctest\"\n--path:\"vendor/nim-datastore\"\n--path:\"vendor/questionable\"\n--path:\"vendor/nim-faststreams\"\n--path:\"vendor/nim-serialization\"\n--path:\"vendor/npeg/src\"\n--path:\"vendor/nim-unittest2\"\n--path:\"vendor/nim-testutils\"\n--path:\"vendor/nim-json-serialization\"\n--path:\"vendor/nim-http-utils\"\n--path:\"vendor/dnsclient.nim/src\"\n--path:\"vendor/nim-websock\"\n--path:\"vendor/nim-results\"\n--path:\"vendor/nim-sqlite3-abi\"\n--path:\"vendor/upraises\"\n--path:\"vendor/nim-zlib\"\n############# end Atlas config section ##########\n"
}

87
build.nims Normal file
View File

@ -0,0 +1,87 @@
mode = ScriptMode.Verbose
### Helper functions
proc buildBinary(name: string, srcDir = "./", params = "", lang = "c") =
if not dirExists "build":
mkDir "build"
# allow something like "nim nimbus --verbosity:0 --hints:off nimbus.nims"
var extra_params = params
when compiles(commandLineParams):
for param in commandLineParams():
extra_params &= " " & param
else:
for i in 2..<paramCount():
extra_params &= " " & paramStr(i)
let cmd = "nim " & lang & " --out:build/" & name & " " & extra_params & " " & srcDir & name & ".nim"
exec(cmd)
proc test(name: string, srcDir = "tests/", params = "", lang = "c") =
buildBinary name, srcDir, params
exec "build/" & name
task codex, "build codex binary":
buildBinary "codex", params = "-d:chronicles_runtime_filtering -d:chronicles_log_level=TRACE"
task testCodex, "Build & run Codex tests":
test "testCodex", params = "-d:codex_enable_proof_failures=true"
task testContracts, "Build & run Codex Contract tests":
test "testContracts"
task testIntegration, "Run integration tests":
buildBinary "codex", params = "-d:chronicles_runtime_filtering -d:chronicles_log_level=TRACE -d:codex_enable_proof_failures=true"
test "testIntegration"
task build, "build codex binary":
codexTask()
task test, "Run tests":
testCodexTask()
task testAll, "Run all tests":
testCodexTask()
testContractsTask()
testIntegrationTask()
import strutils
import os
task coverage, "generates code coverage report":
var (output, exitCode) = gorgeEx("which lcov")
if exitCode != 0:
echo " ************************** ⛔️ ERROR ⛔️ **************************"
echo " ** ERROR: lcov not found, it must be installed to run code **"
echo " ** coverage locally **"
echo " *****************************************************************"
quit 1
(output, exitCode) = gorgeEx("gcov --version")
if output.contains("Apple LLVM"):
echo " ************************* ⚠️ WARNING ⚠️ *************************"
echo " ** WARNING: Using Apple's llvm-cov in place of gcov, which **"
echo " ** emulates an old version of gcov (4.2.0) and therefore **"
echo " ** coverage results will differ than those on CI (which **"
echo " ** uses a much newer version of gcov). **"
echo " *****************************************************************"
var nimSrcs = " "
for f in walkDirRec("codex", {pcFile}):
if f.endswith(".nim"): nimSrcs.add " " & f.absolutePath.quoteShell()
echo "======== Running Tests ======== "
test "coverage", srcDir = "tests/", params = " --nimcache:nimcache/coverage -d:release "
exec("rm nimcache/coverage/*.c")
rmDir("coverage"); mkDir("coverage")
echo " ======== Running LCOV ======== "
exec("lcov --capture --directory nimcache/coverage --output-file coverage/coverage.info")
exec("lcov --extract coverage/coverage.info --output-file coverage/coverage.f.info " & nimSrcs)
echo " ======== Generating HTML coverage report ======== "
exec("genhtml coverage/coverage.f.info --output-directory coverage/report ")
echo " ======== Coverage report Done ======== "
task showCoverage, "open coverage html":
echo " ======== Opening HTML coverage report in browser... ======== "
if findExe("open") != "":
exec("open coverage/report/index.html")

View File

@ -1,11 +1,10 @@
mode = ScriptMode.Verbose
version = "0.1.0" version = "0.1.0"
author = "Codex Team" author = "Codex Team"
description = "p2p data durability engine" description = "p2p data durability engine"
license = "MIT" license = "MIT"
binDir = "build" binDir = "build"
srcDir = "." srcDir = "."
installFiles = @["build.nims"]
requires "nim >= 1.2.0" requires "nim >= 1.2.0"
requires "asynctest >= 0.3.2 & < 0.4.0" requires "asynctest >= 0.3.2 & < 0.4.0"
@ -32,47 +31,4 @@ requires "blscurve"
requires "libp2pdht" requires "libp2pdht"
requires "eth" requires "eth"
when declared(namedBin): include "build.nims"
namedBin = {
"codex/codex": "codex"
}.toTable()
### Helper functions
proc buildBinary(name: string, srcDir = "./", params = "", lang = "c") =
if not dirExists "build":
mkDir "build"
# allow something like "nim nimbus --verbosity:0 --hints:off nimbus.nims"
var extra_params = params
when compiles(commandLineParams):
for param in commandLineParams:
extra_params &= " " & param
else:
for i in 2..<paramCount():
extra_params &= " " & paramStr(i)
exec "nim " & lang & " --out:build/" & name & " " & extra_params & " " & srcDir & name & ".nim"
proc test(name: string, srcDir = "tests/", params = "", lang = "c") =
buildBinary name, srcDir, params
exec "build/" & name
task codex, "build codex binary":
buildBinary "codex", params = "-d:chronicles_runtime_filtering -d:chronicles_log_level=TRACE"
task testCodex, "Build & run Codex tests":
test "testCodex", params = "-d:codex_enable_proof_failures=true"
task testContracts, "Build & run Codex Contract tests":
test "testContracts"
task testIntegration, "Run integration tests":
buildBinary "codex", params = "-d:chronicles_runtime_filtering -d:chronicles_log_level=TRACE -d:codex_enable_proof_failures=true"
test "testIntegration"
task test, "Run tests":
testCodexTask()
task testAll, "Run all tests":
testCodexTask()
testContractsTask()
testIntegrationTask()

View File

@ -11,7 +11,7 @@ import std/sequtils
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
import pkg/libp2p import pkg/libp2p/cid
import pkg/metrics import pkg/metrics
import pkg/questionable import pkg/questionable
import pkg/questionable/results import pkg/questionable/results

View File

@ -14,7 +14,7 @@ import std/algorithm
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
import pkg/libp2p import pkg/libp2p/[cid, switch]
import pkg/metrics import pkg/metrics
import pkg/stint import pkg/stint

View File

@ -285,7 +285,7 @@ proc getOrCreatePeer(b: BlockExcNetwork, peer: PeerId): NetworkPeer =
if peer in b.peers: if peer in b.peers:
return b.peers.getOrDefault(peer, nil) return b.peers.getOrDefault(peer, nil)
var getConn = proc(): Future[Connection] {.async.} = var getConn: ConnProvider = proc(): Future[Connection] {.async, gcsafe, closure.} =
try: try:
return await b.switch.dial(peer, Codec) return await b.switch.dial(peer, Codec)
except CatchableError as exc: except CatchableError as exc:

View File

@ -14,7 +14,7 @@ import pkg/upraises
push: {.upraises: [].} push: {.upraises: [].}
import pkg/libp2p import pkg/libp2p/[cid, multicodec]
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/questionable import pkg/questionable
import pkg/questionable/results import pkg/questionable/results

View File

@ -106,6 +106,7 @@ type
defaultValue: noCommand }: StartUpCommand defaultValue: noCommand }: StartUpCommand
of noCommand: of noCommand:
listenAddrs* {. listenAddrs* {.
desc: "Multi Addresses to listen on" desc: "Multi Addresses to listen on"
defaultValue: @[ defaultValue: @[
@ -292,9 +293,17 @@ proc defaultDataDir*(): string =
getHomeDir() / dataDir getHomeDir() / dataDir
proc parseCmdArg*(T: type MultiAddress, input: string): T proc parseCmdArg*(T: typedesc[MultiAddress],
input: string): MultiAddress
{.upraises: [ValueError, LPError].} = {.upraises: [ValueError, LPError].} =
MultiAddress.init($input).tryGet() var ma: MultiAddress
let res = MultiAddress.init(input)
if res.isOk:
ma = res.get()
else:
warn "Invalid MultiAddress", input=input, error=res.error()
quit QuitFailure
ma
proc parseCmdArg*(T: type SignedPeerRecord, uri: string): T = proc parseCmdArg*(T: type SignedPeerRecord, uri: string): T =
var res: SignedPeerRecord var res: SignedPeerRecord
@ -337,6 +346,18 @@ proc readValue*(r: var TomlReader, val: var SignedPeerRecord) =
val = SignedPeerRecord.parseCmdArg(uri) val = SignedPeerRecord.parseCmdArg(uri)
proc readValue*(r: var TomlReader, val: var MultiAddress) =
without input =? r.readValue(string).catch, err:
error "invalid MultiAddress configuration value", error = err.msg
quit QuitFailure
let res = MultiAddress.init(input)
if res.isOk:
val = res.get()
else:
warn "Invalid MultiAddress", input=input, error=res.error()
quit QuitFailure
proc readValue*(r: var TomlReader, val: var NBytes) proc readValue*(r: var TomlReader, val: var NBytes)
{.upraises: [SerializationError, IOError].} = {.upraises: [SerializationError, IOError].} =
var value = 0'i64 var value = 0'i64

View File

@ -11,14 +11,12 @@ import std/algorithm
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
import pkg/libp2p import pkg/libp2p/[cid, multicodec, routing_record, signed_envelope]
import pkg/libp2p/routing_record
import pkg/libp2p/signed_envelope
import pkg/questionable import pkg/questionable
import pkg/questionable/results import pkg/questionable/results
import pkg/stew/shims/net import pkg/stew/shims/net
import pkg/contractabi/address as ca import pkg/contractabi/address as ca
import pkg/libp2pdht/discv5/protocol as discv5 import pkg/codexdht/discv5/protocol as discv5
import ./rng import ./rng
import ./errors import ./errors

View File

@ -9,15 +9,20 @@
import pkg/stew/results import pkg/stew/results
export results
type type
CodexError* = object of CatchableError # base codex error CodexError* = object of CatchableError # base codex error
CodexResult*[T] = Result[T, ref CodexError] CodexResult*[T] = Result[T, ref CodexError]
template mapFailure*( template mapFailure*[T, V, E](
exp: untyped, exp: Result[T, V],
exc: typed = type CodexError exc: typedesc[E],
): untyped = ): Result[T, ref CatchableError] =
## Convert `Result[T, E]` to `Result[E, ref CatchableError]` ## Convert `Result[T, E]` to `Result[E, ref CatchableError]`
## ##
((exp.mapErr do (e: auto) -> ref CatchableError: (ref exc)(msg: $e))) exp.mapErr(proc (e: V): ref CatchableError = (ref exc)(msg: $e))
template mapFailure*[T, V](exp: Result[T, V]): Result[T, ref CatchableError] =
mapFailure(exp, CodexError)

View File

@ -10,7 +10,7 @@
import std/strutils import std/strutils
import pkg/chronicles import pkg/chronicles
import pkg/libp2p import pkg/libp2p/cid
func shortLog*(cid: Cid): string = func shortLog*(cid: Cid): string =
## Returns compact string representation of ``pid``. ## Returns compact string representation of ``pid``.

View File

@ -101,7 +101,8 @@ func `[]=`*(self: Manifest, i: BackwardsIndex, item: Cid) =
self.blocks[self.len - i.int] = item self.blocks[self.len - i.int] = item
func isManifest*(cid: Cid): ?!bool = func isManifest*(cid: Cid): ?!bool =
($(?cid.contentType().mapFailure) in ManifestContainers).success let res = ?cid.contentType().mapFailure(CodexError)
($(res) in ManifestContainers).success
func isManifest*(mc: MultiCodec): ?!bool = func isManifest*(mc: MultiCodec): ?!bool =
($mc in ManifestContainers).success ($mc in ManifestContainers).success
@ -189,11 +190,8 @@ proc makeRoot*(self: Manifest): ?!void =
stack.add(mh) stack.add(mh)
if stack.len == 1: if stack.len == 1:
let cid = ? Cid.init( let digest = ? EmptyDigests[self.version][self.hcodec].catch
self.version, let cid = ? Cid.init(self.version, self.codec, digest).mapFailure
self.codec,
(? EmptyDigests[self.version][self.hcodec].catch))
.mapFailure
self.rootHash = cid.some self.rootHash = cid.some
@ -225,8 +223,8 @@ proc new*(
## Create a manifest using an array of `Cid`s ## Create a manifest using an array of `Cid`s
## ##
if hcodec notin EmptyDigests[version]: # if hcodec notin EmptyDigests[version]:
return failure("Unsupported manifest hash codec!") # return failure("Unsupported manifest hash codec!")
T( T(
blocks: @blocks, blocks: @blocks,

View File

@ -16,7 +16,9 @@ import pkg/questionable
import pkg/questionable/results import pkg/questionable/results
import pkg/chronicles import pkg/chronicles
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/libp2p/switch
import pkg/libp2p/stream/bufferstream
# TODO: remove once exported by libp2p # TODO: remove once exported by libp2p
import pkg/libp2p/routing_record import pkg/libp2p/routing_record
@ -60,23 +62,21 @@ type
proc findPeer*( proc findPeer*(
node: CodexNodeRef, node: CodexNodeRef,
peerId: PeerId peerId: PeerId): Future[?PeerRecord] {.async.} =
): Future[?PeerRecord] {.async.} =
## Find peer using the discovery service from the given CodexNode ## Find peer using the discovery service from the given CodexNode
## ##
return await node.discovery.findPeer(peerId) return await node.discovery.findPeer(peerId)
proc connect*( proc connect*(
node: CodexNodeRef, node: CodexNodeRef,
peerId: PeerId, peerId: PeerId,
addrs: seq[MultiAddress] addrs: seq[MultiAddress]
): Future[void] = ): Future[void] =
node.switch.connect(peerId, addrs) node.switch.connect(peerId, addrs)
proc fetchManifest*( proc fetchManifest*(
node: CodexNodeRef, node: CodexNodeRef,
cid: Cid cid: Cid): Future[?!Manifest] {.async.} =
): Future[?!Manifest] {.async.} =
## Fetch and decode a manifest block ## Fetch and decode a manifest block
## ##
@ -100,11 +100,10 @@ proc fetchManifest*(
return manifest.success return manifest.success
proc fetchBatched*( proc fetchBatched*(
node: CodexNodeRef, node: CodexNodeRef,
manifest: Manifest, manifest: Manifest,
batchSize = FetchBatch, batchSize = FetchBatch,
onBatch: BatchProc = nil onBatch: BatchProc = nil): Future[?!void] {.async, gcsafe.} =
): Future[?!void] {.async, gcsafe.} =
## Fetch manifest in batches of `batchSize` ## Fetch manifest in batches of `batchSize`
## ##
@ -130,9 +129,8 @@ proc fetchBatched*(
return success() return success()
proc retrieve*( proc retrieve*(
node: CodexNodeRef, node: CodexNodeRef,
cid: Cid cid: Cid): Future[?!LPStream] {.async.} =
): Future[?!LPStream] {.async.} =
## Retrieve by Cid a single block or an entire dataset described by manifest ## Retrieve by Cid a single block or an entire dataset described by manifest
## ##
@ -147,47 +145,35 @@ proc retrieve*(
trace "Unable to erasure decode manifest", cid, exc = error.msg trace "Unable to erasure decode manifest", cid, exc = error.msg
except CatchableError as exc: except CatchableError as exc:
trace "Exception decoding manifest", cid, exc = exc.msg trace "Exception decoding manifest", cid, exc = exc.msg
#
asyncSpawn erasureJob() asyncSpawn erasureJob()
# else:
# # Prefetch the entire dataset into the local store
# proc prefetchBlocks() {.async, raises: [Defect].} =
# try:
# discard await node.fetchBatched(manifest)
# except CatchableError as exc:
# trace "Exception prefetching blocks", exc = exc.msg
# #
# # asyncSpawn prefetchBlocks() - temporarily commented out
#
# Retrieve all blocks of the dataset sequentially from the local store or network # Retrieve all blocks of the dataset sequentially from the local store or network
trace "Creating store stream for manifest", cid trace "Creating store stream for manifest", cid
return LPStream(StoreStream.new(node.blockStore, manifest, pad = false)).success LPStream(StoreStream.new(node.blockStore, manifest, pad = false)).success
else:
let
stream = BufferStream.new()
let without blk =? (await node.blockStore.getBlock(cid)), err:
stream = BufferStream.new() return failure(err)
without blk =? (await node.blockStore.getBlock(cid)), err: proc streamOneBlock(): Future[void] {.async.} =
return failure(err) try:
await stream.pushData(blk.data)
except CatchableError as exc:
trace "Unable to send block", cid, exc = exc.msg
discard
finally:
await stream.pushEof()
proc streamOneBlock(): Future[void] {.async.} = asyncSpawn streamOneBlock()
try: LPStream(stream).success()
await stream.pushData(blk.data)
except CatchableError as exc:
trace "Unable to send block", cid, exc = exc.msg
discard
finally:
await stream.pushEof()
asyncSpawn streamOneBlock()
return LPStream(stream).success()
return failure("Unable to retrieve Cid!")
proc store*( proc store*(
self: CodexNodeRef, self: CodexNodeRef,
stream: LPStream, stream: LPStream,
blockSize = DefaultBlockSize blockSize = DefaultBlockSize): Future[?!Cid] {.async.} =
): Future[?!Cid] {.async.} =
## Save stream contents as dataset with given blockSize ## Save stream contents as dataset with given blockSize
## to nodes's BlockStore, and return Cid of its manifest ## to nodes's BlockStore, and return Cid of its manifest
## ##
@ -249,16 +235,15 @@ proc store*(
return manifest.cid.success return manifest.cid.success
proc requestStorage*( proc requestStorage*(
self: CodexNodeRef, self: CodexNodeRef,
cid: Cid, cid: Cid,
duration: UInt256, duration: UInt256,
proofProbability: UInt256, proofProbability: UInt256,
nodes: uint, nodes: uint,
tolerance: uint, tolerance: uint,
reward: UInt256, reward: UInt256,
collateral: UInt256, collateral: UInt256,
expiry = UInt256.none expiry = UInt256.none): Future[?!PurchaseId] {.async.} =
): Future[?!PurchaseId] {.async.} =
## Initiate a request for storage sequence, this might ## Initiate a request for storage sequence, this might
## be a multistep procedure. ## be a multistep procedure.
## ##
@ -323,14 +308,13 @@ proc requestStorage*(
return success purchase.id return success purchase.id
proc new*( proc new*(
T: type CodexNodeRef, T: type CodexNodeRef,
switch: Switch, switch: Switch,
store: BlockStore, store: BlockStore,
engine: BlockExcEngine, engine: BlockExcEngine,
erasure: Erasure, erasure: Erasure,
discovery: Discovery, discovery: Discovery,
contracts = Contracts.default contracts = Contracts.default): CodexNodeRef =
): CodexNodeRef =
## Create new instance of a Codex node, call `start` to run it ## Create new instance of a Codex node, call `start` to run it
## ##
CodexNodeRef( CodexNodeRef(

View File

@ -27,9 +27,9 @@ import pkg/confutils
import pkg/libp2p import pkg/libp2p
import pkg/libp2p/routing_record import pkg/libp2p/routing_record
import pkg/libp2pdht/discv5/spr as spr import pkg/codexdht/discv5/spr as spr
import pkg/libp2pdht/discv5/routing_table as rt import pkg/codexdht/discv5/routing_table as rt
import pkg/libp2pdht/discv5/node as dn import pkg/codexdht/discv5/node as dn
import ../node import ../node
import ../blocktype import ../blocktype

View File

@ -47,7 +47,7 @@ proc retrieve*(
trace "Cannot retrieve storage proof data from fs", path , error trace "Cannot retrieve storage proof data from fs", path , error
return failure("Cannot retrieve storage proof data from fs") return failure("Cannot retrieve storage proof data from fs")
return PorMessage.decode(data).mapFailure return PorMessage.decode(data).mapFailure(CatchableError)
proc store*( proc store*(
self: StpStore, self: StpStore,

View File

@ -18,7 +18,7 @@ import pkg/questionable/results
import ../blocktype import ../blocktype
export blocktype, libp2p export blocktype
type type
BlockNotFoundError* = object of CodexError BlockNotFoundError* = object of CodexError

View File

@ -13,7 +13,7 @@ push: {.upraises: [].}
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
import pkg/libp2p import pkg/libp2p/cid
import pkg/metrics import pkg/metrics
import pkg/questionable import pkg/questionable
import pkg/questionable/results import pkg/questionable/results
@ -26,7 +26,7 @@ import ../blocktype
import ../clock import ../clock
import ../systemclock import ../systemclock
export blocktype, libp2p export blocktype, cid
logScope: logScope:
topics = "codex repostore" topics = "codex repostore"

View File

@ -7,11 +7,11 @@
## This file may not be copied, modified, or distributed except according to ## This file may not be copied, modified, or distributed except according to
## those terms. ## those terms.
import pkg/libp2p import pkg/libp2p/stream/lpstream
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
export libp2p, chronos, chronicles export lpstream, chronos, chronicles
logScope: logScope:
topics = "codex seekablestream" topics = "codex seekablestream"

View File

@ -13,7 +13,6 @@ import pkg/upraises
push: {.upraises: [].} push: {.upraises: [].}
import pkg/libp2p
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
import pkg/stew/ptrops import pkg/stew/ptrops

View File

@ -12,12 +12,14 @@ push: {.upraises: [].}
import pkg/chronicles import pkg/chronicles
import pkg/questionable/results import pkg/questionable/results
import pkg/libp2p import pkg/libp2p/crypto/crypto
import ./fileutils import ./fileutils
import ../errors import ../errors
import ../rng import ../rng
export crypto
type type
CodexKeyError = object of CodexError CodexKeyError = object of CodexError
CodexKeyUnsafeError = object of CodexKeyError CodexKeyUnsafeError = object of CodexKeyError
@ -38,6 +40,5 @@ proc setupKey*(path: string): ?!PrivateKey =
return failure newException( return failure newException(
CodexKeyUnsafeError, "The network private key file is not safe") CodexKeyUnsafeError, "The network private key file is not safe")
return PrivateKey.init( let kb = ? path.readAllBytes().mapFailure(CodexKeyError)
? path.readAllBytes().mapFailure(CodexKeyError)) return PrivateKey.init(kb).mapFailure(CodexKeyError)
.mapFailure(CodexKeyError)

View File

@ -1,8 +1,10 @@
import std/os
include "build.nims"
import std/os
const currentDir = currentSourcePath()[0 .. ^(len("config.nims") + 1)] const currentDir = currentSourcePath()[0 .. ^(len("config.nims") + 1)]
if getEnv("NIMBUS_BUILD_SYSTEM") == "yes" and when getEnv("NIMBUS_BUILD_SYSTEM") == "yes" and
# BEWARE # BEWARE
# In Nim 1.6, config files are evaluated with a working directory # In Nim 1.6, config files are evaluated with a working directory
# matching where the Nim command was invocated. This means that we # matching where the Nim command was invocated. This means that we
@ -10,12 +12,12 @@ if getEnv("NIMBUS_BUILD_SYSTEM") == "yes" and
system.fileExists(currentDir & "nimbus-build-system.paths"): system.fileExists(currentDir & "nimbus-build-system.paths"):
include "nimbus-build-system.paths" include "nimbus-build-system.paths"
if defined(release): when defined(release):
switch("nimcache", joinPath(currentSourcePath.parentDir, "nimcache/release/$projectName")) switch("nimcache", joinPath(currentSourcePath.parentDir, "nimcache/release/$projectName"))
else: else:
switch("nimcache", joinPath(currentSourcePath.parentDir, "nimcache/debug/$projectName")) switch("nimcache", joinPath(currentSourcePath.parentDir, "nimcache/debug/$projectName"))
if defined(limitStackUsage): when defined(limitStackUsage):
# This limits stack usage of each individual function to 1MB - the option is # This limits stack usage of each individual function to 1MB - the option is
# available on some GCC versions but not all - run with `-d:limitStackUsage` # available on some GCC versions but not all - run with `-d:limitStackUsage`
# and look for .su files in "./build/", "./nimcache/" or $TMPDIR that list the # and look for .su files in "./build/", "./nimcache/" or $TMPDIR that list the
@ -23,7 +25,7 @@ if defined(limitStackUsage):
switch("passC", "-fstack-usage -Werror=stack-usage=1048576") switch("passC", "-fstack-usage -Werror=stack-usage=1048576")
switch("passL", "-fstack-usage -Werror=stack-usage=1048576") switch("passL", "-fstack-usage -Werror=stack-usage=1048576")
if defined(windows): when defined(windows):
# https://github.com/nim-lang/Nim/pull/19891 # https://github.com/nim-lang/Nim/pull/19891
switch("define", "nimRawSetjmp") switch("define", "nimRawSetjmp")
@ -47,8 +49,8 @@ if defined(windows):
# engineering a more portable binary release, this should be tweaked but still # engineering a more portable binary release, this should be tweaked but still
# use at least -msse2 or -msse3. # use at least -msse2 or -msse3.
if defined(disableMarchNative): when defined(disableMarchNative):
if defined(i386) or defined(amd64): when defined(i386) or defined(amd64):
switch("passC", "-mssse3") switch("passC", "-mssse3")
elif defined(macosx) and defined(arm64): elif defined(macosx) and defined(arm64):
# Apple's Clang can't handle "-march=native" on M1: https://github.com/status-im/nimbus-eth2/issues/2758 # Apple's Clang can't handle "-march=native" on M1: https://github.com/status-im/nimbus-eth2/issues/2758
@ -93,7 +95,7 @@ if not defined(macosx):
--define:nimStackTraceOverride --define:nimStackTraceOverride
switch("import", "libbacktrace") switch("import", "libbacktrace")
--define:nimOldCaseObjects # https://github.com/status-im/nim-confutils/issues/9 switch("define", "codex_enable_proof_failures=true")
# `switch("warning[CaseTransition]", "off")` fails with "Error: invalid command line option: '--warning[CaseTransition]'" # `switch("warning[CaseTransition]", "off")` fails with "Error: invalid command line option: '--warning[CaseTransition]'"
switch("warning", "CaseTransition:off") switch("warning", "CaseTransition:off")

View File

@ -5,7 +5,6 @@ import std/tables
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/libp2p/errors import pkg/libp2p/errors
import pkg/codex/rng import pkg/codex/rng

View File

@ -5,7 +5,6 @@ import std/tables
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/codex/rng import pkg/codex/rng
import pkg/codex/stores import pkg/codex/stores

View File

@ -5,9 +5,6 @@ import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/libp2p
import pkg/libp2p/errors
import pkg/codex/rng import pkg/codex/rng
import pkg/codex/stores import pkg/codex/stores
import pkg/codex/blockexchange import pkg/codex/blockexchange

View File

@ -5,9 +5,9 @@ import std/algorithm
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p import pkg/libp2p/errors
import pkg/libp2p/routing_record import pkg/libp2p/routing_record
import pkg/libp2pdht/discv5/protocol as discv5 import pkg/codexdht/discv5/protocol as discv5
import pkg/codex/rng import pkg/codex/rng
import pkg/codex/blockexchange import pkg/codex/blockexchange

View File

@ -1,6 +1,5 @@
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/codex/blockexchange/protobuf/presence import pkg/codex/blockexchange/protobuf/presence
import ../../examples import ../../examples

View File

@ -3,8 +3,6 @@ import std/tables
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/libp2p/errors
import pkg/codex/rng import pkg/codex/rng
import pkg/codex/chunker import pkg/codex/chunker

View File

@ -3,7 +3,6 @@ import std/algorithm
import pkg/chronos import pkg/chronos
import pkg/asynctest import pkg/asynctest
import pkg/libp2p
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/codex/blocktype as bt import pkg/codex/blocktype as bt

View File

@ -14,6 +14,8 @@ import ../checktest
export randomchunker, nodeutils, mockdiscovery, eventually, checktest, manifest export randomchunker, nodeutils, mockdiscovery, eventually, checktest, manifest
export libp2p except setup, eventually
# NOTE: The meaning of equality for blocks # NOTE: The meaning of equality for blocks
# is changed here, because blocks are now `ref` # is changed here, because blocks are now `ref`
# types. This is only in tests!!! # types. This is only in tests!!!

View File

@ -1,6 +1,6 @@
import pkg/chronos import pkg/chronos
template eventually*(condition: untyped, timeout = 5.seconds): bool = template eventuallyCheck*(condition: untyped, timeout = 5.seconds): bool =
proc loop: Future[bool] {.async.} = proc loop: Future[bool] {.async.} =
let start = Moment.now() let start = Moment.now()
while true: while true:

View File

@ -2,6 +2,7 @@ import std/sequtils
import pkg/chronos import pkg/chronos
import pkg/libp2p import pkg/libp2p
import pkg/libp2p/errors
import pkg/codex/discovery import pkg/codex/discovery
import pkg/codex/stores import pkg/codex/stores

View File

@ -113,7 +113,7 @@ asyncchecksuite "Sales":
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
let items = SlotQueueItem.init(request) let items = SlotQueueItem.init(request)
check eventually items.allIt(itemsProcessed.contains(it)) check eventuallyCheck items.allIt(itemsProcessed.contains(it))
test "removes slots from slot queue once RequestCancelled emitted": test "removes slots from slot queue once RequestCancelled emitted":
let request1 = await addRequestToSaturatedQueue() let request1 = await addRequestToSaturatedQueue()
@ -146,7 +146,7 @@ asyncchecksuite "Sales":
market.emitSlotFreed(request.id, 2.u256) market.emitSlotFreed(request.id, 2.u256)
let expected = SlotQueueItem.init(request, 2.uint16) let expected = SlotQueueItem.init(request, 2.uint16)
check eventually itemsProcessed.contains(expected) check eventuallyCheck itemsProcessed.contains(expected)
test "request slots are not added to the slot queue when no availabilities exist": test "request slots are not added to the slot queue when no availabilities exist":
var itemsProcessed: seq[SlotQueueItem] = @[] var itemsProcessed: seq[SlotQueueItem] = @[]
@ -185,7 +185,7 @@ asyncchecksuite "Sales":
# now add matching availability # now add matching availability
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
check eventually itemsProcessed.len == request.ask.slots.int check eventuallyCheck itemsProcessed.len == request.ask.slots.int
test "makes storage unavailable when downloading a matched request": test "makes storage unavailable when downloading a matched request":
var used = false var used = false
@ -199,7 +199,7 @@ asyncchecksuite "Sales":
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually used check eventuallyCheck used
test "reduces remaining availability size after download": test "reduces remaining availability size after download":
let blk = bt.Block.example let blk = bt.Block.example
@ -212,7 +212,7 @@ asyncchecksuite "Sales":
return success() return success()
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually getAvailability().?size == success 1.u256 check eventuallyCheck getAvailability().?size == success 1.u256
test "ignores download when duration not long enough": test "ignores download when duration not long enough":
availability.duration = request.ask.duration - 1 availability.duration = request.ask.duration - 1
@ -265,7 +265,7 @@ asyncchecksuite "Sales":
return success() return success()
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually storingRequest == request check eventuallyCheck storingRequest == request
check storingSlot < request.ask.slots.u256 check storingSlot < request.ask.slots.u256
test "handles errors during state run": test "handles errors during state run":
@ -280,7 +280,7 @@ asyncchecksuite "Sales":
saleFailed = true saleFailed = true
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually saleFailed check eventuallyCheck saleFailed
test "makes storage available again when data retrieval fails": test "makes storage available again when data retrieval fails":
let error = newException(IOError, "data retrieval failed") let error = newException(IOError, "data retrieval failed")
@ -290,7 +290,7 @@ asyncchecksuite "Sales":
return failure(error) return failure(error)
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually getAvailability().?used == success false check eventuallyCheck getAvailability().?used == success false
check getAvailability().?size == success availability.size check getAvailability().?size == success availability.size
test "generates proof of storage": test "generates proof of storage":
@ -301,13 +301,13 @@ asyncchecksuite "Sales":
provingSlot = slot.slotIndex provingSlot = slot.slotIndex
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually provingRequest == request check eventuallyCheck provingRequest == request
check provingSlot < request.ask.slots.u256 check provingSlot < request.ask.slots.u256
test "fills a slot": test "fills a slot":
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually market.filled.len == 1 check eventuallyCheck market.filled.len == 1
check market.filled[0].requestId == request.id check market.filled[0].requestId == request.id
check market.filled[0].slotIndex < request.ask.slots.u256 check market.filled[0].slotIndex < request.ask.slots.u256
check market.filled[0].proof == proof check market.filled[0].proof == proof
@ -325,7 +325,7 @@ asyncchecksuite "Sales":
soldSlotIndex = slotIndex soldSlotIndex = slotIndex
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually soldAvailability == availability check eventuallyCheck soldAvailability == availability
check soldRequest == request check soldRequest == request
check soldSlotIndex < request.ask.slots.u256 check soldSlotIndex < request.ask.slots.u256
@ -342,7 +342,7 @@ asyncchecksuite "Sales":
clearedSlotIndex = slotIndex clearedSlotIndex = slotIndex
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually clearedRequest == request check eventuallyCheck clearedRequest == request
check clearedSlotIndex < request.ask.slots.u256 check clearedSlotIndex < request.ask.slots.u256
test "makes storage available again when other host fills the slot": test "makes storage available again when other host fills the slot":
@ -356,7 +356,7 @@ asyncchecksuite "Sales":
await market.requestStorage(request) await market.requestStorage(request)
for slotIndex in 0..<request.ask.slots: for slotIndex in 0..<request.ask.slots:
market.fillSlot(request.id, slotIndex.u256, proof, otherHost) market.fillSlot(request.id, slotIndex.u256, proof, otherHost)
check eventually (await reservations.allAvailabilities) == @[availability] check eventuallyCheck (await reservations.allAvailabilities) == @[availability]
test "makes storage available again when request expires": test "makes storage available again when request expires":
sales.onStore = proc(request: StorageRequest, sales.onStore = proc(request: StorageRequest,
@ -367,7 +367,7 @@ asyncchecksuite "Sales":
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
clock.set(request.expiry.truncate(int64)) clock.set(request.expiry.truncate(int64))
check eventually (await reservations.allAvailabilities) == @[availability] check eventuallyCheck (await reservations.allAvailabilities) == @[availability]
test "adds proving for slot when slot is filled": test "adds proving for slot when slot is filled":
var soldSlotIndex: UInt256 var soldSlotIndex: UInt256
@ -377,7 +377,7 @@ asyncchecksuite "Sales":
check proving.slots.len == 0 check proving.slots.len == 0
check isOk await reservations.reserve(availability) check isOk await reservations.reserve(availability)
await market.requestStorage(request) await market.requestStorage(request)
check eventually proving.slots.len == 1 check eventuallyCheck proving.slots.len == 1
check proving.slots.contains(Slot(request: request, slotIndex: soldSlotIndex)) check proving.slots.contains(Slot(request: request, slotIndex: soldSlotIndex))
test "loads active slots from market": test "loads active slots from market":
@ -423,5 +423,5 @@ asyncchecksuite "Sales":
return data0.requestId == data1.requestId and return data0.requestId == data1.requestId and
data0.request == data1.request data0.request == data1.request
check eventually sales.agents.len == 2 check eventuallyCheck sales.agents.len == 2
check sales.agents.all(agent => agent.data == expected) check sales.agents.all(agent => agent.data == expected)

View File

@ -10,6 +10,7 @@ import pkg/codex/sales/reservations
import pkg/codex/sales/slotqueue import pkg/codex/sales/slotqueue
import pkg/codex/stores import pkg/codex/stores
import ../helpers
import ../helpers/mockmarket import ../helpers/mockmarket
import ../helpers/eventually import ../helpers/eventually
import ../examples import ../examples

View File

@ -2,7 +2,6 @@ import std/sequtils
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/libp2p/errors import pkg/libp2p/errors
import pkg/contractabi as ca import pkg/contractabi as ca

View File

@ -4,8 +4,9 @@ import std/options
import pkg/chronos import pkg/chronos
import pkg/asynctest import pkg/asynctest
import pkg/libp2p import pkg/libp2p/multicodec
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/questionable
import pkg/questionable/results import pkg/questionable/results
import pkg/codex/stores/cachestore import pkg/codex/stores/cachestore
import pkg/codex/chunker import pkg/codex/chunker
@ -94,7 +95,7 @@ proc commonBlockStoreTests*(name: string,
var count = 0 var count = 0
for c in cids: for c in cids:
if cid =? (await c): if cid =? await c:
check (await store.hasBlock(cid)).tryGet() check (await store.hasBlock(cid)).tryGet()
count.inc count.inc

View File

@ -2,7 +2,6 @@ import std/strutils
import pkg/chronos import pkg/chronos
import pkg/asynctest import pkg/asynctest
import pkg/libp2p
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/questionable/results import pkg/questionable/results
import pkg/codex/stores/cachestore import pkg/codex/stores/cachestore

View File

@ -10,7 +10,6 @@
import std/random import std/random
import std/sequtils import std/sequtils
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/asynctest import pkg/asynctest
import pkg/questionable import pkg/questionable
import pkg/questionable/results import pkg/questionable/results

View File

@ -8,7 +8,6 @@
## those terms. ## those terms.
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/asynctest import pkg/asynctest
import pkg/questionable/results import pkg/questionable/results
import pkg/codex/blocktype as bt import pkg/codex/blocktype as bt

View File

@ -7,7 +7,6 @@ import pkg/questionable/results
import pkg/chronos import pkg/chronos
import pkg/asynctest import pkg/asynctest
import pkg/libp2p
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/stew/endians2 import pkg/stew/endians2
import pkg/datastore import pkg/datastore

View File

@ -1,9 +1,9 @@
import pkg/asynctest import pkg/asynctest
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/codex/chunker import pkg/codex/chunker
import pkg/chronicles import pkg/chronicles
import pkg/chronos import pkg/chronos
import pkg/libp2p
import ./helpers import ./helpers

View File

@ -2,7 +2,6 @@ import std/sequtils
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/libp2p
import pkg/questionable/results import pkg/questionable/results
import pkg/codex/erasure import pkg/codex/erasure

View File

@ -3,7 +3,6 @@ import std/sequtils
import pkg/chronos import pkg/chronos
import pkg/questionable/results import pkg/questionable/results
import pkg/asynctest import pkg/asynctest
import pkg/libp2p
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/codex/chunker import pkg/codex/chunker

View File

@ -4,11 +4,11 @@ import std/math
import pkg/asynctest import pkg/asynctest
import pkg/chronos import pkg/chronos
import pkg/chronicles
import pkg/stew/byteutils import pkg/stew/byteutils
import pkg/nitro import pkg/nitro
import pkg/libp2p import pkg/codexdht/discv5/protocol as discv5
import pkg/libp2pdht/discv5/protocol as discv5
import pkg/codex/stores import pkg/codex/stores
import pkg/codex/blockexchange import pkg/codex/blockexchange

View File

@ -1,6 +1,5 @@
import pkg/chronos import pkg/chronos
import pkg/asynctest import pkg/asynctest
import pkg/libp2p
import pkg/questionable/results import pkg/questionable/results
import ./helpers import ./helpers

View File

@ -1,7 +1,6 @@
import std/unittest import std/unittest
import std/os import std/os
import pkg/libp2p import pkg/questionable
import pkg/questionable/results
import codex/utils/keyutils import codex/utils/keyutils
import ../helpers import ../helpers
@ -18,17 +17,17 @@ checksuite "keyutils":
os.removeDir(path) os.removeDir(path)
test "creates a key file when it does not exist yet": test "creates a key file when it does not exist yet":
check setupKey(path / "keyfile").isSuccess check setupKey(path / "keyfile").isOk
check fileExists(path / "keyfile") check fileExists(path / "keyfile")
test "stores key in a file that's only readable by the user": test "stores key in a file that's only readable by the user":
discard !setupKey(path / "keyfile") discard setupKey(path / "keyfile").get()
when defined(posix): when defined(posix):
check getFilePermissions(path / "keyfile") == {fpUserRead, fpUserWrite} check getFilePermissions(path / "keyfile") == {fpUserRead, fpUserWrite}
when defined(windows): when defined(windows):
check checkCurrentUserOnlyACL(path / "keyfile").get() check checkCurrentUserOnlyACL(path / "keyfile").get()
test "reads key file when it does exist": test "reads key file when it does exist":
let key = !setupKey(path / "keyfile") let key = setupKey(path / "keyfile").get()
check !setupKey(path / "keyfile") == key check setupKey(path / "keyfile").get() == key

2
tests/coverage.nim Normal file
View File

@ -0,0 +1,2 @@
include ./testCodex

12
tests/coverage.nims Normal file
View File

@ -0,0 +1,12 @@
switch("debugger", "native")
switch("lineDir", "on")
switch("define", "debug")
switch("verbosity", "0")
switch("hints", "off")
switch("warnings", "off")
# switches for compiling with coverage
switch("passC", "-fprofile-arcs")
switch("passC", "-ftest-coverage")
switch("passL", "-fprofile-arcs")
switch("passL", "-ftest-coverage")

View File

@ -13,7 +13,7 @@ type
process: Process process: Process
arguments: seq[string] arguments: seq[string]
debug: bool debug: bool
Role* = enum Role* {.pure.} = enum
Client, Client,
Provider, Provider,
Validator Validator
@ -57,7 +57,7 @@ proc init*(_: type DebugNodes,
proc start(node: NodeProcess) = proc start(node: NodeProcess) =
if node.debug: if node.debug:
node.process = startProcess( node.process = osproc.startProcess(
executable, executable,
workingDir, workingDir,
node.arguments, node.arguments,
@ -65,7 +65,7 @@ proc start(node: NodeProcess) =
) )
sleep(1000) sleep(1000)
else: else:
node.process = startProcess( node.process = osproc.startProcess(
executable, executable,
workingDir, workingDir,
node.arguments node.arguments

View File

@ -1,5 +1,6 @@
import std/os import std/os
import std/httpclient import std/httpclient
from std/net import TimeoutError
import pkg/chronos import pkg/chronos
import ../ethertest import ../ethertest

View File

@ -1,9 +1,10 @@
import pkg/chronicles when not defined(nimscript):
import pkg/chronicles
proc ignoreLogging(level: LogLevel, message: LogOutputStr) = proc ignoreLogging(level: LogLevel, message: LogOutputStr) =
discard discard
defaultChroniclesStream.output.writer = ignoreLogging defaultChroniclesStream.output.writer = ignoreLogging
{.warning[UnusedImport]:off.} {.warning[UnusedImport]:off.}
{.used.} {.used.}

1381
tests/nimlldb.py Normal file

File diff suppressed because it is too large Load Diff

2
vendor/asynctest vendored

@ -1 +1 @@
Subproject commit a236a5f0f3031573ac2cb082b63dbf6e170e06e7 Subproject commit fe1a34caf572b05f8bdba3b650f1871af9fce31e

3
vendor/atlas.workspace vendored Normal file
View File

@ -0,0 +1,3 @@
deps=""
resolver="MaxVer"
overrides="urls.rules"

@ -1 +1 @@
Subproject commit fbb76f8af8a33ab818184a7d4406d9fee20993be Subproject commit 23214235d4784d24aceed99bbfe153379ea557c8

2
vendor/nim-bearssl vendored

@ -1 +1 @@
Subproject commit f4c4233de453cb7eac0ce3f3ffad6496295f83ab Subproject commit 99fcb3405c55b27cfffbf60f5368c55da7346f23

@ -1 +1 @@
Subproject commit 7631f7b2ee03398cb1512a79923264e8f9410af6 Subproject commit c9c8e58ec3f89b655a046c485f622f9021c68b61

2
vendor/nim-chronos vendored

@ -1 +1 @@
Subproject commit 6525f4ce1d1a7eba146e5f1a53f6f105077ae686 Subproject commit 0277b65be2c7a365ac13df002fba6e172be55537

2
vendor/nim-eth vendored

@ -1 +1 @@
Subproject commit 5885f638e47b8607683ef9e1e77fc21ce1aede44 Subproject commit 15a09fab737d08a2545284c727199c377bb0f4b7

@ -1 +1 @@
Subproject commit 1b561a9e71b6bdad1c1cdff753418906037e9d09 Subproject commit 720fc5e5c8e428d9d0af618e1e27c44b42350309

@ -1 +1 @@
Subproject commit e88e231dfcef4585fe3b2fbd9b664dbd28a88040 Subproject commit 3b491a40c60aad9e8d3407443f46f62511e63b18

@ -1 +1 @@
Subproject commit e5b18fb710c3d0167ec79f3b892f5a7a1bc6d1a4 Subproject commit bb53d49caf2a6c6cf1df365ba84af93cdcfa7aa3

2
vendor/nim-libp2p vendored

@ -1 +1 @@
Subproject commit 8c2eca18dcc538c57a8fbc0c53fd0b9d24d56cff Subproject commit 440461b24b9e66542b34d26a0b908c17f6549d05

@ -1 +1 @@
Subproject commit bd517f0e8da38a1b5da15f7deb2d5c652ca389f1 Subproject commit 3c940ea8901ae6118e66cc4df423b8ff53699eb4

2
vendor/nim-metrics vendored

@ -1 +1 @@
Subproject commit 743f81d4f6c6ebf0ac02389f2392ff8b4235bee5 Subproject commit 6142e433fc8ea9b73379770a788017ac528d46ff

1
vendor/nim-protobuf-serialization vendored Submodule

@ -0,0 +1 @@
Subproject commit 28214b3e40c755a9886d2ec8f261ec48fbb6bec6

1
vendor/nim-results vendored Submodule

@ -0,0 +1 @@
Subproject commit f3c666a272c69d70cb41e7245e7f6844797303ad

@ -1 +1 @@
Subproject commit 5340cf188168d6afcafc8023770d880f067c0b2f Subproject commit 2acbbdcc0e63002a013fff49f015708522875832

@ -1 +1 @@
Subproject commit 493d18b8292fc03aa4f835fd825dea1183f97466 Subproject commit 384eb2561ee755446cff512a8e057325848b86a7

@ -1 +1 @@
Subproject commit fda455cfea2df707dde052034411ce63de218453 Subproject commit 362e1bd9f689ad9f5380d9d27f0705b3d4dfc7d3

2
vendor/nim-stew vendored

@ -1 +1 @@
Subproject commit e18f5a62af2ade7a1fd1d39635d4e04d944def08 Subproject commit 7afe7e3c070758cac1f628e4330109f3ef6fc853

1
vendor/nim-testutils vendored Submodule

@ -0,0 +1 @@
Subproject commit b56a5953e37fc5117bd6ea6dfa18418c5e112815

@ -1 +1 @@
Subproject commit 02c49b8a994dd3f9eddfaab45262f9b8fa507f8e Subproject commit b178f47527074964f76c395ad0dfc81cf118f379

2
vendor/nim-websock vendored

@ -1 +1 @@
Subproject commit 7b2ed397d6e4c37ea4df08ae82aeac7ff04cd180 Subproject commit 2c3ae3137f3c9cb48134285bd4a47186fa51f0e8

2
vendor/nim-zlib vendored

@ -1 +1 @@
Subproject commit 74cdeb54b21bededb5a515d36f608bc1850555a2 Subproject commit f34ca261efd90f118dc1647beefd2f7a69b05d93

@ -1 +1 @@
Subproject commit 1cf6a1b18ca5aa0d24e7a2861dd85d79ad9cb0cd Subproject commit fe9bc3f3759ae1add6bf8c899db2e75327f03782

2
vendor/nimcrypto vendored

@ -1 +1 @@
Subproject commit a5742a9a214ac33f91615f3862c7b099aec43b00 Subproject commit 24e006df85927f64916e60511620583b11403178

1
vendor/npeg vendored Submodule

@ -0,0 +1 @@
Subproject commit b15a10e388b91b898c581dbbcb6a718d46b27d2f

2
vendor/questionable vendored

@ -1 +1 @@
Subproject commit 30e4184a99c8c1ba329925912d2c5d4b09acf8cc Subproject commit 1569ef4526d118c1bd1c31d8882eb9de6193a096

2
vendor/stint vendored

@ -1 +1 @@
Subproject commit 036c71d06a6b22f8f967ba9d54afd2189c3872ca Subproject commit 86621eced1dcfb5e25903019ebcfc76ed9128ec5

8
vendor/urls.rules vendored Normal file
View File

@ -0,0 +1,8 @@
https://github.com/status-im/nim-libp2p-dht.git -> https://github.com/codex-storage/nim-codex-dht.git
https://github.com/markspanbroek/questionable -> https://github.com/codex-storage/questionable
https://github.com/status-im/questionable -> https://github.com/codex-storage/questionable
https://github.com/status-im/asynctest -> https://github.com/codex-storage/asynctest
https://github.com/status-im/nim-datastore -> https://github.com/codex-storage/nim-datastore
https://github.com/cheatfate/nimcrypto -> https://github.com/status-im/nimcrypto
protobufserialization -> protobuf_serialization
protobufserialization -> https://github.com/status-im/nim-protobuf-serialization