2024-01-15 16:45:04 +00:00
|
|
|
import std/os
|
|
|
|
import std/options
|
|
|
|
import std/math
|
|
|
|
import std/times
|
|
|
|
import std/sequtils
|
|
|
|
import std/importutils
|
|
|
|
|
|
|
|
import pkg/chronos
|
|
|
|
import pkg/stew/byteutils
|
|
|
|
import pkg/datastore
|
|
|
|
import pkg/questionable
|
|
|
|
import pkg/questionable/results
|
|
|
|
import pkg/stint
|
|
|
|
import pkg/poseidon2
|
|
|
|
import pkg/poseidon2/io
|
|
|
|
|
|
|
|
import pkg/nitro
|
|
|
|
import pkg/codexdht/discv5/protocol as discv5
|
|
|
|
|
feat: create logging proxy (#663)
* implement a logging proxy
The logging proxy:
- prevents the need to import chronicles (as well as export except toJson),
- prevents the need to override `writeValue` or use or import nim-json-seralization elsewhere in the codebase, allowing for sole use of utils/json for de/serialization,
- and handles json formatting correctly in chronicles json sinks
* Rename logging -> logutils to avoid ambiguity with common names
* clean up
* add setProperty for JsonRecord, remove nim-json-serialization conflict
* Allow specifying textlines and json format separately
Not specifying a LogFormat will apply the formatting to both textlines and json sinks.
Specifying a LogFormat will apply the formatting to only that sink.
* remove unneeded usages of std/json
We only need to import utils/json instead of std/json
* move serialization from rest/json to utils/json so it can be shared
* fix NoColors ambiguity
Was causing unit tests to fail on Windows.
* Remove nre usage to fix Windows error
Windows was erroring with `could not load: pcre64.dll`. Instead of fixing that error, remove the pcre usage :)
* Add logutils module doc
* Shorten logutils.formatIt for `NBytes`
Both json and textlines formatIt were not needed, and could be combined into one formatIt
* remove debug integration test config
debug output and logformat of json for integration test logs
* Use ## module doc to support docgen
* bump nim-poseidon2 to export fromBytes
Before the changes in this branch, fromBytes was likely being resolved by nim-stew, or other dependency. With the changes in this branch, that dependency was removed and fromBytes could no longer be resolved. By exporting fromBytes from nim-poseidon, the correct resolution is now happening.
* fixes to get compiling after rebasing master
* Add support for Result types being logged using formatIt
2024-01-23 07:35:03 +00:00
|
|
|
import pkg/codex/logutils
|
2024-01-15 16:45:04 +00:00
|
|
|
import pkg/codex/stores
|
|
|
|
import pkg/codex/clock
|
|
|
|
import pkg/codex/contracts
|
|
|
|
import pkg/codex/systemclock
|
|
|
|
import pkg/codex/blockexchange
|
|
|
|
import pkg/codex/chunker
|
|
|
|
import pkg/codex/slots
|
|
|
|
import pkg/codex/manifest
|
|
|
|
import pkg/codex/discovery
|
|
|
|
import pkg/codex/erasure
|
|
|
|
import pkg/codex/merkletree
|
|
|
|
import pkg/codex/blocktype as bt
|
|
|
|
|
|
|
|
import pkg/codex/node {.all.}
|
|
|
|
|
2024-01-29 20:03:51 +00:00
|
|
|
import ../../asynctest
|
2024-01-15 16:45:04 +00:00
|
|
|
import ../examples
|
|
|
|
import ../helpers
|
|
|
|
import ../helpers/mockmarket
|
|
|
|
import ../helpers/mockclock
|
|
|
|
|
|
|
|
import ./helpers
|
|
|
|
|
|
|
|
privateAccess(CodexNodeRef) # enable access to private fields
|
|
|
|
|
|
|
|
asyncchecksuite "Test Node - Basic":
|
|
|
|
setupAndTearDown()
|
|
|
|
|
|
|
|
test "Fetch Manifest":
|
|
|
|
let
|
|
|
|
manifest = await storeDataGetManifest(localStore, chunker)
|
|
|
|
|
|
|
|
manifestBlock = bt.Block.new(
|
|
|
|
manifest.encode().tryGet(),
|
|
|
|
codec = ManifestCodec).tryGet()
|
|
|
|
|
|
|
|
(await localStore.putBlock(manifestBlock)).tryGet()
|
|
|
|
|
|
|
|
let
|
|
|
|
fetched = (await node.fetchManifest(manifestBlock.cid)).tryGet()
|
|
|
|
|
|
|
|
check:
|
|
|
|
fetched == manifest
|
|
|
|
|
2024-03-15 21:50:56 +00:00
|
|
|
test "Should not lookup non-existing blocks twice":
|
|
|
|
# https://github.com/codex-storage/nim-codex/issues/699
|
|
|
|
let
|
|
|
|
cstore = CountingStore.new(engine, localStore)
|
|
|
|
node = CodexNodeRef.new(switch, cstore, engine, blockDiscovery)
|
|
|
|
missingCid = Cid.init(
|
|
|
|
"zDvZRwzmCvtiyubW9AecnxgLnXK8GrBvpQJBDzToxmzDN6Nrc2CZ").get()
|
|
|
|
|
|
|
|
engine.blockFetchTimeout = timer.milliseconds(100)
|
|
|
|
|
|
|
|
discard await node.retrieve(missingCid, local = false)
|
|
|
|
|
|
|
|
let lookupCount = cstore.lookups.getOrDefault(missingCid)
|
|
|
|
check lookupCount == 1
|
|
|
|
|
2024-01-15 16:45:04 +00:00
|
|
|
test "Block Batching":
|
|
|
|
let manifest = await storeDataGetManifest(localStore, chunker)
|
|
|
|
|
|
|
|
for batchSize in 1..12:
|
|
|
|
(await node.fetchBatched(
|
|
|
|
manifest,
|
|
|
|
batchSize = batchSize,
|
|
|
|
proc(blocks: seq[bt.Block]): Future[?!void] {.gcsafe, async.} =
|
|
|
|
check blocks.len > 0 and blocks.len <= batchSize
|
|
|
|
return success()
|
|
|
|
)).tryGet()
|
|
|
|
|
|
|
|
test "Store and retrieve Data Stream":
|
|
|
|
let
|
|
|
|
stream = BufferStream.new()
|
|
|
|
storeFut = node.store(stream)
|
|
|
|
oddChunkSize = math.trunc(DefaultBlockSize.float / 3.14).NBytes # Let's check that node.store can correctly rechunk these odd chunks
|
|
|
|
oddChunker = FileChunker.new(file = file, chunkSize = oddChunkSize, pad = false) # TODO: doesn't work with pad=tue
|
|
|
|
|
|
|
|
var
|
|
|
|
original: seq[byte]
|
|
|
|
|
|
|
|
try:
|
|
|
|
while (
|
|
|
|
let chunk = await oddChunker.getBytes();
|
|
|
|
chunk.len > 0):
|
|
|
|
original &= chunk
|
|
|
|
await stream.pushData(chunk)
|
|
|
|
finally:
|
|
|
|
await stream.pushEof()
|
|
|
|
await stream.close()
|
|
|
|
|
|
|
|
let
|
|
|
|
manifestCid = (await storeFut).tryGet()
|
|
|
|
manifestBlock = (await localStore.getBlock(manifestCid)).tryGet()
|
|
|
|
localManifest = Manifest.decode(manifestBlock).tryGet()
|
|
|
|
data = await (await node.retrieve(manifestCid)).drain()
|
|
|
|
|
|
|
|
check:
|
|
|
|
data.len == localManifest.datasetSize.int
|
|
|
|
data.len == original.len
|
|
|
|
sha256.digest(data) == sha256.digest(original)
|
|
|
|
|
|
|
|
test "Retrieve One Block":
|
|
|
|
let
|
|
|
|
testString = "Block 1"
|
|
|
|
blk = bt.Block.new(testString.toBytes).tryGet()
|
|
|
|
|
|
|
|
(await localStore.putBlock(blk)).tryGet()
|
|
|
|
let stream = (await node.retrieve(blk.cid)).tryGet()
|
|
|
|
defer: await stream.close()
|
|
|
|
|
|
|
|
var data = newSeq[byte](testString.len)
|
|
|
|
await stream.readExactly(addr data[0], data.len)
|
|
|
|
check string.fromBytes(data) == testString
|
|
|
|
|
|
|
|
test "Setup purchase request":
|
|
|
|
let
|
2024-03-12 12:10:14 +00:00
|
|
|
erasure = Erasure.new(store, leoEncoderProvider, leoDecoderProvider)
|
2024-01-15 16:45:04 +00:00
|
|
|
manifest = await storeDataGetManifest(localStore, chunker)
|
|
|
|
manifestBlock = bt.Block.new(
|
|
|
|
manifest.encode().tryGet(),
|
|
|
|
codec = ManifestCodec).tryGet()
|
|
|
|
protected = (await erasure.encode(manifest, 3, 2)).tryGet()
|
2024-02-08 02:27:11 +00:00
|
|
|
builder = Poseidon2Builder.new(localStore, protected).tryGet()
|
2024-01-15 16:45:04 +00:00
|
|
|
verifiable = (await builder.buildManifest()).tryGet()
|
|
|
|
verifiableBlock = bt.Block.new(
|
|
|
|
verifiable.encode().tryGet(),
|
|
|
|
codec = ManifestCodec).tryGet()
|
|
|
|
|
|
|
|
(await localStore.putBlock(manifestBlock)).tryGet()
|
|
|
|
|
|
|
|
let
|
|
|
|
request = (await node.setupRequest(
|
|
|
|
cid = manifestBlock.cid,
|
|
|
|
nodes = 5,
|
|
|
|
tolerance = 2,
|
|
|
|
duration = 100.u256,
|
|
|
|
reward = 2.u256,
|
|
|
|
proofProbability = 3.u256,
|
|
|
|
expiry = 200.u256,
|
|
|
|
collateral = 200.u256)).tryGet
|
|
|
|
|
|
|
|
check:
|
|
|
|
(await verifiableBlock.cid in localStore) == true
|
|
|
|
request.content.cid == $verifiableBlock.cid
|
|
|
|
request.content.merkleRoot == builder.verifyRoot.get.toBytes
|