2022-01-10 15:32:56 +00:00
|
|
|
import std/os
|
|
|
|
import std/options
|
2022-08-24 12:15:59 +00:00
|
|
|
import std/math
|
2023-11-28 21:04:11 +00:00
|
|
|
import std/times
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
import pkg/asynctest
|
|
|
|
import pkg/chronos
|
2023-12-20 02:24:40 +00:00
|
|
|
import pkg/chronicles
|
2022-01-10 15:32:56 +00:00
|
|
|
import pkg/stew/byteutils
|
2023-11-22 10:09:12 +00:00
|
|
|
import pkg/datastore
|
|
|
|
import pkg/questionable
|
2023-11-28 21:04:11 +00:00
|
|
|
import pkg/questionable/results
|
2023-11-22 10:09:12 +00:00
|
|
|
import pkg/stint
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
import pkg/nitro
|
2023-08-01 23:47:57 +00:00
|
|
|
import pkg/codexdht/discv5/protocol as discv5
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-05-19 19:56:03 +00:00
|
|
|
import pkg/codex/stores
|
2023-11-22 10:09:12 +00:00
|
|
|
import pkg/codex/clock
|
|
|
|
import pkg/codex/contracts
|
|
|
|
import pkg/codex/systemclock
|
2022-05-19 19:56:03 +00:00
|
|
|
import pkg/codex/blockexchange
|
|
|
|
import pkg/codex/chunker
|
|
|
|
import pkg/codex/node
|
|
|
|
import pkg/codex/manifest
|
|
|
|
import pkg/codex/discovery
|
|
|
|
import pkg/codex/blocktype as bt
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2023-11-22 10:09:12 +00:00
|
|
|
import ../examples
|
2022-01-10 15:32:56 +00:00
|
|
|
import ./helpers
|
2023-11-22 10:09:12 +00:00
|
|
|
import ./helpers/mockmarket
|
|
|
|
import ./helpers/mockclock
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2023-11-28 21:04:11 +00:00
|
|
|
proc toTimesDuration(d: chronos.Duration): times.Duration =
|
|
|
|
initDuration(seconds=d.seconds)
|
|
|
|
|
2023-06-22 18:01:21 +00:00
|
|
|
asyncchecksuite "Test Node":
|
2022-01-10 15:32:56 +00:00
|
|
|
let
|
|
|
|
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
|
|
|
|
|
|
|
|
var
|
|
|
|
file: File
|
|
|
|
chunker: Chunker
|
|
|
|
switch: Switch
|
|
|
|
wallet: WalletRef
|
|
|
|
network: BlockExcNetwork
|
2023-11-22 10:09:12 +00:00
|
|
|
clock: Clock
|
|
|
|
localStore: RepoStore
|
|
|
|
localStoreRepoDs: DataStore
|
|
|
|
localStoreMetaDs: DataStore
|
2022-01-10 15:32:56 +00:00
|
|
|
engine: BlockExcEngine
|
|
|
|
store: NetworkStore
|
2022-05-19 19:56:03 +00:00
|
|
|
node: CodexNodeRef
|
2022-05-19 02:29:15 +00:00
|
|
|
blockDiscovery: Discovery
|
|
|
|
peerStore: PeerCtxStore
|
|
|
|
pendingBlocks: PendingBlocksManager
|
|
|
|
discovery: DiscoveryEngine
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-08-24 12:15:59 +00:00
|
|
|
proc fetch(T: type Manifest, chunker: Chunker): Future[Manifest] {.async.} =
|
|
|
|
# Collect blocks from Chunker into Manifest
|
2023-11-14 12:02:17 +00:00
|
|
|
await storeDataGetManifest(localStore, chunker)
|
2022-08-24 12:15:59 +00:00
|
|
|
|
|
|
|
proc retrieve(cid: Cid): Future[seq[byte]] {.async.} =
|
|
|
|
# Retrieve an entire file contents by file Cid
|
|
|
|
let
|
2023-07-06 23:23:27 +00:00
|
|
|
oddChunkSize = math.trunc(DefaultBlockSize.float/1.359).int # Let's check that node.retrieve can correctly rechunk data
|
2022-08-24 12:15:59 +00:00
|
|
|
stream = (await node.retrieve(cid)).tryGet()
|
|
|
|
var
|
|
|
|
data: seq[byte]
|
|
|
|
|
2023-06-22 18:01:21 +00:00
|
|
|
defer: await stream.close()
|
|
|
|
|
2022-08-24 12:15:59 +00:00
|
|
|
while not stream.atEof:
|
|
|
|
var
|
|
|
|
buf = newSeq[byte](oddChunkSize)
|
|
|
|
res = await stream.readOnce(addr buf[0], oddChunkSize)
|
|
|
|
check res <= oddChunkSize
|
|
|
|
buf.setLen(res)
|
|
|
|
data &= buf
|
|
|
|
|
|
|
|
return data
|
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
setup:
|
|
|
|
file = open(path.splitFile().dir /../ "fixtures" / "test.jpg")
|
2023-07-06 23:23:27 +00:00
|
|
|
chunker = FileChunker.new(file = file, chunkSize = DefaultBlockSize)
|
2022-01-10 15:32:56 +00:00
|
|
|
switch = newStandardSwitch()
|
|
|
|
wallet = WalletRef.new(EthPrivateKey.random())
|
|
|
|
network = BlockExcNetwork.new(switch)
|
2023-11-22 10:09:12 +00:00
|
|
|
|
|
|
|
clock = SystemClock.new()
|
|
|
|
localStoreMetaDs = SQLiteDatastore.new(Memory).tryGet()
|
|
|
|
localStoreRepoDs = SQLiteDatastore.new(Memory).tryGet()
|
|
|
|
localStore = RepoStore.new(localStoreRepoDs, localStoreMetaDs, clock=clock)
|
|
|
|
await localStore.start()
|
|
|
|
|
2022-11-02 00:58:41 +00:00
|
|
|
blockDiscovery = Discovery.new(
|
|
|
|
switch.peerInfo.privateKey,
|
|
|
|
announceAddrs = @[MultiAddress.init("/ip4/127.0.0.1/tcp/0")
|
|
|
|
.expect("Should return multiaddress")])
|
2022-05-19 02:29:15 +00:00
|
|
|
peerStore = PeerCtxStore.new()
|
|
|
|
pendingBlocks = PendingBlocksManager.new()
|
|
|
|
discovery = DiscoveryEngine.new(localStore, peerStore, network, blockDiscovery, pendingBlocks)
|
|
|
|
engine = BlockExcEngine.new(localStore, wallet, network, discovery, peerStore, pendingBlocks)
|
2022-01-10 15:32:56 +00:00
|
|
|
store = NetworkStore.new(engine, localStore)
|
2022-08-09 04:29:06 +00:00
|
|
|
node = CodexNodeRef.new(switch, store, engine, nil, blockDiscovery) # TODO: pass `Erasure`
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
await node.start()
|
|
|
|
|
|
|
|
teardown:
|
|
|
|
close(file)
|
|
|
|
await node.stop()
|
|
|
|
|
2022-07-28 17:44:59 +00:00
|
|
|
test "Fetch Manifest":
|
|
|
|
let
|
2022-08-24 12:15:59 +00:00
|
|
|
manifest = await Manifest.fetch(chunker)
|
|
|
|
|
2022-07-28 17:44:59 +00:00
|
|
|
manifestBlock = bt.Block.new(
|
2023-12-22 12:04:01 +00:00
|
|
|
manifest.encode().tryGet(),
|
|
|
|
codec = ManifestCodec).tryGet()
|
2022-07-28 17:44:59 +00:00
|
|
|
|
|
|
|
(await localStore.putBlock(manifestBlock)).tryGet()
|
|
|
|
|
|
|
|
let
|
|
|
|
fetched = (await node.fetchManifest(manifestBlock.cid)).tryGet()
|
|
|
|
|
|
|
|
check:
|
2023-11-14 12:02:17 +00:00
|
|
|
fetched == manifest
|
2022-07-28 17:44:59 +00:00
|
|
|
|
2022-07-29 20:04:12 +00:00
|
|
|
test "Block Batching":
|
2023-11-22 10:09:12 +00:00
|
|
|
let manifest = await Manifest.fetch(chunker)
|
2022-08-24 12:15:59 +00:00
|
|
|
|
|
|
|
for batchSize in 1..12:
|
|
|
|
(await node.fetchBatched(
|
|
|
|
manifest,
|
|
|
|
batchSize = batchSize,
|
2023-11-28 21:04:11 +00:00
|
|
|
proc(blocks: seq[bt.Block]): Future[?!void] {.gcsafe, async.} =
|
2022-08-24 12:15:59 +00:00
|
|
|
check blocks.len > 0 and blocks.len <= batchSize
|
2023-11-28 21:04:11 +00:00
|
|
|
return success()
|
2022-08-24 12:15:59 +00:00
|
|
|
)).tryGet()
|
2022-07-29 20:04:12 +00:00
|
|
|
|
2022-08-24 12:15:59 +00:00
|
|
|
test "Store and retrieve Data Stream":
|
2022-01-10 15:32:56 +00:00
|
|
|
let
|
|
|
|
stream = BufferStream.new()
|
|
|
|
storeFut = node.store(stream)
|
2023-07-06 23:23:27 +00:00
|
|
|
oddChunkSize = math.trunc(DefaultBlockSize.float/3.14).NBytes # Let's check that node.store can correctly rechunk these odd chunks
|
2022-08-24 12:15:59 +00:00
|
|
|
oddChunker = FileChunker.new(file = file, chunkSize = oddChunkSize, pad = false) # TODO: doesn't work with pad=tue
|
2022-01-10 15:32:56 +00:00
|
|
|
var
|
2022-08-24 12:15:59 +00:00
|
|
|
original: seq[byte]
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
while (
|
2022-08-24 12:15:59 +00:00
|
|
|
let chunk = await oddChunker.getBytes();
|
2022-01-10 15:32:56 +00:00
|
|
|
chunk.len > 0):
|
2022-08-24 12:15:59 +00:00
|
|
|
original &= chunk
|
2022-01-10 15:32:56 +00:00
|
|
|
await stream.pushData(chunk)
|
|
|
|
finally:
|
|
|
|
await stream.pushEof()
|
|
|
|
await stream.close()
|
|
|
|
|
|
|
|
let
|
2022-01-13 00:42:18 +00:00
|
|
|
manifestCid = (await storeFut).tryGet()
|
2022-01-10 15:32:56 +00:00
|
|
|
check:
|
2022-07-28 00:39:17 +00:00
|
|
|
(await localStore.hasBlock(manifestCid)).tryGet()
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-08-24 12:15:59 +00:00
|
|
|
let
|
2022-08-19 00:56:36 +00:00
|
|
|
manifestBlock = (await localStore.getBlock(manifestCid)).tryGet()
|
2022-07-29 20:04:12 +00:00
|
|
|
localManifest = Manifest.decode(manifestBlock).tryGet()
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2023-06-22 18:01:21 +00:00
|
|
|
let data = await retrieve(manifestCid)
|
|
|
|
|
2022-08-24 12:15:59 +00:00
|
|
|
check:
|
2023-11-14 12:02:17 +00:00
|
|
|
data.len == localManifest.datasetSize.int
|
2022-08-24 12:15:59 +00:00
|
|
|
data.len == original.len
|
|
|
|
sha256.digest(data) == sha256.digest(original)
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
test "Retrieve One Block":
|
|
|
|
let
|
|
|
|
testString = "Block 1"
|
2022-03-18 19:50:53 +00:00
|
|
|
blk = bt.Block.new(testString.toBytes).tryGet()
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-07-28 00:39:17 +00:00
|
|
|
(await localStore.putBlock(blk)).tryGet()
|
2022-03-30 02:43:35 +00:00
|
|
|
let stream = (await node.retrieve(blk.cid)).tryGet()
|
2023-06-22 18:01:21 +00:00
|
|
|
defer: await stream.close()
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
var data = newSeq[byte](testString.len)
|
|
|
|
await stream.readExactly(addr data[0], data.len)
|
|
|
|
check string.fromBytes(data) == testString
|
2023-11-22 10:09:12 +00:00
|
|
|
|
|
|
|
|
|
|
|
asyncchecksuite "Test Node - host contracts":
|
|
|
|
let
|
|
|
|
(path, _, _) = instantiationInfo(-2, fullPaths = true) # get this file's name
|
|
|
|
|
|
|
|
var
|
|
|
|
file: File
|
|
|
|
chunker: Chunker
|
|
|
|
switch: Switch
|
|
|
|
wallet: WalletRef
|
|
|
|
network: BlockExcNetwork
|
|
|
|
clock: MockClock
|
|
|
|
localStore: RepoStore
|
|
|
|
localStoreRepoDs: DataStore
|
|
|
|
localStoreMetaDs: DataStore
|
|
|
|
engine: BlockExcEngine
|
|
|
|
store: NetworkStore
|
|
|
|
sales: Sales
|
|
|
|
node: CodexNodeRef
|
|
|
|
blockDiscovery: Discovery
|
|
|
|
peerStore: PeerCtxStore
|
|
|
|
pendingBlocks: PendingBlocksManager
|
|
|
|
discovery: DiscoveryEngine
|
|
|
|
manifest: Manifest
|
|
|
|
manifestCid: string
|
|
|
|
|
|
|
|
proc fetch(T: type Manifest, chunker: Chunker): Future[Manifest] {.async.} =
|
|
|
|
# Collect blocks from Chunker into Manifest
|
|
|
|
await storeDataGetManifest(localStore, chunker)
|
|
|
|
|
|
|
|
setup:
|
|
|
|
file = open(path.splitFile().dir /../ "fixtures" / "test.jpg")
|
|
|
|
chunker = FileChunker.new(file = file, chunkSize = DefaultBlockSize)
|
|
|
|
switch = newStandardSwitch()
|
|
|
|
wallet = WalletRef.new(EthPrivateKey.random())
|
|
|
|
network = BlockExcNetwork.new(switch)
|
|
|
|
|
|
|
|
clock = MockClock.new()
|
|
|
|
localStoreMetaDs = SQLiteDatastore.new(Memory).tryGet()
|
|
|
|
localStoreRepoDs = SQLiteDatastore.new(Memory).tryGet()
|
|
|
|
localStore = RepoStore.new(localStoreRepoDs, localStoreMetaDs, clock=clock)
|
|
|
|
await localStore.start()
|
|
|
|
|
|
|
|
blockDiscovery = Discovery.new(
|
|
|
|
switch.peerInfo.privateKey,
|
|
|
|
announceAddrs = @[MultiAddress.init("/ip4/127.0.0.1/tcp/0")
|
|
|
|
.expect("Should return multiaddress")])
|
|
|
|
peerStore = PeerCtxStore.new()
|
|
|
|
pendingBlocks = PendingBlocksManager.new()
|
|
|
|
discovery = DiscoveryEngine.new(localStore, peerStore, network, blockDiscovery, pendingBlocks)
|
|
|
|
engine = BlockExcEngine.new(localStore, wallet, network, discovery, peerStore, pendingBlocks)
|
|
|
|
store = NetworkStore.new(engine, localStore)
|
|
|
|
node = CodexNodeRef.new(switch, store, engine, nil, blockDiscovery) # TODO: pass `Erasure`
|
|
|
|
|
|
|
|
# Setup Host Contracts and dependencies
|
|
|
|
let market = MockMarket.new()
|
2023-11-28 21:04:11 +00:00
|
|
|
sales = Sales.new(market, clock, localStore)
|
2023-11-22 10:09:12 +00:00
|
|
|
let hostContracts = some HostInteractions.new(clock, sales)
|
|
|
|
node.contracts = (ClientInteractions.none, hostContracts, ValidatorInteractions.none)
|
|
|
|
|
|
|
|
await node.start()
|
|
|
|
|
|
|
|
# Populate manifest in local store
|
|
|
|
manifest = await storeDataGetManifest(localStore, chunker)
|
|
|
|
let manifestBlock = bt.Block.new(
|
|
|
|
manifest.encode().tryGet(),
|
2023-12-22 12:04:01 +00:00
|
|
|
codec = ManifestCodec
|
2023-11-22 10:09:12 +00:00
|
|
|
).tryGet()
|
|
|
|
manifestCid = $(manifestBlock.cid)
|
|
|
|
(await localStore.putBlock(manifestBlock)).tryGet()
|
|
|
|
|
|
|
|
teardown:
|
|
|
|
close(file)
|
|
|
|
await node.stop()
|
|
|
|
|
|
|
|
test "onExpiryUpdate callback is set":
|
|
|
|
check sales.onExpiryUpdate.isSome
|
|
|
|
|
|
|
|
test "onExpiryUpdate callback":
|
|
|
|
let
|
|
|
|
# The blocks have set default TTL, so in order to update it we have to have larger TTL
|
|
|
|
expectedExpiry: SecondsSince1970 = clock.now + DefaultBlockTtl.seconds + 11123
|
|
|
|
expiryUpdateCallback = !sales.onExpiryUpdate
|
|
|
|
|
|
|
|
(await expiryUpdateCallback(manifestCid, expectedExpiry)).tryGet()
|
|
|
|
|
|
|
|
for index in 0..<manifest.blocksCount:
|
|
|
|
let blk = (await localStore.getBlock(manifest.treeCid, index)).tryGet
|
|
|
|
let expiryKey = (createBlockExpirationMetadataKey(blk.cid)).tryGet
|
|
|
|
let expiry = await localStoreMetaDs.get(expiryKey)
|
|
|
|
|
|
|
|
check (expiry.tryGet).toSecondsSince1970 == expectedExpiry
|
|
|
|
|
|
|
|
test "onStore callback is set":
|
|
|
|
check sales.onStore.isSome
|
|
|
|
|
|
|
|
test "onStore callback":
|
|
|
|
let onStore = !sales.onStore
|
|
|
|
var request = StorageRequest.example
|
|
|
|
request.content.cid = manifestCid
|
2023-11-28 21:04:11 +00:00
|
|
|
request.expiry = (getTime() + DefaultBlockTtl.toTimesDuration + 1.hours).toUnix.u256
|
2023-11-22 10:09:12 +00:00
|
|
|
var fetchedBytes: uint = 0
|
|
|
|
|
2023-11-28 21:04:11 +00:00
|
|
|
let onBatch = proc(blocks: seq[bt.Block]): Future[?!void] {.async.} =
|
2023-11-22 10:09:12 +00:00
|
|
|
for blk in blocks:
|
|
|
|
fetchedBytes += blk.data.len.uint
|
2023-11-28 21:04:11 +00:00
|
|
|
return success()
|
2023-11-22 10:09:12 +00:00
|
|
|
|
|
|
|
(await onStore(request, 0.u256, onBatch)).tryGet()
|
2023-12-22 12:04:01 +00:00
|
|
|
check fetchedBytes == 2293760
|
2023-11-22 10:09:12 +00:00
|
|
|
|
2023-11-28 21:04:11 +00:00
|
|
|
for index in 0..<manifest.blocksCount:
|
|
|
|
let blk = (await localStore.getBlock(manifest.treeCid, index)).tryGet
|
|
|
|
let expiryKey = (createBlockExpirationMetadataKey(blk.cid)).tryGet
|
|
|
|
let expiry = await localStoreMetaDs.get(expiryKey)
|
|
|
|
|
|
|
|
check (expiry.tryGet).toSecondsSince1970 == request.expiry.toSecondsSince1970
|