2022-05-19 19:56:03 +00:00
|
|
|
## Nim-Codex
|
2022-01-10 15:32:56 +00:00
|
|
|
## Copyright (c) 2021 Status Research & Development GmbH
|
|
|
|
## Licensed under either of
|
|
|
|
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
|
|
|
|
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
|
|
|
|
## at your option.
|
|
|
|
## This file may not be copied, modified, or distributed except according to
|
|
|
|
## those terms.
|
|
|
|
|
|
|
|
import std/options
|
2022-03-14 16:06:36 +00:00
|
|
|
import std/tables
|
2022-05-12 21:52:03 +00:00
|
|
|
import std/sequtils
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
import pkg/questionable
|
|
|
|
import pkg/questionable/results
|
|
|
|
import pkg/chronicles
|
|
|
|
import pkg/chronos
|
|
|
|
import pkg/libp2p
|
|
|
|
|
|
|
|
# TODO: remove once exported by libp2p
|
|
|
|
import pkg/libp2p/routing_record
|
|
|
|
import pkg/libp2p/signed_envelope
|
|
|
|
|
|
|
|
import ./chunker
|
|
|
|
import ./blocktype as bt
|
2022-03-14 16:06:36 +00:00
|
|
|
import ./manifest
|
2022-01-10 15:32:56 +00:00
|
|
|
import ./stores/blockstore
|
|
|
|
import ./blockexchange
|
2022-03-30 02:43:35 +00:00
|
|
|
import ./streams
|
2022-04-06 00:34:29 +00:00
|
|
|
import ./erasure
|
2022-04-13 16:32:35 +00:00
|
|
|
import ./discovery
|
2022-04-13 12:15:22 +00:00
|
|
|
import ./contracts
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
logScope:
|
2022-05-19 19:56:03 +00:00
|
|
|
topics = "codex node"
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-05-20 16:53:34 +00:00
|
|
|
const
|
2022-07-07 13:01:29 +00:00
|
|
|
FetchBatch = 100
|
2022-05-20 16:53:34 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
type
|
2022-05-19 19:56:03 +00:00
|
|
|
CodexError = object of CatchableError
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-05-19 19:56:03 +00:00
|
|
|
CodexNodeRef* = ref object
|
2022-01-10 15:32:56 +00:00
|
|
|
switch*: Switch
|
|
|
|
networkId*: PeerID
|
|
|
|
blockStore*: BlockStore
|
|
|
|
engine*: BlockExcEngine
|
2022-04-06 00:34:29 +00:00
|
|
|
erasure*: Erasure
|
2022-04-13 16:32:35 +00:00
|
|
|
discovery*: Discovery
|
2022-04-25 13:12:37 +00:00
|
|
|
contracts*: ?ContractInteractions
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
proc findPeer*(
|
2022-05-19 19:56:03 +00:00
|
|
|
node: CodexNodeRef,
|
2022-04-13 16:32:35 +00:00
|
|
|
peerId: PeerID): Future[?PeerRecord] {.async.} =
|
|
|
|
return await node.discovery.findPeer(peerId)
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
proc connect*(
|
2022-05-19 19:56:03 +00:00
|
|
|
node: CodexNodeRef,
|
2022-01-10 15:32:56 +00:00
|
|
|
peerId: PeerID,
|
|
|
|
addrs: seq[MultiAddress]): Future[void] =
|
|
|
|
node.switch.connect(peerId, addrs)
|
|
|
|
|
|
|
|
proc retrieve*(
|
2022-05-19 19:56:03 +00:00
|
|
|
node: CodexNodeRef,
|
2022-03-30 02:43:35 +00:00
|
|
|
cid: Cid): Future[?!LPStream] {.async.} =
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
trace "Received retrieval request", cid
|
|
|
|
without blk =? await node.blockStore.getBlock(cid):
|
|
|
|
return failure(
|
2022-05-19 19:56:03 +00:00
|
|
|
newException(CodexError, "Couldn't retrieve block for Cid!"))
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-07-07 13:00:08 +00:00
|
|
|
if manifest =? Manifest.decode(blk.data, blk.cid):
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-04-06 00:34:29 +00:00
|
|
|
if manifest.protected:
|
|
|
|
proc erasureJob(): Future[void] {.async.} =
|
|
|
|
try:
|
|
|
|
without res =? (await node.erasure.decode(manifest)), error: # spawn an erasure decoding job
|
2022-05-12 21:52:03 +00:00
|
|
|
trace "Unable to erasure decode manifest", cid, exc = error.msg
|
2022-04-06 00:34:29 +00:00
|
|
|
except CatchableError as exc:
|
|
|
|
trace "Exception decoding manifest", cid
|
|
|
|
|
|
|
|
asyncSpawn erasureJob()
|
|
|
|
|
2022-05-12 21:52:03 +00:00
|
|
|
proc prefetchBlocks() {.async.} =
|
|
|
|
## Initiates requests to all blocks in the manifest
|
|
|
|
##
|
|
|
|
try:
|
2022-05-20 16:53:34 +00:00
|
|
|
let
|
2022-07-07 13:01:29 +00:00
|
|
|
batch = max(1, manifest.blocks.len div FetchBatch)
|
|
|
|
trace "Prefetching in batches of", FetchBatch
|
2022-05-20 16:53:34 +00:00
|
|
|
for blks in manifest.blocks.distribute(batch, true):
|
|
|
|
discard await allFinished(
|
|
|
|
blks.mapIt( node.blockStore.getBlock( it ) ))
|
2022-05-12 21:52:03 +00:00
|
|
|
except CatchableError as exc:
|
|
|
|
trace "Exception prefetching blocks", exc = exc.msg
|
|
|
|
|
|
|
|
asyncSpawn prefetchBlocks()
|
2022-03-30 02:43:35 +00:00
|
|
|
return LPStream(StoreStream.new(node.blockStore, manifest)).success
|
|
|
|
|
|
|
|
let
|
|
|
|
stream = BufferStream.new()
|
|
|
|
|
|
|
|
proc streamOneBlock(): Future[void] {.async.} =
|
|
|
|
try:
|
|
|
|
await stream.pushData(blk.data)
|
|
|
|
except CatchableError as exc:
|
|
|
|
trace "Unable to send block", cid
|
|
|
|
discard
|
|
|
|
finally:
|
|
|
|
await stream.pushEof()
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-03-30 02:43:35 +00:00
|
|
|
asyncSpawn streamOneBlock()
|
|
|
|
return LPStream(stream).success()
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
proc store*(
|
2022-05-19 19:56:03 +00:00
|
|
|
node: CodexNodeRef,
|
2022-01-10 15:32:56 +00:00
|
|
|
stream: LPStream): Future[?!Cid] {.async.} =
|
|
|
|
trace "Storing data"
|
|
|
|
|
2022-03-17 13:56:46 +00:00
|
|
|
without var blockManifest =? Manifest.new():
|
2022-01-10 15:32:56 +00:00
|
|
|
return failure("Unable to create Block Set")
|
|
|
|
|
|
|
|
let
|
2022-03-30 02:43:35 +00:00
|
|
|
chunker = LPStreamChunker.new(stream, chunkSize = BlockSize)
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
try:
|
|
|
|
while (
|
|
|
|
let chunk = await chunker.getBytes();
|
|
|
|
chunk.len > 0):
|
|
|
|
|
|
|
|
trace "Got data from stream", len = chunk.len
|
2022-03-18 19:50:53 +00:00
|
|
|
without blk =? bt.Block.new(chunk):
|
2022-01-11 02:25:13 +00:00
|
|
|
return failure("Unable to init block from chunk!")
|
2022-01-10 15:32:56 +00:00
|
|
|
|
2022-03-14 16:06:36 +00:00
|
|
|
blockManifest.add(blk.cid)
|
2022-01-10 15:32:56 +00:00
|
|
|
if not (await node.blockStore.putBlock(blk)):
|
|
|
|
# trace "Unable to store block", cid = blk.cid
|
|
|
|
return failure("Unable to store block " & $blk.cid)
|
|
|
|
|
|
|
|
except CancelledError as exc:
|
|
|
|
raise exc
|
|
|
|
except CatchableError as exc:
|
|
|
|
return failure(exc.msg)
|
|
|
|
finally:
|
|
|
|
await stream.close()
|
|
|
|
|
|
|
|
# Generate manifest
|
|
|
|
without data =? blockManifest.encode():
|
|
|
|
return failure(
|
2022-05-19 19:56:03 +00:00
|
|
|
newException(CodexError, "Could not generate dataset manifest!"))
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
# Store as a dag-pb block
|
2022-03-18 19:50:53 +00:00
|
|
|
without manifest =? bt.Block.new(data = data, codec = DagPBCodec):
|
2022-01-11 02:25:13 +00:00
|
|
|
trace "Unable to init block from manifest data!"
|
|
|
|
return failure("Unable to init block from manifest data!")
|
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
if not (await node.blockStore.putBlock(manifest)):
|
|
|
|
trace "Unable to store manifest", cid = manifest.cid
|
|
|
|
return failure("Unable to store manifest " & $manifest.cid)
|
|
|
|
|
2022-03-30 02:43:35 +00:00
|
|
|
without cid =? blockManifest.cid, error:
|
|
|
|
trace "Unable to generate manifest Cid!", exc = error.msg
|
|
|
|
return failure(error.msg)
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
trace "Stored data", manifestCid = manifest.cid,
|
2022-03-30 02:43:35 +00:00
|
|
|
contentCid = cid,
|
2022-01-13 01:55:51 +00:00
|
|
|
blocks = blockManifest.len
|
2022-01-10 15:32:56 +00:00
|
|
|
|
|
|
|
return manifest.cid.success
|
|
|
|
|
2022-05-19 19:56:03 +00:00
|
|
|
proc requestStorage*(self: CodexNodeRef,
|
2022-05-10 12:13:39 +00:00
|
|
|
cid: Cid,
|
|
|
|
duration: UInt256,
|
|
|
|
nodes: uint,
|
|
|
|
tolerance: uint,
|
2022-05-18 11:28:34 +00:00
|
|
|
maxPrice: UInt256,
|
|
|
|
expiry = UInt256.none): Future[?!array[32, byte]] {.async.} =
|
2022-04-06 00:34:29 +00:00
|
|
|
## Initiate a request for storage sequence, this might
|
|
|
|
## be a multistep procedure.
|
|
|
|
##
|
|
|
|
## Roughly the flow is as follows:
|
|
|
|
## - Get the original cid from the store (should have already been uploaded)
|
|
|
|
## - Erasure code it according to the nodes and tolerance parameters
|
|
|
|
## - Run the PoR setup on the erasure dataset
|
|
|
|
## - Call into the marketplace and purchasing contracts
|
|
|
|
##
|
2022-05-10 12:13:39 +00:00
|
|
|
trace "Received a request for storage!", cid, duration, nodes, tolerance, maxPrice
|
2022-04-06 00:34:29 +00:00
|
|
|
|
2022-05-11 07:01:31 +00:00
|
|
|
without contracts =? self.contracts:
|
|
|
|
trace "Purchasing not available"
|
|
|
|
return failure "Purchasing not available"
|
|
|
|
|
2022-04-06 00:34:29 +00:00
|
|
|
without blk =? (await self.blockStore.getBlock(cid)), error:
|
|
|
|
trace "Unable to retrieve manifest block", cid
|
|
|
|
return failure(error)
|
|
|
|
|
|
|
|
without mc =? blk.cid.contentType():
|
|
|
|
trace "Couldn't identify Cid!", cid
|
|
|
|
return failure("Couldn't identify Cid! " & $cid)
|
|
|
|
|
|
|
|
# if we got a manifest, stream the blocks
|
|
|
|
if $mc notin ManifestContainers:
|
2022-04-14 10:49:03 +00:00
|
|
|
trace "Not a manifest type!", cid, mc = $mc
|
2022-04-06 00:34:29 +00:00
|
|
|
return failure("Not a manifest type!")
|
|
|
|
|
|
|
|
without var manifest =? Manifest.decode(blk.data), error:
|
|
|
|
trace "Unable to decode manifest from block", cid
|
|
|
|
return failure(error)
|
|
|
|
|
|
|
|
# Erasure code the dataset according to provided parameters
|
|
|
|
without encoded =? (await self.erasure.encode(manifest, nodes.int, tolerance.int)), error:
|
|
|
|
trace "Unable to erasure code dataset", cid
|
|
|
|
return failure(error)
|
|
|
|
|
|
|
|
without encodedData =? encoded.encode(), error:
|
|
|
|
trace "Unable to encode protected manifest"
|
|
|
|
return failure(error)
|
|
|
|
|
|
|
|
without encodedBlk =? bt.Block.new(data = encodedData, codec = DagPBCodec), error:
|
|
|
|
trace "Unable to create block from encoded manifest"
|
|
|
|
return failure(error)
|
|
|
|
|
|
|
|
if not (await self.blockStore.putBlock(encodedBlk)):
|
|
|
|
trace "Unable to store encoded manifest block", cid = encodedBlk.cid
|
|
|
|
return failure("Unable to store encoded manifest block")
|
|
|
|
|
2022-05-11 07:01:31 +00:00
|
|
|
let request = StorageRequest(
|
|
|
|
ask: StorageAsk(
|
|
|
|
size: encoded.size.u256,
|
|
|
|
duration: duration,
|
|
|
|
maxPrice: maxPrice
|
|
|
|
),
|
|
|
|
content: StorageContent(
|
|
|
|
cid: $encodedBlk.cid,
|
|
|
|
erasure: StorageErasure(
|
|
|
|
totalChunks: encoded.len.uint64,
|
|
|
|
totalNodes: 1, # TODO: store on multiple nodes
|
|
|
|
nodeId: 0 # TODO: store on multiple nodes
|
|
|
|
),
|
|
|
|
por: StoragePor(
|
|
|
|
u: @[], # TODO: PoR setup
|
|
|
|
publicKey: @[], # TODO: PoR setup
|
|
|
|
name: @[] # TODO: PoR setup
|
|
|
|
)
|
2022-05-18 11:28:34 +00:00
|
|
|
),
|
|
|
|
expiry: expiry |? 0.u256
|
2022-05-11 07:01:31 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
let purchase = contracts.purchasing.purchase(request)
|
|
|
|
return success purchase.id
|
2022-04-06 00:34:29 +00:00
|
|
|
|
2022-01-10 15:32:56 +00:00
|
|
|
proc new*(
|
2022-05-19 19:56:03 +00:00
|
|
|
T: type CodexNodeRef,
|
2022-01-10 15:32:56 +00:00
|
|
|
switch: Switch,
|
|
|
|
store: BlockStore,
|
2022-04-06 00:34:29 +00:00
|
|
|
engine: BlockExcEngine,
|
2022-04-13 16:32:35 +00:00
|
|
|
erasure: Erasure,
|
2022-04-13 12:15:22 +00:00
|
|
|
discovery: Discovery,
|
2022-04-25 13:12:37 +00:00
|
|
|
contracts: ?ContractInteractions): T =
|
2022-01-10 15:32:56 +00:00
|
|
|
T(
|
|
|
|
switch: switch,
|
|
|
|
blockStore: store,
|
2022-04-06 00:34:29 +00:00
|
|
|
engine: engine,
|
2022-04-13 16:32:35 +00:00
|
|
|
erasure: erasure,
|
2022-04-13 12:15:22 +00:00
|
|
|
discovery: discovery,
|
|
|
|
contracts: contracts)
|
2022-07-06 13:37:27 +00:00
|
|
|
|
|
|
|
proc start*(node: CodexNodeRef) {.async.} =
|
|
|
|
if not node.switch.isNil:
|
|
|
|
await node.switch.start()
|
|
|
|
|
|
|
|
if not node.engine.isNil:
|
|
|
|
await node.engine.start()
|
|
|
|
|
|
|
|
if not node.erasure.isNil:
|
|
|
|
await node.erasure.start()
|
|
|
|
|
|
|
|
if not node.discovery.isNil:
|
|
|
|
await node.discovery.start()
|
|
|
|
|
|
|
|
if contracts =? node.contracts:
|
|
|
|
contracts.sales.retrieve = proc(cid: string) {.async.} =
|
|
|
|
let stream = (await node.retrieve(Cid.init(cid).tryGet())).tryGet()
|
|
|
|
while not stream.atEof():
|
|
|
|
var buffer: array[4096, byte]
|
|
|
|
discard await readOnce(stream, addr buffer[0], buffer.len)
|
|
|
|
contracts.sales.prove = proc(cid: string): Future[seq[byte]] {.async.} =
|
|
|
|
return @[42'u8] # TODO: generate actual proof
|
|
|
|
await contracts.start()
|
|
|
|
|
|
|
|
node.networkId = node.switch.peerInfo.peerId
|
|
|
|
notice "Started codex node", id = $node.networkId, addrs = node.switch.peerInfo.addrs
|
|
|
|
|
|
|
|
proc stop*(node: CodexNodeRef) {.async.} =
|
|
|
|
trace "Stopping node"
|
|
|
|
|
|
|
|
if not node.engine.isNil:
|
|
|
|
await node.engine.stop()
|
|
|
|
|
|
|
|
if not node.switch.isNil:
|
|
|
|
await node.switch.stop()
|
|
|
|
|
|
|
|
if not node.erasure.isNil:
|
|
|
|
await node.erasure.stop()
|
|
|
|
|
|
|
|
if not node.discovery.isNil:
|
|
|
|
await node.discovery.stop()
|
|
|
|
|
|
|
|
if contracts =? node.contracts:
|
|
|
|
await contracts.stop()
|