Update multicodecs (#665)

* rework merkle tree support

* rename merkletree -> codexmerkletree

* treed and proof encoding/decoding

* style

* adding codex merkle and coders tests

* use default hash codec

* proof size changed

* add from nodes test

* shorte file names

* wip poseidon tree

* shorten file names

* root returns a result

* import poseidon tests

* update multicodecs

* consolidating codex types and adding new codecs

* update codec

* remove temp codecs constants

* move codecs related stuff out

* updating codecs

* misc

* updating sizes since block size was adjusted to 64kb

* fix merge issues and cleanup a few warnings
This commit is contained in:
Dmitriy Ryajov 2023-12-22 06:04:01 -06:00 committed by GitHub
parent 52c5578c46
commit b8ee2ac71e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 160 additions and 155 deletions

View File

@ -175,7 +175,6 @@ proc decode*(_: type WantListEntry, pb: ProtoBuffer): ProtoResult[WantListEntry]
value = WantListEntry() value = WantListEntry()
field: uint64 field: uint64
ipb: ProtoBuffer ipb: ProtoBuffer
buf = newSeq[byte]()
if ? pb.getField(1, ipb): if ? pb.getField(1, ipb):
value.address = ? BlockAddress.decode(ipb) value.address = ? BlockAddress.decode(ipb)
if ? pb.getField(2, field): if ? pb.getField(2, field):
@ -203,7 +202,6 @@ proc decode*(_: type WantList, pb: ProtoBuffer): ProtoResult[WantList] =
proc decode*(_: type BlockDelivery, pb: ProtoBuffer): ProtoResult[BlockDelivery] = proc decode*(_: type BlockDelivery, pb: ProtoBuffer): ProtoResult[BlockDelivery] =
var var
value = BlockDelivery() value = BlockDelivery()
field: uint64
dataBuf = newSeq[byte]() dataBuf = newSeq[byte]()
cidBuf = newSeq[byte]() cidBuf = newSeq[byte]()
cid: Cid cid: Cid

View File

@ -9,6 +9,7 @@
import std/tables import std/tables
import std/sugar import std/sugar
export tables export tables
import pkg/upraises import pkg/upraises
@ -26,21 +27,9 @@ import ./units
import ./utils import ./utils
import ./formats import ./formats
import ./errors import ./errors
import ./codextypes
export errors, formats, units export errors, formats, units, codextypes
const
# Size of blocks for storage / network exchange,
# should be divisible by 31 for PoR and by 64 for Leopard ECC
DefaultBlockSize* = NBytes 31 * 64 * 33
# hashes
Sha256Hash* = multiCodec("sha2-256")
# CIDs
Raw = multiCodec("raw")
DagPB* = multiCodec("dag-pb")
DagJson* = multiCodec("dag-json")
type type
Block* = ref object of RootObj Block* = ref object of RootObj
@ -100,8 +89,8 @@ func new*(
T: type Block, T: type Block,
data: openArray[byte] = [], data: openArray[byte] = [],
version = CIDv1, version = CIDv1,
mcodec = multiCodec("sha2-256"), mcodec = Sha256HashCodec,
codec = multiCodec("raw") codec = BlockCodec
): ?!Block = ): ?!Block =
## creates a new block for both storage and network IO ## creates a new block for both storage and network IO
## ##
@ -116,7 +105,7 @@ func new*(
cid: cid, cid: cid,
data: @data).success data: @data).success
func new*( proc new*(
T: type Block, T: type Block,
cid: Cid, cid: Cid,
data: openArray[byte], data: openArray[byte],
@ -138,34 +127,8 @@ func new*(
data: @data data: @data
).success ).success
proc emptyCid*(version: CidVersion, hcodec: MultiCodec, dcodec: MultiCodec): ?!Cid =
## Returns cid representing empty content, given cid version, hash codec and data codec
##
const
Sha256 = multiCodec("sha2-256")
Raw = multiCodec("raw")
DagPB = multiCodec("dag-pb")
DagJson = multiCodec("dag-json")
var index {.global, threadvar.}: Table[(CidVersion, MultiCodec, MultiCodec), Cid]
once:
index = {
# source https://ipld.io/specs/codecs/dag-pb/fixtures/cross-codec/#dagpb_empty
(CIDv0, Sha256, DagPB): ? Cid.init("QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n").mapFailure,
(CIDv1, Sha256, DagPB): ? Cid.init("zdj7Wkkhxcu2rsiN6GUyHCLsSLL47kdUNfjbFqBUUhMFTZKBi").mapFailure, # base36: bafybeihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku
(CIDv1, Sha256, DagJson): ? Cid.init("z4EBG9jGUWMVxX9deANWX7iPyExLswe2akyF7xkNAaYgugvnhmP").mapFailure, # base36: baguqeera6mfu3g6n722vx7dbitpnbiyqnwah4ddy4b5c3rwzxc5pntqcupta
(CIDv1, Sha256, Raw): ? Cid.init("zb2rhmy65F3REf8SZp7De11gxtECBGgUKaLdiDj7MCGCHxbDW").mapFailure,
}.toTable
index[(version, hcodec, dcodec)].catch
proc emptyDigest*(version: CidVersion, hcodec: MultiCodec, dcodec: MultiCodec): ?!MultiHash =
emptyCid(version, hcodec, dcodec)
.flatMap((cid: Cid) => cid.mhash.mapFailure)
proc emptyBlock*(version: CidVersion, hcodec: MultiCodec): ?!Block = proc emptyBlock*(version: CidVersion, hcodec: MultiCodec): ?!Block =
emptyCid(version, hcodec, multiCodec("raw")) emptyCid(version, hcodec, BlockCodec)
.flatMap((cid: Cid) => Block.new(cid = cid, data = @[])) .flatMap((cid: Cid) => Block.new(cid = cid, data = @[]))
proc emptyBlock*(cid: Cid): ?!Block = proc emptyBlock*(cid: Cid): ?!Block =

107
codex/codextypes.nim Normal file
View File

@ -0,0 +1,107 @@
## Nim-Codex
## Copyright (c) 2023 Status Research & Development GmbH
## Licensed under either of
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
## at your option.
## This file may not be copied, modified, or distributed except according to
## those terms.
{.push raises: [].}
import std/tables
import std/sugar
import pkg/libp2p/multicodec
import pkg/libp2p/multihash
import pkg/libp2p/cid
import pkg/results
import pkg/questionable/results
import ./units
import ./errors
export tables
const
# Size of blocks for storage / network exchange,
DefaultBlockSize* = NBytes 1024*64
DefaultCellSize* = NBytes 2048
# hashes
Sha256HashCodec* = multiCodec("sha2-256")
Sha512HashCodec* = multiCodec("sha2-512")
Pos2Bn128SpngCodec* = multiCodec("poseidon2-alt_bn_128-sponge-r2")
Pos2Bn128MrklCodec* = multiCodec("poseidon2-alt_bn_128-merkle-2kb")
ManifestCodec* = multiCodec("codex-manifest")
DatasetRootCodec* = multiCodec("codex-root")
BlockCodec* = multiCodec("codex-block")
SlotRootCodec* = multiCodec("codex-slot-root")
SlotProvingRootCodec* = multiCodec("codex-proving-root")
CodexHashesCodecs* = [
Sha256HashCodec,
Pos2Bn128SpngCodec,
Pos2Bn128MrklCodec
]
CodexPrimitivesCodecs* = [
ManifestCodec,
DatasetRootCodec,
BlockCodec,
SlotRootCodec,
SlotProvingRootCodec,
]
proc initEmptyCidTable(): ?!Table[(CidVersion, MultiCodec, MultiCodec), Cid] =
## Initialize padding blocks table
##
## TODO: Ideally this is done at compile time, but for now
## we do it at runtime because of an `importc` error that is
## coming from somwhere in MultiHash that I can't track down.
##
let
emptyData: seq[byte] = @[]
PadHashes = {
Sha256HashCodec: ? MultiHash.digest($Sha256HashCodec, emptyData).mapFailure,
Sha512HashCodec: ? MultiHash.digest($Sha512HashCodec, emptyData).mapFailure,
Pos2Bn128SpngCodec: ? MultiHash.digest($Pos2Bn128SpngCodec, emptyData).mapFailure,
Pos2Bn128MrklCodec: ? MultiHash.digest($Pos2Bn128SpngCodec, emptyData).mapFailure,
}.toTable
var
table = initTable[(CidVersion, MultiCodec, MultiCodec), Cid]()
for hcodec, mhash in PadHashes.pairs:
table[(CIDv1, hcodec, BlockCodec)] = ? Cid.init(CIDv1, BlockCodec, mhash).mapFailure
success table
proc emptyCid*(
version: CidVersion,
hcodec: MultiCodec,
dcodec: MultiCodec): ?!Cid =
## Returns cid representing empty content,
## given cid version, hash codec and data codec
##
var
table {.global, threadvar.}: Table[(CidVersion, MultiCodec, MultiCodec), Cid]
once:
table = ? initEmptyCidTable()
table[(version, hcodec, dcodec)].catch
proc emptyDigest*(
version: CidVersion,
hcodec: MultiCodec,
dcodec: MultiCodec): ?!MultiHash =
## Returns hash representing empty content,
## given cid version, hash codec and data codec
##
emptyCid(version, hcodec, dcodec)
.flatMap((cid: Cid) => cid.mhash.mapFailure)

View File

@ -16,7 +16,7 @@ import std/sugar
import pkg/chronos import pkg/chronos
import pkg/chronicles import pkg/chronicles
import pkg/libp2p/[multicodec, cid, multibase, multihash] import pkg/libp2p/[multicodec, cid, multihash]
import pkg/libp2p/protobuf/minprotobuf import pkg/libp2p/protobuf/minprotobuf
import ../manifest import ../manifest
@ -379,7 +379,7 @@ proc decode*(
data[].setLen(encoded.ecK) # set len to K data[].setLen(encoded.ecK) # set len to K
parityData[].setLen(encoded.ecM) # set len to M parityData[].setLen(encoded.ecM) # set len to M
without (dataPieces, parityPieces) =? without (dataPieces, _) =?
(await self.prepareDecodingData(encoded, step, data, parityData, cids, emptyBlock)), err: (await self.prepareDecodingData(encoded, step, data, parityData, cids, emptyBlock)), err:
trace "Unable to prepare data", error = err.msg trace "Unable to prepare data", error = err.msg
return failure(err) return failure(err)

View File

@ -1,5 +1,4 @@
import ./manifest/coders import ./manifest/coders
import ./manifest/manifest import ./manifest/manifest
import ./manifest/types
export types, manifest, coders export manifest, coders

View File

@ -25,9 +25,8 @@ import pkg/chronos
import ./manifest import ./manifest
import ../errors import ../errors
import ../blocktype import ../blocktype
import ./types
proc encode*(_: DagPBCoder, manifest: Manifest): ?!seq[byte] = proc encode*(manifest: Manifest): ?!seq[byte] =
## Encode the manifest into a ``ManifestCodec`` ## Encode the manifest into a ``ManifestCodec``
## multicodec container (Dag-pb) for now ## multicodec container (Dag-pb) for now
## ##
@ -50,6 +49,7 @@ proc encode*(_: DagPBCoder, manifest: Manifest): ?!seq[byte] =
# optional uint32 originalDatasetSize = 4; # size of the original dataset # optional uint32 originalDatasetSize = 4; # size of the original dataset
# optional VerificationInformation verification = 5; # verification information # optional VerificationInformation verification = 5; # verification information
# } # }
#
# Message Header { # Message Header {
# optional bytes treeCid = 1; # cid (root) of the tree # optional bytes treeCid = 1; # cid (root) of the tree
# optional uint32 blockSize = 2; # size of a single block # optional uint32 blockSize = 2; # size of a single block
@ -91,7 +91,7 @@ proc encode*(_: DagPBCoder, manifest: Manifest): ?!seq[byte] =
return pbNode.buffer.success return pbNode.buffer.success
proc decode*(_: DagPBCoder, data: openArray[byte]): ?!Manifest = proc decode*(_: type Manifest, data: openArray[byte]): ?!Manifest =
## Decode a manifest from a data blob ## Decode a manifest from a data blob
## ##
@ -204,25 +204,6 @@ proc decode*(_: DagPBCoder, data: openArray[byte]): ?!Manifest =
self.success self.success
proc encode*(
self: Manifest,
encoder = ManifestContainers[$DagPBCodec]
): ?!seq[byte] =
## Encode a manifest using `encoder`
##
encoder.encode(self)
func decode*(
_: type Manifest,
data: openArray[byte],
decoder = ManifestContainers[$DagPBCodec]
): ?!Manifest =
## Decode a manifest using `decoder`
##
decoder.decode(data)
func decode*(_: type Manifest, blk: Block): ?!Manifest = func decode*(_: type Manifest, blk: Block): ?!Manifest =
## Decode a manifest using `decoder` ## Decode a manifest using `decoder`
## ##
@ -230,6 +211,4 @@ func decode*(_: type Manifest, blk: Block): ?!Manifest =
if not ? blk.cid.isManifest: if not ? blk.cid.isManifest:
return failure "Cid not a manifest codec" return failure "Cid not a manifest codec"
Manifest.decode( Manifest.decode(blk.data)
blk.data,
? ManifestContainers[$(?blk.cid.contentType().mapFailure)].catch)

View File

@ -14,19 +14,14 @@ import pkg/upraises
push: {.upraises: [].} push: {.upraises: [].}
import pkg/libp2p/protobuf/minprotobuf import pkg/libp2p/protobuf/minprotobuf
import pkg/libp2p import pkg/libp2p/[cid, multihash, multicodec]
import pkg/questionable
import pkg/questionable/results import pkg/questionable/results
import pkg/chronicles
import ../errors import ../errors
import ../utils import ../utils
import ../utils/json import ../utils/json
import ../units import ../units
import ../blocktype import ../blocktype
import ./types
export types
type type
Manifest* = ref object of RootObj Manifest* = ref object of RootObj
@ -108,11 +103,10 @@ proc slotRoots*(self: Manifest): seq[Cid] =
############################################################ ############################################################
func isManifest*(cid: Cid): ?!bool = func isManifest*(cid: Cid): ?!bool =
let res = ?cid.contentType().mapFailure(CodexError) success (ManifestCodec == ? cid.contentType().mapFailure(CodexError))
($(res) in ManifestContainers).success
func isManifest*(mc: MultiCodec): ?!bool = func isManifest*(mc: MultiCodec): ?!bool =
($mc in ManifestContainers).success success mc == ManifestCodec
############################################################ ############################################################
# Various sizes and verification # Various sizes and verification
@ -199,8 +193,8 @@ proc new*(
blockSize: NBytes, blockSize: NBytes,
datasetSize: NBytes, datasetSize: NBytes,
version: CidVersion = CIDv1, version: CidVersion = CIDv1,
hcodec = multiCodec("sha2-256"), hcodec = Sha256HashCodec,
codec = multiCodec("raw"), codec = BlockCodec,
protected = false): Manifest = protected = false): Manifest =
T( T(
@ -240,6 +234,7 @@ proc new*(
## Create an unprotected dataset from an ## Create an unprotected dataset from an
## erasure protected one ## erasure protected one
## ##
Manifest( Manifest(
treeCid: manifest.originalTreeCid, treeCid: manifest.originalTreeCid,
datasetSize: manifest.originalDatasetSize, datasetSize: manifest.originalDatasetSize,
@ -251,12 +246,11 @@ proc new*(
proc new*( proc new*(
T: type Manifest, T: type Manifest,
data: openArray[byte], data: openArray[byte]): ?!Manifest =
decoder = ManifestContainers[$DagPBCodec]): ?!Manifest =
## Create a manifest instance from given data ## Create a manifest instance from given data
## ##
Manifest.decode(data, decoder) Manifest.decode(data)
proc new*( proc new*(
T: type Manifest, T: type Manifest,

View File

@ -1,28 +0,0 @@
## Nim-Codex
## Copyright (c) 2022 Status Research & Development GmbH
## Licensed under either of
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
## at your option.
## This file may not be copied, modified, or distributed except according to
## those terms.
# This module defines Manifest and all related types
import std/tables
import pkg/libp2p
import ../units
export units
const
DagPBCodec* = multiCodec("dag-pb")
type
ManifestCoderType*[codec: static MultiCodec] = object
DagPBCoder* = ManifestCoderType[multiCodec("dag-pb")]
const
ManifestContainers* = {
$DagPBCodec: DagPBCoder()
}.toTable

View File

@ -31,10 +31,6 @@ export merkletree
logScope: logScope:
topics = "codex merkletree" topics = "codex merkletree"
const
DatasetRootCodec* = multiCodec("codex-root") # TODO: move to blocktype
BlockCodec* = multiCodec("raw") # TODO: fix multicodec to `codex-block` and move to blocktype
type type
ByteTreeKey* {.pure.} = enum ByteTreeKey* {.pure.} = enum
KeyNone = 0x0.byte KeyNone = 0x0.byte
@ -163,7 +159,7 @@ func compress*(
func init*( func init*(
_: type CodexTree, _: type CodexTree,
mcodec: MultiCodec = multiCodec("sha2-256"), mcodec: MultiCodec = Sha256HashCodec,
leaves: openArray[ByteHash]): ?!CodexTree = leaves: openArray[ByteHash]): ?!CodexTree =
if leaves.len == 0: if leaves.len == 0:
@ -211,7 +207,7 @@ func init*(
proc fromNodes*( proc fromNodes*(
_: type CodexTree, _: type CodexTree,
mcodec: MultiCodec = multiCodec("sha2-256"), mcodec: MultiCodec = Sha256HashCodec,
nodes: openArray[ByteHash], nodes: openArray[ByteHash],
nleaves: int): ?!CodexTree = nleaves: int): ?!CodexTree =
@ -246,7 +242,7 @@ proc fromNodes*(
func init*( func init*(
_: type CodexProof, _: type CodexProof,
mcodec: MultiCodec = multiCodec("sha2-256"), mcodec: MultiCodec = Sha256HashCodec,
index: int, index: int,
nleaves: int, nleaves: int,
nodes: openArray[ByteHash]): ?!CodexProof = nodes: openArray[ByteHash]): ?!CodexProof =

View File

@ -8,7 +8,6 @@
## those terms. ## those terms.
import std/options import std/options
import std/tables
import std/sequtils import std/sequtils
import std/strformat import std/strformat
import std/sugar import std/sugar
@ -208,8 +207,8 @@ proc store*(
trace "Storing data" trace "Storing data"
let let
hcodec = multiCodec("sha2-256") hcodec = Sha256HashCodec
dataCodec = multiCodec("raw") dataCodec = BlockCodec
chunker = LPStreamChunker.new(stream, chunkSize = blockSize) chunker = LPStreamChunker.new(stream, chunkSize = blockSize)
var cids: seq[Cid] var cids: seq[Cid]
@ -270,7 +269,7 @@ proc store*(
newException(CodexError, "Error encoding manifest: " & err.msg)) newException(CodexError, "Error encoding manifest: " & err.msg))
# Store as a dag-pb block # Store as a dag-pb block
without manifestBlk =? bt.Block.new(data = data, codec = DagPBCodec): without manifestBlk =? bt.Block.new(data = data, codec = ManifestCodec):
trace "Unable to init block from manifest data!" trace "Unable to init block from manifest data!"
return failure("Unable to init block from manifest data!") return failure("Unable to init block from manifest data!")
@ -344,7 +343,7 @@ proc requestStorage*(
trace "Unable to encode protected manifest" trace "Unable to encode protected manifest"
return failure(error) return failure(error)
without encodedBlk =? bt.Block.new(data = encodedData, codec = DagPBCodec), error: without encodedBlk =? bt.Block.new(data = encodedData, codec = ManifestCodec), error:
trace "Unable to create block from encoded manifest" trace "Unable to create block from encoded manifest"
return failure(error) return failure(error)

View File

@ -2,7 +2,6 @@ import pkg/metrics
import pkg/chronicles import pkg/chronicles
import ../statemachine import ../statemachine
import ./errorhandling import ./errorhandling
import ./error
declareCounter(codex_purchases_cancelled, "codex purchases cancelled") declareCounter(codex_purchases_cancelled, "codex purchases cancelled")

View File

@ -30,8 +30,6 @@ export blockstore, blockexchange, asyncheapqueue
logScope: logScope:
topics = "codex networkstore" topics = "codex networkstore"
const BlockPrefetchAmount = 5
type type
NetworkStore* = ref object of BlockStore NetworkStore* = ref object of BlockStore
engine*: BlockExcEngine # blockexc decision engine engine*: BlockExcEngine # blockexc decision engine

View File

@ -31,6 +31,7 @@ template basicMaths(T: untyped) =
proc `+=` *(x: var T, y: T) {.borrow.} proc `+=` *(x: var T, y: T) {.borrow.}
proc `-=` *(x: var T, y: T) {.borrow.} proc `-=` *(x: var T, y: T) {.borrow.}
proc `hash` *(x: T): Hash {.borrow.} proc `hash` *(x: T): Hash {.borrow.}
template divMaths(T: untyped) = template divMaths(T: untyped) =
proc `mod` *(x, y: T): T = T(`mod`(x.Natural, y.Natural)) proc `mod` *(x, y: T): T = T(`mod`(x.Natural, y.Natural))
proc `div` *(x, y: T): Natural = `div`(x.Natural, y.Natural) proc `div` *(x, y: T): Natural = `div`(x.Natural, y.Natural)

View File

@ -8,7 +8,7 @@ when getEnv("NIMBUS_BUILD_SYSTEM") == "yes" and
# BEWARE # BEWARE
# In Nim 1.6, config files are evaluated with a working directory # In Nim 1.6, config files are evaluated with a working directory
# matching where the Nim command was invocated. This means that we # matching where the Nim command was invocated. This means that we
# must do all file existance checks with full absolute paths: # must do all file existence checks with full absolute paths:
system.fileExists(currentDir & "nimbus-build-system.paths"): system.fileExists(currentDir & "nimbus-build-system.paths"):
include "nimbus-build-system.paths" include "nimbus-build-system.paths"

View File

@ -56,7 +56,7 @@ asyncchecksuite "Block Advertising and Discovery":
(manifest, tree) = makeManifestAndTree(blocks).tryGet() (manifest, tree) = makeManifestAndTree(blocks).tryGet()
manifestBlock = bt.Block.new( manifestBlock = bt.Block.new(
manifest.encode().tryGet(), codec = DagPBCodec).tryGet() manifest.encode().tryGet(), codec = ManifestCodec).tryGet()
(await localStore.putBlock(manifestBlock)).tryGet() (await localStore.putBlock(manifestBlock)).tryGet()

View File

@ -53,7 +53,7 @@ proc example*(_: type BlockExcPeerCtx): BlockExcPeerCtx =
proc example*(_: type Cid): Cid = proc example*(_: type Cid): Cid =
bt.Block.example.cid bt.Block.example.cid
proc example*(_: type MultiHash, mcodec = multiCodec("sha2-256")): MultiHash = proc example*(_: type MultiHash, mcodec = Sha256HashCodec): MultiHash =
let bytes = newSeqWith(256, rand(uint8)) let bytes = newSeqWith(256, rand(uint8))
MultiHash.digest($mcodec, bytes).tryGet() MultiHash.digest($mcodec, bytes).tryGet()

View File

@ -25,7 +25,7 @@ checksuite "merkletree - coders":
test "encoding and decoding a tree yields the same tree": test "encoding and decoding a tree yields the same tree":
let let
tree = CodexTree.init(multiCodec("sha2-256"), data).tryGet() tree = CodexTree.init(Sha256HashCodec, data).tryGet()
encodedBytes = tree.encode() encodedBytes = tree.encode()
decodedTree = CodexTree.decode(encodedBytes).tryGet() decodedTree = CodexTree.decode(encodedBytes).tryGet()
@ -34,7 +34,7 @@ checksuite "merkletree - coders":
test "encoding and decoding a proof yields the same proof": test "encoding and decoding a proof yields the same proof":
let let
tree = CodexTree.init(multiCodec("sha2-256"), data).tryGet() tree = CodexTree.init(Sha256HashCodec, data).tryGet()
proof = tree.getProof(4).tryGet() proof = tree.getProof(4).tryGet()
check: check:

View File

@ -7,6 +7,7 @@ import pkg/stew/byteutils
import pkg/nimcrypto/sha2 import pkg/nimcrypto/sha2
import pkg/libp2p import pkg/libp2p
import pkg/codex/codextypes
import pkg/codex/merkletree import pkg/codex/merkletree
import ./helpers import ./helpers
@ -28,7 +29,7 @@ const
"00000000000000000000000000000009".toBytes, "00000000000000000000000000000009".toBytes,
"00000000000000000000000000000010".toBytes, "00000000000000000000000000000010".toBytes,
] ]
sha256 = multiCodec("sha2-256") sha256 = Sha256HashCodec
suite "Test CodexTree": suite "Test CodexTree":
test "Cannot init tree without any multihash leaves": test "Cannot init tree without any multihash leaves":

View File

@ -110,7 +110,7 @@ proc commonBlockStoreTests*(name: string,
test "listBlocks Manifest": test "listBlocks Manifest":
let let
blocks = @[newBlock1, newBlock2, newBlock3] blocks = @[newBlock1, newBlock2, newBlock3]
manifestBlock = Block.new(manifest.encode().tryGet(), codec = DagPBCodec).tryGet() manifestBlock = Block.new(manifest.encode().tryGet(), codec = ManifestCodec).tryGet()
treeBlock = Block.new(tree.encode()).tryGet() treeBlock = Block.new(tree.encode()).tryGet()
putHandles = await allFinished( putHandles = await allFinished(
(@[treeBlock, manifestBlock] & blocks).mapIt( store.putBlock( it ) )) (@[treeBlock, manifestBlock] & blocks).mapIt( store.putBlock( it ) ))
@ -134,7 +134,7 @@ proc commonBlockStoreTests*(name: string,
test "listBlocks Both": test "listBlocks Both":
let let
blocks = @[newBlock1, newBlock2, newBlock3] blocks = @[newBlock1, newBlock2, newBlock3]
manifestBlock = Block.new(manifest.encode().tryGet(), codec = DagPBCodec).tryGet() manifestBlock = Block.new(manifest.encode().tryGet(), codec = ManifestCodec).tryGet()
treeBlock = Block.new(tree.encode()).tryGet() treeBlock = Block.new(tree.encode()).tryGet()
putHandles = await allFinished( putHandles = await allFinished(
(@[treeBlock, manifestBlock] & blocks).mapIt( store.putBlock( it ) )) (@[treeBlock, manifestBlock] & blocks).mapIt( store.putBlock( it ) ))

View File

@ -29,8 +29,8 @@ proc createManifestCid(): ?!Cid =
let let
length = rand(4096) length = rand(4096)
bytes = newSeqWith(length, rand(uint8)) bytes = newSeqWith(length, rand(uint8))
mcodec = multiCodec("sha2-256") mcodec = Sha256HashCodec
codec = multiCodec("dag-pb") codec = ManifestCodec
version = CIDv1 version = CIDv1
let hash = ? MultiHash.digest($mcodec, bytes).mapFailure let hash = ? MultiHash.digest($mcodec, bytes).mapFailure

View File

@ -116,8 +116,7 @@ asyncchecksuite "Test Node":
manifestBlock = bt.Block.new( manifestBlock = bt.Block.new(
manifest.encode().tryGet(), manifest.encode().tryGet(),
codec = DagPBCodec codec = ManifestCodec).tryGet()
).tryGet()
(await localStore.putBlock(manifestBlock)).tryGet() (await localStore.putBlock(manifestBlock)).tryGet()
@ -253,7 +252,7 @@ asyncchecksuite "Test Node - host contracts":
manifest = await storeDataGetManifest(localStore, chunker) manifest = await storeDataGetManifest(localStore, chunker)
let manifestBlock = bt.Block.new( let manifestBlock = bt.Block.new(
manifest.encode().tryGet(), manifest.encode().tryGet(),
codec = DagPBCodec codec = ManifestCodec
).tryGet() ).tryGet()
manifestCid = $(manifestBlock.cid) manifestCid = $(manifestBlock.cid)
(await localStore.putBlock(manifestBlock)).tryGet() (await localStore.putBlock(manifestBlock)).tryGet()
@ -296,7 +295,7 @@ asyncchecksuite "Test Node - host contracts":
return success() return success()
(await onStore(request, 0.u256, onBatch)).tryGet() (await onStore(request, 0.u256, onBatch)).tryGet()
check fetchedBytes == 2291520 check fetchedBytes == 2293760
for index in 0..<manifest.blocksCount: for index in 0..<manifest.blocksCount:
let blk = (await localStore.getBlock(manifest.treeCid, index)).tryGet let blk = (await localStore.getBlock(manifest.treeCid, index)).tryGet

View File

@ -51,7 +51,7 @@ twonodessuite "Integration tests", debug1 = false, debug2 = false:
check: check:
space.totalBlocks == 2.uint space.totalBlocks == 2.uint
space.quotaMaxBytes == 8589934592.uint space.quotaMaxBytes == 8589934592.uint
space.quotaUsedBytes == 65526.uint space.quotaUsedBytes == 65592.uint
space.quotaReservedBytes == 12.uint space.quotaReservedBytes == 12.uint
test "node allows local file downloads": test "node allows local file downloads":