Update multicodecs (#665)

* rework merkle tree support

* rename merkletree -> codexmerkletree

* treed and proof encoding/decoding

* style

* adding codex merkle and coders tests

* use default hash codec

* proof size changed

* add from nodes test

* shorte file names

* wip poseidon tree

* shorten file names

* root returns a result

* import poseidon tests

* update multicodecs

* consolidating codex types and adding new codecs

* update codec

* remove temp codecs constants

* move codecs related stuff out

* updating codecs

* misc

* updating sizes since block size was adjusted to 64kb

* fix merge issues and cleanup a few warnings
This commit is contained in:
Dmitriy Ryajov 2023-12-22 06:04:01 -06:00 committed by GitHub
parent 52c5578c46
commit b8ee2ac71e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 160 additions and 155 deletions

View File

@ -31,12 +31,12 @@ logScope:
type
BlockExcPeerCtx* = ref object of RootObj
id*: PeerId
blocks*: Table[BlockAddress, Presence] # remote peer have list including price
peerWants*: seq[WantListEntry] # remote peers want lists
exchanged*: int # times peer has exchanged with us
lastExchange*: Moment # last time peer has exchanged with us
account*: ?Account # ethereum account of this peer
paymentChannel*: ?ChannelId # payment channel id
blocks*: Table[BlockAddress, Presence] # remote peer have list including price
peerWants*: seq[WantListEntry] # remote peers want lists
exchanged*: int # times peer has exchanged with us
lastExchange*: Moment # last time peer has exchanged with us
account*: ?Account # ethereum account of this peer
paymentChannel*: ?ChannelId # payment channel id
proc peerHave*(self: BlockExcPeerCtx): seq[BlockAddress] =
toSeq(self.blocks.keys)

View File

@ -175,7 +175,6 @@ proc decode*(_: type WantListEntry, pb: ProtoBuffer): ProtoResult[WantListEntry]
value = WantListEntry()
field: uint64
ipb: ProtoBuffer
buf = newSeq[byte]()
if ? pb.getField(1, ipb):
value.address = ? BlockAddress.decode(ipb)
if ? pb.getField(2, field):
@ -203,7 +202,6 @@ proc decode*(_: type WantList, pb: ProtoBuffer): ProtoResult[WantList] =
proc decode*(_: type BlockDelivery, pb: ProtoBuffer): ProtoResult[BlockDelivery] =
var
value = BlockDelivery()
field: uint64
dataBuf = newSeq[byte]()
cidBuf = newSeq[byte]()
cid: Cid

View File

@ -9,6 +9,7 @@
import std/tables
import std/sugar
export tables
import pkg/upraises
@ -26,21 +27,9 @@ import ./units
import ./utils
import ./formats
import ./errors
import ./codextypes
export errors, formats, units
const
# Size of blocks for storage / network exchange,
# should be divisible by 31 for PoR and by 64 for Leopard ECC
DefaultBlockSize* = NBytes 31 * 64 * 33
# hashes
Sha256Hash* = multiCodec("sha2-256")
# CIDs
Raw = multiCodec("raw")
DagPB* = multiCodec("dag-pb")
DagJson* = multiCodec("dag-json")
export errors, formats, units, codextypes
type
Block* = ref object of RootObj
@ -100,8 +89,8 @@ func new*(
T: type Block,
data: openArray[byte] = [],
version = CIDv1,
mcodec = multiCodec("sha2-256"),
codec = multiCodec("raw")
mcodec = Sha256HashCodec,
codec = BlockCodec
): ?!Block =
## creates a new block for both storage and network IO
##
@ -116,7 +105,7 @@ func new*(
cid: cid,
data: @data).success
func new*(
proc new*(
T: type Block,
cid: Cid,
data: openArray[byte],
@ -138,34 +127,8 @@ func new*(
data: @data
).success
proc emptyCid*(version: CidVersion, hcodec: MultiCodec, dcodec: MultiCodec): ?!Cid =
## Returns cid representing empty content, given cid version, hash codec and data codec
##
const
Sha256 = multiCodec("sha2-256")
Raw = multiCodec("raw")
DagPB = multiCodec("dag-pb")
DagJson = multiCodec("dag-json")
var index {.global, threadvar.}: Table[(CidVersion, MultiCodec, MultiCodec), Cid]
once:
index = {
# source https://ipld.io/specs/codecs/dag-pb/fixtures/cross-codec/#dagpb_empty
(CIDv0, Sha256, DagPB): ? Cid.init("QmdfTbBqBPQ7VNxZEYEj14VmRuZBkqFbiwReogJgS1zR1n").mapFailure,
(CIDv1, Sha256, DagPB): ? Cid.init("zdj7Wkkhxcu2rsiN6GUyHCLsSLL47kdUNfjbFqBUUhMFTZKBi").mapFailure, # base36: bafybeihdwdcefgh4dqkjv67uzcmw7ojee6xedzdetojuzjevtenxquvyku
(CIDv1, Sha256, DagJson): ? Cid.init("z4EBG9jGUWMVxX9deANWX7iPyExLswe2akyF7xkNAaYgugvnhmP").mapFailure, # base36: baguqeera6mfu3g6n722vx7dbitpnbiyqnwah4ddy4b5c3rwzxc5pntqcupta
(CIDv1, Sha256, Raw): ? Cid.init("zb2rhmy65F3REf8SZp7De11gxtECBGgUKaLdiDj7MCGCHxbDW").mapFailure,
}.toTable
index[(version, hcodec, dcodec)].catch
proc emptyDigest*(version: CidVersion, hcodec: MultiCodec, dcodec: MultiCodec): ?!MultiHash =
emptyCid(version, hcodec, dcodec)
.flatMap((cid: Cid) => cid.mhash.mapFailure)
proc emptyBlock*(version: CidVersion, hcodec: MultiCodec): ?!Block =
emptyCid(version, hcodec, multiCodec("raw"))
emptyCid(version, hcodec, BlockCodec)
.flatMap((cid: Cid) => Block.new(cid = cid, data = @[]))
proc emptyBlock*(cid: Cid): ?!Block =

107
codex/codextypes.nim Normal file
View File

@ -0,0 +1,107 @@
## Nim-Codex
## Copyright (c) 2023 Status Research & Development GmbH
## Licensed under either of
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
## at your option.
## This file may not be copied, modified, or distributed except according to
## those terms.
{.push raises: [].}
import std/tables
import std/sugar
import pkg/libp2p/multicodec
import pkg/libp2p/multihash
import pkg/libp2p/cid
import pkg/results
import pkg/questionable/results
import ./units
import ./errors
export tables
const
# Size of blocks for storage / network exchange,
DefaultBlockSize* = NBytes 1024*64
DefaultCellSize* = NBytes 2048
# hashes
Sha256HashCodec* = multiCodec("sha2-256")
Sha512HashCodec* = multiCodec("sha2-512")
Pos2Bn128SpngCodec* = multiCodec("poseidon2-alt_bn_128-sponge-r2")
Pos2Bn128MrklCodec* = multiCodec("poseidon2-alt_bn_128-merkle-2kb")
ManifestCodec* = multiCodec("codex-manifest")
DatasetRootCodec* = multiCodec("codex-root")
BlockCodec* = multiCodec("codex-block")
SlotRootCodec* = multiCodec("codex-slot-root")
SlotProvingRootCodec* = multiCodec("codex-proving-root")
CodexHashesCodecs* = [
Sha256HashCodec,
Pos2Bn128SpngCodec,
Pos2Bn128MrklCodec
]
CodexPrimitivesCodecs* = [
ManifestCodec,
DatasetRootCodec,
BlockCodec,
SlotRootCodec,
SlotProvingRootCodec,
]
proc initEmptyCidTable(): ?!Table[(CidVersion, MultiCodec, MultiCodec), Cid] =
## Initialize padding blocks table
##
## TODO: Ideally this is done at compile time, but for now
## we do it at runtime because of an `importc` error that is
## coming from somwhere in MultiHash that I can't track down.
##
let
emptyData: seq[byte] = @[]
PadHashes = {
Sha256HashCodec: ? MultiHash.digest($Sha256HashCodec, emptyData).mapFailure,
Sha512HashCodec: ? MultiHash.digest($Sha512HashCodec, emptyData).mapFailure,
Pos2Bn128SpngCodec: ? MultiHash.digest($Pos2Bn128SpngCodec, emptyData).mapFailure,
Pos2Bn128MrklCodec: ? MultiHash.digest($Pos2Bn128SpngCodec, emptyData).mapFailure,
}.toTable
var
table = initTable[(CidVersion, MultiCodec, MultiCodec), Cid]()
for hcodec, mhash in PadHashes.pairs:
table[(CIDv1, hcodec, BlockCodec)] = ? Cid.init(CIDv1, BlockCodec, mhash).mapFailure
success table
proc emptyCid*(
version: CidVersion,
hcodec: MultiCodec,
dcodec: MultiCodec): ?!Cid =
## Returns cid representing empty content,
## given cid version, hash codec and data codec
##
var
table {.global, threadvar.}: Table[(CidVersion, MultiCodec, MultiCodec), Cid]
once:
table = ? initEmptyCidTable()
table[(version, hcodec, dcodec)].catch
proc emptyDigest*(
version: CidVersion,
hcodec: MultiCodec,
dcodec: MultiCodec): ?!MultiHash =
## Returns hash representing empty content,
## given cid version, hash codec and data codec
##
emptyCid(version, hcodec, dcodec)
.flatMap((cid: Cid) => cid.mhash.mapFailure)

View File

@ -16,7 +16,7 @@ import std/sugar
import pkg/chronos
import pkg/chronicles
import pkg/libp2p/[multicodec, cid, multibase, multihash]
import pkg/libp2p/[multicodec, cid, multihash]
import pkg/libp2p/protobuf/minprotobuf
import ../manifest
@ -379,7 +379,7 @@ proc decode*(
data[].setLen(encoded.ecK) # set len to K
parityData[].setLen(encoded.ecM) # set len to M
without (dataPieces, parityPieces) =?
without (dataPieces, _) =?
(await self.prepareDecodingData(encoded, step, data, parityData, cids, emptyBlock)), err:
trace "Unable to prepare data", error = err.msg
return failure(err)

View File

@ -1,5 +1,4 @@
import ./manifest/coders
import ./manifest/manifest
import ./manifest/types
export types, manifest, coders
export manifest, coders

View File

@ -25,9 +25,8 @@ import pkg/chronos
import ./manifest
import ../errors
import ../blocktype
import ./types
proc encode*(_: DagPBCoder, manifest: Manifest): ?!seq[byte] =
proc encode*(manifest: Manifest): ?!seq[byte] =
## Encode the manifest into a ``ManifestCodec``
## multicodec container (Dag-pb) for now
##
@ -50,6 +49,7 @@ proc encode*(_: DagPBCoder, manifest: Manifest): ?!seq[byte] =
# optional uint32 originalDatasetSize = 4; # size of the original dataset
# optional VerificationInformation verification = 5; # verification information
# }
#
# Message Header {
# optional bytes treeCid = 1; # cid (root) of the tree
# optional uint32 blockSize = 2; # size of a single block
@ -91,7 +91,7 @@ proc encode*(_: DagPBCoder, manifest: Manifest): ?!seq[byte] =
return pbNode.buffer.success
proc decode*(_: DagPBCoder, data: openArray[byte]): ?!Manifest =
proc decode*(_: type Manifest, data: openArray[byte]): ?!Manifest =
## Decode a manifest from a data blob
##
@ -204,25 +204,6 @@ proc decode*(_: DagPBCoder, data: openArray[byte]): ?!Manifest =
self.success
proc encode*(
self: Manifest,
encoder = ManifestContainers[$DagPBCodec]
): ?!seq[byte] =
## Encode a manifest using `encoder`
##
encoder.encode(self)
func decode*(
_: type Manifest,
data: openArray[byte],
decoder = ManifestContainers[$DagPBCodec]
): ?!Manifest =
## Decode a manifest using `decoder`
##
decoder.decode(data)
func decode*(_: type Manifest, blk: Block): ?!Manifest =
## Decode a manifest using `decoder`
##
@ -230,6 +211,4 @@ func decode*(_: type Manifest, blk: Block): ?!Manifest =
if not ? blk.cid.isManifest:
return failure "Cid not a manifest codec"
Manifest.decode(
blk.data,
? ManifestContainers[$(?blk.cid.contentType().mapFailure)].catch)
Manifest.decode(blk.data)

View File

@ -14,19 +14,14 @@ import pkg/upraises
push: {.upraises: [].}
import pkg/libp2p/protobuf/minprotobuf
import pkg/libp2p
import pkg/questionable
import pkg/libp2p/[cid, multihash, multicodec]
import pkg/questionable/results
import pkg/chronicles
import ../errors
import ../utils
import ../utils/json
import ../units
import ../blocktype
import ./types
export types
type
Manifest* = ref object of RootObj
@ -108,11 +103,10 @@ proc slotRoots*(self: Manifest): seq[Cid] =
############################################################
func isManifest*(cid: Cid): ?!bool =
let res = ?cid.contentType().mapFailure(CodexError)
($(res) in ManifestContainers).success
success (ManifestCodec == ? cid.contentType().mapFailure(CodexError))
func isManifest*(mc: MultiCodec): ?!bool =
($mc in ManifestContainers).success
success mc == ManifestCodec
############################################################
# Various sizes and verification
@ -199,8 +193,8 @@ proc new*(
blockSize: NBytes,
datasetSize: NBytes,
version: CidVersion = CIDv1,
hcodec = multiCodec("sha2-256"),
codec = multiCodec("raw"),
hcodec = Sha256HashCodec,
codec = BlockCodec,
protected = false): Manifest =
T(
@ -240,6 +234,7 @@ proc new*(
## Create an unprotected dataset from an
## erasure protected one
##
Manifest(
treeCid: manifest.originalTreeCid,
datasetSize: manifest.originalDatasetSize,
@ -251,12 +246,11 @@ proc new*(
proc new*(
T: type Manifest,
data: openArray[byte],
decoder = ManifestContainers[$DagPBCodec]): ?!Manifest =
data: openArray[byte]): ?!Manifest =
## Create a manifest instance from given data
##
Manifest.decode(data, decoder)
Manifest.decode(data)
proc new*(
T: type Manifest,

View File

@ -1,28 +0,0 @@
## Nim-Codex
## Copyright (c) 2022 Status Research & Development GmbH
## Licensed under either of
## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE))
## * MIT license ([LICENSE-MIT](LICENSE-MIT))
## at your option.
## This file may not be copied, modified, or distributed except according to
## those terms.
# This module defines Manifest and all related types
import std/tables
import pkg/libp2p
import ../units
export units
const
DagPBCodec* = multiCodec("dag-pb")
type
ManifestCoderType*[codec: static MultiCodec] = object
DagPBCoder* = ManifestCoderType[multiCodec("dag-pb")]
const
ManifestContainers* = {
$DagPBCodec: DagPBCoder()
}.toTable

View File

@ -31,10 +31,6 @@ export merkletree
logScope:
topics = "codex merkletree"
const
DatasetRootCodec* = multiCodec("codex-root") # TODO: move to blocktype
BlockCodec* = multiCodec("raw") # TODO: fix multicodec to `codex-block` and move to blocktype
type
ByteTreeKey* {.pure.} = enum
KeyNone = 0x0.byte
@ -163,7 +159,7 @@ func compress*(
func init*(
_: type CodexTree,
mcodec: MultiCodec = multiCodec("sha2-256"),
mcodec: MultiCodec = Sha256HashCodec,
leaves: openArray[ByteHash]): ?!CodexTree =
if leaves.len == 0:
@ -211,7 +207,7 @@ func init*(
proc fromNodes*(
_: type CodexTree,
mcodec: MultiCodec = multiCodec("sha2-256"),
mcodec: MultiCodec = Sha256HashCodec,
nodes: openArray[ByteHash],
nleaves: int): ?!CodexTree =
@ -246,7 +242,7 @@ proc fromNodes*(
func init*(
_: type CodexProof,
mcodec: MultiCodec = multiCodec("sha2-256"),
mcodec: MultiCodec = Sha256HashCodec,
index: int,
nleaves: int,
nodes: openArray[ByteHash]): ?!CodexProof =

View File

@ -8,7 +8,6 @@
## those terms.
import std/options
import std/tables
import std/sequtils
import std/strformat
import std/sugar
@ -208,8 +207,8 @@ proc store*(
trace "Storing data"
let
hcodec = multiCodec("sha2-256")
dataCodec = multiCodec("raw")
hcodec = Sha256HashCodec
dataCodec = BlockCodec
chunker = LPStreamChunker.new(stream, chunkSize = blockSize)
var cids: seq[Cid]
@ -270,7 +269,7 @@ proc store*(
newException(CodexError, "Error encoding manifest: " & err.msg))
# Store as a dag-pb block
without manifestBlk =? bt.Block.new(data = data, codec = DagPBCodec):
without manifestBlk =? bt.Block.new(data = data, codec = ManifestCodec):
trace "Unable to init block from manifest data!"
return failure("Unable to init block from manifest data!")
@ -344,7 +343,7 @@ proc requestStorage*(
trace "Unable to encode protected manifest"
return failure(error)
without encodedBlk =? bt.Block.new(data = encodedData, codec = DagPBCodec), error:
without encodedBlk =? bt.Block.new(data = encodedData, codec = ManifestCodec), error:
trace "Unable to create block from encoded manifest"
return failure(error)

View File

@ -2,7 +2,6 @@ import pkg/metrics
import pkg/chronicles
import ../statemachine
import ./errorhandling
import ./error
declareCounter(codex_purchases_cancelled, "codex purchases cancelled")

View File

@ -30,8 +30,6 @@ export blockstore, blockexchange, asyncheapqueue
logScope:
topics = "codex networkstore"
const BlockPrefetchAmount = 5
type
NetworkStore* = ref object of BlockStore
engine*: BlockExcEngine # blockexc decision engine

View File

@ -6,7 +6,7 @@
## at your option.
## This file may not be copied, modified, or distributed except according to
## those terms.
##
##
import std/hashes
import std/strutils
@ -31,6 +31,7 @@ template basicMaths(T: untyped) =
proc `+=` *(x: var T, y: T) {.borrow.}
proc `-=` *(x: var T, y: T) {.borrow.}
proc `hash` *(x: T): Hash {.borrow.}
template divMaths(T: untyped) =
proc `mod` *(x, y: T): T = T(`mod`(x.Natural, y.Natural))
proc `div` *(x, y: T): Natural = `div`(x.Natural, y.Natural)

View File

@ -8,7 +8,7 @@ when getEnv("NIMBUS_BUILD_SYSTEM") == "yes" and
# BEWARE
# In Nim 1.6, config files are evaluated with a working directory
# matching where the Nim command was invocated. This means that we
# must do all file existance checks with full absolute paths:
# must do all file existence checks with full absolute paths:
system.fileExists(currentDir & "nimbus-build-system.paths"):
include "nimbus-build-system.paths"

View File

@ -56,7 +56,7 @@ asyncchecksuite "Block Advertising and Discovery":
(manifest, tree) = makeManifestAndTree(blocks).tryGet()
manifestBlock = bt.Block.new(
manifest.encode().tryGet(), codec = DagPBCodec).tryGet()
manifest.encode().tryGet(), codec = ManifestCodec).tryGet()
(await localStore.putBlock(manifestBlock)).tryGet()

View File

@ -53,7 +53,7 @@ proc example*(_: type BlockExcPeerCtx): BlockExcPeerCtx =
proc example*(_: type Cid): Cid =
bt.Block.example.cid
proc example*(_: type MultiHash, mcodec = multiCodec("sha2-256")): MultiHash =
proc example*(_: type MultiHash, mcodec = Sha256HashCodec): MultiHash =
let bytes = newSeqWith(256, rand(uint8))
MultiHash.digest($mcodec, bytes).tryGet()

View File

@ -25,7 +25,7 @@ checksuite "merkletree - coders":
test "encoding and decoding a tree yields the same tree":
let
tree = CodexTree.init(multiCodec("sha2-256"), data).tryGet()
tree = CodexTree.init(Sha256HashCodec, data).tryGet()
encodedBytes = tree.encode()
decodedTree = CodexTree.decode(encodedBytes).tryGet()
@ -34,7 +34,7 @@ checksuite "merkletree - coders":
test "encoding and decoding a proof yields the same proof":
let
tree = CodexTree.init(multiCodec("sha2-256"), data).tryGet()
tree = CodexTree.init(Sha256HashCodec, data).tryGet()
proof = tree.getProof(4).tryGet()
check:

View File

@ -7,6 +7,7 @@ import pkg/stew/byteutils
import pkg/nimcrypto/sha2
import pkg/libp2p
import pkg/codex/codextypes
import pkg/codex/merkletree
import ./helpers
@ -28,7 +29,7 @@ const
"00000000000000000000000000000009".toBytes,
"00000000000000000000000000000010".toBytes,
]
sha256 = multiCodec("sha2-256")
sha256 = Sha256HashCodec
suite "Test CodexTree":
test "Cannot init tree without any multihash leaves":

View File

@ -110,7 +110,7 @@ proc commonBlockStoreTests*(name: string,
test "listBlocks Manifest":
let
blocks = @[newBlock1, newBlock2, newBlock3]
manifestBlock = Block.new(manifest.encode().tryGet(), codec = DagPBCodec).tryGet()
manifestBlock = Block.new(manifest.encode().tryGet(), codec = ManifestCodec).tryGet()
treeBlock = Block.new(tree.encode()).tryGet()
putHandles = await allFinished(
(@[treeBlock, manifestBlock] & blocks).mapIt( store.putBlock( it ) ))
@ -134,7 +134,7 @@ proc commonBlockStoreTests*(name: string,
test "listBlocks Both":
let
blocks = @[newBlock1, newBlock2, newBlock3]
manifestBlock = Block.new(manifest.encode().tryGet(), codec = DagPBCodec).tryGet()
manifestBlock = Block.new(manifest.encode().tryGet(), codec = ManifestCodec).tryGet()
treeBlock = Block.new(tree.encode()).tryGet()
putHandles = await allFinished(
(@[treeBlock, manifestBlock] & blocks).mapIt( store.putBlock( it ) ))

View File

@ -29,8 +29,8 @@ proc createManifestCid(): ?!Cid =
let
length = rand(4096)
bytes = newSeqWith(length, rand(uint8))
mcodec = multiCodec("sha2-256")
codec = multiCodec("dag-pb")
mcodec = Sha256HashCodec
codec = ManifestCodec
version = CIDv1
let hash = ? MultiHash.digest($mcodec, bytes).mapFailure

View File

@ -115,9 +115,8 @@ asyncchecksuite "Test Node":
manifest = await Manifest.fetch(chunker)
manifestBlock = bt.Block.new(
manifest.encode().tryGet(),
codec = DagPBCodec
).tryGet()
manifest.encode().tryGet(),
codec = ManifestCodec).tryGet()
(await localStore.putBlock(manifestBlock)).tryGet()
@ -253,7 +252,7 @@ asyncchecksuite "Test Node - host contracts":
manifest = await storeDataGetManifest(localStore, chunker)
let manifestBlock = bt.Block.new(
manifest.encode().tryGet(),
codec = DagPBCodec
codec = ManifestCodec
).tryGet()
manifestCid = $(manifestBlock.cid)
(await localStore.putBlock(manifestBlock)).tryGet()
@ -296,7 +295,7 @@ asyncchecksuite "Test Node - host contracts":
return success()
(await onStore(request, 0.u256, onBatch)).tryGet()
check fetchedBytes == 2291520
check fetchedBytes == 2293760
for index in 0..<manifest.blocksCount:
let blk = (await localStore.getBlock(manifest.treeCid, index)).tryGet

View File

@ -51,7 +51,7 @@ twonodessuite "Integration tests", debug1 = false, debug2 = false:
check:
space.totalBlocks == 2.uint
space.quotaMaxBytes == 8589934592.uint
space.quotaUsedBytes == 65526.uint
space.quotaUsedBytes == 65592.uint
space.quotaReservedBytes == 12.uint
test "node allows local file downloads":