From 56b80d6f6d05c8c4c4dee76a310e7eda52da8fa0 Mon Sep 17 00:00:00 2001 From: Dmitriy Ryajov Date: Mon, 23 May 2022 23:24:15 -0600 Subject: [PATCH] Por serialize (#106) * move por into storage proofs * use SeekableStream * adding serialization for por * remove leftovers * add empty block support * add basic por test * rename block exchange for consistency * add storageproofstests * moving timing to storageproofs * fix imports * fix imports * fix imports * add top level exports * move delete blocks helper to helpers * more import/export fixes * cleanup * more import fixes * fix unused warnings * detect corrupt blocks tests * add serialization tests * move init method around * bump asynctest * fix CID version * get rid of warning * wip: fix CI * increase CI timeout --- .github/workflows/ci.yml | 2 +- codex/blocktype.nim | 2 +- codex/contracts/proofs.nim | 2 +- codex/por/README.md | 1 - codex/por/benchmark.nim | 9 - codex/por/example.txt | 1 - codex/por/testpor.nim | 37 -- codex/proving.nim | 4 +- codex/rest/api.nim | 8 +- codex/storageproofs.nim | 4 + codex/storageproofs/por.nim | 4 + codex/{ => storageproofs}/por/por.nim | 363 ++++++++++++------ codex/storageproofs/por/serialization.nim | 3 + .../storageproofs/por/serialization/por.proto | 33 ++ .../por/serialization/serialization.nim | 170 ++++++++ codex/storageproofs/timing.nim | 4 + .../{por => storageproofs}/timing/periods.nim | 0 .../{por => storageproofs}/timing/proofs.nim | 0 codex/streams/storestream.nim | 20 +- .../discovery/testdiscoveryengine.nim | 1 - .../blockexchange/engine/testblockexc.nim | 1 - tests/codex/helpers.nim | 41 +- tests/codex/helpers/mockproofs.nim | 2 +- tests/codex/storageproofs/testpor.nim | 164 ++++++++ ...testblockexc.nim => testblockexchange.nim} | 0 tests/codex/teststorageproofs.nim | 3 + tests/codex/teststorestream.nim | 1 - tests/contracts/testContracts.nim | 2 +- tests/testCodex.nim | 3 +- vendor/asynctest | 2 +- 30 files changed, 697 insertions(+), 190 deletions(-) delete mode 100644 codex/por/README.md delete mode 100644 codex/por/benchmark.nim delete mode 100644 codex/por/example.txt delete mode 100644 codex/por/testpor.nim create mode 100644 codex/storageproofs.nim create mode 100644 codex/storageproofs/por.nim rename codex/{ => storageproofs}/por/por.nim (67%) create mode 100644 codex/storageproofs/por/serialization.nim create mode 100644 codex/storageproofs/por/serialization/por.proto create mode 100644 codex/storageproofs/por/serialization/serialization.nim create mode 100644 codex/storageproofs/timing.nim rename codex/{por => storageproofs}/timing/periods.nim (100%) rename codex/{por => storageproofs}/timing/proofs.nim (100%) create mode 100644 tests/codex/storageproofs/testpor.nim rename tests/codex/{testblockexc.nim => testblockexchange.nim} (100%) create mode 100644 tests/codex/teststorageproofs.nim diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 34f58414..bc1ca755 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,7 +36,7 @@ jobs: builder: windows-2019 name: '${{ matrix.target.os }}-${{ matrix.target.cpu }}-${{ matrix.branch }}' runs-on: ${{ matrix.builder }} - timeout-minutes: 40 + timeout-minutes: 60 steps: - name: Checkout nim-codex uses: actions/checkout@v2 diff --git a/codex/blocktype.nim b/codex/blocktype.nim index b324c2a4..f16d155f 100644 --- a/codex/blocktype.nim +++ b/codex/blocktype.nim @@ -59,7 +59,7 @@ template EmptyDigests*: untyped = .get() }.toTable, CIDv1: { - multiCodec("sha2-256"): EmptyCid[CIDv0] + multiCodec("sha2-256"): EmptyCid[CIDv1] .catch .get()[multiCodec("sha2-256")] .catch diff --git a/codex/contracts/proofs.nim b/codex/contracts/proofs.nim index 0c835e68..4b6a7535 100644 --- a/codex/contracts/proofs.nim +++ b/codex/contracts/proofs.nim @@ -1,5 +1,5 @@ import pkg/ethers -import ../por/timing/proofs +import ../storageproofs/timing/proofs import ./storage export proofs diff --git a/codex/por/README.md b/codex/por/README.md deleted file mode 100644 index 1ee54b5e..00000000 --- a/codex/por/README.md +++ /dev/null @@ -1 +0,0 @@ -Nim implementation of Proof of Storage related schemes diff --git a/codex/por/benchmark.nim b/codex/por/benchmark.nim deleted file mode 100644 index 332b971a..00000000 --- a/codex/por/benchmark.nim +++ /dev/null @@ -1,9 +0,0 @@ -import times, strutils -export strutils.formatFloat - -template benchmark*(benchmarkName: string, code: untyped) = - let t0 = epochTime() - code - let elapsed = epochTime() - t0 - let elapsedStr = elapsed.formatFloat(format = ffDecimal, precision = 3) - echo "CPU Time [", benchmarkName, "] ", elapsedStr, "s" diff --git a/codex/por/example.txt b/codex/por/example.txt deleted file mode 100644 index 5be377b5..00000000 --- a/codex/por/example.txt +++ /dev/null @@ -1 +0,0 @@ -The quick brown fox jumps over the lazy dog! diff --git a/codex/por/testpor.nim b/codex/por/testpor.nim deleted file mode 100644 index 718ee9d7..00000000 --- a/codex/por/testpor.nim +++ /dev/null @@ -1,37 +0,0 @@ -## Nim-POS -## Copyright (c) 2021 Status Research & Development GmbH -## Licensed under either of -## * Apache License, version 2.0, ([LICENSE-APACHE](LICENSE-APACHE)) -## * MIT license ([LICENSE-MIT](LICENSE-MIT)) -## at your option. -## This file may not be copied, modified, or distributed except according to -## those terms. - -import por -import benchmark -import strutils - -const sectorsperblock = 1024.int64 -const querylen = 22 - -proc testbls() : bool = - benchmark "Key generation": - let (spk, ssk) = por.keygen() - - benchmark "Auth generation (s=" & $sectorsperblock & ")": - let (tau, authenticators) = por.setup(ssk, sectorsperblock, "example.txt") - #echo "Auth: ", authenticators - - benchmark "Generating challenge (q=" & $querylen & ")": - let q = por.generateQuery(tau, spk, querylen) - #echo "Generated!" #, " q:", q - - benchmark "Issuing proof": - let (mu, sigma) = por.generateProof(q, authenticators, spk, sectorsperblock, "example.txt") - #echo "Issued!" #, " mu:", mu, " sigma:", sigma - - benchmark "Verifying proof": - result = por.verifyProof(tau, q, mu, sigma, spk) - echo "Result: ", result - -let r = testbls() diff --git a/codex/proving.nim b/codex/proving.nim index 26107296..6c65b775 100644 --- a/codex/proving.nim +++ b/codex/proving.nim @@ -2,11 +2,11 @@ import std/sets import pkg/upraises import pkg/questionable import pkg/chronicles -import ./por/timing/proofs +import ./storageproofs import ./clock export sets -export proofs +export storageproofs type Proving* = ref object diff --git a/codex/rest/api.nim b/codex/rest/api.nim index 22a27c1a..4b6efac9 100644 --- a/codex/rest/api.nim +++ b/codex/rest/api.nim @@ -47,10 +47,10 @@ proc decodeString(T: type Cid, value: string): Result[Cid, cstring] = .init(value) .mapErr do(e: CidError) -> cstring: case e - of CidError.Incorrect: "Incorrect Cid" - of CidError.Unsupported: "Unsupported Cid" - of CidError.Overrun: "Overrun Cid" - else: "Error parsing Cid" + of CidError.Incorrect: "Incorrect Cid".cstring + of CidError.Unsupported: "Unsupported Cid".cstring + of CidError.Overrun: "Overrun Cid".cstring + else: "Error parsing Cid".cstring proc encodeString(peerId: PeerID): Result[string, cstring] = ok($peerId) diff --git a/codex/storageproofs.nim b/codex/storageproofs.nim new file mode 100644 index 00000000..e23ecfbf --- /dev/null +++ b/codex/storageproofs.nim @@ -0,0 +1,4 @@ +import ./storageproofs/por +import ./storageproofs/timing + +export por, timing diff --git a/codex/storageproofs/por.nim b/codex/storageproofs/por.nim new file mode 100644 index 00000000..79ada5dd --- /dev/null +++ b/codex/storageproofs/por.nim @@ -0,0 +1,4 @@ +import ./por/serialization +import ./por/por + +export por, serialization diff --git a/codex/por/por.nim b/codex/storageproofs/por/por.nim similarity index 67% rename from codex/por/por.nim rename to codex/storageproofs/por/por.nim index d0d1f7c4..03207e32 100644 --- a/codex/por/por.nim +++ b/codex/storageproofs/por/por.nim @@ -39,7 +39,7 @@ # Our implementation uses additive cyclic groups instead of the multiplicative # cyclic group in the paper, thus changing the name of the group operation as in # blscurve and blst. Thus, point multiplication becomes point addition, and scalar -# exponentiation becomes scalar multiplication. +# exponentiation becomes scalar multiplicaiton. # # Number of operations: # The following table summarizes the number of operations in different phases @@ -77,77 +77,108 @@ # q * (8 + 48) bytes # The size of the proof is instead # s * 32 + 48 bytes +import std/endians -import blscurve -import blscurve/blst/blst_abi -import ../rng -import endians +import pkg/chronos +import pkg/blscurve +import pkg/blscurve/blst/blst_abi + +import ../../rng +import ../../streams # sector size in bytes. Must be smaller than the subgroup order r # which is 255 bits long for BLS12-381 -const bytespersector = 31 +const + BytesPerSector* = 31 -# length in bytes of the unique (random) name -const namelen = 512 + # length in bytes of the unique (random) name + Namelen = 512 type # a single sector - ZChar = array[bytespersector, byte] + ZChar* = array[BytesPerSector, byte] # secret key combining the metadata signing key and the POR generation key - SecretKey = object - signkey: blscurve.SecretKey - key: blst_scalar + SecretKey* = object + signkey*: blscurve.SecretKey + key*: blst_scalar # public key combining the metadata signing key and the POR validation key - PublicKey = object - signkey: blscurve.PublicKey - key: blst_p2 + PublicKey* = object + signkey*: blscurve.PublicKey + key*: blst_p2 # POR metadata (called "file tag t_0" in the original paper) - TauZero = object - name: array[namelen, byte] - n: int64 - u: seq[blst_p1] + TauZero* = object + name*: array[Namelen, byte] + n*: int64 + u*: seq[blst_p1] # signed POR metadata (called "signed file tag t" in the original paper) - Tau = object - t: TauZero - signature: array[96, byte] + Tau* = object + t*: TauZero + signature*: array[96, byte] + + Proof* = object + mu*: seq[blst_scalar] + sigma*: blst_p1 # PoR query element - QElement = object - I: int64 - V: blst_scalar + QElement* = object + I*: int64 + V*: blst_scalar + + PoR* = object + ssk*: SecretKey + spk*: PublicKey + tau*: Tau + authenticators*: seq[blst_p1] proc fromBytesBE(a: array[32, byte]): blst_scalar = ## Convert data to blst native form + ## + blst_scalar_from_bendian(result, a) doAssert(blst_scalar_fr_check(result).bool) proc fromBytesBE(a: openArray[byte]): blst_scalar = ## Convert data to blst native form + ## + var b: array[32, byte] doAssert(a.len <= b.len) + let d = b.len - a.len - for i in 0 ..< a.len: + for i in 0.. postion - f.setFilePos((blockid * spb + sectorid) * sizeof(result)) - let r = f.readBytes(result, 0, sizeof(result)) + ## + + var res: ZChar + stream.setPos(((blockid * spb + sectorid) * ZChar.len).int) + discard await stream.readOnce(addr res[0], ZChar.len) + return res proc rndScalar(): blst_scalar = ## Generate random scalar within the subroup order r - var scal{.noInit.}: array[32, byte] - var scalar{.noInit.}: blst_scalar + ## + + var scal {.noInit.}: array[32, byte] + var scalar {.noInit.}: blst_scalar while true: for val in scal.mitems: val = byte Rng.instance.rand(0xFF) + scalar.blst_scalar_from_bendian(scal) if blst_scalar_fr_check(scalar).bool: break @@ -156,55 +187,77 @@ proc rndScalar(): blst_scalar = proc rndP2(): (blst_p2, blst_scalar) = ## Generate random point on G2 - var x{.noInit.}: blst_p2 + ## + + var + x {.noInit.}: blst_p2 x.blst_p2_from_affine(BLS12_381_G2) # init from generator - let scalar = rndScalar() + + let + scalar = rndScalar() x.blst_p2_mult(x, scalar, 255) + return (x, scalar) proc rndP1(): (blst_p1, blst_scalar) = ## Generate random point on G1 - var x{.noInit.}: blst_p1 + var + x {.noInit.}: blst_p1 x.blst_p1_from_affine(BLS12_381_G1) # init from generator - let scalar = rndScalar() + + let + scalar = rndScalar() x.blst_p1_mult(x, scalar, 255) + return (x, scalar) -proc posKeygen(): (blst_p2, blst_scalar) = +template posKeygen(): (blst_p2, blst_scalar) = ## Generate POS key pair + ## + rndP2() -proc keygen*(): (PublicKey, SecretKey) = +proc keyGen*(): (PublicKey, SecretKey) = ## Generate key pair for signing metadata and for POS tags - var pk: PublicKey - var sk: SecretKey - var ikm: array[32, byte] + ## + + var + pk: PublicKey + sk: SecretKey + ikm: array[32, byte] for b in ikm.mitems: b = byte Rng.instance.rand(0xFF) + doAssert ikm.keyGen(pk.signkey, sk.signkey) (pk.key, sk.key) = posKeygen() return (pk, sk) -proc split(f: File, s: int64): int64 = +proc sectorsCount(stream: SeekableStream, s: int64): int64 = ## Calculate number of blocks for a file - let size = f.getFileSize() - let n = ((size - 1) div (s * sizeof(ZChar))) + 1 - echo "File size=", size, " bytes", - ", blocks=", n, - ", sectors/block=", $s, - ", sectorsize=", $sizeof(ZChar), " bytes" + ## + + let + size = stream.size() + n = ((size - 1) div (s * sizeof(ZChar))) + 1 + # debugEcho "File size=", size, " bytes", + # ", blocks=", n, + # ", sectors/block=", $s, + # ", sectorsize=", $sizeof(ZChar), " bytes" return n proc hashToG1[T: byte|char](msg: openArray[T]): blst_p1 = - ## Hash to curve with Codex specific domain separation - const dst = "CODEX-PROOF-OF-CONCEPT" + ## Hash to curve with Dagger specific domain separation + ## + + const dst = "DAGGER-PROOF-OF-CONCEPT" result.blst_hash_to_g1(msg, dst, aug = "") -proc hashNameI(name: array[namelen, byte], i: int64): blst_p1 = - ## Calculate unique filname and block index based hash +proc hashNameI(name: array[Namelen, byte], i: int64): blst_p1 = + ## Calculate unique filename and block index based hash + ## # # naive implementation, hashing a long string representation # # such as "[255, 242, 23]1" @@ -216,20 +269,33 @@ proc hashNameI(name: array[namelen, byte], i: int64): blst_p1 = bigEndian64(addr(namei[sizeof(name)]), unsafeAddr(i)) return hashToG1(namei) -proc generateAuthenticatorNaive(i: int64, s: int64, t: TauZero, f: File, ssk: SecretKey): blst_p1 = +proc generateAuthenticatorNaive( + stream: SeekableStream, + ssk: SecretKey, + i: int64, + s: int64, + t: TauZero): Future[blst_p1] {.async.} = ## Naive implementation of authenticator as in the S&W paper. ## With the paper's multiplicative notation: ## \sigmai=\(H(file||i)\cdot\prod{j=0}^{s-1}{uj^{m[i][j]}})^{\alpha} + ## + var sum: blst_p1 - for j in 0 ..< s: + for j in 0.. G_T - var aa: blst_p1_affine - var bb: blst_p2_affine + ## + + var + aa: blst_p1_affine + bb: blst_p2_affine + l: blst_fp12 + blst_p1_to_affine(aa, a) blst_p2_to_affine(bb, b) - var l: blst_fp12 + blst_miller_loop(l, bb, aa) blst_final_exp(result, l) @@ -346,6 +415,8 @@ proc verifyPairingsNaive(a1: blst_p1, a2: blst_p2, b1: blst_p1, b2: blst_p2) : b proc verifyPairingsNeg(a1: blst_p1, a2: blst_p2, b1: blst_p1, b2: blst_p2) : bool = ## Faster pairing verification using 2 miller loops but ony one final exponentiation ## based on https://github.com/benjaminion/c-kzg/blob/main/src/bls12_381.c + ## + var loop0, loop1, gt_point: blst_fp12 aa1, bb1: blst_p1_affine @@ -369,29 +440,37 @@ proc verifyPairingsNeg(a1: blst_p1, a2: blst_p2, b1: blst_p1, b2: blst_p2) : boo proc verifyPairings(a1: blst_p1, a2: blst_p2, b1: blst_p1, b2: blst_p2) : bool = ## Wrapper to select verify pairings implementation + ## + verifyPairingsNaive(a1, a2, b1, b2) #verifyPairingsNeg(a1, a2, b1, b2) -proc verifyProof*(tau: Tau, q: openArray[QElement], mus: openArray[blst_scalar], sigma: blst_p1, spk: PublicKey): bool = +proc verifyProof*( + self: PoR, + q: seq[QElement], + mus: seq[blst_scalar], + sigma: blst_p1): bool = ## Verify a BLS proof given a query + ## # verify signature on Tau - var signature: Signature - if not signature.fromBytes(tau.signature): + var signature: blscurve.Signature + if not signature.fromBytes(self.tau.signature): return false - if not verify(spk.signkey, $tau.t, signature): + + if not verify(self.spk.signkey, $self.tau.t, signature): return false var first: blst_p1 - for qelem in q : + for qelem in q: var prod: blst_p1 - prod.blst_p1_mult(hashNameI(tau.t.name, qelem.I), qelem.V, 255) + prod.blst_p1_mult(hashNameI(self.tau.t.name, qelem.I), qelem.V, 255) first.blst_p1_add_or_double(first, prod) doAssert(blst_p1_on_curve(first).bool) - let us = tau.t.u + let us = self.tau.t.u var second: blst_p1 - for j in 0 ..< len(us) : + for j in 0..= self.manifest.blockSize: self.offset mod self.manifest.blockSize @@ -75,7 +77,15 @@ method readOnce*( else: min(nbytes - read, self.manifest.blockSize) - copyMem(pbytes.offset(read), unsafeAddr blk.data[blockOffset], readBytes) + trace "Reading bytes from store stream", pos, cid = blk.cid, bytes = readBytes, blockOffset = blockOffset + copyMem( + pbytes.offset(read), + if blk.isEmpty: + self.emptyBlock[blockOffset].addr + else: + blk.data[blockOffset].addr, + readBytes) + self.offset += readBytes read += readBytes diff --git a/tests/codex/blockexchange/discovery/testdiscoveryengine.nim b/tests/codex/blockexchange/discovery/testdiscoveryengine.nim index 3531206a..e6dde554 100644 --- a/tests/codex/blockexchange/discovery/testdiscoveryengine.nim +++ b/tests/codex/blockexchange/discovery/testdiscoveryengine.nim @@ -5,7 +5,6 @@ import std/tables import pkg/asynctest import pkg/chronos -import pkg/chronicles import pkg/libp2p import pkg/codex/rng diff --git a/tests/codex/blockexchange/engine/testblockexc.nim b/tests/codex/blockexchange/engine/testblockexc.nim index 6c2d6557..f898bb79 100644 --- a/tests/codex/blockexchange/engine/testblockexc.nim +++ b/tests/codex/blockexchange/engine/testblockexc.nim @@ -16,7 +16,6 @@ import pkg/codex/discovery import pkg/codex/blocktype as bt import ../../helpers -import ../../examples suite "NetworkStore engine - 2 nodes": let diff --git a/tests/codex/helpers.nim b/tests/codex/helpers.nim index cf68c45e..c9931608 100644 --- a/tests/codex/helpers.nim +++ b/tests/codex/helpers.nim @@ -1,6 +1,10 @@ +import pkg/chronos import pkg/libp2p import pkg/libp2p/varint -import pkg/codex/blocktype +import pkg/codex/blocktype as bt +import pkg/codex/stores +import pkg/codex/manifest +import pkg/codex/rng import ./helpers/nodeutils import ./helpers/randomchunker @@ -10,8 +14,8 @@ export randomchunker, nodeutils # NOTE: The meaning of equality for blocks # is changed here, because blocks are now `ref` # types. This is only in tests!!! -func `==`*(a, b: Block): bool = - (a.cid == b.cid) and (a.data == b.data) +func `==`*(a, b: bt.Block): bool = + (a.cid == b.cid) and (a.data == b. data) proc lenPrefix*(msg: openArray[byte]): seq[byte] = ## Write `msg` with a varint-encoded length prefix @@ -23,3 +27,34 @@ proc lenPrefix*(msg: openArray[byte]): seq[byte] = buf[vbytes.len..= blks: + break + + var i = -1 + if (i = Rng.instance.rand(manifest.len - 1); pos.find(i) >= 0): + continue + + pos.add(i) + var + blk = (await store.getBlock(manifest[i])).tryGet() + bytePos: seq[int] + + while true: + if bytePos.len > bytes: + break + + var ii = -1 + if (ii = Rng.instance.rand(blk.data.len - 1); bytePos.find(ii) >= 0): + continue + + bytePos.add(ii) + blk.data[ii] = byte 0 + + return pos diff --git a/tests/codex/helpers/mockproofs.nim b/tests/codex/helpers/mockproofs.nim index 90dcc6bb..605775fb 100644 --- a/tests/codex/helpers/mockproofs.nim +++ b/tests/codex/helpers/mockproofs.nim @@ -2,7 +2,7 @@ import std/sets import std/tables import std/sequtils import pkg/upraises -import pkg/codex/por/timing/proofs +import pkg/codex/storageproofs type MockProofs* = ref object of Proofs diff --git a/tests/codex/storageproofs/testpor.nim b/tests/codex/storageproofs/testpor.nim new file mode 100644 index 00000000..81ca3d09 --- /dev/null +++ b/tests/codex/storageproofs/testpor.nim @@ -0,0 +1,164 @@ +import pkg/chronos +import pkg/asynctest + +import pkg/blscurve/blst/blst_abi + +import pkg/codex/streams +import pkg/codex/storageproofs as st +import pkg/codex/stores +import pkg/codex/manifest +import pkg/codex/chunker +import pkg/codex/rng +import pkg/codex/blocktype as bt + + +import ../helpers + +const + SectorSize = 31 + SectorsPerBlock = BlockSize div SectorSize + DataSetSize = BlockSize * 100 + +suite "BLS PoR": + var + chunker: RandomChunker + manifest: Manifest + store: BlockStore + ssk: st.SecretKey + spk: st.PublicKey + + setup: + chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize) + store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize) + manifest = Manifest.new(blockSize = BlockSize).tryGet() + (spk, ssk) = st.keyGen() + + while ( + let chunk = await chunker.getBytes(); + chunk.len > 0): + + let + blk = bt.Block.new(chunk).tryGet() + + manifest.add(blk.cid) + if not (await store.putBlock(blk)): + raise newException(CatchableError, "Unable to store block " & $blk.cid) + + test "Test PoR without corruption": + let + por = await PoR.init( + StoreStream.new(store, manifest), + ssk, + spk, + BlockSize) + q = generateQuery(por.tau, 22) + proof = await generateProof( + StoreStream.new(store, manifest), + q, + por.authenticators, + SectorsPerBlock) + + check por.verifyProof(q, proof.mu, proof.sigma) + + test "Test PoR with corruption - query: 22, corrupted blocks: 300, bytes: 10": + let + por = await PoR.init( + StoreStream.new(store, manifest), + ssk, + spk, + BlockSize) + pos = await store.corruptBlocks(manifest, 30, 10) + q = generateQuery(por.tau, 22) + proof = await generateProof( + StoreStream.new(store, manifest), + q, + por.authenticators, + SectorsPerBlock) + + check pos.len == 30 + check not por.verifyProof(q, proof.mu, proof.sigma) + +suite "Test Serialization": + var + chunker: RandomChunker + manifest: Manifest + store: BlockStore + ssk: st.SecretKey + spk: st.PublicKey + por: PoR + q: seq[QElement] + proof: Proof + + setupAll: + chunker = RandomChunker.new(Rng.instance(), size = DataSetSize, chunkSize = BlockSize) + store = CacheStore.new(cacheSize = DataSetSize, chunkSize = BlockSize) + manifest = Manifest.new(blockSize = BlockSize).tryGet() + + while ( + let chunk = await chunker.getBytes(); + chunk.len > 0): + + let + blk = bt.Block.new(chunk).tryGet() + + manifest.add(blk.cid) + if not (await store.putBlock(blk)): + raise newException(CatchableError, "Unable to store block " & $blk.cid) + + (spk, ssk) = st.keyGen() + por = await PoR.init( + StoreStream.new(store, manifest), + ssk, + spk, + BlockSize) + q = generateQuery(por.tau, 22) + proof = await generateProof( + StoreStream.new(store, manifest), + q, + por.authenticators, + SectorsPerBlock) + + test "Serialize Public Key": + var + spkMessage = spk.toMessage() + + check: + spk.signkey == spkMessage.fromMessage().tryGet().signkey + spk.key.blst_p2_is_equal(spkMessage.fromMessage().tryGet().key).bool + + test "Serialize TauZero": + var + tauZeroMessage = por.tau.t.toMessage() + tauZero = tauZeroMessage.fromMessage().tryGet() + + check: + por.tau.t.name == tauZero.name + por.tau.t.n == tauZero.n + + for i in 0..