Merge branch 'goldilocks'

This commit is contained in:
Balazs Komuves 2024-11-15 18:33:39 +01:00
commit ac8d366752
No known key found for this signature in database
GPG Key ID: F63B7AEF18435562
30 changed files with 1098 additions and 388 deletions

View File

@ -2,4 +2,4 @@
testmain
cli
*.json
json/
json_tmp/

View File

@ -29,6 +29,8 @@ use real data.
-K, --ncells = <ncells> : number of cells inside this slot (eg. 1024; must be a power of two)
-o, --output = <input.json> : the JSON file into which we write the proof input
-C, --circom = <main.circom> : the circom main component to create with these parameters
-F, --field = <field> : the underlying field: "bn254" or "goldilocks"
-H, --hash = <hash> : the hash function to use: "poseidon2" or "monolith"
You can generate only the `.json` or only the `.circom` file, or both at the same
time.
@ -39,4 +41,4 @@ NOTE: When using actual files for the slot data (which is untested :), you will
need _all_ slots to be able to compute the dataset root. The convention used is
that you specify the base name of the file, say `"slotdata"`; and the individual
slots file names are derived from this to be for example `"slotdata5.dat"` for
the slot with index 5 (note: indexing starts from 0).
the slot with index 5 (note: indexing starts from 0).

View File

@ -4,8 +4,11 @@ author = "Balazs Komuves"
description = "reference implementation for generating the proof inputs"
license = "MIT or Apache-2.0"
srcDir = "src"
bin = @["cli","testmain"]
bin = @["cli"]
#bin = @["cli","testmain"]
requires "nim >= 1.6.0"
requires "https://github.com/mratsim/constantine#ab6fa6ae1bbbd1b10071a92ec209b381b5d82511"
requires "https://github.com/codex-storage/nim-poseidon2#8a54c69032a741160bbc097d009e45a8b5e4d718"
requires "https://github.com/codex-storage/nim-goldilocks-hash#bd5b805b80b6005a3e5de412dec15783284d205d"
#requires "goldilocks_hash == 0.0.1"

View File

@ -5,18 +5,28 @@ import std/sequtils
#import poseidon2/types
import poseidon2/io
import poseidon2/sponge
import poseidon2/merkle
#import poseidon2/merkle
import types
import merkle
import ../types
import ../types/bn254
#import ../merkle
import ../merkle/bn254
#-------------------------------------------------------------------------------
func hashCellOpen( globcfg: GlobalConfig, cellData: openArray[byte] ): Hash =
func merkleTree*( hashcfg: HashConfig, what: openarray[Hash]): MerkleTree[Hash] =
assert( hashcfg.combo == BN254_Poseidon2 )
return merkleTreeBN254( what )
#-------------------------------------------------------------------------------
func hashCellOpen( hashcfg: HashConfig, globcfg: GlobalConfig, cellData: openArray[byte] ): Hash =
assert( hashcfg.field == BN254 )
assert( hashcfg.hashFun == Poseidon2 )
assert( cellData.len == globcfg.cellSize , ("cells are expected to be exactly " & $globcfg.cellSize & " bytes") )
return Sponge.digest( cellData, rate=2 )
func hashCell*( globcfg: GlobalConfig, cellData: Cell): Hash = hashCellOpen(globcfg, cellData)
func hashCell*(hashcfg: HashConfig, globcfg: GlobalConfig, cellData: Cell): Hash = hashCellOpen(hashcfg, globcfg, cellData)
#-------------------------------------------------------------------------------
@ -36,23 +46,24 @@ func splitBlockIntoCells( globcfg: GlobalConfig, blockData: openArray[byte] ): s
# returns the special hash of a network block (this is a Merkle root built on the
# top of the hashes of the 32 cells inside the block)
func hashNetworkBlockOpen( globcfg: GlobalConfig, blockData: openArray[byte] ): Hash =
func hashNetworkBlockOpen( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: openArray[byte] ): Hash =
let cells = splitBlockIntoCells(globcfg, blockData)
let leaves = collect( newSeq , (for i in 0..<cellsPerBlock(globcfg): hashCell(globcfg, cells[i]) ))
return Merkle.digest(leaves) # merkleRoot(leaves)
let leaves = collect( newSeq , (for i in 0..<cellsPerBlock(globcfg): hashCell(hashcfg, globcfg, cells[i]) ))
return merkleDigestBN254(leaves) # Merkle.digest(leaves) # merkleRoot(leaves)
func hashNetworkBlock*(globcfg: GlobalConfig, blockData: Block): Hash =
hashNetworkBlockOpen(globcfg, blockData)
func hashNetworkBlock*( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: Block): Hash =
hashNetworkBlockOpen(hashcfg, globcfg, blockData)
#-------------------------------------------------------------------------------
# returns the mini Merkle tree built on the 32 cells inside a network block
func networkBlockTreeOpen( globcfg: GlobalConfig, blockData: openArray[byte] ): MerkleTree =
let cells = splitBlockIntoCells( globcfg, blockData)
let leaves = collect( newSeq , (for i in 0..<cellsPerBlock(globcfg): hashCell( globcfg, cells[i]) ))
return merkleTree(leaves)
func networkBlockTreeOpen( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: openArray[byte] ): MerkleTree[Hash] =
assert( hashcfg.field == BN254 )
let cells = splitBlockIntoCells( globcfg, blockData )
let leaves = collect( newSeq , (for i in 0..<cellsPerBlock(globcfg): hashCell( hashcfg, globcfg, cells[i]) ))
return merkleTree(hashcfg, leaves)
func networkBlockTree*( globcfg: GlobalConfig, blockData: Block): MerkleTree =
networkBlockTreeOpen(globcfg, blockData)
func networkBlockTree*( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: Block): MerkleTree[Hash] =
networkBlockTreeOpen(hashcfg, globcfg, blockData)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,76 @@
import sugar
import std/sequtils
import goldilocks_hash/types
#import goldilocks_hash/poseidon2/sponge
#import goldilocks_hash/monolith/sponge
import ../types
import ../types/goldilocks
#import ../merkle
import ../merkle/goldilocks/poseidon2
import ../merkle/goldilocks/monolith
#-------------------------------------------------------------------------------
func merkleTree*( hashcfg: HashConfig, what: openarray[Digest] ): MerkleTree[Digest] =
assert( hashcfg.field == Goldilocks )
case hashcfg.hashFun:
of Poseidon2: return poseidon2.merkleTreeGoldilocksPoseidon2( what )
of Monolith: return monolith.merkleTreeGoldilocksMonolith( what )
#-------------------------------------------------------------------------------
func hashCellOpen( hashcfg: HashConfig, globcfg: GlobalConfig, cellData: openArray[byte] ): Digest =
assert( hashcfg.field == Goldilocks )
assert( cellData.len == globcfg.cellSize , ("cells are expected to be exactly " & $globcfg.cellSize & " bytes") )
case hashcfg.hashFun:
of Poseidon2: return digestBytesPoseidon2( cellData )
of Monolith: return digestBytesMonolith( cellData )
func hashCell*(hashcfg: HashConfig, globcfg: GlobalConfig, cellData: Cell): Digest =
hashCellOpen(hashcfg, globcfg, cellData)
#-------------------------------------------------------------------------------
func splitBlockIntoCells( globcfg: GlobalConfig, blockData: openArray[byte] ): seq[Cell] =
assert( blockData.len == globcfg.blockSize , ("network blocks are expected to be exactly" & $globcfg.blockSize & " bytes" ) )
var cells : seq[seq[byte]] = newSeq[seq[byte]]( cellsPerBlock(globcfg) )
let start = low(blockData)
var leaves : seq[Digest] = newSeq[Digest]( cellsPerBlock(globcfg) )
for i in 0..<cellsPerBlock(globcfg):
let a = start + i * globcfg.cellSize
let b = start + (i+1) * globcfg.cellSize
cells[i] = blockData[a..<b].toSeq()
return cells
# returns the special hash of a network block (this is a Merkle root built on the
# top of the hashes of the 32 cells inside the block)
func hashNetworkBlockOpen( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: openArray[byte] ): Digest =
let cells = splitBlockIntoCells(globcfg, blockData)
let leaves = collect( newSeq , (for i in 0..<cellsPerBlock(globcfg): hashCell(hashcfg, globcfg, cells[i]) ))
case hashcfg.hashFun:
of Poseidon2: return merkleDigestPoseidon2(leaves)
of Monolith: return merkleDigestMonolith( leaves)
func hashNetworkBlock*( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: Block): Digest =
hashNetworkBlockOpen(hashcfg, globcfg, blockData)
#-------------------------------------------------------------------------------
# returns the mini Merkle tree built on the 32 cells inside a network block
func networkBlockTreeOpen( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: openArray[byte] ): MerkleTree[Digest] =
assert( hashcfg.field == Goldilocks )
let cells = splitBlockIntoCells( globcfg, blockData )
let leaves = collect( newSeq , (for i in 0..<cellsPerBlock(globcfg): hashCell( hashcfg, globcfg, cells[i]) ))
return merkleTree(hashcfg, leaves)
func networkBlockTree*( hashcfg: HashConfig, globcfg: GlobalConfig, blockData: Block): MerkleTree[Digest] =
networkBlockTreeOpen(hashcfg, globcfg, blockData)
#-------------------------------------------------------------------------------

View File

@ -7,25 +7,35 @@ import std/sequtils
import std/os
import std/parseopt
import constantine/math/arithmetic
import poseidon2/types
import poseidon2/merkle
import poseidon2/io
# import constantine/math/arithmetic
#
# import poseidon2/types
# import poseidon2/merkle
# import poseidon2/io
import types
import blocks
import slot
import dataset
import sample
import merkle
import gen_input
import json
import types/bn254
import types/goldilocks
#import blocks/bn254
#import blocks/goldilocks
#import slot
#import dataset
#import sample
#import sample/bn254
#import sample/goldilocks
#import merkle
#import merkle/bn254
#import merkle/goldilocks
import gen_input/bn254
import gen_input/goldilocks
import json/bn254
import json/goldilocks
import misc
#-------------------------------------------------------------------------------
type FullConfig = object
hashCfg: HashConfig
globCfg: GlobalConfig
dsetCfg: DataSetConfig
slotIndex: int
@ -34,11 +44,17 @@ type FullConfig = object
circomFile: string
verbose: bool
const defHashCfg =
HashConfig( field: Goldilocks # BN254
, hashFun: Poseidon2
, combo: Goldilocks_Poseidon2
)
const defGlobCfg =
GlobalConfig( maxDepth: 32
, maxLog2NSlots: 8
, cellSize: 2048
, blockSize: 65536
, blockSize: 65536
)
const defDSetCfg =
@ -49,7 +65,8 @@ const defDSetCfg =
)
const defFullCfg =
FullConfig( globCfg: defGlobCfg
FullConfig( hashCfg: defHashCfg
, globCfg: defGlobCfg
, dsetCfg: defDSetCfg
, slotIndex: 0
, outFile: ""
@ -81,6 +98,8 @@ proc printHelp() =
echo " -K, --ncells = <ncells> : number of cells inside this slot (eg. 1024; must be a power of two)"
echo " -o, --output = <input.json> : the JSON file into which we write the proof input"
echo " -C, --circom = <main.circom> : the circom main component to create with these parameters"
echo " -F, --field = <field> : the underlying field: \"bn254\" or \"goldilocks\""
echo " -H, --hash = <hash> : the hash function to use: \"poseidon2\" or \"monolith\""
echo ""
quit()
@ -91,6 +110,7 @@ proc parseCliOptions(): FullConfig =
var argCtr: int = 0
var hashCfg = defHashCfg
var globCfg = defGlobCfg
var dsetCfg = defDSetCfg
var fullCfg = defFullCfg
@ -123,6 +143,8 @@ proc parseCliOptions(): FullConfig =
of "K", "ncells" : dsetCfg.ncells = checkPowerOfTwo(parseInt(value),"nCells")
of "o", "output" : fullCfg.outFile = value
of "C", "circom" : fullCfg.circomFile = value
of "F", "field" : hashCfg.field = parseField(value)
of "H", "hash" : hashCfg.hashFun = parseHashFun(value)
else:
echo "Unknown option: ", key
echo "use --help to get a list of options"
@ -131,6 +153,9 @@ proc parseCliOptions(): FullConfig =
of cmdEnd:
discard
hashCfg.combo = toFieldHashCombo( hashCfg.field , hashCfg.hashFun )
fullCfg.hashCfg = hashCfg
fullCfg.globCfg = globCfg
fullCfg.dsetCfg = dsetCfg
@ -140,9 +165,12 @@ proc parseCliOptions(): FullConfig =
proc printConfig(fullCfg: FullConfig) =
let hashCfg = fullCfg.hashCfg
let globCfg = fullCfg.globCfg
let dsetCfg = fullCfg.dsetCfg
echo "field = " & ($hashCfg.field)
echo "hash func. = " & ($hashCfg.hashFun)
echo "maxDepth = " & ($globCfg.maxDepth)
echo "maxSlots = " & ($pow2(globCfg.maxLog2NSlots))
echo "cellSize = " & ($globCfg.cellSize)
@ -180,13 +208,13 @@ proc writeCircomMainComponent(fullCfg: FullConfig, fname: string) =
when isMainModule:
let fullCfg = parseCliOptions()
# echo fullCfg
let hashCfg = fullCfg.hashCfg
if fullCfg.verbose:
printConfig(fullCfg)
if fullCfg.circomFile == "" and fullCfg.outFile == "":
echo "nothing do!"
echo "nothing to do!"
echo "use --help for getting a list of options"
quit()
@ -196,7 +224,14 @@ when isMainModule:
if fullCfg.outFile != "":
echo "writing proof input into `" & fullCfg.outFile & "`..."
let prfInput = generateProofInput( fullCfg.globCfg, fullCfg.dsetCfg, fullCfg.slotIndex, toF(fullCfg.entropy) )
exportProofInput( fullCfg.outFile , prfInput )
case hashCfg.field
of BN254:
let entropy = intToBN254(fullCfg.entropy)
let prfInput = generateProofInputBN254( hashCfg, fullCfg.globCfg, fullCfg.dsetCfg, fullCfg.slotIndex, entropy )
exportProofInputBN254( hashCfg, fullCfg.outFile , prfInput )
of Goldilocks:
let entropy = intToDigest(fullCfg.entropy)
let prfInput = generateProofInputGoldilocks( hashCfg, fullCfg.globCfg, fullCfg.dsetCfg, fullCfg.slotIndex, entropy )
exportProofInputGoldilocks( hashCfg, fullCfg.outFile , prfInput )
echo "done"

View File

@ -1,73 +0,0 @@
#
# generate the input data for the proof
# see `json.nim` to export it in Snarkjs-compatible format
#
import sugar
import std/sequtils
import blocks
import slot
import dataset
import sample
import merkle
import types
#-------------------------------------------------------------------------------
proc buildSlotTreeFull( globcfg: GlobalConfig, slotCfg: SlotConfig ): (seq[MerkleTree], MerkleTree) =
let ncells = slotCfg.nCells
let nblocks = ncells div cellsPerBlock(globcfg)
assert( nblocks * cellsPerBlock(globcfg) == ncells )
let blocks : seq[Block] = collect( newSeq, (for i in 0..<nblocks: slotLoadBlockData(globcfg, slotCfg, i) ))
let miniTrees : seq[MerkleTree] = collect( newSeq, (for blk in blocks: networkBlockTree(globcfg, blk) ))
let blockHashes : seq[Root] = map( miniTrees , treeRoot )
let bigTree = merkleTree( blockHashes )
return (miniTrees, bigTree)
proc buildSlotTree( globcfg: GlobalConfig, slotCfg: SlotConfig ): MerkleTree =
return buildSlotTreeFull(globcfg, slotCfg)[1]
proc generateProofInput*( globCfg: GlobalConfig, dsetCfg: DataSetConfig, slotIdx: SlotIdx, entropy: Entropy ): SlotProofInput =
let nslots = dsetCfg.nSlots
let ncells = dsetCfg.nCells
let nblocks = ncells div cellsPerBlock(globCfg)
assert( nblocks * cellsPerBlock(globcfg) == ncells )
let slotCfgs = collect( newSeq , (for i in 0..<nslots: slotCfgFromDataSetCfg(dsetcfg, i) ))
let slotTrees = collect( newSeq , (for scfg in slotcfgs: buildSlotTree(globCfg, scfg) ))
let slotRoots = map( slotTrees, treeRoot )
let ourSlotCfg = slotCfgs[slotIdx]
let ourSlotRoot = slotRoots[slotIdx]
let ourSlotTree = slotTrees[slotIdx]
let dsetTree = merkleTree( slotRoots )
let dsetRoot = treeRoot( dsetTree )
let slotProof = merkleProof( dsetTree , slotIdx )
let indices = cellIndices(entropy, ourSlotRoot, ncells, dsetCfg.nSamples)
var inputs : seq[CellProofInput]
for cellIdx in indices:
let (miniTrees, bigTree) = buildSlotTreeFull( globCfg, ourSlotCfg )
let blockIdx = cellIdx div cellsPerBlock(globcfg)
let blockTree = miniTrees[ blockIdx ]
let cellData = slotLoadCellData( globCfg, ourSlotCfg, cellIdx )
let botProof = merkleProof( blockTree , cellIdx mod cellsPerBlock(globcfg) )
let topProof = merkleProof( bigTree , blockIdx )
let prf = padMerkleProof( mergeMerkleProofs( botProof, topProof ), globCfg.maxDepth )
inputs.add( CellProofInput(cellData: cellData, merkleProof: prf) )
return SlotProofInput( dataSetRoot: dsetRoot
, entropy: entropy
, nCells: ncells
, nSlots: nslots
, slotIndex: slotIdx
, slotRoot: ourSlotRoot
, slotProof: padMerkleProof( slotProof, globCfg.maxLog2NSlots )
, proofInputs: inputs
)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,81 @@
#
# generate the input data for the proof
# see `json.nim` to export it in Snarkjs-compatible format
#
import sugar
import std/sequtils
import ../blocks/bn254
import ../slot
import ../dataset
import ../sample/bn254
import ../merkle
import ../merkle/bn254
import ../types
import ../types/bn254
#-------------------------------------------------------------------------------
proc buildSlotTreeFull( hashcfg: HashConfig, globcfg: GlobalConfig, slotCfg: SlotConfig ): (seq[MerkleTree[Hash]], MerkleTree[Hash]) =
let ncells = slotCfg.nCells
let nblocks = ncells div cellsPerBlock(globcfg)
assert( nblocks * cellsPerBlock(globcfg) == ncells )
let blocks : seq[Block] = collect( newSeq, (for i in 0..<nblocks: slotLoadBlockData(globcfg, slotCfg, i) ))
let miniTrees : seq[MerkleTree[Hash]] = collect( newSeq, (for blk in blocks: networkBlockTree(hashcfg, globcfg, blk) ))
let blockHashes : seq[Root] = map( miniTrees , treeRoot )
let bigTree = merkleTree( hashcfg, blockHashes )
return (miniTrees, bigTree)
proc buildSlotTree( hashcfg: HashConfig, globcfg: GlobalConfig, slotCfg: SlotConfig ): MerkleTree[Hash] =
return buildSlotTreeFull(hashcfg, globcfg, slotCfg)[1]
proc generateProofInput*( hashCfg: HashConfig, globCfg: GlobalConfig, dsetCfg: DataSetConfig, slotIdx: SlotIdx, entropy: Entropy ): SlotProofInput[Hash] =
let nslots = dsetCfg.nSlots
let ncells = dsetCfg.nCells
let nblocks = ncells div cellsPerBlock(globCfg)
assert( nblocks * cellsPerBlock(globcfg) == ncells )
let slotCfgs = collect( newSeq , (for i in 0..<nslots: slotCfgFromDataSetCfg(dsetcfg, i) ))
let slotTrees = collect( newSeq , (for scfg in slotcfgs: buildSlotTree(hashCfg, globCfg, scfg) ))
let slotRoots = map( slotTrees, treeRoot )
let ourSlotCfg = slotCfgs[slotIdx]
let ourSlotRoot = slotRoots[slotIdx]
let ourSlotTree = slotTrees[slotIdx]
let dsetTree : MerkleTree[Hash] = merkleTree( hashcfg, slotRoots )
let dsetRoot : Hash = treeRoot( dsetTree )
let slotProof : MerkleProof[Hash] = merkleProof( dsetTree , slotIdx )
let indices = cellIndices(hashCfg, entropy, ourSlotRoot, ncells, dsetCfg.nSamples)
var inputs : seq[CellProofInput[Hash]]
for cellIdx in indices:
let (miniTrees, bigTree) = buildSlotTreeFull( hashCfg, globCfg, ourSlotCfg )
let blockIdx = cellIdx div cellsPerBlock(globcfg)
let blockTree = miniTrees[ blockIdx ]
let cellData = slotLoadCellData( globCfg, ourSlotCfg, cellIdx )
let botProof = merkleProof( blockTree , cellIdx mod cellsPerBlock(globcfg) )
let topProof = merkleProof( bigTree , blockIdx )
let prf = padMerkleProof( mergeMerkleProofs( compressWithKey, botProof, topProof ), globCfg.maxDepth )
inputs.add( CellProofInput[Hash](cellData: cellData, merkleProof: prf) )
return SlotProofInput[Hash]( dataSetRoot: dsetRoot
, entropy: entropy
, nCells: ncells
, nSlots: nslots
, slotIndex: slotIdx
, slotRoot: ourSlotRoot
, slotProof: padMerkleProof( slotProof, globCfg.maxLog2NSlots )
, proofInputs: inputs
)
#---------------------------------------
proc generateProofInputBN254*( hashCfg: HashConfig, globCfg: GlobalConfig, dsetCfg: DataSetConfig, slotIdx: SlotIdx, entropy: Entropy ): SlotProofInput[Hash] =
generateProofInput( hashCfg, globCfg, dsetCfg, slotIdx, entropy)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,87 @@
#
# generate the input data for the proof
# see `json.nim` to export it in Snarkjs-compatible format
#
import sugar
import std/sequtils
import ../blocks/goldilocks
import ../slot
import ../dataset
import ../sample/goldilocks
import ../merkle
import ../merkle/goldilocks/poseidon2
import ../merkle/goldilocks/monolith
import ../types
import ../types/goldilocks
#-------------------------------------------------------------------------------
proc buildSlotTreeFull( hashcfg: HashConfig, globcfg: GlobalConfig, slotCfg: SlotConfig ): (seq[MerkleTree[Digest]], MerkleTree[Digest]) =
let ncells = slotCfg.nCells
let nblocks = ncells div cellsPerBlock(globcfg)
assert( nblocks * cellsPerBlock(globcfg) == ncells )
let blocks : seq[Block] = collect( newSeq, (for i in 0..<nblocks: slotLoadBlockData(globcfg, slotCfg, i) ))
let miniTrees : seq[MerkleTree[Digest]] = collect( newSeq, (for blk in blocks: networkBlockTree(hashcfg, globcfg, blk) ))
let blockHashes : seq[Root] = map( miniTrees , treeRoot )
let bigTree = merkleTree( hashcfg, blockHashes )
return (miniTrees, bigTree)
proc buildSlotTree( hashcfg: HashConfig, globcfg: GlobalConfig, slotCfg: SlotConfig ): MerkleTree[Digest] =
return buildSlotTreeFull(hashcfg, globcfg, slotCfg)[1]
proc generateProofInput*( hashCfg: HashConfig, globCfg: GlobalConfig, dsetCfg: DataSetConfig, slotIdx: SlotIdx, entropy: Entropy ): SlotProofInput[Digest] =
let nslots = dsetCfg.nSlots
let ncells = dsetCfg.nCells
let nblocks = ncells div cellsPerBlock(globCfg)
assert( nblocks * cellsPerBlock(globcfg) == ncells )
let slotCfgs = collect( newSeq , (for i in 0..<nslots: slotCfgFromDataSetCfg(dsetcfg, i) ))
let slotTrees = collect( newSeq , (for scfg in slotcfgs: buildSlotTree(hashCfg, globCfg, scfg) ))
let slotRoots = map( slotTrees, treeRoot )
let ourSlotCfg = slotCfgs[slotIdx]
let ourSlotRoot = slotRoots[slotIdx]
let ourSlotTree = slotTrees[slotIdx]
let dsetTree = merkleTree( hashcfg, slotRoots )
let dsetRoot = treeRoot( dsetTree )
let slotProof = merkleProof( dsetTree , slotIdx )
let indices = cellIndices(hashCfg, entropy, ourSlotRoot, ncells, dsetCfg.nSamples)
var inputs : seq[CellProofInput[Digest]]
for cellIdx in indices:
let (miniTrees, bigTree) = buildSlotTreeFull( hashCfg, globCfg, ourSlotCfg )
let blockIdx = cellIdx div cellsPerBlock(globcfg)
let blockTree = miniTrees[ blockIdx ]
let cellData = slotLoadCellData( globCfg, ourSlotCfg, cellIdx )
let botProof = merkleProof( blockTree , cellIdx mod cellsPerBlock(globcfg) )
let topProof = merkleProof( bigTree , blockIdx )
var prf : MerkleProof[Digest]
case hashCfg.hashFun:
of Poseidon2: prf = padMerkleProof( mergeMerkleProofs( poseidon2.compressWithKey, botProof, topProof ), globCfg.maxDepth )
of Monolith: prf = padMerkleProof( mergeMerkleProofs( monolith.compressWithKey , botProof, topProof ), globCfg.maxDepth )
inputs.add( CellProofInput[Digest](cellData: cellData, merkleProof: prf) )
return SlotProofInput[Digest]( dataSetRoot: dsetRoot
, entropy: entropy
, nCells: ncells
, nSlots: nslots
, slotIndex: slotIdx
, slotRoot: ourSlotRoot
, slotProof: padMerkleProof( slotProof, globCfg.maxLog2NSlots )
, proofInputs: inputs
)
#---------------------------------------
proc generateProofInputGoldilocks*( hashCfg: HashConfig, globCfg: GlobalConfig, dsetCfg: DataSetConfig, slotIdx: SlotIdx, entropy: Entropy ): SlotProofInput[Digest] =
generateProofInput( hashCfg, globCfg, dsetCfg, slotIdx, entropy)
#-------------------------------------------------------------------------------

View File

@ -1,112 +1,17 @@
#
# export the proof inputs as a JSON file suitable for `snarkjs`
#
import sugar
import std/strutils
import std/sequtils
import std/streams
from poseidon2/io import elements
import types
from types/bn254 import Hash
from types/goldilocks import Digest
import json/bn254
import json/goldilocks
#-------------------------------------------------------------------------------
func toQuotedDecimalF(x: F): string =
let s : string = toDecimalF(x)
return ("\"" & s & "\"")
proc exportProofInput*(hashcfg: HashConfig, fname: string, prfInput: SlotProofInput[Digest]) =
exportProofInputGoldilocks(hashcfg, fname, prfInput)
func mkIndent(foo: string): string =
return spaces(foo.len)
proc writeF(h: Stream, prefix: string, x: F) =
h.writeLine(prefix & toQuotedDecimalF(x))
#[
proc writeSeq(h: Stream, prefix: string, xs: seq[F])
let n = xs.len
let indent = mkIndent(prefix)
for i in 0..<n:
let str : string = toQuotedF( xs[i] )
if i==0:
h.writeLine(prefix & "[ " & str)
else:
h.writeLine(indent & ", " & str)
h.writeLine(indent & "] ")
]#
proc exportProofInput*(hashcfg: HashConfig, fname: string, prfInput: SlotProofInput[Hash]) =
exportProofInputBN254(hashcfg, fname, prfInput)
#-------------------------------------------------------------------------------
type
WriteFun[T] = proc (stream: Stream, prefix: string, what: T) {.closure.}
proc writeList[T](h: Stream, prefix: string, xs: seq[T], writeFun: WriteFun[T]) =
let n = xs.len
let indent = mkIndent(prefix)
for i in 0..<n:
if i==0:
writeFun(h, prefix & "[ ", xs[i])
else:
writeFun(h, indent & ", ", xs[i])
h.writeLine( indent & "]" )
proc writeFieldElems(h: Stream, prefix: string, xs: seq[F]) =
writeList[F]( h, prefix, xs, writeF )
#-------------------------------------------------------------------------------
proc writeSingleCellData(h: Stream, prefix:string , cell: Cell) =
let flds : seq[F] = cell.elements(F).toSeq()
writeFieldElems(h, prefix, flds)
proc writeAllCellData(h: Stream, cells: seq[Cell]) =
writeList(h, " ", cells, writeSingleCellData )
#-------------------------------------------------------------------------------
proc writeSingleMerklePath(h: Stream, prefix: string, path: MerkleProof) =
let flds = path.merklePath
writeFieldElems(h, prefix, flds)
proc writeAllMerklePaths(h: Stream, cells: seq[MerkleProof]) =
writeList(h, " ", cells, writeSingleMerklePath )
#-------------------------------------------------------------------------------
#[
signal input entropy; // public input
signal input dataSetRoot; // public input
signal input slotIndex; // must be public, otherwise we could prove a different slot
signal input slotRoot; // can be private input
signal input nCellsPerSlot; // can be private input (Merkle tree is safe)
signal input nSlotsPerDataSet; // can be private input (Merkle tree is safe)
signal input slotProof[maxLog2NSlots]; // path from the slot root the the dataset root (private input)
signal input cellData[nSamples][nFieldElemsPerCell]; // private input
signal input merklePaths[nSamples][maxDepth]; // private input
]#
proc exportProofInput*(fname: string, prfInput: SlotProofInput) =
let h = openFileStream(fname, fmWrite)
defer: h.close()
h.writeLine("{")
h.writeLine(" \"dataSetRoot\": " & toQuotedDecimalF(prfInput.dataSetRoot) )
h.writeLine(", \"entropy\": " & toQuotedDecimalF(prfInput.entropy ) )
h.writeLine(", \"nCellsPerSlot\": " & $(prfInput.nCells) )
h.writeLine(", \"nSlotsPerDataSet\": " & $(prfInput.nSlots) )
h.writeLine(", \"slotIndex\": " & $(prfInput.slotIndex) )
h.writeLine(", \"slotRoot\": " & toQuotedDecimalF(prfInput.slotRoot) )
h.writeLine(", \"slotProof\":")
writeSingleMerklePath(h, " ", prfInput.slotProof )
h.writeLine(", \"cellData\":")
writeAllCellData(h, collect( newSeq , (for p in prfInput.proofInputs: p.cellData) ))
h.writeLine(", \"merklePaths\":")
writeAllMerklePaths(h, collect( newSeq , (for p in prfInput.proofInputs: p.merkleProof) ))
h.writeLine("}")

View File

@ -0,0 +1,81 @@
#
# export the proof inputs as a JSON file suitable for `snarkjs`
#
import sugar
#import std/strutils
import std/sequtils
import std/streams
from poseidon2/io import elements
import ../types
import ../types/bn254
import shared
#-------------------------------------------------------------------------------
proc writeFieldElems(h: Stream, prefix: string, xs: seq[F]) =
writeList[F]( h, prefix, xs, writeLnF )
#-------------------------------------------------------------------------------
proc writeSingleCellData(h: Stream, prefix:string , cell: Cell) =
let flds : seq[F] = cell.elements(F).toSeq()
writeFieldElems(h, prefix, flds)
proc writeAllCellData(h: Stream, cells: seq[Cell]) =
writeList(h, " ", cells, writeSingleCellData )
#-------------------------------------------------------------------------------
proc writeSingleMerklePath(h: Stream, prefix: string, path: MerkleProof[Hash]) =
let flds = path.merklePath
writeFieldElems(h, prefix, flds)
proc writeAllMerklePaths(h: Stream, paths: seq[MerkleProof[Hash]]) =
writeList(h, " ", paths, writeSingleMerklePath )
#-------------------------------------------------------------------------------
#[
signal input entropy; // public input
signal input dataSetRoot; // public input
signal input slotIndex; // must be public, otherwise we could prove a different slot
signal input slotRoot; // can be private input
signal input nCellsPerSlot; // can be private input (Merkle tree is safe)
signal input nSlotsPerDataSet; // can be private input (Merkle tree is safe)
signal input slotProof[maxLog2NSlots]; // path from the slot root the the dataset root (private input)
signal input cellData[nSamples][nFieldElemsPerCell]; // private input
signal input merklePaths[nSamples][maxDepth]; // private input
]#
proc exportProofInput*(fname: string, prfInput: SlotProofInput[Hash]) =
let h = openFileStream(fname, fmWrite)
defer: h.close()
h.writeLine("{")
h.writeLine(" \"dataSetRoot\": " & toQuotedDecimalF(prfInput.dataSetRoot) )
h.writeLine(", \"entropy\": " & toQuotedDecimalF(prfInput.entropy ) )
h.writeLine(", \"nCellsPerSlot\": " & $(prfInput.nCells) )
h.writeLine(", \"nSlotsPerDataSet\": " & $(prfInput.nSlots) )
h.writeLine(", \"slotIndex\": " & $(prfInput.slotIndex) )
h.writeLine(", \"slotRoot\": " & toQuotedDecimalF(prfInput.slotRoot) )
h.writeLine(", \"slotProof\":")
writeSingleMerklePath(h, " ", prfInput.slotProof )
h.writeLine(", \"cellData\":")
writeAllCellData(h, collect( newSeq , (for p in prfInput.proofInputs: p.cellData) ))
h.writeLine(", \"merklePaths\":")
writeAllMerklePaths(h, collect( newSeq , (for p in prfInput.proofInputs: p.merkleProof) ))
h.writeLine("}")
proc exportProofInputBN254*(hashcfg: HashConfig, fname: string, prfInput: SlotProofInput[Hash]) =
assert( hashcfg.field == BN254 )
exportProofInput(fname, prfInput)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,93 @@
#
# export the proof inputs as a JSON file suitable for `snarkjs`
#
import sugar
#import std/strutils
#import std/sequtils
import std/streams
import goldilocks_hash/marshal
import ../types
import ../types/goldilocks
import shared
#-------------------------------------------------------------------------------
func bytesToFieldElements( bytes: openArray[byte] ): seq[F] =
let digests = padAndDecodeBytesToDigest62(bytes)
return digestSeqToFeltSeq(digests)
func bytesToFieldElementsMat( bytes: openArray[byte] ): seq[seq[F]] =
let digests = padAndDecodeBytesToDigest62(bytes)
return digestSeqToFeltSeqSeq(digests)
#-------------------------------------------------------------------------------
proc writeFieldElems(h: Stream, prefix: string, xs: seq[F]) =
writeList[F]( h, prefix, xs, writeLnF )
proc writeFieldElemsMat(h: Stream, prefix: string, xs: seq[seq[F]]) =
writeListList[F]( h, prefix, xs, writeF )
#-------------------------------------------------------------------------------
proc writeSingleCellData(h: Stream, prefix:string , cell: Cell) =
let flds : seq[seq[F]] = bytesToFieldElementsMat(cell)
writeFieldElemsMat(h, prefix, flds)
proc writeAllCellData(h: Stream, cells: seq[Cell]) =
writeList(h, " ", cells, writeSingleCellData )
#-------------------------------------------------------------------------------
proc writeSingleMerklePath(h: Stream, prefix: string, path: MerkleProof[Digest]) =
let flds : seq[seq[F]] = digestSeqToFeltSeqSeq( path.merklePath )
writeFieldElemsMat(h, prefix, flds)
proc writeAllMerklePaths(h: Stream, paths: seq[MerkleProof[Digest]]) =
writeList(h, " ", paths, writeSingleMerklePath )
#-------------------------------------------------------------------------------
#[
signal input entropy; // public input
signal input dataSetRoot; // public input
signal input slotIndex; // must be public, otherwise we could prove a different slot
signal input slotRoot; // can be private input
signal input nCellsPerSlot; // can be private input (Merkle tree is safe)
signal input nSlotsPerDataSet; // can be private input (Merkle tree is safe)
signal input slotProof[maxLog2NSlots]; // path from the slot root the the dataset root (private input)
signal input cellData[nSamples][nFieldElemsPerCell]; // private input
signal input merklePaths[nSamples][maxDepth]; // private input
]#
proc exportProofInput*(fname: string, prfInput: SlotProofInput[Digest]) =
let h = openFileStream(fname, fmWrite)
defer: h.close()
h.writeLine("{")
h.writeLine(" \"dataSetRoot\": " & digestToJsonString(prfInput.dataSetRoot) )
h.writeLine(", \"entropy\": " & digestToJsonString(prfInput.entropy ) )
h.writeLine(", \"nCellsPerSlot\": " & $(prfInput.nCells) )
h.writeLine(", \"nSlotsPerDataSet\": " & $(prfInput.nSlots) )
h.writeLine(", \"slotIndex\": " & $(prfInput.slotIndex) )
h.writeLine(", \"slotRoot\": " & digestToJsonString(prfInput.slotRoot) )
h.writeLine(", \"slotProof\":")
writeSingleMerklePath(h, " ", prfInput.slotProof )
h.writeLine(", \"cellData\":")
writeAllCellData(h, collect( newSeq , (for p in prfInput.proofInputs: p.cellData) ))
h.writeLine(", \"merklePaths\":")
writeAllMerklePaths(h, collect( newSeq , (for p in prfInput.proofInputs: p.merkleProof) ))
h.writeLine("}")
proc exportProofInputGoldilocks*(hashcfg: HashConfig, fname: string, prfInput: SlotProofInput[Digest]) =
assert( hashcfg.field == Goldilocks )
exportProofInput(fname, prfInput)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,50 @@
#import sugar
#import std/sequtils
import std/strutils
import std/streams
#-------------------------------------------------------------------------------
func mkIndent*(foo: string): string =
return spaces(foo.len)
#-------------------------------------------------------------------------------
type
WriteFun*[T] = proc (stream: Stream, prefix: string, what: T) {.closure.}
proc writeList*[T](h: Stream, prefix: string, xs: seq[T], writeFun: WriteFun[T]) =
let n = xs.len
let indent = mkIndent(prefix)
for i in 0..<n:
if i==0:
writeFun(h, prefix & "[ ", xs[i])
else:
writeFun(h, indent & ", ", xs[i])
h.writeLine( indent & "]" )
#---------------------------------------
proc writeListList*[T](h: Stream, prefix: string, xs: seq[seq[T]], writeFun: WriteFun[T]) =
let n = xs.len
let indent = mkIndent(prefix)
for i in 0..<n:
if i==0:
h.write( prefix & "[ ")
else:
h.write( indent & ", ")
let ys = xs[i]
let m = ys.len
for j in 0..<m:
if j==0:
writeFun(h, "" , ys[j])
else:
writeFun(h, " , ", ys[j])
h.write("\n")
h.writeLine( indent & "]" )
#-------------------------------------------------------------------------------

View File

@ -1,39 +1,32 @@
import std/bitops
import std/sequtils
import constantine/math/arithmetic
import constantine/math/io/io_fields
import poseidon2/types
import poseidon2/compress
import poseidon2/io
import types
#-------------------------------------------------------------------------------
func treeDepth*(tree: MerkleTree): int =
func treeDepth*[H](tree: MerkleTree[H]): int =
return tree.layers.len - 1
func treeNumberOfLeaves*(tree: MerkleTree): int =
func treeNumberOfLeaves*[H](tree: MerkleTree[H]): int =
return tree.layers[0].len
func treeRoot*(tree: MerkleTree): Hash =
func treeRoot*[H](tree: MerkleTree[H]): H =
let last = tree.layers[tree.layers.len-1]
assert( last.len == 1 )
return last[0]
#-------------------------------------------------------------------------------
func merkleProof*(tree: MerkleTree, index: int): MerkleProof =
func merkleProof*[H](tree: MerkleTree[H], index: int): MerkleProof[H] =
let depth = treeDepth(tree)
let nleaves = treeNumberOfLeaves(tree)
var zero : H # hackety hack, it should be initialized with zeros
assert( index >= 0 and index < nleaves )
var path : seq[Hash] = newSeq[Hash](depth)
var path : seq[H] = newSeq[H](depth)
var k = index
var m = nleaves
for i in 0..<depth:
@ -42,21 +35,23 @@ func merkleProof*(tree: MerkleTree, index: int): MerkleProof =
k = k shr 1
m = (m+1) shr 1
return MerkleProof( leafIndex: index
, leafValue: tree.layers[0][index]
, merklePath: path
, numberOfLeaves: nleaves
)
return MerkleProof[H]( leafIndex: index
, leafValue: tree.layers[0][index]
, merklePath: path
, numberOfLeaves: nleaves
)
#-------------------------------------------------------------------------------
func compressWithKey(key: int, x: F, y: F): F =
compress(x,y, key=toF(key))
type CompressWithKey[H] = proc (key: int, x, y: H): H {.closure.}
func reconstructRoot*(proof: MerkleProof): Hash =
var m : int = proof.numberOfLeaves
var j : int = proof.leafIndex
var h : Hash = proof.leafValue
# func compressWithKey(key: int, x: F, y: F): F =
# compress(x,y, key=toF(key))
func reconstructRoot*[H](compressWithKey: CompressWithKey[H], proof: MerkleProof[H]): H =
var m : int = proof.numberOfLeaves
var j : int = proof.leafIndex
var h : H = proof.leafValue
var bottomFlag : int = 1
for p in proof.merklePath:
let oddIndex : bool = (bitand(j,1) != 0)
@ -78,50 +73,8 @@ func reconstructRoot*(proof: MerkleProof): Hash =
m = (m+1) shr 1
return h
func checkMerkleProof*(root: Root, proof: MerkleProof): bool =
return bool(root == reconstructRoot(proof))
#-------------------------------------------------------------------------------
# TODO: maybe move this (and the rest?) into poseidon2-nim
const KeyNone = F.fromHex("0x0")
const KeyBottomLayer = F.fromHex("0x1")
const KeyOdd = F.fromHex("0x2")
const KeyOddAndBottomLayer = F.fromhex("0x3")
func merkleTreeWorker(xs: openArray[F], isBottomLayer: static bool) : seq[seq[F]] =
let a = low(xs)
let b = high(xs)
let m = b-a+1
when not isBottomLayer:
if m==1:
return @[ xs.toSeq() ]
let halfn : int = m div 2
let n : int = 2*halfn
let isOdd : bool = (n != m)
var ys : seq[F]
if not isOdd:
ys = newSeq[F](halfn)
else:
ys = newSeq[F](halfn+1)
for i in 0..<halfn:
const key = when isBottomLayer: KeyBottomLayer else: KeyNone
ys[i] = compress( xs[a+2*i], xs[a+2*i+1], key = key )
if isOdd:
const key = when isBottomLayer: KeyOddAndBottomLayer else: KeyOdd
ys[halfn] = compress( xs[n], zero, key = key )
var ls : seq[seq[F]]
ls = @[ xs.toSeq() ]
ls = ls & merkleTreeWorker(ys, isBottomLayer = false)
return ls
func merkleTree*(xs: openArray[F]) : MerkleTree =
return MerkleTree(layers: merkleTreeWorker(xs, isBottomLayer = true))
func checkMerkleProof*[H](compressWithKey: CompressWithKey[H], root: H, proof: MerkleProof[H]): bool =
return bool( root == reconstructRoot[H](compressWithKey, proof) )
#-------------------------------------------------------------------------------
@ -130,9 +83,9 @@ func merkleTree*(xs: openArray[F]) : MerkleTree =
# we can compose proofs (though for checking these proofs we need to remember
# this and use a specific custom convention, because we mark the bottom layers)
#
func mergeMerkleProofs*(bottomProof, topProof: MerkleProof): MerkleProof =
func mergeMerkleProofs*[H](compressWithKey: CompressWithKey[H], bottomProof, topProof: MerkleProof[H]): MerkleProof[H] =
let botRoot = reconstructRoot( bottomProof )
let botRoot = reconstructRoot[H]( compressWithKey, bottomProof )
assert( bool(botRoot == topProof.leafValue) )
let idx = topProof.leafIndex * bottomProof.numberOfLeaves + bottomProof.leafIndex
@ -140,10 +93,10 @@ func mergeMerkleProofs*(bottomProof, topProof: MerkleProof): MerkleProof =
let nlvs = bottomProof.numberOfLeaves * topProof.numberOfLeaves
let path = bottomProof.merklePath & topProof.merklePath
return MerkleProof( leafIndex: idx
, leafValue: val
, merklePath: path
, numberOfLeaves: nlvs
)
return MerkleProof[H]( leafIndex: idx
, leafValue: val
, merklePath: path
, numberOfLeaves: nlvs
)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,65 @@
import std/sequtils
import
constantine/math/arithmetic,
constantine/math/io/io_fields
import poseidon2/types
import poseidon2/compress
import poseidon2/merkle
import poseidon2/io
import ../types
import ../types/bn254
#-------------------------------------------------------------------------------
func compressWithkey*(key: int, x, y: F): F = compress(x,y, key=toF(key))
func merkleDigestBN254*(xs: openArray[F]): F = Merkle.digest(xs)
#-------------------------------------------------------------------------------
const KeyNone = F.fromHex("0x0")
const KeyBottomLayer = F.fromHex("0x1")
const KeyOdd = F.fromHex("0x2")
const KeyOddAndBottomLayer = F.fromhex("0x3")
func merkleTreeWorker(xs: openArray[F], isBottomLayer: static bool) : seq[seq[F]] =
let a = low(xs)
let b = high(xs)
let m = b-a+1
when not isBottomLayer:
if m==1:
return @[ xs.toSeq() ]
let halfn : int = m div 2
let n : int = 2*halfn
let isOdd : bool = (n != m)
var ys : seq[F]
if not isOdd:
ys = newSeq[F](halfn)
else:
ys = newSeq[F](halfn+1)
for i in 0..<halfn:
const key = when isBottomLayer: KeyBottomLayer else: KeyNone
ys[i] = compress( xs[a+2*i], xs[a+2*i+1], key = key )
if isOdd:
const key = when isBottomLayer: KeyOddAndBottomLayer else: KeyOdd
ys[halfn] = compress( xs[n], zero, key = key )
var ls : seq[seq[F]]
ls = @[ xs.toSeq() ]
ls = ls & merkleTreeWorker(ys, isBottomLayer = false)
return ls
#-------------------------------------------------------------------------------
func merkleTreeBN254*(xs: openArray[F]) : MerkleTree[F] =
return MerkleTree[F](layers: merkleTreeWorker(xs, isBottomLayer = true))
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,65 @@
import std/sequtils
import goldilocks_hash/types
import goldilocks_hash/monolith/compress
import goldilocks_hash/monolith/merkle
import goldilocks_hash/monolith/sponge
import ../../types
import ../../types/goldilocks
#-------------------------------------------------------------------------------
func compressWithkey*(key: int, x, y: Digest): Digest = compress(x,y, key=uint64(key))
func merkleDigestMonolith*(xs: openArray[Digest]): Digest = Merkle.digest(xs)
# TODO: move these somewhere else
func digestFeltsMonolith*(xs: openArray[F] ): Digest = digestFeltsC( rate=8, xs )
func digestBytesMonolith*(xs: openArray[byte]): Digest = digestBytesC( rate=8, xs )
#-------------------------------------------------------------------------------
const KeyNone : uint64 = 0x00
const KeyBottomLayer : uint64 = 0x01
const KeyOdd : uint64 = 0x02
const KeyOddAndBottomLayer : uint64 = 0x03
func merkleTreeWorker(xs: openArray[Digest], isBottomLayer: static bool) : seq[seq[Digest]] =
let a = low(xs)
let b = high(xs)
let m = b-a+1
when not isBottomLayer:
if m==1:
return @[ xs.toSeq() ]
let halfn : int = m div 2
let n : int = 2*halfn
let isOdd : bool = (n != m)
var ys : seq[Digest]
if not isOdd:
ys = newSeq[Digest](halfn)
else:
ys = newSeq[Digest](halfn+1)
for i in 0..<halfn:
const key = when isBottomLayer: KeyBottomLayer else: KeyNone
ys[i] = compress( xs[a+2*i], xs[a+2*i+1], key = key )
if isOdd:
const key = when isBottomLayer: KeyOddAndBottomLayer else: KeyOdd
ys[halfn] = compress( xs[n], zeroDigest, key = key )
var ls : seq[seq[Digest]]
ls = @[ xs.toSeq() ]
ls = ls & merkleTreeWorker(ys, isBottomLayer = false)
return ls
#-------------------------------------------------------------------------------
func merkleTreeGoldilocksMonolith*(xs: openArray[Digest]) : MerkleTree[Digest] =
return MerkleTree[Digest](layers: merkleTreeWorker(xs, isBottomLayer = true))
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,65 @@
import std/sequtils
import goldilocks_hash/types
import goldilocks_hash/poseidon2/compress
import goldilocks_hash/poseidon2/merkle
import goldilocks_hash/poseidon2/sponge
import ../../types
import ../../types/goldilocks
#-------------------------------------------------------------------------------
func compressWithkey*(key: int, x, y: Digest): Digest = compress(x,y, key=uint64(key))
func merkleDigestPoseidon2*(xs: openArray[Digest]): Digest = Merkle.digest(xs)
# TODO: move these somewhere else
func digestFeltsPoseidon2*(xs: openArray[F] ): Digest = digestFeltsC( rate=8, xs )
func digestBytesPoseidon2*(xs: openArray[byte]): Digest = digestBytesC( rate=8, xs )
#-------------------------------------------------------------------------------
const KeyNone : uint64 = 0x00
const KeyBottomLayer : uint64 = 0x01
const KeyOdd : uint64 = 0x02
const KeyOddAndBottomLayer : uint64 = 0x03
func merkleTreeWorker(xs: openArray[Digest], isBottomLayer: static bool) : seq[seq[Digest]] =
let a = low(xs)
let b = high(xs)
let m = b-a+1
when not isBottomLayer:
if m==1:
return @[ xs.toSeq() ]
let halfn : int = m div 2
let n : int = 2*halfn
let isOdd : bool = (n != m)
var ys : seq[Digest]
if not isOdd:
ys = newSeq[Digest](halfn)
else:
ys = newSeq[Digest](halfn+1)
for i in 0..<halfn:
const key = when isBottomLayer: KeyBottomLayer else: KeyNone
ys[i] = compress( xs[a+2*i], xs[a+2*i+1], key = key )
if isOdd:
const key = when isBottomLayer: KeyOddAndBottomLayer else: KeyOdd
ys[halfn] = compress( xs[n], zeroDigest, key = key )
var ls : seq[seq[Digest]]
ls = @[ xs.toSeq() ]
ls = ls & merkleTreeWorker(ys, isBottomLayer = false)
return ls
#-------------------------------------------------------------------------------
func merkleTreeGoldilocksPoseidon2*(xs: openArray[Digest]) : MerkleTree[Digest] =
return MerkleTree[Digest](layers: merkleTreeWorker(xs, isBottomLayer = true))
#-------------------------------------------------------------------------------

View File

@ -1,43 +0,0 @@
import sugar
import std/bitops
import constantine/math/arithmetic
import poseidon2/types
import poseidon2/sponge
import poseidon2/io
import types
import misc
#-------------------------------------------------------------------------------
func extractLowBits[n: static int]( A: BigInt[n], k: int): uint64 =
assert( k>0 and k<=64 )
var r : uint64 = 0
for i in 0..<k:
let b = bit[n](A, i) # NOTE: the docunmentation seems to lie about the conventions here....
let y = uint64(b)
if (y != 0):
r = bitor( r, 1'u64 shl i )
return r
func extractLowBits(fld: F, k: int): uint64 =
let A : BigInt[254] = fld.toBig()
return extractLowBits(A, k);
#-------------------------------------------------------------------------------
func cellIndex*(entropy: Entropy, slotRoot: Root, numberOfCells: int, counter: int): int =
let log2 = ceilingLog2(numberOfCells)
assert( 1 shl log2 == numberOfCells , "for this version, `numberOfCells` is assumed to be a power of two")
let input : seq[F] = @[ entropy, slotRoot, toF(counter) ]
let H : Hash = Sponge.digest( input, rate = 2 )
return int(extractLowBits(H,log2))
func cellIndices*(entropy: Entropy, slotRoot: Root, numberOfCells: int, nSamples: int): seq[int] =
return collect( newSeq, (for i in 1..nSamples: cellIndex(entropy, slotRoot, numberOfCells, i) ))
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,29 @@
import sugar
#import std/bitops
# import constantine/math/arithmetic
import poseidon2/types
import poseidon2/sponge
import poseidon2/io
import ../types
import ../types/bn254
import ../misc
#-------------------------------------------------------------------------------
func cellIndex*(hashcfg: HashConfig, entropy: Entropy, slotRoot: Root, numberOfCells: int, counter: int): int =
assert( hashcfg.field == BN254 )
let log2 = ceilingLog2(numberOfCells)
assert( 1 shl log2 == numberOfCells , "for this version, `numberOfCells` is assumed to be a power of two")
let input : seq[F] = @[ entropy, slotRoot, toF(counter) ]
let H : Hash = Sponge.digest( input, rate = 2 )
return int(extractLowBits(H,log2))
func cellIndices*(hashcfg: HashConfig, entropy: Entropy, slotRoot: Root, numberOfCells: int, nSamples: int): seq[int] =
return collect( newSeq, (for i in 1..nSamples: cellIndex(hashcfg, entropy, slotRoot, numberOfCells, i) ))
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,40 @@
import sugar
#import std/bitops
#import std/sequtils
import goldilocks_hash/types
#import goldilocks_hash/poseidon2/sponge
#import goldilocks_hash/monolith/sponge
import ../types
import ../types/goldilocks
import ../merkle/goldilocks/poseidon2
import ../merkle/goldilocks/monolith
import ../misc
#-------------------------------------------------------------------------------
func cellIndex*(hashcfg: HashConfig, entropy: Entropy, slotRoot: Digest, numberOfCells: int, counter: int): int =
assert( hashcfg.field == Goldilocks )
let log2 = ceilingLog2(numberOfCells)
assert( 1 shl log2 == numberOfCells , "for this version, `numberOfCells` is assumed to be a power of two")
let inputD : seq[Digest] = @[ entropy, slotRoot, intToDigest( counter ) ]
let input : seq[F] = digestSeqToFeltSeq(inputD)
var hash : Digest
case hashcfg.hashFun:
of Poseidon2: hash = digestFeltsPoseidon2( input )
of Monolith: hash = digestFeltsMonolith( input )
let hash4 : F4 = fromDigest(hash)
let hash0 : F = hash4[0]
return int(extractLowBits(hash0,log2))
func cellIndices*(hashcfg: HashConfig, entropy: Entropy, slotRoot: Digest, numberOfCells: int, nSamples: int): seq[int] =
return collect( newSeq, (for i in 1..nSamples: cellIndex(hashcfg, entropy, slotRoot, numberOfCells, i) ))
#-------------------------------------------------------------------------------

View File

@ -32,6 +32,8 @@ proc genFakeCell(globcfg: GlobalConfig, cfg: SlotConfig, seed: Seed, idx: CellId
return cell
#[
-- original Haskell version:
--
genFakeCell :: SlotConfig -> Seed -> CellIdx -> CellData
genFakeCell cfg (Seed seed) (CellIdx idx) = (mkCellData cfg $ B.pack list) where
list = go (fromIntegral $ _cellSize cfg) 1

View File

@ -1,21 +1,21 @@
import sugar
import std/sequtils
#import std/sequtils
import constantine/math/arithmetic
#import constantine/math/arithmetic
import poseidon2/types
import poseidon2/merkle
import poseidon2/io
import types
import blocks
import slot
import dataset
import sample
import merkle
import gen_input
import json
#import blocks
#import slot
#import dataset
#import sample
#-------------------------------------------------------------------------------

View File

@ -2,25 +2,6 @@
import std/strutils
import std/sequtils
from constantine/math/io/io_fields import toDecimal
import poseidon2/types
export types
#-------------------------------------------------------------------------------
type Entropy* = F
type Hash* = F
type Root* = Hash
#-------------------------------------------------------------------------------
func toDecimalF*(a : F): string =
var s : string = toDecimal(a)
s = s.strip( leading=true, trailing=false, chars={'0'} )
if s.len == 0: s="0"
return s
#-------------------------------------------------------------------------------
type Cell* = seq[byte]
@ -30,27 +11,30 @@ type Block* = seq[byte]
type
MerkleProof* = object
MerkleProof*[H] = object
leafIndex* : int # linear index of the leaf, starting from 0
leafValue* : Hash # value of the leaf
merklePath* : seq[Hash] # order: from the bottom to the top
leafValue* : H # value of the leaf
merklePath* : seq[H] # order: from the bottom to the top
numberOfLeaves* : int # number of leaves in the tree (=size of input)
MerkleTree* = object
layers*: seq[seq[Hash]]
MerkleTree*[H] = object
layers*: seq[seq[H]]
# ^^^ note: the first layer is the bottom layer, and the last layer is the root
#-------------------------------------------------------------------------------
# the circuit expect merkle path of statically known length, so we need to pad them
func padMerkleProof*( old: MerkleProof, newlen: int ): MerkleProof =
func padMerkleProof*[H]( old: MerkleProof[H], newlen: int ): MerkleProof[H] =
let pad = newlen - old.merklePath.len
assert( pad >= 0 )
return MerkleProof( leafIndex: old.leafIndex
, leafValue: old.leafValue
, merklePath: old.merklePath & repeat(zero,pad)
, numberOfLeaves: old.numberOfLeaves
)
var zero : H # hackety hack hack, it should be initialized to zero
return MerkleProof[H]( leafIndex: old.leafIndex
, leafValue: old.leafValue
, merklePath: old.merklePath & repeat(zero,pad)
, numberOfLeaves: old.numberOfLeaves
)
#-------------------------------------------------------------------------------
@ -61,19 +45,19 @@ type
BlockIdx* = int
SlotIdx* = int
CellProofInput* = object
CellProofInput*[H] = object
cellData*: Cell
merkleProof*: MerkleProof
merkleProof*: MerkleProof[H]
SlotProofInput* = object
dataSetRoot*: Root
entropy*: Entropy
SlotProofInput*[H] = object
dataSetRoot*: H # Root
entropy*: H # Entropy
nSlots*: int
nCells*: int
slotRoot*: Root
slotRoot*: H # Root
slotIndex*: SlotIdx
slotProof*: MerkleProof
proofInputs*: seq[CellProofInput]
slotProof*: MerkleProof[H]
proofInputs*: seq[CellProofInput[H]]
#-------------------------------------------------------------------------------
@ -106,6 +90,24 @@ type
cellSize* : int # size of the cells we prove (2048)
blockSize* : int # size of the network block (65536)
HashConfig* = object
field* : FieldSelect
hashFun* : HashSelect
combo* : FieldHashCombo
FieldSelect* = enum
BN254,
Goldilocks
HashSelect* = enum
Poseidon2,
Monolith
FieldHashCombo* = enum
BN254_Poseidon2,
Goldilocks_Poseidon2,
Goldilocks_Monolith
#-------------------------------------------------------------------------------
func cellsPerBlock*(glob: GlobalConfig): int =
@ -114,3 +116,35 @@ func cellsPerBlock*(glob: GlobalConfig): int =
return k
#-------------------------------------------------------------------------------
func parseField*(str0: string): FieldSelect =
let str = strutils.toLowerAscii(str0)
case str:
of "bn254": return BN254
of "goldilocks": return Goldilocks
else: raiseAssert("parsefield: unrecognized field `" & str0 & "`")
func parseHashFun*(str0: string): HashSelect =
let str = strutils.toLowerAscii(str0)
case str:
of "poseidon2": return Poseidon2
of "monolith": return Monolith
else: raiseAssert("parsefield: unrecognized hash function `" & str0 & "`")
#-------------------------------------------------------------------------------
{. warning[UnreachableElse]:off .}
func toFieldHashCombo*( field: FieldSelect, hash: HashSelect ): FieldHashCombo =
let msg = "invalid hash function `" & ($hash) & "` choice for field `" & ($field) & "`"
case field:
of BN254:
case hash:
of Poseidon2: return BN254_Poseidon2
else: raiseAssert(msg)
of Goldilocks:
case hash:
of Poseidon2: return Goldilocks_Poseidon2
of Monolith: return Goldilocks_Monolith
else: raiseAssert(msg)
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,62 @@
import std/strutils
import std/bitops
import std/streams
import
constantine/math/arithmetic,
constantine/math/io/io_fields,
constantine/math/io/io_bigints,
constantine/math/config/curves
#from constantine/math/io/io_fields import toDecimal
import poseidon2/types
import poseidon2/io
export types
#-------------------------------------------------------------------------------
type BN254_T* = F
type Entropy* = F
type Hash* = F
type Root* = Hash
#-------------------------------------------------------------------------------
func intToBN254*(x: int): F = toF(x)
func toDecimalF*(a : F): string =
var s : string = toDecimal(a)
s = s.strip( leading=true, trailing=false, chars={'0'} )
if s.len == 0: s="0"
return s
func toQuotedDecimalF*(x: F): string =
let s : string = toDecimalF(x)
return ("\"" & s & "\"")
proc writeLnF*(h: Stream, prefix: string, x: F) =
h.writeLine(prefix & toQuotedDecimalF(x))
proc writeF*(h: Stream, prefix: string, x: F) =
h.write(prefix & toQuotedDecimalF(x))
#-------------------------------------------------------------------------------
func extractLowBits[n: static int]( A: BigInt[n], k: int): uint64 =
assert( k>0 and k<=64 )
var r : uint64 = 0
for i in 0..<k:
let b = bit[n](A, i) # NOTE: the docunmentation seems to lie about the conventions here....
let y = uint64(b)
if (y != 0):
r = bitor( r, 1'u64 shl i )
return r
func extractLowBits*(fld: F, k: int): uint64 =
let A : BigInt[254] = fld.toBig()
return extractLowBits(A, k);
#-------------------------------------------------------------------------------

View File

@ -0,0 +1,49 @@
#import std/strutils
import std/bitops
import std/streams
import goldilocks_hash/types
export types
#-------------------------------------------------------------------------------
type Goldi_T* = F
type Entropy* = Digest
type Hash* = Digest
type Root* = Hash
#-------------------------------------------------------------------------------
func toDecimalF*(a : F): string =
var s : string = $fromF(a)
return s
func toQuotedDecimalF*(x: F): string =
let s : string = toDecimalF(x)
return ("\"" & s & "\"")
proc writeLnF*(h: Stream, prefix: string, x: F) =
h.writeLine(prefix & toQuotedDecimalF(x))
proc writeF*(h: Stream, prefix: string, x: F) =
h.write(prefix & toQuotedDecimalF(x))
#-------------------------------------------------------------------------------
func extractLowBits*(fld: F, k: int): uint64 =
assert( k>0 and k<=56 )
let val : uint64 = fromF(fld)
let mask : uint64 = (1'u64 shl k) - 1
return bitand(val, mask)
#-------------------------------------------------------------------------------
func digestToJsonString*( d: Digest ): string =
let xs: F4 = fromDigest(d)
return "[ " & toQuotedDecimalF(xs[0]) & ", " &
toQuotedDecimalF(xs[1]) & ", " &
toQuotedDecimalF(xs[2]) & ", " &
toQuotedDecimalF(xs[3]) & " ]"
#-------------------------------------------------------------------------------

1
workflow/.gitignore vendored
View File

@ -1,3 +1,4 @@
build/
build_big/
build*/
tmp/

View File

@ -16,28 +16,47 @@ NOTE: the examples below assume `bash`. In particular, it won't work with `zsh`
To have an overview of what all the different steps and files are, see [PROOFS.md](PROOFS.md).
### Some measurements
### Some benchmarks
Approximate time to run this on an M2 (8+4 cores), with 10 samples:
Approximate time to run this on an M2 macbook pro (8+4 cores), with 10 samples:
- compiling the circuit: 8 seconds
- circuit-specific setup (with 1 contributor): 85 seconds
- size of the `.zkey` file (only 1 contributor): 110 megabytes
- generating the witness (WASM): 0.3 seconds
- proving with `snarkjs` (slow): 7.7 seconds
- proving wiht `zikkurat` (single threaded!): 13 seconds
- proving with `arkworks`: 4.4 seconds (loading the zkey: 6 seconds)
- proving with `nim-groth16` (old version): 2 seconds
Same with 50 samples:
- compiling: 37 seconds
- circuit-specific setup: ~420 seconds
- circuit-specific setup: ~430 seconds
- `.zkey` file: 525 megabytes
- snarkjs prove: 34 seconds
- generating the witness (WASM): 1.2 seconds
- proving with `snarkjs`: 36 seconds
- proving wiht `zikkurat` (single threaded!): 52 seconds
- proving with `arkworks`: 19.8 seconds (loading the zkey: 33 seconds)
- proving with `nim-groth16` (old version): 9.4 seconds
And with 100 samples:
- compiling: 76 seconds
- circuit-specific setup: ~1000 seconds
- `.zkey` file
- snarkjs prove: 76 seconds
- compiling: 76 seconds
- circuit-specific setup: ~1050 seconds
- `.zkey` file
- generating the witness (WASM): 2.3 seconds
- proving with `snarkjs`: 76 seconds
- proving wiht `zikkurat` (single threaded!): 102 seconds
- proving with `arkworks`: 41 seconds (loading the zkey: 66 seconds)
- proving with `nim-groth16` (old version): 18 seconds
TODO:
- [x] add `arkworks` prover
- [ ] add `rapidsnarks` prover (doesn't run on ARM...)
- [ ] update `nim-groth16` to `constantine-0.1` (should be faster because no workarounds)
- [ ] add multithreading to `zikkurat`
### Preliminaries

View File

@ -13,7 +13,9 @@ CLI_ARGS="--depth=$MAXDEPTH \
--seed=$SEED \
--nslots=$NSLOTS \
--ncells=$NCELLS \
--index=$SLOTINDEX"
--index=$SLOTINDEX \
--field=bn254 \
--hash=poseidon2"
if [[ "$1" == "--export" ]]
then

View File

@ -16,16 +16,18 @@ ${NIMCLI_DIR}/cli $CLI_ARGS -v --output=input.json
# --- generate the witness ---
start=`date +%s`
echo ""
echo "generating the witness..."
cd ${CIRCUIT_MAIN}_js
time node generate_witness.js ${CIRCUIT_MAIN}.wasm ../input.json ../witness.wtns
cd ${ORIG}/build
end=`date +%s`
echo "Generating the witness took `expr $end - $start` seconds."
# --- create the proof ---
PROVER="snarkjs"
# PROVER="nim"
RS=`which rapidsnark`
if [[ ! -z "$RS" ]]
@ -33,9 +35,13 @@ then
PROVER="rapidsnark"
fi
# PROVER="zikkurat"
PROVER="nim"
echo ""
echo "creating the proof... using prover: \`$PROVER\`"
start=`date +%s`
case $PROVER in
snarkjs)
time snarkjs groth16 prove ${CIRCUIT_MAIN}.zkey witness.wtns proof.json public.json
@ -46,11 +52,16 @@ case $PROVER in
nim)
time nim-groth16 -tpv --zkey=${CIRCUIT_MAIN}.zkey --wtns=witness.wtns -o=proof.json -i=public.json
;;
zikkurat)
time zikkurat-groth16 -tpv --zkey=${CIRCUIT_MAIN}.zkey --wtns=witness.wtns # -o=proof.json -i=public.json
;;
*)
echo "unknown prover \`$PROVER\`"
exit 99
;;
esac
end=`date +%s`
echo "Creating the proof took `expr $end - $start` seconds."
# --- verify the proof ---

View File

@ -14,8 +14,13 @@ ${NIMCLI_DIR}/cli $CLI_ARGS -v --circom=${CIRCUIT_MAIN}.circom
# --- compile the circuit ---
echo ""
start=`date +%s`
CIRCUIT_INCLUDES="-l${CIRCUIT_LIB_DIR} -l${CIRCUIT_POS_DIR} -l${CIRCUIT_PRF_DIR}"
time circom --r1cs --wasm --O2 ${CIRCUIT_INCLUDES} ${CIRCUIT_MAIN}.circom
circom --r1cs --wasm --O2 ${CIRCUIT_INCLUDES} ${CIRCUIT_MAIN}.circom
end=`date +%s`
echo "Compiling the circuit took `expr $end - $start` seconds."
echo ""
# --- circuit specific setup ---