Storage proofs (#1)

* WIP: working on storage proofs

* make it work

* don't need to pass an array

* add failing case

* move `saltSquare` to top

* updating deps
This commit is contained in:
Dmitriy Ryajov 2023-02-09 10:40:23 -06:00 committed by GitHub
parent 94b9b0da6d
commit a0ba322cfd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 9291 additions and 718 deletions

View File

@ -1,5 +0,0 @@
pragma circom 2.0.0;
include "../node_modules/circomlib/circuits/sha256/sha256.circom";
component main = Sha256(512);

View File

@ -1,78 +1,79 @@
pragma circom 2.1.0;
include "../node_modules/circomlib/circuits/sha256/sha256.circom";
include "../node_modules/circomlib/circuits/poseidon.circom";
// include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/mimc.circom";
// include "../node_modules/circomlib/circuits/mimcsponge.circom";
include "../node_modules/circomlib/circuits/switcher.circom";
include "../node_modules/circomlib/circuits/bitify.circom";
include "tree.circom";
template HashCheck(blockSize) {
signal input block[blockSize];
//signal input blockHash[256];
template parallel MerkleProof(LEVELS) {
signal input leaf;
signal input pathElements[LEVELS];
signal input pathIndices;
signal output root;
component switcher[LEVELS];
component hasher[LEVELS];
component indexBits = Num2Bits(LEVELS);
indexBits.in <== pathIndices;
for (var i = 0; i < LEVELS; i++) {
switcher[i] = Switcher();
switcher[i].L <== i == 0 ? leaf : hasher[i - 1].out;
switcher[i].R <== pathElements[i];
switcher[i].sel <== indexBits.out[i];
// hasher[i] = Poseidon(2);
hasher[i] = MultiMiMC7(2, 91);
hasher[i].k <== 2;
hasher[i].in[0] <== switcher[i].outL;
hasher[i].in[1] <== switcher[i].outR;
}
root <== hasher[LEVELS - 1].out;
}
template parallel HashCheck(BLOCK_SIZE) {
signal input block[BLOCK_SIZE];
signal input blockHash;
//component hash = Sha256(blockSize);
component hash = Poseidon(blockSize);
for (var i = 0; i < blockSize; i++) {
hash.inputs[i] <== block[i];
}
hash.out === blockHash; //is this checking the whole array?
// is this enough or do we need output?
component hash = MultiMiMC7(BLOCK_SIZE, 91);
hash.in <== block;
hash.k <== 2;
blockHash === hash.out; // assert that block matches hash
}
template CheckInclusion(nLevels) {
signal input index;
signal input chunkHash;
signal input treeSiblings[nLevels];
signal input root;
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS) {
// BLOCK_SIZE: size of block in symbols
// QUERY_LEN: query length, i.e. number if indices to be proven
// LEVELS: size of Merkle Tree in the manifest
signal input chunks[QUERY_LEN][BLOCK_SIZE]; // chunks to be proven
signal input siblings[QUERY_LEN][LEVELS]; // siblings hashes of chunks to be proven
signal input path[QUERY_LEN]; // path of chunks to be proven
signal input hashes[QUERY_LEN]; // hashes of chunks to be proven
signal input root; // root of the Merkle Tree
signal input salt; // salt (block hash) to prevent preimage attacks
component num2Bits = Num2Bits(nLevels);
num2Bits.in <== index;
signal saltSquare <== salt * salt; // might not be necesary as it's part of the public inputs
component inclusionProof = MerkleTreeInclusionProof(nLevels);
inclusionProof.leaf <== chunkHash;
for (var j = 0; j < nLevels; j++) {
inclusionProof.siblings[j] <== treeSiblings[j];
inclusionProof.pathIndices[j] <== num2Bits.out[j];
}
root === inclusionProof.root;
}
template StorageProver(blockSize, qLen, nLevels) {
// blockSize: size of block in bits (sha256), or in symbols (Poseidon)
// qLen: query length, i.e. number if indices to be proven
// nLevels: size of Merkle Tree in the manifest
signal input chunks[qLen][blockSize];
//signal input chunkHashes[qLen][256];
signal input chunkHashes[qLen];
signal input indices[qLen];
signal input treeSiblings[qLen][nLevels];
signal input root;
//check that chunks hash to given hashes
for (var i = 0; i < qLen; i++) {
parallel HashCheck(blockSize)(
chunks[i],
chunkHashes[i]
);
component hashers[QUERY_LEN];
for (var i = 0; i < QUERY_LEN; i++) {
hashers[i] = HashCheck(BLOCK_SIZE);
hashers[i].block <== chunks[i];
hashers[i].blockHash <== hashes[i];
}
//check that the tree is correct
// - check indices against limits TODO
// - convert indices to treePathIndices
// - check chunkHash and treeSiblings according to treePathIndices against root
component merkelizer[QUERY_LEN];
for (var i = 0; i < QUERY_LEN; i++) {
merkelizer[i] = MerkleProof(LEVELS);
merkelizer[i].leaf <== hashes[i];
merkelizer[i].pathElements <== siblings[i];
merkelizer[i].pathIndices <== path[i];
for (var i = 0; i < qLen; i++) {
parallel CheckInclusion(nLevels)(
indices[i],
chunkHashes[i],
treeSiblings[i],
root);
merkelizer[i].root === root;
}
}
//component main {public [blockHash]} = HashCheck(512);
//template StorageProver(blockSize, qLen, nLevels) {
//component main {public [indices]} = StorageProver(512, 1, 10);
component main {public [indices, root]} = StorageProver(2, 1, 20);

View File

@ -1,45 +0,0 @@
pragma circom 2.0.0;
//based on Semaphore code
include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/mux1.circom";
template MerkleTreeInclusionProof(nLevels) {
signal input leaf;
signal input pathIndices[nLevels];
signal input siblings[nLevels];
signal output root;
component hashers[nLevels];
component mux[nLevels];
signal hashes[nLevels + 1];
hashes[0] <== leaf;
for (var i = 0; i < nLevels; i++) {
pathIndices[i] * (1 - pathIndices[i]) === 0;
hashers[i] = Poseidon(2);
mux[i] = MultiMux1(2);
mux[i].c[0][0] <== hashes[i];
mux[i].c[0][1] <== siblings[i];
mux[i].c[1][0] <== siblings[i];
mux[i].c[1][1] <== hashes[i];
mux[i].s <== pathIndices[i];
hashers[i].inputs[0] <== mux[i].out[0];
hashers[i].inputs[1] <== mux[i].out[1];
hashes[i + 1] <== hashers[i].out;
}
root <== hashes[nLevels];
}
//component main {public [leaf]} = MerkleTreeInclusionProof(10);

9106
package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,5 @@
{
"name": "codex-zk",
"type": "module",
"version": "0.1.0",
"description": "",
"main": "index.js",
@ -11,9 +10,11 @@
"license": "ISC",
"dependencies": {
"@zk-kit/protocols": "^1.11.1",
"chai-as-promised": "^7.1.1",
"circom_tester": "^0.0.19",
"circomlib": "^2.0.2",
"circomlib": "^2.0.5",
"circomlibjs": "^0.0.8",
"merkletreejs": "^0.3.9",
"mocha": "^10.1.0",
"snarkjs": "^0.5.0"
}

View File

@ -1,16 +0,0 @@
template Multiplier(n) {
signal private input a;
signal private input b;
signal output c;
signal int[n];
int[0] <== a*a + b;
for (var i=1; i<n; i++) {
int[i] <== int[i-1]*int[i-1] + b;
}
c <== int[n-1];
}
component main = Multiplier(1000);

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,5 @@
pragma circom 2.1.0;
include "../../circuits/storer.circom";
component main = StorageProver(32, 4, 2);

View File

@ -1,172 +0,0 @@
import * as snarkjs from "snarkjs";
import { buildBn128, buildBls12381} from "ffjavascript";
//import { getCurveFromName } from "snarkjs/curves.js";
import assert from "assert";
import path from "path";
const typeSizes = {
"undefined": () => 0,
"boolean": () => 4,
"number": () => 8,
"string": item => 2 * item.length,
"object": item => !item ? 0 : Object
.keys(item)
.reduce((total, key) => sizeOf(key) + sizeOf(item[key]) + total, 0)
};
const sizeOf = value => typeSizes[typeof value](value);
describe("Full process", function () {
this.timeout(1000000000);
const r1csPath = path.join("test", "circuit", "circuit.r1cs");
const wasmPath = path.join("test", "circuit", "circuit.wasm");
let curve;
const ptau_0 = {type: "mem"};
const ptau_1 = {type: "mem"};
const ptau_2 = {type: "mem"};
const ptau_beacon = {type: "mem"};
const ptau_final = {type: "mem"};
const ptau_challenge2 = {type: "mem"};
const ptau_response2 = {type: "mem"};
const zkey_0 = {type: "mem"};
const zkey_1 = {type: "mem"};
const zkey_2 = {type: "mem"};
const zkey_final = {type: "mem"};
const zkey_plonk = {type: "mem"};
const bellman_1 = {type: "mem"};
const bellman_2 = {type: "mem"};
let vKey;
let vKeyPlonk;
const wtns = {type: "mem"};
let proof;
let publicSignals;
before( async () => {
curve = await buildBn128();
// curve.Fr.s = 10;
});
after( async () => {
await curve.terminate();
// console.log(process._getActiveHandles());
// console.log(process._getActiveRequests());
});
it ("powersoftau new", async () => {
await snarkjs.powersOfTau.newAccumulator(curve, 11, ptau_0);
});
it ("powersoftau contribute ", async () => {
await snarkjs.powersOfTau.contribute(ptau_0, ptau_1, "C1", "Entropy1");
});
it ("powersoftau export challenge", async () => {
await snarkjs.powersOfTau.exportChallenge(ptau_1, ptau_challenge2);
});
it ("powersoftau challenge contribute", async () => {
await snarkjs.powersOfTau.challengeContribute(curve, ptau_challenge2, ptau_response2, "Entropy2");
});
it ("powersoftau import response", async () => {
await snarkjs.powersOfTau.importResponse(ptau_1, ptau_response2, ptau_2, "C2", true);
});
it ("powersoftau beacon", async () => {
await snarkjs.powersOfTau.beacon(ptau_2, ptau_beacon, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("powersoftau prepare phase2", async () => {
await snarkjs.powersOfTau.preparePhase2(ptau_beacon, ptau_final);
});
it ("powersoftau verify", async () => {
const res = await snarkjs.powersOfTau.verify(ptau_final);
assert(res);
});
it ("groth16 setup", async () => {
await snarkjs.zKey.newZKey(r1csPath, ptau_final, zkey_0);
console.warn(zkey_0);
});
it ("zkey contribute ", async () => {
await snarkjs.zKey.contribute(zkey_0, zkey_1, "p2_C1", "pa_Entropy1");
});
it ("zkey export bellman", async () => {
await snarkjs.zKey.exportBellman(zkey_1, bellman_1);
});
it ("zkey bellman contribute", async () => {
await snarkjs.zKey.bellmanContribute(curve, bellman_1, bellman_2, "pa_Entropy2");
});
it ("zkey import bellman", async () => {
await snarkjs.zKey.importBellman(zkey_1, bellman_2, zkey_2, "C2");
});
it ("zkey beacon", async () => {
await snarkjs.zKey.beacon(zkey_2, zkey_final, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("zkey verify r1cs", async () => {
const res = await snarkjs.zKey.verifyFromR1cs(r1csPath, ptau_final, zkey_final);
assert(res);
});
it ("zkey verify init", async () => {
const res = await snarkjs.zKey.verifyFromInit(zkey_0, ptau_final, zkey_final);
assert(res);
});
it ("zkey export verificationkey", async () => {
vKey = await snarkjs.zKey.exportVerificationKey(zkey_final);
});
it ("witness calculate", async () => {
await snarkjs.wtns.calculate({a: 11, b:2}, wasmPath, wtns);
//console.warn("witness: ", wtns);
console.warn("witness: ", wtns.data.length, " bytes");
console.warn("witness: ", sizeOf(wtns.data), " bytes");
});
it ("groth16 proof", async () => {
const res = await snarkjs.groth16.prove(zkey_final, wtns);
proof = res.proof;
publicSignals = res.publicSignals;
console.warn("proof: ", sizeOf(proof), " bytes");
console.warn("public: ", sizeOf(publicSignals), " bytes");
});
it ("groth16 verify", async () => {
const res = await snarkjs.groth16.verify(vKey, publicSignals, proof);
assert(res == true);
});
it ("plonk setup", async () => {
await snarkjs.plonk.setup(r1csPath, ptau_final, zkey_plonk);
});
it ("zkey export verificationkey", async () => {
vKey = await snarkjs.zKey.exportVerificationKey(zkey_plonk);
});
it ("plonk proof", async () => {
const res = await snarkjs.plonk.prove(zkey_plonk, wtns);
proof = res.proof;
publicSignals = res.publicSignals;
console.warn("proof: ", proof, " bytes");
console.warn("public: ", publicSignals, " bytes");
});
it ("plonk verify", async () => {
const res = await snarkjs.plonk.verify(vKey, publicSignals, proof);
assert(res == true);
});
});

View File

@ -1,223 +0,0 @@
import { IncrementalMerkleTree } from "@zk-kit/incremental-merkle-tree"
import { poseidon } from "circomlibjs"
import assert from "assert";
import path from "path";
import * as snarkjs from "snarkjs";
import wasm from "circom_tester";
import { buildBn128, buildBls12381} from "ffjavascript";
describe("MerkleTree", function () {
this.timeout(1000000000);
// these parameters should match the circuit
const depth = 21 // depth of full tree from data: sum of block hash depth and dataset treehash depth
const hashFn = poseidon
const zeroValue = BigInt(0) // padding symbol in the Merkle Tree
const arity = 2 //
const queryLen = 1
const numberOfLeaves = 2 ** 7 // example dataset size
const circuitPath = path.join("circuits", "storer.circom");
const r1csPath = path.join("circuits", "storer.r1cs");
const wasmPath = path.join("circuits", "storer_js", "storer.wasm");
let curve;
const ptau_0 = {type: "mem"};
const ptau_1 = {type: "mem"};
const ptau_2 = {type: "mem"};
const ptau_beacon = {type: "mem"};
const ptau_final = {type: "mem"};
const ptau_challenge2 = {type: "mem"};
const ptau_response2 = {type: "mem"};
const zkey_0 = {type: "mem"};
const zkey_1 = {type: "mem"};
const zkey_2 = {type: "mem"};
const zkey_final = {type: "mem"};
const zkey_plonk = {type: "mem"};
const bellman_1 = {type: "mem"};
const bellman_2 = {type: "mem"};
let vKey;
let vKeyPlonk;
const wtns = {type: "mem"};
let proof;
let publicSignals;
before( async () => {
curve = await buildBn128();
});
after( async () => {
await curve.terminate();
});
// create Merkle Tree for example dataset
let tree
it ("generate Merkle Tree from data", () => {
tree = new IncrementalMerkleTree(hashFn, depth, zeroValue, arity)
for (let i = 0; i < numberOfLeaves; i += 1) {
tree.insert(BigInt(i + 1))
}
})
const index = 0
// Create an example Merkle Proof
let merkleProof
it ("create Merkle proof", () => {
merkleProof = tree.createProof(index)
})
// Verify the above proof just to be on the safe side
it ("verify Merkle proof", () => {
assert(tree.verifyProof(merkleProof))
// console.warn(merkleProof)
})
let cir
it ("compile circuit", async () => {
cir = await wasm.wasm(circuitPath)
// console.warn(cir)
})
const chunks = [[1,2]]
let circuitInputs
it ("witness calculate", async () => {
// inputs defined in circuit:
// signal input chunks[qLen][blockSize];
// signal input chunkHashes[qLen];
// signal input indices[qLen];
// signal input treeSiblings[qLen][nLevels];
// signal input root;
circuitInputs = {
chunks: chunks,
chunkHashes: [hashFn(chunks[index])],
indices: [index],
treeSiblings: [merkleProof.siblings.slice(1)],
root: merkleProof.root
}
await snarkjs.wtns.calculate(circuitInputs, wasmPath, wtns);
// await cir.calculateWitness(circuitInputs, true);
// console.warn("witness: ", wtns);
// console.warn("witness: ", wtns.data.length, " bytes");
// console.warn("witness: ", sizeOf(wtns.data), " bytes");
})
// set ceremony size
// The second parameter is the power of two of the maximum number of constraints that the ceremony can accept: in this case, the number of constraints is 2 ^ 12 = 4096. The maximum value supported here is 28, which means you can use snarkjs to securely generate zk-snark parameters for circuits with up to 2 ^ 28 (≈268 million) constraints.
// see https://github.com/iden3/snarkjs/blob/master/README.md#1-start-a-new-powers-of-tau-ceremony
const power = 13
it ("powersoftau new", async () => {
await snarkjs.powersOfTau.newAccumulator(curve, power, ptau_0);
});
it ("powersoftau contribute ", async () => {
await snarkjs.powersOfTau.contribute(ptau_0, ptau_1, "C1", "Entropy1");
});
it ("powersoftau export challenge", async () => {
await snarkjs.powersOfTau.exportChallenge(ptau_1, ptau_challenge2);
});
it ("powersoftau challenge contribute", async () => {
await snarkjs.powersOfTau.challengeContribute(curve, ptau_challenge2, ptau_response2, "Entropy2");
});
it ("powersoftau import response", async () => {
await snarkjs.powersOfTau.importResponse(ptau_1, ptau_response2, ptau_2, "C2", true);
});
it ("powersoftau beacon", async () => {
await snarkjs.powersOfTau.beacon(ptau_2, ptau_beacon, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("powersoftau prepare phase2", async () => {
await snarkjs.powersOfTau.preparePhase2(ptau_beacon, ptau_final);
});
it ("powersoftau verify", async () => {
const res = await snarkjs.powersOfTau.verify(ptau_final);
assert(res);
});
it ("groth16 setup", async () => {
await snarkjs.zKey.newZKey(r1csPath, ptau_final, zkey_0);
console.warn(zkey_0);
});
it ("zkey contribute ", async () => {
await snarkjs.zKey.contribute(zkey_0, zkey_1, "p2_C1", "pa_Entropy1");
});
it ("zkey export bellman", async () => {
await snarkjs.zKey.exportBellman(zkey_1, bellman_1);
});
it ("zkey bellman contribute", async () => {
await snarkjs.zKey.bellmanContribute(curve, bellman_1, bellman_2, "pa_Entropy2");
});
it ("zkey import bellman", async () => {
await snarkjs.zKey.importBellman(zkey_1, bellman_2, zkey_2, "C2");
});
it ("zkey beacon", async () => {
await snarkjs.zKey.beacon(zkey_2, zkey_final, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("zkey verify r1cs", async () => {
const res = await snarkjs.zKey.verifyFromR1cs(r1csPath, ptau_final, zkey_final);
assert(res);
});
it ("zkey verify init", async () => {
const res = await snarkjs.zKey.verifyFromInit(zkey_0, ptau_final, zkey_final);
assert(res);
});
it ("zkey export verificationkey", async () => {
vKey = await snarkjs.zKey.exportVerificationKey(zkey_final);
});
it ("witness calculate", async () => {
await snarkjs.wtns.calculate(circuitInputs, wasmPath, wtns);
});
it ("groth16 proof", async () => {
const res = await snarkjs.groth16.prove(zkey_final, wtns);
proof = res.proof;
publicSignals = res.publicSignals;
});
it ("groth16 verify", async () => {
const res = await snarkjs.groth16.verify(vKey, publicSignals, proof);
assert(res == true);
});
it ("plonk setup", async () => {
await snarkjs.plonk.setup(r1csPath, ptau_final, zkey_plonk);
});
it ("zkey export verificationkey", async () => {
vKey = await snarkjs.zKey.exportVerificationKey(zkey_plonk);
});
it ("plonk proof", async () => {
const res = await snarkjs.plonk.prove(zkey_plonk, wtns);
proof = res.proof;
publicSignals = res.publicSignals;
console.warn("proof: ", proof, " bytes");
console.warn("public: ", publicSignals, " bytes");
});
it ("plonk verify", async () => {
const res = await snarkjs.plonk.verify(vKey, publicSignals, proof);
assert(res == true);
});
})

View File

@ -1,17 +0,0 @@
template TestPlonk() {
signal input a;
signal private input b;
signal output c;
signal i1;
signal i2;
signal i4;
i1 <== a+b+3;
i2 <== i1*i1;
i4 <== i2*i2;
c <== i1*i4;
}
component main = TestPlonk();

Binary file not shown.

Binary file not shown.

View File

@ -1,175 +0,0 @@
const chai = require("chai");
const path = require("path");
const crypto = require("crypto");
const F1Field = require("ffjavascript").F1Field;
const Scalar = require("ffjavascript").Scalar;
exports.p = Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const Fr = new F1Field(exports.p);
const assert = chai.assert;
const sha256 = require("./helpers/sha256");
const wasm_tester = require("circom_tester").wasm;
const snarkjs = require("snarkjs");
const buildBn128 = require("ffjavascript").buildBn128;
// const printSignal = require("./helpers/printsignal");
function buffer2bitArray(b) {
const res = [];
for (let i=0; i<b.length; i++) {
for (let j=0; j<8; j++) {
res.push((b[i] >> (7-j) &1));
}
}
return res;
}
function bitArray2buffer(a) {
const len = Math.floor((a.length -1 )/8)+1;
const b = new Buffer.alloc(len);
for (let i=0; i<a.length; i++) {
const p = Math.floor(i/8);
b[p] = b[p] | (Number(a[i]) << ( 7 - (i%8) ));
}
return b;
}
describe("SHA256 test", function () {
this.timeout(1000000);
let curve;
const ptau_0 = {type: "mem"};
const ptau_1 = {type: "mem"};
const ptau_2 = {type: "mem"};
const ptau_beacon = {type: "mem"};
const ptau_final = {type: "mem"};
const ptau_challenge2 = {type: "mem"};
const ptau_response2 = {type: "mem"};
const zkey_0 = {type: "mem"};
const zkey_1 = {type: "mem"};
const zkey_2 = {type: "mem"};
const zkey_final = {type: "mem"};
const zkey_plonk = {type: "mem"};
const bellman_1 = {type: "mem"};
const bellman_2 = {type: "mem"};
let vKey;
let vKeyPlonk;
const wtns = {type: "mem"};
let proof;
let publicSignals;
var cir;
before( async () => {
curve = await buildBn128();
// curve.Fr.s = 10;
});
after( async () => {
await curve.terminate();
// console.log(process._getActiveHandles());
// console.log(process._getActiveRequests());
});
it ("powersoftau new", async () => {
await snarkjs.powersOfTau.newAccumulator(curve, 11, ptau_0);
});
it ("powersoftau contribute ", async () => {
await snarkjs.powersOfTau.contribute(ptau_0, ptau_1, "C1", "Entropy1");
});
it ("powersoftau export challenge", async () => {
await snarkjs.powersOfTau.exportChallenge(ptau_1, ptau_challenge2);
});
it ("powersoftau challenge contribute", async () => {
await snarkjs.powersOfTau.challengeContribute(curve, ptau_challenge2, ptau_response2, "Entropy2");
});
it ("powersoftau import response", async () => {
await snarkjs.powersOfTau.importResponse(ptau_1, ptau_response2, ptau_2, "C2", true);
});
it ("powersoftau beacon", async () => {
await snarkjs.powersOfTau.beacon(ptau_2, ptau_beacon, "B3", "0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f20", 10);
});
it ("powersoftau prepare phase2", async () => {
await snarkjs.powersOfTau.preparePhase2(ptau_beacon, ptau_final);
});
it ("powersoftau verify", async () => {
const res = await snarkjs.powersOfTau.verify(ptau_final);
assert(res);
});
it("Should work bits to array and array to bits", async () => {
const b = new Buffer.alloc(64);
for (let i=0; i<64; i++) {
b[i] = i+1;
}
const a = buffer2bitArray(b);
const b2 = bitArray2buffer(a);
assert.equal(b.toString("hex"), b2.toString("hex"), true);
});
it ("compile circuit 64 bytes (512 bits)", async () => {
cir = await wasm_tester(path.join(__dirname, "../circuits", "sha256_test512.circom"));
});
it("Should calculate a hash of 2 compressor", async () => {
const b = new Buffer.alloc(64);
for (let i=0; i<64; i++) {
b[i] = i+1;
}
const hash = crypto.createHash("sha256")
.update(b)
.digest("hex");
const arrIn = buffer2bitArray(b);
const witness = await cir.calculateWitness({ "in": arrIn }, true);
console.warn("witness: ", witness.length, " bits");
const arrOut = witness.slice(1, 257);
const hash2 = bitArray2buffer(arrOut).toString("hex");
assert.equal(hash, hash2);
}).timeout(1000000);
it ("compile circuit 640 bytes", async () => {
cir = await wasm_tester(path.join(__dirname, "circuits", "sha256_test5120.circom"));
});
it("Should calculate a hash of 20 compressor", async () => {
const b = new Buffer.alloc(640);
for (let i=0; i<640; i++) {
b[i] = i+1;
}
const hash = crypto.createHash("sha256")
.update(b)
.digest("hex");
const arrIn = buffer2bitArray(b);
const witness = await cir.calculateWitness({ "in": arrIn }, true);
console.warn("witness: ", witness.length, " bits");
const arrOut = witness.slice(1, 257);
const hash2 = bitArray2buffer(arrOut).toString("hex");
assert.equal(hash, hash2);
}).timeout(1000000);
});

113
test/storer.js Normal file
View File

@ -0,0 +1,113 @@
const chai = require("chai");
const path = require("path");
const crypto = require("crypto");
const F1Field = require("ffjavascript").F1Field;
const Scalar = require("ffjavascript").Scalar;
const mimc7 = require("circomlibjs").mimc7;
const mimcsponge = require("circomlibjs").mimcsponge;
const { MerkleTree } = require("merkletreejs");
const {c} = require("circom_tester");
const chaiAsPromised = require('chai-as-promised');
chai.use(chaiAsPromised);
exports.p = Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const Fr = new F1Field(exports.p);
const assert = chai.assert;
const expect = chai.expect;
const wasm_tester = require("circom_tester").wasm;
const key = BigInt(2);
const digest = (buf) => mimc7.hash(buf, key);
const digestMulti = (buf) => mimc7.multiHash(buf, key);
function merkelize(leafs) {
// simple merkle root (treehash) generator
// unbalanced trees will have the last leaf duplicated
var merkle = leafs;
while (merkle.length > 1) {
var newMerkle = [];
var i = 0;
while (i < merkle.length) {
newMerkle.push(digestMulti([merkle[i], merkle[i + 1]]));
i += 2;
}
if (merkle.length % 2 == 1) {
newMerkle.add(digestMulti([merkle[merkle.length - 2], merkle[merkle.length - 2]]));
}
merkle = newMerkle;
}
return merkle[0];
}
describe("Storer test", function () {
this.timeout(100000);
const a = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const aHash = digestMulti(a);
const b = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const bHash = digestMulti(b);
const c = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const cHash = digestMulti(c);
const d = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const dHash = digestMulti(d);
const salt = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const saltHash = digestMulti(salt);
it("Should merkelize", async () => {
let root = merkelize([aHash, bHash]);
let hash = digestMulti([aHash, bHash]);
assert.equal(hash, root);
});
it("Should verify chunk is correct and part of dataset", async () => {
const cir = await wasm_tester(path.join(__dirname, "./circuits", "storer_test.circom"));
const root = merkelize([aHash, bHash, cHash, dHash]);
const parentHashL = digestMulti([aHash, bHash]);
const parentHashR = digestMulti([cHash, dHash]);
await cir.calculateWitness({
"chunks": [[a], [b], [c], [d]],
"siblings": [[bHash, parentHashR], [aHash, parentHashR], [dHash, parentHashL], [cHash, parentHashL]],
"hashes": [aHash, bHash, cHash, dHash],
"path": [0, 1, 2, 3],
"root": root,
"salt": saltHash,
}, true);
}).timeout(100000);
it("Should verify chunk is correct and part of dataset", async () => {
const cir = await wasm_tester(path.join(__dirname, "./circuits", "storer_test.circom"));
const root = merkelize([aHash, bHash, cHash, dHash]);
const parentHashL = digestMulti([aHash, bHash]);
const parentHashR = digestMulti([cHash, dHash]);
const fn = async () => {
return await cir.calculateWitness({
"chunks": [[salt], [b], [c], [d]],
"siblings": [[bHash, parentHashR], [aHash, parentHashR], [dHash, parentHashL], [cHash, parentHashL]],
"hashes": [saltHash, bHash, cHash, dHash],
"path": [0, 1, 2, 3],
"root": root,
"salt": saltHash,
}, true);
}
assert.isRejected(
fn(), Error,
/Error: Error: Assert Failed.\nError in template StorageProver_7 line: 75/);
}).timeout(100000);
});