rework with poseidon (#3)
* rework with poseidon * adding main template * adding todo * remove mimc
This commit is contained in:
parent
4483440c27
commit
e7b296ebbb
|
@ -1 +1,13 @@
|
||||||
node_modules/
|
node_modules/
|
||||||
|
|
||||||
|
# Added by cargo
|
||||||
|
|
||||||
|
/target
|
||||||
|
|
||||||
|
|
||||||
|
# Added by cargo
|
||||||
|
#
|
||||||
|
# already existing elements were commented out
|
||||||
|
|
||||||
|
#/target
|
||||||
|
/Cargo.lock
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
pragma circom 2.1.0;
|
||||||
|
|
||||||
|
include "./storer.circom";
|
||||||
|
|
||||||
|
component main { public [root, salt] } = StorageProver(32, 4, 2);
|
|
@ -1,8 +1,6 @@
|
||||||
pragma circom 2.1.0;
|
pragma circom 2.1.0;
|
||||||
|
|
||||||
// include "../node_modules/circomlib/circuits/poseidon.circom";
|
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||||
include "../node_modules/circomlib/circuits/mimc.circom";
|
|
||||||
// include "../node_modules/circomlib/circuits/mimcsponge.circom";
|
|
||||||
include "../node_modules/circomlib/circuits/switcher.circom";
|
include "../node_modules/circomlib/circuits/switcher.circom";
|
||||||
include "../node_modules/circomlib/circuits/bitify.circom";
|
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||||
|
|
||||||
|
@ -26,25 +24,52 @@ template parallel MerkleProof(LEVELS) {
|
||||||
switcher[i].R <== pathElements[i];
|
switcher[i].R <== pathElements[i];
|
||||||
switcher[i].sel <== indexBits.out[i];
|
switcher[i].sel <== indexBits.out[i];
|
||||||
|
|
||||||
// hasher[i] = Poseidon(2);
|
hasher[i] = Poseidon(2);
|
||||||
hasher[i] = MultiMiMC7(2, 91);
|
hasher[i].inputs[0] <== switcher[i].outL;
|
||||||
hasher[i].k <== 2;
|
hasher[i].inputs[1] <== switcher[i].outR;
|
||||||
hasher[i].in[0] <== switcher[i].outL;
|
|
||||||
hasher[i].in[1] <== switcher[i].outR;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
root <== hasher[LEVELS - 1].out;
|
root <== hasher[LEVELS - 1].out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function min(arg1, arg2) {
|
||||||
|
return arg1 < arg2 ? arg1 : arg2;
|
||||||
|
}
|
||||||
|
|
||||||
template parallel HashCheck(BLOCK_SIZE) {
|
template parallel HashCheck(BLOCK_SIZE) {
|
||||||
signal input block[BLOCK_SIZE];
|
signal input block[BLOCK_SIZE];
|
||||||
signal input blockHash;
|
signal input blockHash;
|
||||||
|
|
||||||
component hash = MultiMiMC7(BLOCK_SIZE, 91);
|
// TODO: make CHUNK_SIZE a parameter
|
||||||
hash.in <== block;
|
// Split array into chunks of size 16
|
||||||
hash.k <== 2;
|
var CHUNK_SIZE = 16;
|
||||||
|
var NUM_CHUNKS = BLOCK_SIZE / CHUNK_SIZE;
|
||||||
|
|
||||||
blockHash === hash.out; // assert that block matches hash
|
// Initialize an array to store hashes of each block
|
||||||
|
component hashes[NUM_CHUNKS];
|
||||||
|
|
||||||
|
// Loop over chunks and hash them using Poseidon()
|
||||||
|
for (var i = 0; i < NUM_CHUNKS; i++) {
|
||||||
|
var start = i * CHUNK_SIZE;
|
||||||
|
var end = min(start + CHUNK_SIZE, BLOCK_SIZE);
|
||||||
|
hashes[i] = Poseidon(CHUNK_SIZE);
|
||||||
|
for (var j = start; j < end; j++) {
|
||||||
|
hashes[i].inputs[j - start] <== block[j];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Concatenate hashes into a single block
|
||||||
|
var concat[NUM_CHUNKS];
|
||||||
|
for (var i = 0; i < NUM_CHUNKS; i++) {
|
||||||
|
concat[i] = hashes[i].out;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Hash concatenated array using Poseidon() again
|
||||||
|
component h = Poseidon(NUM_CHUNKS);
|
||||||
|
h.inputs <== concat;
|
||||||
|
|
||||||
|
// Assign output to hash signal
|
||||||
|
h.out === blockHash;
|
||||||
}
|
}
|
||||||
|
|
||||||
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS) {
|
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS) {
|
||||||
|
|
|
@ -2,4 +2,4 @@ pragma circom 2.1.0;
|
||||||
|
|
||||||
include "../../circuits/storer.circom";
|
include "../../circuits/storer.circom";
|
||||||
|
|
||||||
component main = StorageProver(32, 4, 2);
|
component main { public [root, salt] } = StorageProver(32, 4, 2);
|
||||||
|
|
|
@ -3,25 +3,33 @@ const path = require("path");
|
||||||
const crypto = require("crypto");
|
const crypto = require("crypto");
|
||||||
const F1Field = require("ffjavascript").F1Field;
|
const F1Field = require("ffjavascript").F1Field;
|
||||||
const Scalar = require("ffjavascript").Scalar;
|
const Scalar = require("ffjavascript").Scalar;
|
||||||
const mimc7 = require("circomlibjs").mimc7;
|
|
||||||
const mimcsponge = require("circomlibjs").mimcsponge;
|
|
||||||
const { MerkleTree } = require("merkletreejs");
|
|
||||||
const {c} = require("circom_tester");
|
const {c} = require("circom_tester");
|
||||||
const chaiAsPromised = require('chai-as-promised');
|
const chaiAsPromised = require('chai-as-promised');
|
||||||
|
const poseidon = require("circomlibjs/src/poseidon");
|
||||||
|
const wasm_tester = require("circom_tester").wasm;
|
||||||
|
|
||||||
chai.use(chaiAsPromised);
|
chai.use(chaiAsPromised);
|
||||||
|
|
||||||
exports.p = Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
|
const p = Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
|
||||||
const Fr = new F1Field(exports.p);
|
const Fr = new F1Field(p);
|
||||||
|
|
||||||
const assert = chai.assert;
|
const assert = chai.assert;
|
||||||
const expect = chai.expect;
|
const expect = chai.expect;
|
||||||
|
|
||||||
const wasm_tester = require("circom_tester").wasm;
|
function digest(input, chunkSize = 16) {
|
||||||
const key = BigInt(2);
|
let chunks = Math.ceil(input.length / chunkSize);
|
||||||
|
let concat = [];
|
||||||
|
|
||||||
const digest = (buf) => mimc7.hash(buf, key);
|
for (let i = 0; i < chunks; i++) {
|
||||||
const digestMulti = (buf) => mimc7.multiHash(buf, key);
|
concat.push(poseidon(input.slice(i * chunkSize, Math.min((i + 1) * chunkSize, input.length))));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (concat.length > 1) {
|
||||||
|
return poseidon(concat);
|
||||||
|
}
|
||||||
|
|
||||||
|
return concat[0]
|
||||||
|
}
|
||||||
|
|
||||||
function merkelize(leafs) {
|
function merkelize(leafs) {
|
||||||
// simple merkle root (treehash) generator
|
// simple merkle root (treehash) generator
|
||||||
|
@ -33,12 +41,12 @@ function merkelize(leafs) {
|
||||||
|
|
||||||
var i = 0;
|
var i = 0;
|
||||||
while (i < merkle.length) {
|
while (i < merkle.length) {
|
||||||
newMerkle.push(digestMulti([merkle[i], merkle[i + 1]]));
|
newMerkle.push(digest([merkle[i], merkle[i + 1]]));
|
||||||
i += 2;
|
i += 2;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (merkle.length % 2 == 1) {
|
if (merkle.length % 2 == 1) {
|
||||||
newMerkle.add(digestMulti([merkle[merkle.length - 2], merkle[merkle.length - 2]]));
|
newMerkle.add(digest([merkle[merkle.length - 2], merkle[merkle.length - 2]]));
|
||||||
}
|
}
|
||||||
|
|
||||||
merkle = newMerkle;
|
merkle = newMerkle;
|
||||||
|
@ -51,19 +59,19 @@ describe("Storer test", function () {
|
||||||
this.timeout(100000);
|
this.timeout(100000);
|
||||||
|
|
||||||
const a = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
const a = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
||||||
const aHash = digestMulti(a);
|
const aHash = digest(a);
|
||||||
const b = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
const b = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
||||||
const bHash = digestMulti(b);
|
const bHash = digest(b);
|
||||||
const c = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
const c = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
||||||
const cHash = digestMulti(c);
|
const cHash = digest(c);
|
||||||
const d = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
const d = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
||||||
const dHash = digestMulti(d);
|
const dHash = digest(d);
|
||||||
const salt = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
const salt = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
|
||||||
const saltHash = digestMulti(salt);
|
const saltHash = digest(salt);
|
||||||
|
|
||||||
it("Should merkelize", async () => {
|
it("Should merkelize", async () => {
|
||||||
let root = merkelize([aHash, bHash]);
|
let root = merkelize([aHash, bHash]);
|
||||||
let hash = digestMulti([aHash, bHash]);
|
let hash = digest([aHash, bHash]);
|
||||||
|
|
||||||
assert.equal(hash, root);
|
assert.equal(hash, root);
|
||||||
});
|
});
|
||||||
|
@ -73,12 +81,16 @@ describe("Storer test", function () {
|
||||||
|
|
||||||
const root = merkelize([aHash, bHash, cHash, dHash]);
|
const root = merkelize([aHash, bHash, cHash, dHash]);
|
||||||
|
|
||||||
const parentHashL = digestMulti([aHash, bHash]);
|
const parentHashL = digest([aHash, bHash]);
|
||||||
const parentHashR = digestMulti([cHash, dHash]);
|
const parentHashR = digest([cHash, dHash]);
|
||||||
|
|
||||||
await cir.calculateWitness({
|
await cir.calculateWitness({
|
||||||
"chunks": [[a], [b], [c], [d]],
|
"chunks": [[a], [b], [c], [d]],
|
||||||
"siblings": [[bHash, parentHashR], [aHash, parentHashR], [dHash, parentHashL], [cHash, parentHashL]],
|
"siblings": [
|
||||||
|
[bHash, parentHashR],
|
||||||
|
[aHash, parentHashR],
|
||||||
|
[dHash, parentHashL],
|
||||||
|
[cHash, parentHashL]],
|
||||||
"hashes": [aHash, bHash, cHash, dHash],
|
"hashes": [aHash, bHash, cHash, dHash],
|
||||||
"path": [0, 1, 2, 3],
|
"path": [0, 1, 2, 3],
|
||||||
"root": root,
|
"root": root,
|
||||||
|
@ -91,13 +103,21 @@ describe("Storer test", function () {
|
||||||
|
|
||||||
const root = merkelize([aHash, bHash, cHash, dHash]);
|
const root = merkelize([aHash, bHash, cHash, dHash]);
|
||||||
|
|
||||||
const parentHashL = digestMulti([aHash, bHash]);
|
const parentHashL = digest([aHash, bHash]);
|
||||||
const parentHashR = digestMulti([cHash, dHash]);
|
const parentHashR = digest([cHash, dHash]);
|
||||||
|
|
||||||
const fn = async () => {
|
const fn = async () => {
|
||||||
return await cir.calculateWitness({
|
return await cir.calculateWitness({
|
||||||
"chunks": [[salt], [b], [c], [d]],
|
"chunks": [
|
||||||
"siblings": [[bHash, parentHashR], [aHash, parentHashR], [dHash, parentHashL], [cHash, parentHashL]],
|
[salt], // wrong chunk
|
||||||
|
[b],
|
||||||
|
[c],
|
||||||
|
[d]],
|
||||||
|
"siblings": [
|
||||||
|
[bHash, parentHashR],
|
||||||
|
[aHash, parentHashR],
|
||||||
|
[dHash, parentHashL],
|
||||||
|
[cHash, parentHashL]],
|
||||||
"hashes": [saltHash, bHash, cHash, dHash],
|
"hashes": [saltHash, bHash, cHash, dHash],
|
||||||
"path": [0, 1, 2, 3],
|
"path": [0, 1, 2, 3],
|
||||||
"root": root,
|
"root": root,
|
||||||
|
|
Loading…
Reference in New Issue