mirror of
https://github.com/codex-storage/codex-storage-proofs.git
synced 2025-01-25 01:59:23 +00:00
ebef300064
* wip rust ffi * proper test component instantiation * adding quick&dirty poseidon implementation * update gitignode * gitignore * adding rust circuit tests * gitignore * rename * add storer tests * move utils under circuit_tests * fix storage proofs * wip: ffi * instantiate storer * enable ark-serialize * delete js tests * update CI to run cargo tests * keep the artifacts dir * update .gitignore * build circuits * remove package json * place built circuits in correct dirs * update gitignore * remove node * fix ci * updating readme * storageproofs.rs to storage_proofs.rs * flatten tests chunks by default * add ffi * fix digest * minor fixes for ffi * fix storer test * use random data for chunks * debug optimizations to speed witness generation * clippy & other lint stuff * add back missing unsafe blocks * release mode disables constraint checks * fix ffi * fix hashes serialization * make naming more consistent * add missing pragma * use correct circuits * add todo * add clarification to readme * silence unused warning * include constants file into exec * remove unused imports
68 lines
2.3 KiB
Plaintext
68 lines
2.3 KiB
Plaintext
pragma circom 2.1.0;
|
|
|
|
include "../node_modules/circomlib/circuits/poseidon.circom";
|
|
include "../node_modules/circomlib/circuits/switcher.circom";
|
|
include "../node_modules/circomlib/circuits/bitify.circom";
|
|
|
|
include "./poseidon-digest.circom";
|
|
|
|
template parallel MerkleProof(LEVELS) {
|
|
signal input leaf;
|
|
signal input pathElements[LEVELS];
|
|
signal input pathIndices;
|
|
|
|
signal output root;
|
|
|
|
component switcher[LEVELS];
|
|
component hasher[LEVELS];
|
|
|
|
component indexBits = Num2Bits(LEVELS);
|
|
indexBits.in <== pathIndices;
|
|
|
|
for (var i = 0; i < LEVELS; i++) {
|
|
switcher[i] = Switcher();
|
|
|
|
switcher[i].L <== i == 0 ? leaf : hasher[i - 1].out;
|
|
switcher[i].R <== pathElements[i];
|
|
switcher[i].sel <== indexBits.out[i];
|
|
|
|
hasher[i] = Poseidon(2);
|
|
hasher[i].inputs[0] <== switcher[i].outL;
|
|
hasher[i].inputs[1] <== switcher[i].outR;
|
|
}
|
|
|
|
root <== hasher[LEVELS - 1].out;
|
|
}
|
|
|
|
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS, DIGEST_CHUNK) {
|
|
// BLOCK_SIZE: size of block in symbols
|
|
// QUERY_LEN: query length, i.e. number if indices to be proven
|
|
// LEVELS: size of Merkle Tree in the manifest
|
|
// DIGEST_CHUNK: number of symbols to hash in one go
|
|
signal input chunks[QUERY_LEN][BLOCK_SIZE]; // chunks to be proven
|
|
signal input siblings[QUERY_LEN][LEVELS]; // siblings hashes of chunks to be proven
|
|
signal input path[QUERY_LEN]; // path of chunks to be proven
|
|
signal input hashes[QUERY_LEN]; // hashes of chunks to be proven
|
|
signal input root; // root of the Merkle Tree
|
|
signal input salt; // salt (block hash) to prevent preimage attacks
|
|
|
|
signal saltSquare <== salt * salt; // might not be necesary as it's part of the public inputs
|
|
|
|
component hashers[QUERY_LEN];
|
|
for (var i = 0; i < QUERY_LEN; i++) {
|
|
hashers[i] = PoseidonDigest(BLOCK_SIZE, DIGEST_CHUNK);
|
|
hashers[i].block <== chunks[i];
|
|
hashers[i].hash === hashes[i];
|
|
}
|
|
|
|
component merkelizer[QUERY_LEN];
|
|
for (var i = 0; i < QUERY_LEN; i++) {
|
|
merkelizer[i] = MerkleProof(LEVELS);
|
|
merkelizer[i].leaf <== hashes[i];
|
|
merkelizer[i].pathElements <== siblings[i];
|
|
merkelizer[i].pathIndices <== path[i];
|
|
|
|
merkelizer[i].root === root;
|
|
}
|
|
}
|