codex-storage-proofs/circuits/poseidon-digest.circom
Dmitriy Ryajov ebef300064
Ark circom and rust ffi (#5)
* wip rust ffi

* proper test component instantiation

* adding quick&dirty poseidon implementation

* update gitignode

* gitignore

* adding rust circuit tests

* gitignore

* rename

* add storer tests

* move utils under circuit_tests

* fix storage proofs

* wip: ffi

* instantiate storer

* enable ark-serialize

* delete js tests

* update CI to run cargo tests

* keep the artifacts dir

* update .gitignore

* build circuits

* remove package json

* place built circuits in correct dirs

* update gitignore

* remove node

* fix ci

* updating readme

* storageproofs.rs to storage_proofs.rs

* flatten tests chunks by default

* add ffi

* fix digest

* minor fixes for ffi

* fix storer test

* use random data for chunks

* debug optimizations to speed witness generation

* clippy & other lint stuff

* add back missing unsafe blocks

* release mode disables constraint checks

* fix ffi

* fix hashes serialization

* make naming more consistent

* add missing pragma

* use correct circuits

* add todo

* add clarification to readme

* silence unused warning

* include constants file into exec

* remove unused imports
2023-04-12 16:17:00 -06:00

56 lines
1.5 KiB
Plaintext

pragma circom 2.1.0;
include "../node_modules/circomlib/circuits/poseidon.circom";
function roundUpDiv(x, n) {
var last = x % n; // get the last digit
var div = x \ n; // get the division
if (last > 0) {
return div + 1;
}
return div;
}
template parallel PoseidonDigest(BLOCK_SIZE, DIGEST_CHUNK) {
// BLOCK_SIZE - size of the input block array
// DIGEST_CHUNK - number of elements to hash at once
signal input block[BLOCK_SIZE]; // Input block array
signal output hash; // Output hash
// Split array into chunks of size DIGEST_CHUNK, usually 2
var NUM_CHUNKS = roundUpDiv(BLOCK_SIZE, DIGEST_CHUNK);
// Initialize an array to store hashes of each block
component hashes[NUM_CHUNKS];
// Loop over chunks and hash them using Poseidon()
for (var i = 0; i < NUM_CHUNKS; i++) {
hashes[i] = Poseidon(DIGEST_CHUNK);
var start = i * DIGEST_CHUNK;
var end = start + DIGEST_CHUNK;
for (var j = start; j < end; j++) {
if (j >= BLOCK_SIZE) {
hashes[i].inputs[j - start] <== 0;
} else {
hashes[i].inputs[j - start] <== block[j];
}
}
}
// Concatenate hashes into a single block
var concat[NUM_CHUNKS];
for (var i = 0; i < NUM_CHUNKS; i++) {
concat[i] = hashes[i].out;
}
// Hash concatenated array using Poseidon() again
component h = Poseidon(NUM_CHUNKS);
h.inputs <== concat;
// Assign output to hash signal
hash <== h.out;
}