Completed benchmarking

Basic benchmarking script

Added benchmarks for groth16

base of benches.rs

Update README.md (#2)

* Update README.md

* fix tests running example

---------

Co-authored-by: Dmitriy Ryajov <dryajov@gmail.com>

adding CI build (#4)

adding initial CI build for circuits

rework with poseidon (#3)

* rework with poseidon

* adding main template

* adding todo

* remove mimc

Ark circom and rust ffi (#5)

* wip rust ffi

* proper test component instantiation

* adding quick&dirty poseidon implementation

* update gitignode

* gitignore

* adding rust circuit tests

* gitignore

* rename

* add storer tests

* move utils under circuit_tests

* fix storage proofs

* wip: ffi

* instantiate storer

* enable ark-serialize

* delete js tests

* update CI to run cargo tests

* keep the artifacts dir

* update .gitignore

* build circuits

* remove package json

* place built circuits in correct dirs

* update gitignore

* remove node

* fix ci

* updating readme

* storageproofs.rs to storage_proofs.rs

* flatten tests chunks by default

* add ffi

* fix digest

* minor fixes for ffi

* fix storer test

* use random data for chunks

* debug optimizations to speed witness generation

* clippy & other lint stuff

* add back missing unsafe blocks

* release mode disables constraint checks

* fix ffi

* fix hashes serialization

* make naming more consistent

* add missing pragma

* use correct circuits

* add todo

* add clarification to readme

* silence unused warning

* include constants file into exec

* remove unused imports

extract poseidon to it's own package (#8)

* extract poseidon to it's own package

* move license to the bottom
This commit is contained in:
Mikerah 2023-06-13 15:24:39 -04:00
parent a0ba322cfd
commit afcaf4a600
26 changed files with 1222 additions and 102 deletions

34
.github/workflows/ci.yml vendored Normal file
View File

@ -0,0 +1,34 @@
# This workflow runs the tests for the circuits.
name: Test
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
jobs:
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-node@v3
with:
node-version: 18
cache: 'npm'
- name: Cache circom
id: cache-circom
uses: actions/cache@v3
with:
path: ~/.cargo/bin/circom
# Since the version of circom is specified in `scripts/install-circom.sh`,
# as long as the file doesn't change we can reuse the circom binary.
key: ${{ runner.os }}-circom-${{ hashFiles('./scripts/install-circom.sh') }}
- name: Install circom if not cached
run: sh ./scripts/install-circom.sh
- run: npm ci
- name: Build circuits
run: sh ./scripts/circuit-prep.sh
- name: Run the tests
run: RUST_BACKTRACE=full cargo test

19
.gitignore vendored
View File

@ -1 +1,18 @@
node_modules/
node_modules/
# Added by cargo
/target
# Added by cargo
#
# already existing elements were commented out
#/target
/Cargo.lock
.vscode
test/circuits/artifacts
out.log
src/circuit_tests/artifacts/*
!src/circuit_tests/artifacts/.keep

49
Cargo.toml Normal file
View File

@ -0,0 +1,49 @@
[package]
name = "codex-storage-proofs"
version = "0.1.0"
edition = "2021"
[profile.dev]
opt-level = 3
[lib]
crate-type = [
"staticlib", # Ensure it gets compiled as a (static) C library
# "cdylib", # If you want a shared/dynamic C library (advanced)
"lib", # For downstream Rust dependents: `examples/`, `tests/` etc.
]
[dependencies]
ark-bn254 = { version = "0.3.0" }
ark-ec = { version = "0.3.0", default-features = false, features = [
"parallel",
] }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = [
"parallel",
] }
ark-std = { version = "0.3.0", default-features = false, features = [
"parallel",
] }
ark-serialize = { version = "0.3.0", default-features = false }
num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
ark-circom = { git = "https://github.com/gakonst/ark-circom.git", rev = "35ce5a9", features = [
"circom-2",
] }
ark-ff = { version = "0.3.0", features = ["std"] }
ruint = { version = "1.7.0", features = ["serde", "num-bigint", "ark-ff"] }
once_cell = "1.17.1"
serde = "1.0.156"
serde_json = "1.0.94"
num-traits = "0.2.15"
ark-relations = { version = "0.4.0", features = ["std", "tracing-subscriber"] }
<<<<<<< HEAD
<<<<<<< HEAD
rs-poseidon = {git = "https://github.com/status-im/rs-poseidon" }
[dev-dependencies]
criterion = "0.3"
=======
>>>>>>> ebef300 (Ark circom and rust ffi (#5))
=======
rs-poseidon = {git = "https://github.com/status-im/rs-poseidon" }
>>>>>>> f8e4b3e (extract poseidon to it's own package (#8))

View File

@ -5,6 +5,29 @@
WIP Zero Knowledge tooling for the Codex project
## Usage
First, clone the repo and install the circom components:
```sh
git clone git@github.com:status-im/codex-storage-proofs.git
cd codex-storage-proofs
npm i
cd circuits
```
Nex, compile circuits:
```sh
../scripts/circuit_prep.sh
```
Running the tests:
```sh
cargo test # don't run in release more as it dissables circuit assets
```
## License
Licensed and distributed under either of
@ -16,15 +39,3 @@ or
* Apache License, Version 2.0, ([LICENSE-APACHEv2](LICENSE-APACHEv2) or http://www.apache.org/licenses/LICENSE-2.0)
at your option. These files may not be copied, modified, or distributed except according to those terms.
## Usage
Preparing test key material (only suitable for testing)
```
../scripts/circuit_prep.sh storer 13
```
Running part of the code
```
npm test test/merkletree.js
```

75
bench/benches.rs Normal file
View File

@ -0,0 +1,75 @@
use std::fs::File;
use ark_bn254::{Bn254, Fr};
use ark_circom::{read_zkey, CircomBuilder, CircomConfig};
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read};
use ark_std::rand::rngs::ThreadRng;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use ruint::aliases::U256;
#[derive(Debug, Clone)]
pub struct StorageProofs {
builder: CircomBuilder<Bn254>,
params: ProvingKey<Bn254>,
rng: ThreadRng,
}
// ...StorageProofs implementation here...
// Functions for benchmarking
fn bench_prove(c: &mut Criterion) {
// You need to fill in these variables properly
let wtns = "...";
let r1cs = "...";
let zkey = Some("...");
let mut sp = StorageProofs::new(wtns, r1cs, zkey);
let chunks: &[U256] = &[];
let siblings: &[U256] = &[];
let hashes: &[U256] = &[];
let path: &[i32] = &[];
let root = U256::default();
let salt = U256::default();
let mut proof_bytes = Vec::new();
let mut public_inputs_bytes = Vec::new();
c.bench_function("StorageProofs prove", |b| {
b.iter(|| {
black_box(
sp.prove(
chunks,
siblings,
hashes,
path,
root,
salt,
&mut proof_bytes,
&mut public_inputs_bytes,
)
.unwrap(),
)
})
});
}
fn bench_verify(c: &mut Criterion) {
// You need to fill in these variables properly
let wtns = "...";
let r1cs = "...";
let zkey = Some("...");
let mut sp = StorageProofs::new(wtns, r1cs, zkey);
let proof_bytes: &[u8] = &[];
let public_inputs: &[u8] = &[];
c.bench_function("StorageProofs verify", |b| {
b.iter(|| {
black_box(sp.verify(proof_bytes, public_inputs).unwrap());
})
});
}
criterion_group!(benches, bench_prove, bench_verify);
criterion_main!(benches);

65
bench/snarkjs_bench.sh Executable file
View File

@ -0,0 +1,65 @@
#!/bin/bash
# Set up the benchmarking parameters
ITERATIONS=10
CIRCUIT=../test/circuits/storer_test.circom
WITNESS=./input.json
# Define the SnarkJS commands for each system
GROTH16_CMD="snarkjs groth16 prove circuit_final.zkey witness.wtns proof.json public.json"
PLONK_CMD="snarkjs plonk prove circuit_final.zkey witness.wtns proof.json public.json"
# Set up the powers of tau ceremony
echo "Set up powers of tau ceremony"
snarkjs powersoftau new bn128 17 ../scripts/pot17_bn128_0000.ptau -v
# Generate circuit files
circom ${CIRCUIT} --r1cs --wasm --sym
snarkjs r1cs export json ./storer_test.r1cs ./storer_test.r1cs.json
# Generate the proving and verifying keys for Groth16
echo "Preparing phase 1"
snarkjs powersoftau contribute ../scripts/pot17_bn128_0000.ptau ../scripts/pot17_bn128_0001.ptau >/dev/null 2>&1 </dev/urandom
snarkjs powersoftau contribute ../scripts/pot17_bn128_0001.ptau ../scripts/pot17_bn128_0002.ptau >/dev/null 2>&1 </dev/urandom
snarkjs powersoftau verify ../scripts/pot17_bn128_0002.ptau
snarkjs powersoftau beacon ../scripts/pot17_bn128_0002.ptau ../scripts/pot17_bn128_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
echo "Preparing phase 2"
snarkjs powersoftau prepare phase2 ../scripts/pot17_bn128_beacon.ptau ../scripts/pot17_bn128_final.ptau
snarkjs powersoftau verify ../scripts/pot17_bn128_final.ptau
echo "Calculating witness"
node ./storer_test_js/generate_witness.js ./storer_test_js/storer_test.wasm ${WITNESS} ./witness.wtns
snarkjs wtns check ./storer_test.r1cs ./witness.wtns
# Benchmark Groth16
echo "Benchmarking Groth16..."
snarkjs groth16 setup ./storer_test.r1cs ../scripts/pot17_bn128_final.ptau circuit_0000.zkey
snarkjs zkey contribute circuit_0000.zkey circuit_0001.zkey --name="1st contributor" >/dev/null 2>&1
snarkjs zkey contribute circuit_0001.zkey circuit_0002.zkey --name="2nd contributor" >/dev/null 2>&1
snarkjs zkey verify ./storer_test.r1cs ../scripts/pot17_bn128_final.ptau circuit_0002.zkey
snarkjs zkey beacon circuit_0002.zkey circuit_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f
snarkjs zkey verify ./storer_test.r1cs ../scripts/pot17_bn128_final.ptau circuit_final.zkey
snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
for i in $(seq 1 $ITERATIONS); do
echo "Proving..."
/usr/bin/time -f "%e seconds" $GROTH16_CMD >/dev/null 2>&1
echo "Verifying.."
/usr/bin/time -f "%e seconds " snarkjs groth16 verify verification_key.json public.json proof.json
done
# Generate the proving and verifying keys for PLONK
echo "Generating PLONK keys..."
snarkjs powersoftau contribute ./contributions_2 pot12_0000_final_challenge >/dev/null 2>&1
snarkjs powersoftau verify ./contributions_2 >/dev/null 2>&1
snarkjs powersoftau prepare phase2 ./contributions_2 pot12_0000_final_challenge --srs_monomial_form ./srs.monomial >/dev/null 2>&1
snarkjs plonk setup --srs_monomial_form ./srs.monomial >/dev/null 2>&1
# Benchmark PLONK
echo "Benchmarking PLONK..."
for i in $(seq 1 $ITERATIONS); do
/usr/bin/time -f "%e seconds" $PLONK_CMD >/dev/null 2>&1
done

66
benches/benches.rs Normal file
View File

@ -0,0 +1,66 @@
use std::fs::File;
use ark_bn254::{Bn254, Fr};
use ark_circom::{read_zkey, CircomBuilder, CircomConfig};
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read};
use ark_std::rand::rngs::ThreadRng;
use criterion::{black_box, criterion_group, criterion_main, Criterion};
use ruint::aliases::U256;
use codex_storage_proofs::storage_proofs::{StorageProofs};
// Functions for benchmarking
fn bench_prove(c: &mut Criterion) {
let wtns = "./witness.wtns";
let r1cs = "./storer_test.r1cs";
let zkey = Some("./circuit_0001.zkey".to_string());
let mut sp = StorageProofs::new(wtns.to_string(), r1cs.to_string(), zkey);
let chunks: &[U256] = &[];
let siblings: &[U256] = &[];
let hashes: &[U256] = &[];
let path: &[i32] = &[];
let root = U256::default();
let salt = U256::default();
let mut proof_bytes = Vec::new();
let mut public_inputs_bytes = Vec::new();
c.bench_function("StorageProofs prove", |b| {
b.iter(|| {
black_box(
sp.prove(
chunks,
siblings,
hashes,
path,
root,
salt,
&mut proof_bytes,
&mut public_inputs_bytes,
)
.unwrap(),
)
})
});
}
fn bench_verify(c: &mut Criterion) {
let wtns = "./witness.wtns";
let r1cs = "./storer_test.r1cs";
let zkey = Some("./circuit_0001.zkey".to_string());
let mut sp = StorageProofs::new(wtns.to_string(), r1cs.to_string(), zkey);
let proof_bytes: &[u8] = &[];
let public_inputs: &[u8] = &[];
c.bench_function("StorageProofs verify", |b| {
b.iter(|| {
black_box(sp.verify(proof_bytes, public_inputs).unwrap());
})
});
}
criterion_group!(benches, bench_prove, bench_verify);
criterion_main!(benches);

65
benches/snarkjs_bench.sh Executable file
View File

@ -0,0 +1,65 @@
#!/bin/bash
# Set up the benchmarking parameters
ITERATIONS=10
CIRCUIT=../test/circuits/storer_test.circom
WITNESS=./input.json
# Define the SnarkJS commands for each system
GROTH16_CMD="snarkjs groth16 prove circuit_final.zkey witness.wtns proof.json public.json"
PLONK_CMD="snarkjs plonk prove circuit_final.zkey witness.wtns proof.json public.json"
# Set up the powers of tau ceremony
echo "Set up powers of tau ceremony"
snarkjs powersoftau new bn128 17 ../scripts/pot17_bn128_0000.ptau -v
# Generate circuit files
circom ${CIRCUIT} --r1cs --wasm --sym
snarkjs r1cs export json ./storer_test.r1cs ./storer_test.r1cs.json
# Generate the proving and verifying keys for Groth16
echo "Preparing phase 1"
snarkjs powersoftau contribute ../scripts/pot17_bn128_0000.ptau ../scripts/pot17_bn128_0001.ptau >/dev/null 2>&1 </dev/urandom
snarkjs powersoftau contribute ../scripts/pot17_bn128_0001.ptau ../scripts/pot17_bn128_0002.ptau >/dev/null 2>&1 </dev/urandom
snarkjs powersoftau verify ../scripts/pot17_bn128_0002.ptau
snarkjs powersoftau beacon ../scripts/pot17_bn128_0002.ptau ../scripts/pot17_bn128_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
echo "Preparing phase 2"
snarkjs powersoftau prepare phase2 ../scripts/pot17_bn128_beacon.ptau ../scripts/pot17_bn128_final.ptau
snarkjs powersoftau verify ../scripts/pot17_bn128_final.ptau
echo "Calculating witness"
node ./storer_test_js/generate_witness.js ./storer_test_js/storer_test.wasm ${WITNESS} ./witness.wtns
snarkjs wtns check ./storer_test.r1cs ./witness.wtns
# Benchmark Groth16
echo "Benchmarking Groth16..."
snarkjs groth16 setup ./storer_test.r1cs ../scripts/pot17_bn128_final.ptau circuit_0000.zkey
snarkjs zkey contribute circuit_0000.zkey circuit_0001.zkey --name="1st contributor" >/dev/null 2>&1
snarkjs zkey contribute circuit_0001.zkey circuit_0002.zkey --name="2nd contributor" >/dev/null 2>&1
snarkjs zkey verify ./storer_test.r1cs ../scripts/pot17_bn128_final.ptau circuit_0002.zkey
snarkjs zkey beacon circuit_0002.zkey circuit_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f
snarkjs zkey verify ./storer_test.r1cs ../scripts/pot17_bn128_final.ptau circuit_final.zkey
snarkjs zkey export verificationkey circuit_final.zkey verification_key.json
for i in $(seq 1 $ITERATIONS); do
echo "Proving..."
/usr/bin/time -f "%e seconds" $GROTH16_CMD >/dev/null 2>&1
echo "Verifying.."
/usr/bin/time -f "%e seconds " snarkjs groth16 verify verification_key.json public.json proof.json
done
# Generate the proving and verifying keys for PLONK
echo "Generating PLONK keys..."
snarkjs powersoftau contribute ./contributions_2 pot12_0000_final_challenge >/dev/null 2>&1
snarkjs powersoftau verify ./contributions_2 >/dev/null 2>&1
snarkjs powersoftau prepare phase2 ./contributions_2 pot12_0000_final_challenge --srs_monomial_form ./srs.monomial >/dev/null 2>&1
snarkjs plonk setup --srs_monomial_form ./srs.monomial >/dev/null 2>&1
# Benchmark PLONK
echo "Benchmarking PLONK..."
for i in $(seq 1 $ITERATIONS); do
/usr/bin/time -f "%e seconds" $PLONK_CMD >/dev/null 2>&1
done

View File

@ -0,0 +1,55 @@
pragma circom 2.1.0;
include "../node_modules/circomlib/circuits/poseidon.circom";
function roundUpDiv(x, n) {
var last = x % n; // get the last digit
var div = x \ n; // get the division
if (last > 0) {
return div + 1;
}
return div;
}
template parallel PoseidonDigest(BLOCK_SIZE, DIGEST_CHUNK) {
// BLOCK_SIZE - size of the input block array
// DIGEST_CHUNK - number of elements to hash at once
signal input block[BLOCK_SIZE]; // Input block array
signal output hash; // Output hash
// Split array into chunks of size DIGEST_CHUNK, usually 2
var NUM_CHUNKS = roundUpDiv(BLOCK_SIZE, DIGEST_CHUNK);
// Initialize an array to store hashes of each block
component hashes[NUM_CHUNKS];
// Loop over chunks and hash them using Poseidon()
for (var i = 0; i < NUM_CHUNKS; i++) {
hashes[i] = Poseidon(DIGEST_CHUNK);
var start = i * DIGEST_CHUNK;
var end = start + DIGEST_CHUNK;
for (var j = start; j < end; j++) {
if (j >= BLOCK_SIZE) {
hashes[i].inputs[j - start] <== 0;
} else {
hashes[i].inputs[j - start] <== block[j];
}
}
}
// Concatenate hashes into a single block
var concat[NUM_CHUNKS];
for (var i = 0; i < NUM_CHUNKS; i++) {
concat[i] = hashes[i].out;
}
// Hash concatenated array using Poseidon() again
component h = Poseidon(NUM_CHUNKS);
h.inputs <== concat;
// Assign output to hash signal
hash <== h.out;
}

View File

@ -1,11 +1,11 @@
pragma circom 2.1.0;
// include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/mimc.circom";
// include "../node_modules/circomlib/circuits/mimcsponge.circom";
include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/switcher.circom";
include "../node_modules/circomlib/circuits/bitify.circom";
include "./poseidon-digest.circom";
template parallel MerkleProof(LEVELS) {
signal input leaf;
signal input pathElements[LEVELS];
@ -26,31 +26,19 @@ template parallel MerkleProof(LEVELS) {
switcher[i].R <== pathElements[i];
switcher[i].sel <== indexBits.out[i];
// hasher[i] = Poseidon(2);
hasher[i] = MultiMiMC7(2, 91);
hasher[i].k <== 2;
hasher[i].in[0] <== switcher[i].outL;
hasher[i].in[1] <== switcher[i].outR;
hasher[i] = Poseidon(2);
hasher[i].inputs[0] <== switcher[i].outL;
hasher[i].inputs[1] <== switcher[i].outR;
}
root <== hasher[LEVELS - 1].out;
}
template parallel HashCheck(BLOCK_SIZE) {
signal input block[BLOCK_SIZE];
signal input blockHash;
component hash = MultiMiMC7(BLOCK_SIZE, 91);
hash.in <== block;
hash.k <== 2;
blockHash === hash.out; // assert that block matches hash
}
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS) {
template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS, DIGEST_CHUNK) {
// BLOCK_SIZE: size of block in symbols
// QUERY_LEN: query length, i.e. number if indices to be proven
// LEVELS: size of Merkle Tree in the manifest
// DIGEST_CHUNK: number of symbols to hash in one go
signal input chunks[QUERY_LEN][BLOCK_SIZE]; // chunks to be proven
signal input siblings[QUERY_LEN][LEVELS]; // siblings hashes of chunks to be proven
signal input path[QUERY_LEN]; // path of chunks to be proven
@ -62,9 +50,9 @@ template StorageProver(BLOCK_SIZE, QUERY_LEN, LEVELS) {
component hashers[QUERY_LEN];
for (var i = 0; i < QUERY_LEN; i++) {
hashers[i] = HashCheck(BLOCK_SIZE);
hashers[i] = PoseidonDigest(BLOCK_SIZE, DIGEST_CHUNK);
hashers[i].block <== chunks[i];
hashers[i].blockHash <== hashes[i];
hashers[i].hash === hashes[i];
}
component merkelizer[QUERY_LEN];

View File

@ -0,0 +1,5 @@
pragma circom 2.1.0;
include "./storer.circom";
component main { public [root, salt] } = StorageProver(256, 80, 32, 16);

5
scripts/circuit-prep.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/bash
circom src/circuit_tests/poseidon-digest-test.circom --r1cs --wasm -o src/circuit_tests/artifacts
circom src/circuit_tests/poseidon-hash-test.circom --r1cs --wasm -o src/circuit_tests/artifacts
circom src/circuit_tests/storer-test.circom --r1cs --wasm -o src/circuit_tests/artifacts

View File

@ -1,28 +0,0 @@
#!/bin/bash
set -e
set -x
CIRCUIT=`basename $1`
POWER="${2:-12}"
CURVE="${3:-bn128}"
POTPREFIX=pot${POWER}_${CURVE}
if [ ! -f ${POTPREFIX}_final.ptau ]
then
snarkjs powersoftau new $CURVE $POWER ${POTPREFIX}_0000.ptau -v
snarkjs powersoftau contribute ${POTPREFIX}_0000.ptau ${POTPREFIX}_0001.ptau --name="First contribution" -v -e="random text"
snarkjs powersoftau verify ${POTPREFIX}_0001.ptau
snarkjs powersoftau beacon ${POTPREFIX}_0001.ptau ${POTPREFIX}_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon"
snarkjs powersoftau prepare phase2 ${POTPREFIX}_beacon.ptau ${POTPREFIX}_final.ptau -v
snarkjs powersoftau verify ${POTPREFIX}_final.ptau
fi
# phase 2
circom $1.circom --r1cs --wasm
snarkjs groth16 setup ${CIRCUIT}.r1cs ${POTPREFIX}_final.ptau ${CIRCUIT}_0000.zkey
snarkjs zkey contribute ${CIRCUIT}_0000.zkey ${CIRCUIT}_0001.zkey --name="1st Contributor Name" -v -e="another random text"
snarkjs zkey verify ${CIRCUIT}.r1cs ${POTPREFIX}_final.ptau ${CIRCUIT}_0001.zkey
snarkjs zkey beacon ${CIRCUIT}_0001.zkey ${CIRCUIT}_final.zkey 0102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f 10 -n="Final Beacon phase2"

11
scripts/install-circom.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
circom_version=v2.1.4
if ! [ -x "$(command -v circom)" ]; then
git clone https://github.com/iden3/circom.git
cd circom
git checkout $circom_version
cargo build --release
cargo install --path circom
fi

View File

153
src/circuit_tests/mod.rs Normal file
View File

@ -0,0 +1,153 @@
pub mod utils;
#[cfg(test)]
mod test {
use ark_bn254::Bn254;
use ark_circom::{CircomBuilder, CircomConfig};
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_inputs,
prepare_verifying_key, verify_proof_with_prepared_inputs, ProvingKey,
};
use ark_std::rand::{distributions::Alphanumeric, rngs::ThreadRng, Rng};
use rs_poseidon::poseidon::hash;
use ruint::aliases::U256;
use crate::{
circuit_tests::utils::{digest, treehash},
storage_proofs::StorageProofs,
};
pub struct CircuitsTests {
builder: CircomBuilder<Bn254>,
params: ProvingKey<Bn254>,
rng: ThreadRng,
}
impl CircuitsTests {
pub fn new(wtns: String, r1cs: String) -> CircuitsTests {
let mut rng = ThreadRng::default();
let builder = CircomBuilder::new(CircomConfig::<Bn254>::new(wtns, r1cs).unwrap());
let params =
generate_random_parameters::<Bn254, _, _>(builder.setup(), &mut rng).unwrap();
CircuitsTests {
builder,
params,
rng,
}
}
pub fn poseidon_hash(&mut self, elements: &[U256], hash: U256) -> bool {
let mut builder = self.builder.clone();
elements.iter().for_each(|c| builder.push_input("in", *c));
builder.push_input("hash", hash);
let circuit = builder.build().unwrap();
let inputs = circuit.get_public_inputs().unwrap();
let proof = prove(circuit, &self.params, &mut self.rng).unwrap();
let vk = prepare_verifying_key(&self.params.vk);
let public_inputs = prepare_inputs(&vk, &inputs).unwrap();
verify_proof_with_prepared_inputs(&vk, &proof, &public_inputs).is_ok()
}
pub fn poseidon_digest(&mut self, elements: &[U256], hash: U256) -> bool {
let mut builder = self.builder.clone();
elements
.iter()
.for_each(|c| builder.push_input("block", *c));
builder.push_input("hash", hash);
let circuit = builder.build().unwrap();
let inputs = circuit.get_public_inputs().unwrap();
let proof = prove(circuit, &self.params, &mut self.rng).unwrap();
let vk = prepare_verifying_key(&self.params.vk);
let public_inputs = prepare_inputs(&vk, &inputs).unwrap();
verify_proof_with_prepared_inputs(&vk, &proof, &public_inputs).is_ok()
}
}
#[test]
fn test_poseidon_hash() {
let r1cs = "./src/circuit_tests/artifacts/poseidon-hash-test.r1cs";
let wasm = "./src/circuit_tests/artifacts/poseidon-hash-test_js/poseidon-hash-test.wasm";
let mut hasher = CircuitsTests::new(wasm.to_string(), r1cs.to_string());
assert!(hasher.poseidon_hash(&[U256::from(1)], hash(&[U256::from(1)])));
}
#[test]
fn test_poseidon_digest() {
let r1cs = "./src/circuit_tests/artifacts/poseidon-digest-test.r1cs";
let wasm =
"./src/circuit_tests/artifacts/poseidon-digest-test_js/poseidon-digest-test.wasm";
let mut hasher = CircuitsTests::new(wasm.to_string(), r1cs.to_string());
let input: Vec<U256> = (0..256).map(|c| U256::from(c)).collect();
assert!(hasher.poseidon_digest(&input, digest(&input, Some(16))));
}
#[test]
fn test_storer() {
let r1cs = "./src/circuit_tests/artifacts/storer-test.r1cs";
let wasm = "./src/circuit_tests/artifacts/storer-test_js/storer-test.wasm";
let mut prover = StorageProofs::new(wasm.to_string(), r1cs.to_string(), None);
// generate a tuple of (preimages, hash), where preimages is a vector of 256 U256s
// and hash is the hash of each vector generated using the digest function
let data = (0..4)
.map(|_| {
let rng = ThreadRng::default();
let preimages: Vec<U256> = rng
.sample_iter(Alphanumeric)
.take(256)
.map(|c| U256::from(c))
.collect();
let hash = digest(&preimages, Some(16));
(preimages, hash)
})
.collect::<Vec<(Vec<U256>, U256)>>();
let chunks: Vec<U256> = data.iter().flat_map(|c| c.0.to_vec()).collect();
let hashes: Vec<U256> = data.iter().map(|c| c.1).collect();
let path = [0, 1, 2, 3].to_vec();
let parent_hash_l = hash(&[hashes[0], hashes[1]]);
let parent_hash_r = hash(&[hashes[2], hashes[3]]);
let siblings = &[
hashes[1],
parent_hash_r,
hashes[0],
parent_hash_r,
hashes[3],
parent_hash_l,
hashes[2],
parent_hash_l,
];
let root = treehash(hashes.as_slice());
let proof_bytes = &mut Vec::new();
let public_inputs_bytes = &mut Vec::new();
prover
.prove(
chunks.as_slice(),
siblings,
hashes.as_slice(),
path.as_slice(),
root,
root, // random salt - block hash
proof_bytes,
public_inputs_bytes,
)
.unwrap();
assert!(prover
.verify(proof_bytes.as_slice(), public_inputs_bytes.as_slice())
.is_ok());
}
}

View File

@ -0,0 +1,20 @@
pragma circom 2.1.0;
include "../../circuits/poseidon-digest.circom";
template PoseidonDigestTest(BLOCK_SIZE, CHUNK_SIZE) {
signal input block[BLOCK_SIZE];
signal input hash;
signal output hash2;
component digest = PoseidonDigest(BLOCK_SIZE, CHUNK_SIZE);
for (var i = 0; i < BLOCK_SIZE; i++) {
digest.block[i] <== block[i];
}
digest.hash === hash; // verify that the hash is correct
hash2 <== digest.hash;
}
component main { public [hash] } = PoseidonDigestTest(256, 16);

View File

@ -0,0 +1,17 @@
pragma circom 2.1.0;
include "../../node_modules/circomlib/circuits/poseidon.circom";
template PoseidonHash(SIZE) {
signal input in[SIZE];
signal input hash;
component hasher = Poseidon(SIZE);
for(var i = 0; i < SIZE; i++) {
hasher.inputs[i] <== in[i];
}
hasher.out === hash;
}
component main { public [hash] } = PoseidonHash(1);

View File

@ -0,0 +1,5 @@
pragma circom 2.1.0;
include "../../circuits/storer.circom";
component main { public [root, salt] } = StorageProver(256, 4, 2, 16);

View File

@ -0,0 +1,49 @@
#![allow(dead_code)]
use rs_poseidon::poseidon::hash;
use ruint::{aliases::U256, uint};
pub fn digest(input: &[U256], chunk_size: Option<usize>) -> U256 {
let chunk_size = chunk_size.unwrap_or(4);
let chunks = ((input.len() as f32) / (chunk_size as f32)).ceil() as usize;
let mut concat: Vec<U256> = vec![];
for i in 0..chunks {
let range = (i * chunk_size)..std::cmp::min((i + 1) * chunk_size, input.len());
let mut chunk = input[range].to_vec();
if chunk.len() < chunk_size {
chunk.resize(chunk_size, uint!(0_U256));
}
concat.push(hash(chunk.as_slice()));
}
if concat.len() > 1 {
return hash(concat.as_slice());
}
concat[0]
}
pub fn treehash(leafs: &[U256]) -> U256 {
// simple merkle root (treehash) generator
// unbalanced trees will have the last leaf duplicated
let mut merkle: Vec<U256> = leafs.to_vec();
while merkle.len() > 1 {
let mut new_merkle = Vec::new();
let mut i = 0;
while i < merkle.len() {
new_merkle.push(hash(&[merkle[i], merkle[i + 1]]));
i += 2;
}
if merkle.len() % 2 == 1 {
new_merkle.push(hash(&[merkle[merkle.len() - 2], merkle[merkle.len() - 2]]));
}
merkle = new_merkle;
}
merkle[0]
}

302
src/ffi.rs Normal file
View File

@ -0,0 +1,302 @@
use ruint::aliases::U256;
use crate::storage_proofs::StorageProofs;
use std::str;
#[derive(Debug, Clone)]
#[repr(C)]
pub struct Buffer {
pub data: *const u8,
pub len: usize,
}
#[derive(Debug, Clone)]
#[repr(C)]
pub struct ProofCtx {
pub proof: Buffer,
pub public_inputs: Buffer,
}
impl ProofCtx {
pub fn new(proof: &[u8], public_inputs: &[u8]) -> Self {
Self {
proof: Buffer {
data: proof.as_ptr(),
len: proof.len(),
},
public_inputs: Buffer {
data: public_inputs.as_ptr(),
len: public_inputs.len(),
},
}
}
}
/// # Safety
///
/// Construct a StorageProofs object
#[no_mangle]
pub unsafe extern "C" fn init(
r1cs: *const &Buffer,
wasm: *const &Buffer,
zkey: *const &Buffer,
) -> *mut StorageProofs {
let r1cs = {
if r1cs.is_null() {
return std::ptr::null_mut();
}
let slice = std::slice::from_raw_parts((*r1cs).data, (*r1cs).len);
str::from_utf8(slice).unwrap().to_string()
};
let wasm = {
if wasm.is_null() {
return std::ptr::null_mut();
}
let slice = std::slice::from_raw_parts((*wasm).data, (*wasm).len);
str::from_utf8(slice).unwrap().to_string()
};
let zkey = {
if !zkey.is_null() {
let slice = std::slice::from_raw_parts((*zkey).data, (*zkey).len);
Some(str::from_utf8(slice).unwrap().to_string())
} else {
None
}
};
Box::into_raw(Box::new(StorageProofs::new(wasm, r1cs, zkey)))
}
/// # Safety
///
/// Use after constructing a StorageProofs object with init
#[no_mangle]
pub unsafe extern "C" fn prove(
prover_ptr: *mut StorageProofs,
chunks: *const Buffer,
siblings: *const Buffer,
hashes: *const Buffer,
path: *const i32,
path_len: usize,
pubkey: *const Buffer,
root: *const Buffer,
salt: *const Buffer,
) -> *mut ProofCtx {
let chunks = {
let slice = std::slice::from_raw_parts((*chunks).data, (*chunks).len);
slice
.chunks(U256::BYTES)
.map(|c| U256::try_from_le_slice(c).unwrap())
.collect::<Vec<U256>>()
};
let siblings = {
let slice = std::slice::from_raw_parts((*siblings).data, (*siblings).len);
slice
.chunks(U256::BYTES)
.map(|c| U256::try_from_le_slice(c).unwrap())
.collect::<Vec<U256>>()
};
let hashes = {
let slice = std::slice::from_raw_parts((*hashes).data, (*hashes).len);
slice
.chunks(U256::BYTES)
.map(|c| U256::try_from_le_slice(c).unwrap())
.collect::<Vec<U256>>()
};
let path = {
let slice = std::slice::from_raw_parts(path, path_len);
slice.to_vec()
};
let pubkey =
U256::try_from_le_slice(std::slice::from_raw_parts((*pubkey).data, (*pubkey).len)).unwrap();
let root =
U256::try_from_le_slice(std::slice::from_raw_parts((*root).data, (*root).len)).unwrap();
let salt =
U256::try_from_le_slice(std::slice::from_raw_parts((*salt).data, (*salt).len)).unwrap();
let proof_bytes = &mut Vec::new();
let public_inputs_bytes = &mut Vec::new();
let mut _prover = &mut *prover_ptr;
_prover
.prove(
chunks.as_slice(),
siblings.as_slice(),
hashes.as_slice(),
path.as_slice(),
root,
salt,
proof_bytes,
public_inputs_bytes,
)
.unwrap();
Box::into_raw(Box::new(ProofCtx::new(proof_bytes, public_inputs_bytes)))
}
#[no_mangle]
/// # Safety
///
/// Should be called on a valid proof and public inputs previously generated by prove
pub unsafe extern "C" fn verify(
prover_ptr: *mut StorageProofs,
proof: *const Buffer,
public_inputs: *const Buffer,
) -> bool {
let proof = std::slice::from_raw_parts((*proof).data, (*proof).len);
let public_inputs = std::slice::from_raw_parts((*public_inputs).data, (*public_inputs).len);
let mut _prover = &mut *prover_ptr;
_prover.verify(proof, public_inputs).is_ok()
}
/// # Safety
///
/// Use on a valid pointer to StorageProofs or panics
#[no_mangle]
pub unsafe extern "C" fn free_prover(prover: *mut StorageProofs) {
if prover.is_null() {
return;
}
unsafe { drop(Box::from_raw(prover)) }
}
/// # Safety
///
/// Use on a valid pointer to ProofCtx or panics
#[no_mangle]
pub unsafe extern "C" fn free_proof_ctx(ctx: *mut ProofCtx) {
if ctx.is_null() {
return;
}
drop(Box::from_raw(ctx))
}
#[cfg(test)]
mod tests {
use ark_std::rand::{distributions::Alphanumeric, rngs::ThreadRng, Rng};
use rs_poseidon::poseidon::hash;
use ruint::aliases::U256;
use crate::{
circuit_tests::utils::{digest, treehash},
};
use super::{init, prove, Buffer};
#[test]
fn test_storer_ffi() {
// generate a tuple of (preimages, hash), where preimages is a vector of 256 U256s
// and hash is the hash of each vector generated using the digest function
let data = (0..4)
.map(|_| {
let rng = ThreadRng::default();
let preimages: Vec<U256> = rng
.sample_iter(Alphanumeric)
.take(256)
.map(|c| U256::from(c))
.collect();
let hash = digest(&preimages, Some(16));
(preimages, hash)
})
.collect::<Vec<(Vec<U256>, U256)>>();
let chunks: Vec<u8> = data
.iter()
.map(|c| {
c.0.iter()
.map(|c| c.to_le_bytes_vec())
.flatten()
.collect::<Vec<u8>>()
})
.flatten()
.collect();
let hashes: Vec<U256> = data.iter().map(|c| c.1).collect();
let hashes_slice: Vec<u8> = hashes.iter().map(|c| c.to_le_bytes_vec()).flatten().collect();
let path = [0, 1, 2, 3];
let parent_hash_l = hash(&[hashes[0], hashes[1]]);
let parent_hash_r = hash(&[hashes[2], hashes[3]]);
let sibling_hashes = &[
hashes[1],
parent_hash_r,
hashes[0],
parent_hash_r,
hashes[3],
parent_hash_l,
hashes[2],
parent_hash_l,
];
let siblings: Vec<u8> = sibling_hashes
.iter()
.map(|c| c.to_le_bytes_vec())
.flatten()
.collect();
let root = treehash(hashes.as_slice());
let chunks_buff = Buffer {
data: chunks.as_ptr() as *const u8,
len: chunks.len(),
};
let siblings_buff = Buffer {
data: siblings.as_ptr() as *const u8,
len: siblings.len(),
};
let hashes_buff = Buffer {
data: hashes_slice.as_ptr() as *const u8,
len: hashes_slice.len(),
};
let root_bytes: [u8; U256::BYTES] = root.to_le_bytes();
let root_buff = Buffer {
data: root_bytes.as_ptr() as *const u8,
len: root_bytes.len(),
};
let r1cs_path = "src/circuit_tests/artifacts/storer-test.r1cs";
let wasm_path = "src/circuit_tests/artifacts/storer-test_js/storer-test.wasm";
let r1cs = &Buffer {
data: r1cs_path.as_ptr(),
len: r1cs_path.len(),
};
let wasm = &Buffer {
data: wasm_path.as_ptr(),
len: wasm_path.len(),
};
let prover_ptr = unsafe { init(&r1cs, &wasm, std::ptr::null()) };
let prove_ctx = unsafe {
prove(
prover_ptr,
&chunks_buff as *const Buffer,
&siblings_buff as *const Buffer,
&hashes_buff as *const Buffer,
&path as *const i32,
path.len(),
&root_buff as *const Buffer, // root
&root_buff as *const Buffer, // pubkey
&root_buff as *const Buffer, // salt/block hash
)
};
assert!(prove_ctx.is_null() == false);
}
}

3
src/lib.rs Normal file
View File

@ -0,0 +1,3 @@
pub mod ffi;
pub mod storage_proofs;
mod circuit_tests;

98
src/storage_proofs.rs Normal file
View File

@ -0,0 +1,98 @@
use std::fs::File;
use ark_bn254::{Bn254, Fr};
use ark_circom::{read_zkey, CircomBuilder, CircomConfig};
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read};
use ark_std::rand::rngs::ThreadRng;
use ruint::aliases::U256;
#[derive(Debug, Clone)]
pub struct StorageProofs {
builder: CircomBuilder<Bn254>,
params: ProvingKey<Bn254>,
rng: ThreadRng,
}
impl StorageProofs {
// TODO: add rng
pub fn new(
wtns: String,
r1cs: String,
zkey: Option<String>, /* , rng: Option<ThreadRng> */
) -> Self {
let mut rng = ThreadRng::default();
let builder = CircomBuilder::new(CircomConfig::<Bn254>::new(wtns, r1cs).unwrap());
let params: ProvingKey<Bn254> = match zkey {
Some(zkey) => {
let mut file = File::open(zkey).unwrap();
read_zkey(&mut file).unwrap().0
}
None => generate_random_parameters::<Bn254, _, _>(builder.setup(), &mut rng).unwrap(),
};
Self {
builder,
params,
rng,
}
}
pub fn prove(
&mut self,
chunks: &[U256],
siblings: &[U256],
hashes: &[U256],
path: &[i32],
root: U256,
salt: U256,
proof_bytes: &mut Vec<u8>,
public_inputs_bytes: &mut Vec<u8>,
) -> Result<(), String> {
let mut builder = self.builder.clone();
// vec of vecs is flattened, since wasm expects a contiguous array in memory
chunks.iter().for_each(|c| builder.push_input("chunks", *c));
siblings
.iter()
.for_each(|c| builder.push_input("siblings", *c));
hashes.iter().for_each(|c| builder.push_input("hashes", *c));
path.iter().for_each(|c| builder.push_input("path", *c));
builder.push_input("root", root);
builder.push_input("salt", salt);
let circuit = builder.build().map_err(|e| e.to_string())?;
let inputs = circuit
.get_public_inputs()
.ok_or("Unable to get public inputs!")?;
let proof = prove(circuit, &self.params, &mut self.rng).map_err(|e| e.to_string())?;
proof.serialize(proof_bytes).map_err(|e| e.to_string())?;
inputs
.serialize(public_inputs_bytes)
.map_err(|e| e.to_string())?;
Ok(())
}
pub fn verify<RR: Read>(
&mut self,
proof_bytes: RR,
mut public_inputs: RR,
) -> Result<(), String> {
let inputs: Vec<Fr> =
CanonicalDeserialize::deserialize(&mut public_inputs).map_err(|e| e.to_string())?;
let proof = Proof::<Bn254>::deserialize(proof_bytes).map_err(|e| e.to_string())?;
let vk = prepare_verifying_key(&self.params.vk);
verify_proof(&vk, &proof, inputs.as_slice()).map_err(|e| e.to_string())?;
Ok(())
}
}

View File

@ -0,0 +1,5 @@
pragma circom 2.1.0;
include "../../circuits/storer.circom";
component main { public [root, salt] } = StorageProver(32, 4, 2, 5);

View File

@ -1,5 +0,0 @@
pragma circom 2.1.0;
include "../../circuits/storer.circom";
component main = StorageProver(32, 4, 2);

View File

@ -3,25 +3,39 @@ const path = require("path");
const crypto = require("crypto");
const F1Field = require("ffjavascript").F1Field;
const Scalar = require("ffjavascript").Scalar;
const mimc7 = require("circomlibjs").mimc7;
const mimcsponge = require("circomlibjs").mimcsponge;
const { MerkleTree } = require("merkletreejs");
const {c} = require("circom_tester");
const chaiAsPromised = require('chai-as-promised');
const poseidon = require("circomlibjs/src/poseidon");
const wasm_tester = require("circom_tester").wasm;
// const snarkjs = require("snarkjs");
// const fs = require("fs");
chai.use(chaiAsPromised);
exports.p = Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const Fr = new F1Field(exports.p);
const p = Scalar.fromString("21888242871839275222246405745257275088548364400416034343698204186575808495617");
const Fr = new F1Field(p);
const assert = chai.assert;
const expect = chai.expect;
const wasm_tester = require("circom_tester").wasm;
const key = BigInt(2);
function digest(input, chunkSize = 5) {
let chunks = Math.ceil(input.length / chunkSize);
let concat = [];
const digest = (buf) => mimc7.hash(buf, key);
const digestMulti = (buf) => mimc7.multiHash(buf, key);
for (let i = 0; i < chunks; i++) {
let chunk = input.slice(i * chunkSize, (i + 1) * chunkSize);
if (chunk.length < chunkSize) {
chunk = chunk.concat(Array(chunkSize - chunk.length).fill(0));
}
concat.push(poseidon(chunk));
}
if (concat.length > 1) {
return poseidon(concat);
}
return concat[0]
}
function merkelize(leafs) {
// simple merkle root (treehash) generator
@ -33,12 +47,12 @@ function merkelize(leafs) {
var i = 0;
while (i < merkle.length) {
newMerkle.push(digestMulti([merkle[i], merkle[i + 1]]));
newMerkle.push(digest([merkle[i], merkle[i + 1]], 2));
i += 2;
}
if (merkle.length % 2 == 1) {
newMerkle.add(digestMulti([merkle[merkle.length - 2], merkle[merkle.length - 2]]));
newMerkle.add(digest([merkle[merkle.length - 2], merkle[merkle.length - 2]], 2));
}
merkle = newMerkle;
@ -47,57 +61,71 @@ function merkelize(leafs) {
return merkle[0];
}
// TODO: should be removed at some point, as the rust test should be sufficient, but left here for now to aid debugging
describe("Storer test", function () {
this.timeout(100000);
const a = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const aHash = digestMulti(a);
const b = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const bHash = digestMulti(b);
const c = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const cHash = digestMulti(c);
const d = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const dHash = digestMulti(d);
const salt = Array.from(crypto.randomBytes(32).values()).map((v) => BigInt(v));
const saltHash = digestMulti(salt);
const a = Array.from(crypto.randomBytes(256).values()).map((v) => BigInt(v));
const aHash = digest(a, 16);
const b = Array.from(crypto.randomBytes(256).values()).map((v) => BigInt(v));
const bHash = digest(b, 16);
const c = Array.from(crypto.randomBytes(256).values()).map((v) => BigInt(v));
const cHash = digest(c, 16);
const d = Array.from(crypto.randomBytes(256).values()).map((v) => BigInt(v));
const dHash = digest(d, 16);
const salt = Array.from(crypto.randomBytes(256).values()).map((v) => BigInt(v));
const saltHash = digest(salt, 16);
it("Should merkelize", async () => {
let root = merkelize([aHash, bHash]);
let hash = digestMulti([aHash, bHash]);
let hash = digest([aHash, bHash], 2);
assert.equal(hash, root);
});
it("Should verify chunk is correct and part of dataset", async () => {
const cir = await wasm_tester(path.join(__dirname, "./circuits", "storer_test.circom"));
const cir = await wasm_tester("src/circuit_tests/storer-test.circom");
const root = merkelize([aHash, bHash, cHash, dHash]);
const parentHashL = digestMulti([aHash, bHash]);
const parentHashR = digestMulti([cHash, dHash]);
const parentHashL = digest([aHash, bHash], 2);
const parentHashR = digest([cHash, dHash], 2);
await cir.calculateWitness({
"chunks": [[a], [b], [c], [d]],
"siblings": [[bHash, parentHashR], [aHash, parentHashR], [dHash, parentHashL], [cHash, parentHashL]],
"siblings": [
[bHash, parentHashR],
[aHash, parentHashR],
[dHash, parentHashL],
[cHash, parentHashL]],
"hashes": [aHash, bHash, cHash, dHash],
"path": [0, 1, 2, 3],
"root": root,
"salt": saltHash,
}, true);
}).timeout(100000);
});
it("Should verify chunk is correct and part of dataset", async () => {
const cir = await wasm_tester(path.join(__dirname, "./circuits", "storer_test.circom"));
it("Should verify chunk is not correct and part of dataset", async () => {
const cir = await wasm_tester("src/circuit_tests/storer-test.circom");
const root = merkelize([aHash, bHash, cHash, dHash]);
const parentHashL = digestMulti([aHash, bHash]);
const parentHashR = digestMulti([cHash, dHash]);
const parentHashL = digest([aHash, bHash], 2);
const parentHashR = digest([cHash, dHash], 2);
const fn = async () => {
return await cir.calculateWitness({
"chunks": [[salt], [b], [c], [d]],
"siblings": [[bHash, parentHashR], [aHash, parentHashR], [dHash, parentHashL], [cHash, parentHashL]],
"chunks": [
[salt], // wrong chunk
[b],
[c],
[d]],
"siblings": [
[bHash, parentHashR],
[aHash, parentHashR],
[dHash, parentHashL],
[cHash, parentHashL]],
"hashes": [saltHash, bHash, cHash, dHash],
"path": [0, 1, 2, 3],
"root": root,
@ -108,6 +136,33 @@ describe("Storer test", function () {
assert.isRejected(
fn(), Error,
/Error: Error: Assert Failed.\nError in template StorageProver_7 line: 75/);
});
}).timeout(100000);
function range(start, end) {
return Array(end - start + 1).fill().map((_, idx) => start + idx)
}
it("Should test poseidon digest", async () => {
const cir = await wasm_tester("src/circuit_tests/poseidon-digest-test.circom");
let input = range(0, 255).map((c) => BigInt(c));
await cir.calculateWitness({
"block": input,
"hash": digest(input, 16),
});
});
// it("Should prove digest with zkey file", async () => {
// let input = range(0, 255).map((c) => BigInt(c));
// const {proof, publicSignals} = await snarkjs.groth16.fullProve(
// {
// "block": input,
// "hash": digest(input, 16),
// },
// "src/circuit_tests/artifacts/poseidon-digest-test_js/poseidon-digest-test.wasm",
// "circuit_0000.zkey");
// const vKey = JSON.parse(fs.readFileSync("verification_key.json"));
// const res = await snarkjs.groth16.verify(vKey, publicSignals, proof);
// assert(res);
// });
});