diff --git a/src/lib.rs b/src/lib.rs index 4a7177b..898fea0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,6 +1,4 @@ pub mod ffi; -pub mod hash; pub mod poseidon; -// pub mod storageproofs; +pub mod storageproofs; mod circuit_tests; -mod utils; diff --git a/src/storageproofs.rs b/src/storageproofs.rs index 96e812e..2682ca5 100644 --- a/src/storageproofs.rs +++ b/src/storageproofs.rs @@ -1,210 +1,84 @@ -// use ark_bn254::Bn254; -// use ark_std::rand::rngs::ThreadRng; -// use ark_circom::{CircomBuilder, CircomConfig}; -// use ark_groth16::{ProvingKey, generate_random_parameters, prepare_verifying_key, create_random_proof as prove}; -// use ruint::aliases::U256; -// use crate::poseidon::hash1; +use ark_bn254::{Bn254, Fr}; +use ark_circom::{CircomBuilder, CircomConfig}; +use ark_groth16::{ + create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof, + Proof, ProvingKey, +}; +use ark_serialize::{CanonicalDeserialize, CanonicalSerialize, Read}; +use ark_std::rand::rngs::ThreadRng; +use ruint::aliases::U256; -// #[derive(Debug, Clone)] -// #[repr(C)] -// pub struct StorageProofs { -// builder: CircomBuilder, -// pvk: ProvingKey, -// rng: ThreadRng, -// } +#[derive(Debug, Clone)] +#[repr(C)] +pub struct StorageProofs { + builder: CircomBuilder, + pvk: ProvingKey, + rng: ThreadRng, +} -// impl StorageProofs { -// pub fn new(wtns: String, r1cs: String) -> Self { -// let mut rng = ThreadRng::default(); -// let builder = CircomBuilder::new(CircomConfig::::new(wtns, r1cs).unwrap()); -// let pvk = generate_random_parameters::(builder.setup(), &mut rng).unwrap(); +impl StorageProofs { + pub fn new(wtns: String, r1cs: String) -> Self { + let mut rng = ThreadRng::default(); + let builder = CircomBuilder::new(CircomConfig::::new(wtns, r1cs).unwrap()); + let pvk = generate_random_parameters::(builder.setup(), &mut rng).unwrap(); -// Self { builder, pvk, rng } -// } + Self { builder, pvk, rng } + } -// pub fn prove( -// &mut self, -// chunks: Vec>, -// siblings: Vec>, -// hashes: Vec, -// path: Vec, -// root: Fq, -// salt: Fq, -// proof_bytes: Vec, -// public_inputs_bytes: Vec, -// ) -> Result<(), String> { -// let mut builder = self.builder.clone(); + pub fn prove( + &mut self, + chunks: Vec>, + siblings: Vec>, + hashes: Vec, + path: Vec, + root: U256, + salt: U256, + proof_bytes: &mut Vec, + public_inputs_bytes: &mut Vec, + ) -> Result<(), String> { + let mut builder = self.builder.clone(); -// chunks.iter().flat_map(|c| c.into_iter()).for_each(|c| { -// builder.push_input( -// "chunks", -// BigInt::from_biguint(Sign::Plus, c.into_repr().into()), -// ) -// }); + // vec of vecs is flattened, since wasm expects a contiguous array in memory + chunks + .iter() + .flat_map(|c| c.into_iter()) + .for_each(|c| builder.push_input("chunks", *c)); -// siblings.iter().flat_map(|c| c.into_iter()).for_each(|c| { -// builder.push_input( -// "siblings", -// BigInt::from_biguint(Sign::Plus, c.into_repr().into()), -// ) -// }); + siblings + .iter() + .flat_map(|c| c.into_iter()) + .for_each(|c| builder.push_input("siblings", *c)); -// hashes.iter().for_each(|c| { -// builder.push_input( -// "hashes", -// BigInt::from_biguint(Sign::Plus, c.into_repr().into()), -// ) -// }); + hashes.iter().for_each(|c| builder.push_input("hashes", *c)); -// path.iter() -// .for_each(|c| builder.push_input("path", BigInt::new(Sign::Plus, vec![*c]))); + path.iter().for_each(|c| builder.push_input("path", *c)); -// builder.push_input( -// "root", -// BigInt::from_biguint(Sign::Plus, root.into_repr().into()), -// ); + builder.push_input("root", root); -// builder.push_input( -// "salt", -// BigInt::from_biguint(Sign::Plus, salt.into_repr().into()), -// ); + builder.push_input("salt", salt); -// let circuit = builder.build().unwrap(); -// let inputs = circuit.get_public_inputs().unwrap(); -// let proof = prove(circuit, &self.pvk, &mut self.rng).unwrap(); -// let vk = prepare_verifying_key(&self.pvk.vk); + let circuit = builder.build().map_err(|e| e.to_string())?; + let inputs = circuit + .get_public_inputs() + .ok_or("Unable to get public inputs!")?; + let proof = prove(circuit, &self.pvk, &mut self.rng).map_err(|e| e.to_string())?; -// // proof.serialize(proof_bytes).unwrap(); -// // inputs.serialize(public_inputs_bytes).unwrap(); + proof.serialize(proof_bytes).map_err(|e| e.to_string())?; + inputs + .serialize(public_inputs_bytes) + .map_err(|e| e.to_string())?; -// Ok(()) -// } + Ok(()) + } -// // fn verify(self, hashes: Vec, root: i32, salt: i32,vk_bytes: R, proof_bytes: R) -> Result<(), String> { -// // let vk = ProvingKey::::deserialize(vk_bytes).unwrap(); -// // let proof = Proof::::deserialize(proof_bytes).unwrap(); + pub fn verify(self, proof_bytes: R, mut public_inputs: R) -> Result<(), String> { + let inputs: Vec = + CanonicalDeserialize::deserialize(&mut public_inputs).map_err(|e| e.to_string())?; + let proof = Proof::::deserialize(proof_bytes).map_err(|e| e.to_string())?; + let vk = prepare_verifying_key(&self.pvk.vk); -// // let vk = prepare_verifying_key(&self.pvk.vk); -// // verify_proof(&vk, &proof, &public_inputs).unwrap(); + verify_proof(&vk, &proof, &inputs.as_slice()).map_err(|e| e.to_string())?; -// // Ok(()) -// // } -// } - -// #[cfg(test)] -// mod test { -// use super::StorageProofs; -// use ark_bn254::Fq; -// use ark_ff::{UniformRand, Zero}; -// use ark_std::rand::{rngs::ThreadRng, Rng}; -// use arkworks_native_gadgets::{ -// poseidon::{sbox::PoseidonSbox, *}, -// prelude::ark_ff::PrimeField, -// }; - -// use arkworks_utils::{ -// bytes_matrix_to_f, bytes_vec_to_f, poseidon_params::setup_poseidon_params, Curve, -// }; - -// fn digest(input: Vec, chunk_size: Option) -> Result { -// let chunk_size = chunk_size.unwrap_or(4); -// let chunks = ((input.len() as f32) / (chunk_size as f32)).ceil() as usize; -// let mut concat = vec![]; -// let hasher = hash1(Curve::Bn254, 5, (chunk_size + 1) as u8); - -// let mut i: usize = 0; -// while i < chunks { -// let range = (i * chunk_size)..std::cmp::min((i + 1) * chunk_size, input.len()); - -// let mut chunk: Vec = input[range].to_vec(); - -// if chunk.len() < chunk_size { -// chunk.resize(chunk_size as usize, Fq::zero()); -// } - -// concat.push(hasher(chunk)?); -// i += chunk_size; -// } - -// if concat.len() > 1 { -// return hasher(concat); -// } - -// return Ok(concat[0]); -// } - -// fn merkelize(leafs: Vec) -> Fq { -// // simple merkle root (treehash) generator -// // unbalanced trees will have the last leaf duplicated -// let mut merkle: Vec = leafs; -// let hasher = hasher(Curve::Bn254, 5, 3); - -// while merkle.len() > 1 { -// let mut new_merkle = Vec::new(); -// let mut i = 0; -// while i < merkle.len() { -// new_merkle.push(hasher(vec![merkle[i], merkle[i + 1]]).unwrap()); -// i += 2; -// } - -// if merkle.len() % 2 == 1 { -// new_merkle.push( -// hasher(vec![merkle[merkle.len() - 2], merkle[merkle.len() - 2]]).unwrap(), -// ); -// } - -// merkle = new_merkle; -// } - -// return merkle[0]; -// } - -// #[test] -// fn should_proove() { -// let mut rng = ThreadRng::default(); -// let data: Vec<(Vec, Fq)> = (0..4) -// .map(|_| { -// let preimages = vec![Fq::rand(&mut rng); 32]; -// let hash = digest(preimages.clone(), None).unwrap(); -// return (preimages, hash); -// }) -// .collect(); - -// let chunks: Vec> = data.iter().map(|c| c.0.to_vec()).collect(); -// let hashes: Vec = data.iter().map(|c| c.1).collect(); -// let path = [0, 1, 2, 3].to_vec(); - -// let hash2 = hasher(Curve::Bn254, 5, 3); -// let parent_hash_l = hash2(vec![hashes[0], hashes[1]]).unwrap(); -// let parent_hash_r = hash2(vec![hashes[2], hashes[3]]).unwrap(); - -// let siblings = [ -// [hashes[1], parent_hash_r].to_vec(), -// [hashes[1], parent_hash_r].to_vec(), -// [hashes[3], parent_hash_l].to_vec(), -// [hashes[2], parent_hash_l].to_vec(), -// ] -// .to_vec(); - -// let root = merkelize(hashes.clone()); -// let mut proof_bytes: Vec = Vec::new(); -// let mut public_inputs_bytes: Vec = Vec::new(); - -// let r1cs = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test.r1cs"; -// let wasm = "/Users/dryajov/personal/projects/status/codex-zk/test/circuits/artifacts/storer_test_js/storer_test.wasm"; - -// let mut prover = StorageProofs::new(wasm.to_string(), r1cs.to_string()); -// prover -// .prove( -// chunks, -// siblings, -// hashes, -// path, -// root, -// root, // random salt -// proof_bytes, -// public_inputs_bytes, -// ) -// .unwrap(); -// } -// } + Ok(()) + } +}