RLN: Update Rust and C API (#20)

* fix(rln): ignore dirty rln submodule

* feat(rln): add RLN witness & proof values byte serialization/deserialization

* feat(RLN): add Merkle-tree public APIs

* fix(rln): use references to avoid unnecessary clone()

* fix(rln): fix rust public API, code refactor

* fix(rln): change vector length serialization from usize to u64; add random witness generation

* feat(rln): add C APIs

* chore(rln): cargo format/clippy

* feat(rln): add C APIs tests

* chore(rln): minor code restyling

* refactor(rln): change &Vec<u8> to &[u8] (avoids  new object creation)
This commit is contained in:
G 2022-06-22 16:36:54 +02:00 committed by GitHub
parent 89593dc033
commit 95352a4a7c
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 1020 additions and 308 deletions

1
.gitmodules vendored
View File

@ -1,5 +1,6 @@
[submodule "rln/vendor/rln"]
path = rln/vendor/rln
ignore = dirty
url = https://github.com/privacy-scaling-explorations/rln.git
[submodule "semaphore/vendor/semaphore"]
path = semaphore/vendor/semaphore

View File

@ -45,7 +45,7 @@ tracing-subscriber = "0.2"
# json
serde = { version = "1.0.103", default-features = false, features = ["derive"] }
serde_json = "1.0.48"
bincode = "1.3.3"
once_cell = "1.8"
poseidon-rs = "0.0.8"
@ -68,6 +68,7 @@ bellman = { package = "bellman_ce", version = "0.3.4", default-features = false
#semaphore = { git = "https://github.com/oskarth/semaphore-rs" }
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "d462a43"}
rand = "0.8"
tempfile = "3.3.0"

View File

@ -8,7 +8,7 @@ use num_bigint::BigUint;
use serde_json::Value;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{Cursor, Write};
use std::io::{Cursor, Error, ErrorKind, Result, Write};
use std::path::Path;
use std::str::FromStr;
@ -17,23 +17,30 @@ const VK_PATH: &str = "./resources/verifying_key.json";
const R1CS_PATH: &str = "./resources/rln.r1cs";
const WASM_PATH: &str = "./resources/rln.wasm";
pub fn ZKEY() -> ProvingKey<Bn254> /*, ConstraintMatrices<Fr>)*/ {
pub fn ZKEY() -> Result<ProvingKey<Bn254>> /*, ConstraintMatrices<Fr>)*/ {
if Path::new(ZKEY_PATH).exists() {
let mut file = File::open(ZKEY_PATH).unwrap();
let (proving_key, _matrices) = read_zkey(&mut file).unwrap();
proving_key
Ok(proving_key)
} else {
Err(Error::new(ErrorKind::NotFound, "No proving key found!"))
}
}
pub fn VK() -> VerifyingKey<Bn254> {
pub fn VK() -> Result<VerifyingKey<Bn254>> {
let verifying_key: VerifyingKey<Bn254>;
if Path::new(VK_PATH).exists() {
verifying_key = vk_from_json(VK_PATH);
verifying_key
Ok(verifying_key)
} else if Path::new(ZKEY_PATH).exists() {
verifying_key = ZKEY().vk;
verifying_key
verifying_key = ZKEY().unwrap().vk;
Ok(verifying_key)
} else {
panic!("No proving/verification key present!");
Err(Error::new(
ErrorKind::NotFound,
"No proving/verification key found!",
))
}
}
@ -133,5 +140,8 @@ fn vk_from_json(vk_path: &str) -> VerifyingKey<Bn254> {
// Checks verification key to be correct with respect to proving key
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Bn254>) {
assert_eq!(ZKEY().vk, verifying_key);
let zkey = ZKEY();
if zkey.is_ok() {
assert_eq!(zkey.unwrap().vk, verifying_key);
}
}

View File

@ -6,11 +6,6 @@ use std::slice;
///
/// Also heavily inspired by https://github.com/kilic/rln/blob/master/src/ffi.rs
// TODO Update mul to rln references
// TODO Make sure get_root etc is on right LE form
// TODO Add other FFI interfaces (update_next_member etc) while making sure it
// is according to new RLN spec
#[repr(C)]
#[derive(Clone, Debug, PartialEq)]
pub struct Buffer {
@ -33,45 +28,99 @@ impl<'a> From<&Buffer> for &'a [u8] {
}
}
// TODO: check if there are security implications for this clippy. It seems we should have pub unsafe extern "C" fn ...
// #[allow(clippy::not_unsafe_ptr_arg_deref)]
////////////////////////////////////////////////////////
// RLN APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new(tree_height: usize, ctx: *mut *mut RLN) -> bool {
let rln = RLN::new(tree_height);
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
true
}
////////////////////////////////////////////////////////
// Merkle tree APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
let rln = unsafe { &mut *ctx };
rln.set_tree(tree_height).is_ok()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
let rln = unsafe { &mut *ctx };
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
rln.set_leaf(index, input_data).is_ok()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn set_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
let rln = unsafe { &mut *ctx };
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
rln.set_leaves(input_data).is_ok()
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
let rln = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
match rln.get_root(&mut output_data) {
Ok(_) => true,
Err(_) => false,
};
if rln.get_root(&mut output_data).is_ok() {
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn new_circuit(ctx: *mut *mut RLN) -> bool {
println!("rln ffi: new");
let mul = RLN::new();
unsafe { *ctx = Box::into_raw(Box::new(mul)) };
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
let rln = unsafe { &*ctx };
let mut output_data: Vec<u8> = Vec::new();
if rln.get_proof(index, &mut output_data).is_ok() {
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
////////////////////////////////////////////////////////
// zkSNARKs APIs
////////////////////////////////////////////////////////
#[allow(clippy::not_unsafe_ptr_arg_deref)]
#[no_mangle]
pub extern "C" fn prove(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
println!("RLN ffi: prove");
let mul = unsafe { &*ctx };
pub extern "C" fn prove(
ctx: *const RLN,
input_buffer: *const Buffer,
output_buffer: *mut Buffer,
) -> bool {
let rln = unsafe { &*ctx };
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
let mut output_data: Vec<u8> = Vec::new();
match mul.prove(&mut output_data) {
Ok(proof_data) => proof_data,
Err(_) => return false,
};
if rln.prove(input_data, &mut output_data).is_ok() {
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
std::mem::forget(output_data);
true
} else {
std::mem::forget(output_data);
false
}
}
#[allow(clippy::not_unsafe_ptr_arg_deref)]
@ -79,18 +128,223 @@ pub extern "C" fn prove(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
pub extern "C" fn verify(
ctx: *const RLN,
proof_buffer: *const Buffer,
result_ptr: *mut u32,
proof_is_valid_ptr: *mut bool,
) -> bool {
println!("RLN ffi: verify");
let mul = unsafe { &*ctx };
let rln = unsafe { &*ctx };
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
if match mul.verify(proof_data) {
if match rln.verify(proof_data) {
Ok(verified) => verified,
Err(_) => return false,
} {
unsafe { *result_ptr = 0 };
unsafe { *proof_is_valid_ptr = true };
} else {
unsafe { *result_ptr = 1 };
unsafe { *proof_is_valid_ptr = false };
};
true
}
#[cfg(test)]
mod test {
use super::*;
use crate::circuit::*;
use crate::protocol::*;
use crate::utils::*;
use ark_groth16::Proof as ArkProof;
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::rand::thread_rng;
use ark_std::str::FromStr;
use rand::Rng;
use semaphore::{identity::Identity, poseidon_hash, Field};
use serde::{Deserialize, Serialize};
use std::io::Cursor;
use std::mem::MaybeUninit;
#[test]
// We test merkle batch Merkle tree additions
fn test_merkle_batch_additions_ffi() {
let tree_height = 16;
// We generate a vector of random leaves
let mut leaves: Vec<Field> = Vec::new();
let mut rng = thread_rng();
for _ in 0..256 {
leaves.push(hash_to_field(&rng.gen::<[u8; 32]>()));
}
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let success = new(tree_height, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We first add leaves one by one
for (i, leaf) in leaves.iter().enumerate() {
// We prepare id_commitment and we set the leaf at provided index
let leaf_ser = field_to_bytes_le(&leaf);
let input_buffer = &Buffer::from(leaf_ser.as_ref());
let success = set_leaf(rln_pointer, i, input_buffer);
assert!(success, "set leaf call failed");
}
// We get the root of the tree obtained adding one leaf per time
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root_single, _) = bytes_le_to_field(&result_data);
// We reset the tree to default
let success = set_tree(rln_pointer, tree_height);
assert!(success, "set tree call failed");
// We add leaves in a batch into the tree
let leaves_ser = vec_field_to_bytes_le(&leaves);
let input_buffer = &Buffer::from(leaves_ser.as_ref());
let success = set_leaves(rln_pointer, input_buffer);
assert!(success, "set leaf call failed");
// We get the root of the tree obtained adding leaves in batch
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root_batch, _) = bytes_le_to_field(&result_data);
assert_eq!(root_single, root_batch);
}
#[test]
// This test is similar to the one in lib, but uses only public C API
fn test_merkle_proof_ffi() {
let tree_height = 16;
let leaf_index = 3;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let success = new(tree_height, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// generate identity
// We follow zk-kit approach for identity generation
let id = Identity::from_seed(b"test-merkle-proof");
let identity_secret = poseidon_hash(&vec![id.trapdoor, id.nullifier]);
let id_commitment = poseidon_hash(&vec![identity_secret]);
// We prepare id_commitment and we set the leaf at provided index
let leaf_ser = field_to_bytes_le(&id_commitment);
let input_buffer = &Buffer::from(leaf_ser.as_ref());
let success = set_leaf(rln_pointer, leaf_index, input_buffer);
assert!(success, "set leaf call failed");
// We obtain the Merkle tree root
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
assert!(success, "get root call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (root, _) = bytes_le_to_field(&result_data);
assert_eq!(
root,
Field::from_str("0x27401a4559ce263630907ce3b77c570649e28ede22d2a7f5296839627a16e870")
.unwrap()
);
// We obtain the Merkle tree root
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = get_proof(rln_pointer, leaf_index, output_buffer.as_mut_ptr());
assert!(success, "get merkle proof call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
let result_data = <&[u8]>::from(&output_buffer).to_vec();
let (path_elements, read) = bytes_le_to_vec_field(&result_data);
let (identity_path_index, _) = bytes_le_to_vec_u8(&result_data[read..].to_vec());
// We check correct computation of the path and indexes
let expected_path_elements = vec![
Field::from_str("0x0000000000000000000000000000000000000000000000000000000000000000")
.unwrap(),
Field::from_str("0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864")
.unwrap(),
Field::from_str("0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1")
.unwrap(),
Field::from_str("0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238")
.unwrap(),
Field::from_str("0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a")
.unwrap(),
Field::from_str("0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55")
.unwrap(),
Field::from_str("0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78")
.unwrap(),
Field::from_str("0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d")
.unwrap(),
Field::from_str("0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61")
.unwrap(),
Field::from_str("0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747")
.unwrap(),
Field::from_str("0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2")
.unwrap(),
Field::from_str("0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636")
.unwrap(),
Field::from_str("0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a")
.unwrap(),
Field::from_str("0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0")
.unwrap(),
Field::from_str("0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c")
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We double check that the proof computed from public API is correct
let root_from_proof =
get_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
assert_eq!(root, root_from_proof);
}
#[test]
fn test_groth16_proof_ffi() {
let tree_height: usize = 16;
// We create a RLN instance
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
let success = new(tree_height, rln_pointer.as_mut_ptr());
assert!(success, "RLN object creation failed");
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
// We generate random witness instances and relative proof values
let rln_witness = random_rln_witness(tree_height);
let proof_values = proof_values_from_witness(&rln_witness);
// We prepare id_commitment and we set the leaf at provided index
let rln_witness_ser = serialize_witness(&rln_witness);
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
let success = prove(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
assert!(success, "prove call failed");
let output_buffer = unsafe { output_buffer.assume_init() };
// We read the returned proof and we append proof values for verify
let serialized_proof = <&[u8]>::from(&output_buffer).to_vec();
let serialized_proof_values = serialize_proof_values(&proof_values);
let mut verify_data = Vec::<u8>::new();
verify_data.extend(&serialized_proof);
verify_data.extend(&serialized_proof_values);
// We prepare input proof values and we call verify
let input_buffer = &Buffer::from(verify_data.as_ref());
let mut proof_is_valid: bool = false;
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
let success = verify(rln_pointer, input_buffer, proof_is_valid_ptr);
assert!(success, "verify call failed");
assert_eq!(proof_is_valid, true);
}
}

View File

@ -23,6 +23,50 @@ mod test {
Field,
};
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
const WITNESS_JSON: &str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
#[test]
// We test Merkle Tree generation, proofs and verification
fn test_merkle_proof() {
@ -99,66 +143,21 @@ mod test {
#[test]
// We test a RLN proof generation and verification
fn test_witness_from_json() {
// From rln JSON witness
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
let input_json_str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
// We generate all relevant keys
let proving_key = &ZKEY();
let verification_key = &VK();
let proving_key = ZKEY().unwrap();
let verification_key = VK().unwrap();
let builder = CIRCOM();
// We compute witness from the json input example
let rln_witness = rln_witness_from_json(input_json_str);
let rln_witness = rln_witness_from_json(WITNESS_JSON);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, proving_key, &rln_witness).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let verified = verify_proof(verification_key, proof, &proof_values);
let verified = verify_proof(&verification_key, &proof, &proof_values);
assert!(verified.unwrap());
}
@ -193,18 +192,33 @@ mod test {
rln_witness_from_values(identity_secret, &merkle_proof, x, epoch, rln_identifier);
// We generate all relevant keys
let proving_key = &ZKEY();
let verification_key = &VK();
let proving_key = ZKEY().unwrap();
let verification_key = VK().unwrap();
let builder = CIRCOM();
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, proving_key, &rln_witness).unwrap();
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let success = verify_proof(verification_key, proof, &proof_values).unwrap();
let success = verify_proof(&verification_key, &proof, &proof_values).unwrap();
assert!(success);
}
#[test]
fn test_serialization() {
// We test witness serialization
let rln_witness = rln_witness_from_json(WITNESS_JSON);
let ser = serialize_witness(&rln_witness);
let deser = deserialize_witness(&ser);
assert_eq!(rln_witness, deser);
// We test Proof values serialization
let proof_values = proof_values_from_witness(&rln_witness);
let ser = serialize_proof_values(&proof_values);
let deser = deserialize_proof_values(&ser);
assert_eq!(proof_values, deser);
}
}

View File

@ -1,98 +0,0 @@
use color_eyre::Result;
// Tracing
use ark_relations::r1cs::{ConstraintLayer, ConstraintTrace, TracingMode};
use tracing_subscriber::layer::SubscriberExt;
// JSON
use rln::circuit::{CIRCOM, VK, ZKEY};
use rln::protocol::{
generate_proof, proof_values_from_witness, rln_witness_from_json, verify_proof,
};
// RLN
fn groth16_proof_example() -> Result<()> {
// Tracing to help with debugging
let mut layer = ConstraintLayer::default();
layer.mode = TracingMode::OnlyConstraints;
let subscriber = tracing_subscriber::Registry::default().with(layer);
let _guard = tracing::subscriber::set_default(subscriber);
let trace = ConstraintTrace::capture();
println!("Trace is: {:?}", trace);
// From rln JSON witness
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
let input_json_str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
// We generate all relevant keys
let proving_key = &ZKEY();
let verification_key = &VK();
let builder = CIRCOM();
// We compute witness from the json input example
let rln_witness = rln_witness_from_json(input_json_str);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let verified = verify_proof(verification_key, proof, &proof_values);
assert!(verified.unwrap());
Ok(())
}
fn main() {
println!("rln example proof");
match groth16_proof_example() {
Ok(_) => println!("Success"),
Err(_) => println!("Error"),
}
}

View File

@ -2,18 +2,20 @@ use crate::circuit::{VK, ZKEY};
use ark_bn254::{Bn254, Fr, Parameters};
use ark_circom::{read_zkey, CircomBuilder, CircomConfig, CircomReduction};
use ark_ec::bn::Bn;
use ark_ff::{Fp256, PrimeField};
use ark_ff::{bytes::ToBytes, Fp256, PrimeField};
use ark_groth16::{
create_proof_with_reduction_and_matrices, create_random_proof_with_reduction,
prepare_verifying_key, verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey,
VerifyingKey,
};
use ark_relations::r1cs::SynthesisError;
use ark_serialize::*;
use ark_std::{rand::thread_rng, str::FromStr, UniformRand};
use color_eyre::Result;
use ethers_core::utils::keccak256;
use num_bigint::{BigInt, BigUint, ToBigInt};
use primitive_types::U256;
use rand::Rng;
use semaphore::{
identity::Identity,
merkle_tree::{self, Branch},
@ -22,16 +24,17 @@ use semaphore::{
Field,
};
use serde::{Deserialize, Serialize};
use std::io::Write;
use std::time::Instant;
use thiserror::Error;
pub use crate::utils::{add, bytes_to_field, mul, str_to_field, vec_to_field, vec_to_fr};
pub use crate::utils::*;
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
#[derive(Debug, Deserialize, Clone)]
#[derive(Debug, PartialEq)]
pub struct RLNWitnessInput {
identity_secret: Field,
path_elements: Vec<Field>,
@ -41,7 +44,7 @@ pub struct RLNWitnessInput {
rln_identifier: Field,
}
#[derive(Debug, Deserialize, Clone)]
#[derive(Debug, PartialEq)]
pub struct RLNProofValues {
// Public outputs:
y: Field,
@ -53,6 +56,97 @@ pub struct RLNProofValues {
rln_identifier: Field,
}
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut field_to_bytes_le(&rln_witness.identity_secret));
serialized.append(&mut vec_field_to_bytes_le(&rln_witness.path_elements));
serialized.append(&mut vec_u8_to_bytes_le(&rln_witness.identity_path_index));
serialized.append(&mut field_to_bytes_le(&rln_witness.x));
serialized.append(&mut field_to_bytes_le(&rln_witness.epoch));
serialized.append(&mut field_to_bytes_le(&rln_witness.rln_identifier));
serialized
}
pub fn deserialize_witness(serialized: &[u8]) -> RLNWitnessInput {
let mut all_read: usize = 0;
let (identity_secret, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (path_elements, read) = bytes_le_to_vec_field(&serialized[all_read..].to_vec());
all_read += read;
let (identity_path_index, read) = bytes_le_to_vec_u8(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
assert_eq!(serialized.len(), all_read);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
}
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
let mut serialized: Vec<u8> = Vec::new();
serialized.append(&mut field_to_bytes_le(&rln_proof_values.y));
serialized.append(&mut field_to_bytes_le(&rln_proof_values.nullifier));
serialized.append(&mut field_to_bytes_le(&rln_proof_values.root));
serialized.append(&mut field_to_bytes_le(&rln_proof_values.x));
serialized.append(&mut field_to_bytes_le(&rln_proof_values.epoch));
serialized.append(&mut field_to_bytes_le(&rln_proof_values.rln_identifier));
serialized
}
pub fn deserialize_proof_values(serialized: &[u8]) -> RLNProofValues {
let mut all_read: usize = 0;
let (y, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (nullifier, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (root, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (x, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (epoch, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
let (rln_identifier, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
all_read += read;
assert_eq!(serialized.len(), all_read);
RLNProofValues {
y,
nullifier,
root,
x,
epoch,
rln_identifier,
}
}
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
@ -105,12 +199,40 @@ pub fn rln_witness_from_values(
}
}
pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
let mut rng = thread_rng();
let identity_secret = hash_to_field(&rng.gen::<[u8; 32]>());
let x = hash_to_field(&rng.gen::<[u8; 32]>());
let epoch = hash_to_field(&rng.gen::<[u8; 32]>());
let rln_identifier = hash_to_field(&rng.gen::<[u8; 32]>());
let mut path_elements: Vec<Field> = Vec::new();
let mut identity_path_index: Vec<u8> = Vec::new();
// In semaphore's poseidon_tree, the leaves level is counted in tree_height.
// This means that a merkle proof consists of tree_height-1 field elements
for _ in 0..tree_height - 1 {
path_elements.push(hash_to_field(&rng.gen::<[u8; 32]>()));
identity_path_index.push(rng.gen_range(0..2) as u8);
}
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
}
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValues {
// y share
let a_0 = rln_witness.identity_secret;
let a_1 = poseidon_hash(&[a_0, rln_witness.epoch]);
let y = mul(rln_witness.x, a_1);
let y = add(y, a_0);
let y = mul(&rln_witness.x, &a_1);
let y = add(&y, &a_0);
// Nullifier
let nullifier = poseidon_hash(&[a_1, rln_witness.rln_identifier]);
@ -126,7 +248,7 @@ pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValue
}
let root = get_tree_root(
rln_witness.identity_secret,
&rln_witness.identity_secret,
&rln_witness.path_elements,
&rln_witness.identity_path_index,
true,
@ -170,12 +292,12 @@ pub fn get_identity_path_index(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<
}
pub fn get_tree_root(
leaf: Field,
leaf: &Field,
path_elements: &[Field],
identity_path_index: &[u8],
hash_leaf: bool,
) -> Field {
let mut root = leaf;
let mut root = *leaf;
if hash_leaf {
root = poseidon_hash(&[root]);
}
@ -195,9 +317,10 @@ pub fn get_tree_root(
// Signal/nullifier utility functions
///////////////////////////////////////////////////////
fn hash_signal(signal: &[u8]) -> Field {
pub fn hash_to_field(signal: &[u8]) -> Field {
let hash = keccak256(signal);
bytes_to_field(&hash)
let (el, _) = bytes_le_to_field(hash.as_ref());
el
}
/// Generates the nullifier hash
@ -327,7 +450,7 @@ pub fn generate_proof(
/// necessarily mean the proof is incorrect.
pub fn verify_proof(
verifying_key: &VerifyingKey<Bn254>,
proof: Proof,
proof: &Proof,
proof_values: &RLNProofValues,
) -> Result<bool, ProofError> {
// We re-arrange proof-values according to the circuit specification
@ -342,8 +465,8 @@ pub fn verify_proof(
// Check that the proof is valid
let pvk = prepare_verifying_key(verifying_key);
let pr: ArkProof<Bn254> = proof.into();
let verified = ark_verify_proof(&pvk, &pr, &vec_to_fr(inputs))?;
let pr: ArkProof<Bn254> = (*proof).into();
let verified = ark_verify_proof(&pvk, &pr, &vec_to_fr(&inputs))?;
Ok(verified)
}

View File

@ -1,120 +1,161 @@
/// This is the main public API for RLN. It is used by the FFI, and should be
/// used by tests etc as well
///
use ark_bn254::Bn254;
use ark_bn254::{Bn254, Fr};
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey,
};
use ark_groth16::Proof as ArkProof;
use ark_groth16::{ProvingKey, VerifyingKey};
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
use ark_std::rand::thread_rng;
use num_bigint::BigInt;
use semaphore::{
hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, protocol::*, Field,
};
use serde::Deserialize;
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
use serde::{Deserialize, Serialize};
use serde_json;
use std::io::{self, Read, Write};
use std::io::{self, Error, ErrorKind, Result}; //default read/write
use crate::circuit::{CIRCOM, ZKEY};
use crate::protocol;
// For the ToBytes implementation of groth16::Proof
use ark_ec::bn::Bn;
use ark_ff::bytes::ToBytes;
use ark_serialize::{Read, Write};
use crate::circuit::{CIRCOM, VK, ZKEY};
use crate::protocol::*;
use crate::utils::*;
// TODO Add Engine here? i.e. <E: Engine> not <Bn254>
// TODO Assuming we want to use IncrementalMerkleTree, figure out type/trait conversions
// TODO Adopt to new protocol structure
pub struct RLN {
circom: CircomCircuit<Bn254>,
params: ProvingKey<Bn254>,
tree: PoseidonTree,
pub circom: CircomBuilder<Bn254>,
pub proving_key: Result<ProvingKey<Bn254>>,
pub verification_key: Result<VerifyingKey<Bn254>>,
pub tree: PoseidonTree,
}
// TODO Expand API to have better coverage of things needed
use crate::utils::{to_field, to_fr};
use std::io::Cursor;
impl RLN {
// TODO Update this to use new protocol
pub fn new() -> RLN {
let cfg =
CircomConfig::<Bn254>::new("./resources/rln.wasm", "./resources/rln.r1cs").unwrap();
pub fn new(tree_height: usize) -> RLN {
let circom = CIRCOM();
let builder = CircomBuilder::new(cfg);
let params = ZKEY();
let circom = builder.build().unwrap();
let inputs = circom.get_public_inputs().unwrap();
println!("Public inputs {:#?} ", inputs);
let proving_key = ZKEY();
let verification_key = VK();
// We compute a default empty tree
// Probably better to pass it as parameter
let TREE_HEIGHT = 21;
let leaf = Field::from(0);
let tree = PoseidonTree::new(TREE_HEIGHT, leaf);
let tree = PoseidonTree::new(tree_height, leaf);
RLN {
circom,
params,
proving_key,
verification_key,
tree,
}
}
pub fn set_tree<R: Read>(&self, _input_data: R) -> io::Result<()> {
//Implement leaf and deserialization
//let leaf = Leaf::deserialize(input_data).unwrap();
////////////////////////////////////////////////////////
// Merkle-tree APIs
////////////////////////////////////////////////////////
pub fn set_tree(&mut self, tree_height: usize) -> io::Result<()> {
// We compute a default empty tree of desired height
let leaf = Field::from(0);
self.tree = PoseidonTree::new(tree_height, leaf);
Ok(())
}
pub fn set_leaf<R: Read>(&mut self, index: usize, mut input_data: R) -> io::Result<()> {
// We read input
let mut leaf_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut leaf_byte)?;
// We set the leaf at input index
let (leaf, _) = bytes_le_to_field(&leaf_byte);
self.tree.set(index, leaf);
Ok(())
}
//TODO: change to set_leaves_from(index, input_data)
pub fn set_leaves<R: Read>(&mut self, mut input_data: R) -> io::Result<()> {
// We read input
let mut leaves_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut leaves_byte)?;
let (leaves, _) = bytes_le_to_vec_field(&leaves_byte);
// We set the leaves
for (i, leaf) in leaves.iter().enumerate() {
self.tree.set(i, *leaf);
}
//returns H::Hash, which is a 256 bit hash value
//let root = self.tree.root();
// TODO Return root as LE here
//root.write_le(&mut result_data)?;
//println!("NYI: root le write buffer {:#?}", root);
Ok(())
}
/// returns current membership root
/// * `root` is a scalar field element in 32 bytes
pub fn get_root<W: Write>(&self, _result_data: W) -> io::Result<()> {
//let root = self.tree.get_root();
// Converts PrimeFieldRepr into LE
//root.into_repr().write_le(&mut result_data)?;
//returns H::Hash, which is a 256 bit hash value
pub fn get_root<W: Write>(&self, mut output_data: W) -> io::Result<()> {
let root = self.tree.root();
// TODO Return root as LE here
//root.write_le(&mut result_data)?;
println!("NYI: root le write buffer {:#?}", root);
Ok(())
}
// TODO Input Read
pub fn prove<W: Write>(&self, result_data: W) -> io::Result<()> {
let mut rng = thread_rng();
// XXX: There's probably a better way to do this
let circom = self.circom.clone();
let params = self.params.clone();
//let proof = create_random_proof(circom, &params, &mut rng)?;
let proof = prove(circom, &params, &mut rng).unwrap();
println!("Proof: {:?}", proof);
// XXX: Unclear if this is different from other serialization(s)
let _ = proof.serialize(result_data).unwrap();
output_data.write_all(&field_to_bytes_le(&root))?;
Ok(())
}
pub fn verify<R: Read>(&self, input_data: R) -> io::Result<bool> {
let proof = Proof::deserialize(input_data).unwrap();
/// returns current membership root
/// * `root` is a scalar field element in 32 bytes
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> io::Result<()> {
let merkle_proof = self.tree.proof(index).expect("proof should exist");
let path_elements = get_path_elements(&merkle_proof);
let identity_path_index = get_identity_path_index(&merkle_proof);
let pvk = prepare_verifying_key(&self.params.vk);
output_data.write_all(&vec_field_to_bytes_le(&path_elements))?;
output_data.write_all(&vec_u8_to_bytes_le(&identity_path_index))?;
// XXX Part of input data?
let inputs = self.circom.get_public_inputs().unwrap();
Ok(())
}
let verified = verify_proof(&pvk, &proof, &inputs).unwrap();
////////////////////////////////////////////////////////
// zkSNARK APIs
////////////////////////////////////////////////////////
pub fn prove<R: Read, W: Write>(
&self,
mut input_data: R,
mut output_data: W,
) -> io::Result<()> {
// We read input RLN witness and we deserialize it
let mut witness_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut witness_byte)?;
let rln_witness = deserialize_witness(&witness_byte);
let proof = generate_proof(
self.circom.clone(),
self.proving_key.as_ref().unwrap(),
&rln_witness,
)
.unwrap();
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
ArkProof::from(proof).serialize(&mut output_data).unwrap();
Ok(())
}
pub fn verify<R: Read>(&self, mut input_data: R) -> io::Result<bool> {
// Input data is serialized for Bn254 as:
// serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
let mut input_byte: Vec<u8> = Vec::new();
input_data.read_to_end(&mut input_byte)?;
let proof: Proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128].to_vec()))
.unwrap()
.into();
let proof_values = deserialize_proof_values(&input_byte[128..].to_vec());
let verified = verify_proof(
self.verification_key.as_ref().unwrap(),
&proof,
&proof_values,
)
.unwrap();
Ok(verified)
}
@ -122,6 +163,181 @@ impl RLN {
impl Default for RLN {
fn default() -> Self {
Self::new()
let tree_height = 21;
Self::new(tree_height)
}
}
#[cfg(test)]
mod test {
use super::*;
use ark_std::str::FromStr;
use rand::Rng;
use semaphore::poseidon_hash;
#[test]
// We test merkle batch Merkle tree additions
fn test_merkle_batch_additions() {
let tree_height = 16;
// We generate a vector of random leaves
let mut leaves: Vec<Field> = Vec::new();
let mut rng = thread_rng();
for _ in 0..256 {
leaves.push(hash_to_field(&rng.gen::<[u8; 32]>()));
}
// We create a new tree
let mut rln = RLN::new(tree_height);
// We first add leaves one by one
for (i, leaf) in leaves.iter().enumerate() {
let mut buffer = Cursor::new(field_to_bytes_le(&leaf));
rln.set_leaf(i, &mut buffer).unwrap();
}
// We get the root of the tree obtained adding one leaf per time
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_single, _) = bytes_le_to_field(&buffer.into_inner());
// We reset the tree to default
rln.set_tree(tree_height).unwrap();
// We add leaves in a batch into the tree
let mut buffer = Cursor::new(vec_field_to_bytes_le(&leaves));
rln.set_leaves(&mut buffer).unwrap();
// We get the root of the tree obtained adding leaves in batch
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root_batch, _) = bytes_le_to_field(&buffer.into_inner());
assert_eq!(root_single, root_batch);
}
#[test]
// This test is similar to the one in lib, but uses only public API
fn test_merkle_proof() {
let tree_height = 16;
let leaf_index = 3;
let mut rln = RLN::new(tree_height);
// generate identity
// We follow zk-kit approach for identity generation
let id = Identity::from_seed(b"test-merkle-proof");
let identity_secret = poseidon_hash(&vec![id.trapdoor, id.nullifier]);
let id_commitment = poseidon_hash(&vec![identity_secret]);
// We pass id_commitment as Read buffer to RLN's set_leaf
let mut buffer = Cursor::new(field_to_bytes_le(&id_commitment));
rln.set_leaf(leaf_index, &mut buffer).unwrap();
// We check correct computation of the root
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_root(&mut buffer).unwrap();
let (root, _) = bytes_le_to_field(&buffer.into_inner());
assert_eq!(
root,
Field::from_str("0x27401a4559ce263630907ce3b77c570649e28ede22d2a7f5296839627a16e870")
.unwrap()
);
// We check correct computation of merkle proof
let mut buffer = Cursor::new(Vec::<u8>::new());
rln.get_proof(leaf_index, &mut buffer).unwrap();
let buffer_inner = buffer.into_inner();
let (path_elements, read) = bytes_le_to_vec_field(&buffer_inner);
let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
// We check correct computation of the path and indexes
let expected_path_elements = vec![
Field::from_str("0x0000000000000000000000000000000000000000000000000000000000000000")
.unwrap(),
Field::from_str("0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864")
.unwrap(),
Field::from_str("0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1")
.unwrap(),
Field::from_str("0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238")
.unwrap(),
Field::from_str("0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a")
.unwrap(),
Field::from_str("0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55")
.unwrap(),
Field::from_str("0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78")
.unwrap(),
Field::from_str("0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d")
.unwrap(),
Field::from_str("0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61")
.unwrap(),
Field::from_str("0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747")
.unwrap(),
Field::from_str("0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2")
.unwrap(),
Field::from_str("0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636")
.unwrap(),
Field::from_str("0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a")
.unwrap(),
Field::from_str("0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0")
.unwrap(),
Field::from_str("0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c")
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We double check that the proof computed from public API is correct
let root_from_proof =
get_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
assert_eq!(root, root_from_proof);
}
#[test]
// This test is similar to the one in lib, but uses only public API
fn test_groth16_proof() {
let tree_height = 16;
let rln = RLN::new(tree_height);
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
let rln_witness = random_rln_witness(tree_height);
let proof_values = proof_values_from_witness(&rln_witness);
// We compute a Groth16 proof
let mut input_buffer = Cursor::new(serialize_witness(&rln_witness));
let mut output_buffer = Cursor::new(Vec::<u8>::new());
rln.prove(&mut input_buffer, &mut output_buffer).unwrap();
let serialized_proof = output_buffer.into_inner();
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
let proof: Proof = ArkProof::deserialize(&mut Cursor::new(&serialized_proof))
.unwrap()
.into();
let verified = verify_proof(
&rln.verification_key.as_ref().unwrap(),
&proof,
&proof_values,
);
assert!(verified.unwrap());
// We prepare the input to prove API, consisting of serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
let serialized_proof_values = serialize_proof_values(&proof_values);
let mut verify_data = Vec::<u8>::new();
verify_data.extend(&serialized_proof);
verify_data.extend(&serialized_proof_values);
let mut input_buffer = Cursor::new(verify_data);
// We verify the Groth16 proof against the provided proof values
let verified = rln.verify(&mut input_buffer).unwrap();
assert!(verified);
}
}

View File

@ -1,19 +1,32 @@
use ark_bn254::{Bn254, Fr, Parameters};
use ark_ff::{Fp256, PrimeField};
use ark_ff::{BigInteger, Field as ArkField, FpParameters, PrimeField};
use ark_std::str::FromStr;
use ethers_core::utils::keccak256;
use num_bigint::{BigInt, BigUint, ToBigInt};
use semaphore::{identity::Identity, Field};
use std::iter::Extend;
pub fn to_fr(el: Field) -> Fr {
Fr::try_from(el).unwrap()
pub fn modulus_bit_size() -> usize {
<Fr as PrimeField>::Params::MODULUS
.num_bits()
.try_into()
.unwrap()
}
pub fn to_field(el: Fr) -> Field {
el.try_into().unwrap()
pub fn fr_byte_size() -> usize {
let mbs = modulus_bit_size();
(mbs + 64 - (mbs % 64)) / 8
}
pub fn vec_to_fr(v: Vec<Field>) -> Vec<Fr> {
pub fn to_fr(el: &Field) -> Fr {
Fr::try_from(*el).unwrap()
}
pub fn to_field(el: &Fr) -> Field {
(*el).try_into().unwrap()
}
pub fn vec_to_fr(v: &Vec<Field>) -> Vec<Fr> {
let mut result: Vec<Fr> = vec![];
for el in v {
result.push(to_fr(el));
@ -21,7 +34,7 @@ pub fn vec_to_fr(v: Vec<Field>) -> Vec<Fr> {
result
}
pub fn vec_to_field(v: Vec<Fr>) -> Vec<Field> {
pub fn vec_to_field(v: &Vec<Fr>) -> Vec<Field> {
let mut result: Vec<Field> = vec![];
for el in v {
result.push(to_field(el));
@ -29,6 +42,22 @@ pub fn vec_to_field(v: Vec<Fr>) -> Vec<Field> {
result
}
pub fn vec_fr_to_field(input: &Vec<Fr>) -> Vec<Field> {
let mut res: Vec<Field> = Vec::new();
for el in input {
res.push(to_field(el));
}
res
}
pub fn vec_field_to_fr(input: &Vec<Field>) -> Vec<Fr> {
let mut res: Vec<Fr> = Vec::new();
for el in input {
res.push(to_fr(el));
}
res
}
pub fn str_to_field(input: String, radix: i32) -> Field {
assert!((radix == 10) || (radix == 16));
@ -49,28 +78,190 @@ pub fn str_to_field(input: String, radix: i32) -> Field {
}
}
pub fn bytes_to_fr(input: &[u8]) -> Fr {
Fr::from(BigUint::from_bytes_le(input))
pub fn bytes_le_to_fr(input: &[u8]) -> (Fr, usize) {
let el_size = fr_byte_size();
(
Fr::from(BigUint::from_bytes_le(&input[0..el_size])),
el_size,
)
}
pub fn bytes_to_field(input: &[u8]) -> Field {
to_field(bytes_to_fr(input))
pub fn bytes_be_to_fr(input: &[u8]) -> (Fr, usize) {
let el_size = fr_byte_size();
(
Fr::from(BigUint::from_bytes_be(&input[0..el_size])),
el_size,
)
}
pub fn bytes_le_to_field(input: &[u8]) -> (Field, usize) {
let (fr_el, read) = bytes_le_to_fr(input);
(to_field(&fr_el), read)
}
pub fn bytes_be_to_field(input: &[u8]) -> (Field, usize) {
let (fr_el, read) = bytes_be_to_fr(input);
(to_field(&fr_el), read)
}
pub fn fr_to_bytes_le(input: &Fr) -> Vec<u8> {
let input_biguint: BigUint = (*input).into();
let mut res = input_biguint.to_bytes_le();
//BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization will fail (length % 8 != 0)
while res.len() != fr_byte_size() {
res.push(0);
}
res
}
pub fn fr_to_bytes_be(input: &Fr) -> Vec<u8> {
let input_biguint: BigUint = (*input).into();
let mut res = input_biguint.to_bytes_be();
// BigUint conversion ignores most significant zero bytes. We restore them otherwise serialization might fail
// Fr elements are stored using 64 bits nimbs
while res.len() != fr_byte_size() {
res.insert(0, 0);
}
res
}
pub fn field_to_bytes_le(input: &Field) -> Vec<u8> {
fr_to_bytes_le(&to_fr(input))
}
pub fn field_to_bytes_be(input: &Field) -> Vec<u8> {
fr_to_bytes_be(&to_fr(input))
}
pub fn vec_fr_to_bytes_le(input: &Vec<Fr>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(input.len().to_le_bytes().to_vec());
// We store each element
for el in input {
bytes.extend(fr_to_bytes_le(el));
}
bytes
}
pub fn vec_fr_to_bytes_be(input: &Vec<Fr>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(input.len().to_be_bytes().to_vec());
// We store each element
for el in input {
bytes.extend(fr_to_bytes_be(el));
}
bytes
}
pub fn vec_field_to_bytes_le(input: &Vec<Field>) -> Vec<u8> {
vec_fr_to_bytes_le(&vec_field_to_fr(input))
}
pub fn vec_field_to_bytes_be(input: &Vec<Field>) -> Vec<u8> {
vec_fr_to_bytes_be(&vec_field_to_fr(input))
}
pub fn vec_u8_to_bytes_le(input: &Vec<u8>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len()).unwrap().to_le_bytes().to_vec());
bytes.extend(input);
bytes
}
pub fn vec_u8_to_bytes_be(input: Vec<u8>) -> Vec<u8> {
let mut bytes: Vec<u8> = Vec::new();
//We store the vector length
bytes.extend(u64::try_from(input.len()).unwrap().to_be_bytes().to_vec());
bytes.extend(input);
bytes
}
pub fn bytes_le_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into().unwrap())).unwrap();
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
(res, read)
}
pub fn bytes_be_to_vec_u8(input: &[u8]) -> (Vec<u8>, usize) {
let mut read: usize = 0;
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into().unwrap())).unwrap();
read += 8;
let res = input[8..8 + len].to_vec();
read += res.len();
(res, read)
}
pub fn bytes_le_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::try_from(u64::from_le_bytes(input[0..8].try_into().unwrap())).unwrap();
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_le_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
(res, read)
}
pub fn bytes_be_to_vec_fr(input: &[u8]) -> (Vec<Fr>, usize) {
let mut read: usize = 0;
let mut res: Vec<Fr> = Vec::new();
let len = usize::try_from(u64::from_be_bytes(input[0..8].try_into().unwrap())).unwrap();
read += 8;
let el_size = fr_byte_size();
for i in 0..len {
let (curr_el, _) = bytes_be_to_fr(&input[8 + el_size * i..8 + el_size * (i + 1)].to_vec());
res.push(curr_el);
read += el_size;
}
(res, read)
}
pub fn bytes_le_to_vec_field(input: &[u8]) -> (Vec<Field>, usize) {
let (vec_fr, read) = bytes_le_to_vec_fr(input);
(vec_fr_to_field(&vec_fr), read)
}
pub fn bytes_be_to_vec_field(input: &[u8]) -> (Vec<Field>, usize) {
let (vec_fr, read) = bytes_be_to_vec_fr(input);
(vec_fr_to_field(&vec_fr), read)
}
// Arithmetic over Field elements (wrapped over arkworks algebra crate)
pub fn add(a: Field, b: Field) -> Field {
to_field(to_fr(a) + to_fr(b))
pub fn add(a: &Field, b: &Field) -> Field {
to_field(&(to_fr(a) + to_fr(b)))
}
pub fn mul(a: Field, b: Field) -> Field {
to_field(to_fr(a) * to_fr(b))
pub fn mul(a: &Field, b: &Field) -> Field {
to_field(&(to_fr(a) * to_fr(b)))
}
pub fn div(a: Field, b: Field) -> Field {
to_field(to_fr(a) / to_fr(b))
pub fn div(a: &Field, b: &Field) -> Field {
to_field(&(to_fr(a) / to_fr(b)))
}
pub fn inv(a: Field) -> Field {
to_field(Fr::from(1) / to_fr(a))
pub fn inv(a: &Field) -> Field {
to_field(&(Fr::from(1) / to_fr(a)))
}