mirror of https://github.com/vacp2p/zerokit.git
RLN: Update API (#22)
* refactor(rln): integrate semaphore's merkle tree implementation * feat(rln): expand API and refactor * refactor(rln): support external resource path for circuits/keys, add circuit for tree_height = 20 * feat(rln): add cargo library directives
This commit is contained in:
parent
95352a4a7c
commit
9cc333811b
|
@ -5,6 +5,9 @@ edition = "2021"
|
|||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib", "rlib"]
|
||||
|
||||
[dependencies]
|
||||
|
||||
# WASM operations
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,119 @@
|
|||
{
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128",
|
||||
"nPublic": 6,
|
||||
"vk_alpha_1": [
|
||||
"1805378556360488226980822394597799963030511477964155500103132920745199284516",
|
||||
"11990395240534218699464972016456017378439762088320057798320175886595281336136",
|
||||
"1"
|
||||
],
|
||||
"vk_beta_2": [
|
||||
[
|
||||
"11031529986141021025408838211017932346992429731488270384177563837022796743627",
|
||||
"16042159910707312759082561183373181639420894978640710177581040523252926273854"
|
||||
],
|
||||
[
|
||||
"20112698439519222240302944148895052359035104222313380895334495118294612255131",
|
||||
"19441583024670359810872018179190533814486480928824742448673677460151702019379"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_gamma_2": [
|
||||
[
|
||||
"10857046999023057135944570762232829481370756359578518086990519993285655852781",
|
||||
"11559732032986387107991004021392285783925812861821192530917403151452391805634"
|
||||
],
|
||||
[
|
||||
"8495653923123431417604973247489272438418190587263600148770280649306958101930",
|
||||
"4082367875863433681332203403145435568316851327593401208105741076214120093531"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_delta_2": [
|
||||
[
|
||||
"1342791402398183550129987853701397066695422166542200371137242980909975744720",
|
||||
"19885954793721639146517398722913034453263197732511169431324269951156805454588"
|
||||
],
|
||||
[
|
||||
"16612518449808520746616592899100682320852224744311197908486719118388461103870",
|
||||
"13039435290897389787786546960964558630619663289413586834851804020863949546009"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"vk_alphabeta_12": [
|
||||
[
|
||||
[
|
||||
"5151991366823434428398919091000210787450832786814248297320989361921939794156",
|
||||
"15735191313289001022885148627913534790382722933676436876510746491415970766821"
|
||||
],
|
||||
[
|
||||
"3387907257437913904447588318761906430938415556102110876587455322225272831272",
|
||||
"1998779853452712881084781956683721603875246565720647583735935725110674288056"
|
||||
],
|
||||
[
|
||||
"14280074182991498185075387990446437410077692353432005297922275464876153151820",
|
||||
"17092408446352310039633488224969232803092763095456307462247653153107223117633"
|
||||
]
|
||||
],
|
||||
[
|
||||
[
|
||||
"4359046709531668109201634396816565829237358165496082832279660960675584351266",
|
||||
"4511888308846208349307186938266411423935335853916317436093178288331845821336"
|
||||
],
|
||||
[
|
||||
"11429499807090785857812316277335883295048773373068683863667725283965356423273",
|
||||
"16232274853200678548795010078253506586114563833318973594428907292096178657392"
|
||||
],
|
||||
[
|
||||
"18068999605870933925311275504102553573815570223888590384919752303726860800970",
|
||||
"17309569111965782732372130116757295842160193489132771344011460471298173784984"
|
||||
]
|
||||
]
|
||||
],
|
||||
"IC": [
|
||||
[
|
||||
"15907620619058468322652190166474219459106695372760190199814463422116003944385",
|
||||
"15752765921940703867480319151728055971288798043197983667046402260506178676501",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"12004081423498474638814710157503496372594892372197913146719480190853290407272",
|
||||
"17759993271504587923309435837545182941635937261719294500288793819648071033469",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"878120019311612655450010384994897394984265086410869146105626241891073100410",
|
||||
"17631186298933191134732246976686754514124819009836710500647157641262968661294",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"14710016919630225372037989028011020715054625029990218653012745498368446893907",
|
||||
"2581293501049347486538806758240731445964309309490885835380825245889909387041",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"766327921864693063481261933507417084013182964450768912480746815296334678928",
|
||||
"18104222034822903557262264275808261481286672296559910954337205847153944954509",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"8877686447180479408315100041907552504213694351585462004774320248566787828012",
|
||||
"15836202093850379814510995758762098170932781831518064786308541653541698178373",
|
||||
"1"
|
||||
],
|
||||
[
|
||||
"19567388833538990982537236781224917793757180861915757860561618079730704818311",
|
||||
"3535132838196675082818592669173684593624477421910576112671761297886253127546",
|
||||
"1"
|
||||
]
|
||||
]
|
||||
}
|
|
@ -9,17 +9,31 @@ use serde_json::Value;
|
|||
use std::convert::TryFrom;
|
||||
use std::fs::File;
|
||||
use std::io::{Cursor, Error, ErrorKind, Result, Write};
|
||||
use std::option::Option;
|
||||
use std::path::Path;
|
||||
use std::str::FromStr;
|
||||
|
||||
const ZKEY_PATH: &str = "./resources/rln_final.zkey";
|
||||
const VK_PATH: &str = "./resources/verifying_key.json";
|
||||
const R1CS_PATH: &str = "./resources/rln.r1cs";
|
||||
const WASM_PATH: &str = "./resources/rln.wasm";
|
||||
const ZKEY_FILENAME: &str = "rln_final.zkey";
|
||||
const VK_FILENAME: &str = "verifying_key.json";
|
||||
const R1CS_FILENAME: &str = "rln.r1cs";
|
||||
const WASM_FILENAME: &str = "rln.wasm";
|
||||
|
||||
pub fn ZKEY() -> Result<ProvingKey<Bn254>> /*, ConstraintMatrices<Fr>)*/ {
|
||||
if Path::new(ZKEY_PATH).exists() {
|
||||
let mut file = File::open(ZKEY_PATH).unwrap();
|
||||
// These parameters are used for tests
|
||||
// Note that the circuit and keys in TEST_RESOURCES_FOLDER are compiled for Merkle trees of height 16 and 20 (including leaves level)
|
||||
// Changing these parameters to other value than these pairs of defaults will cause zkSNARK proof verification to fail
|
||||
// All tests should pass for TEST_TREE_HEIGHT = 16
|
||||
// The following tests fails for TEST_TREE_HEIGHT = 20 : ffi::test::test_merkle_proof_ffi, public::test::test_merkle_proof, test::test_merkle_proof, test::test_witness_from_json
|
||||
// TODO: tests containing hardcoded values for TEST_TREE_HEIGHT = 16 need to be extended for the case TEST_TREE_HEIGHT = 20 in order to pass
|
||||
pub const TEST_TREE_HEIGHT: usize = 16;
|
||||
pub const TEST_RESOURCES_FOLDER: &str = "./resources/tree_height_16/";
|
||||
//pub const TEST_TREE_HEIGHT: usize = 20;
|
||||
//pub const TEST_RESOURCES_FOLDER: &str = "./resources/tree_height_20/";
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn ZKEY(resources_folder: &str) -> Result<ProvingKey<Bn254>> {
|
||||
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
|
||||
if Path::new(&zkey_path).exists() {
|
||||
let mut file = File::open(&zkey_path).unwrap();
|
||||
let (proving_key, _matrices) = read_zkey(&mut file).unwrap();
|
||||
Ok(proving_key)
|
||||
} else {
|
||||
|
@ -27,14 +41,18 @@ pub fn ZKEY() -> Result<ProvingKey<Bn254>> /*, ConstraintMatrices<Fr>)*/ {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn VK() -> Result<VerifyingKey<Bn254>> {
|
||||
#[allow(non_snake_case)]
|
||||
pub fn VK(resources_folder: &str) -> Result<VerifyingKey<Bn254>> {
|
||||
let vk_path = format!("{resources_folder}{VK_FILENAME}");
|
||||
let zkey_path = format!("{resources_folder}{ZKEY_FILENAME}");
|
||||
|
||||
let verifying_key: VerifyingKey<Bn254>;
|
||||
|
||||
if Path::new(VK_PATH).exists() {
|
||||
verifying_key = vk_from_json(VK_PATH);
|
||||
if Path::new(&vk_path).exists() {
|
||||
verifying_key = vk_from_json(&vk_path);
|
||||
Ok(verifying_key)
|
||||
} else if Path::new(ZKEY_PATH).exists() {
|
||||
verifying_key = ZKEY().unwrap().vk;
|
||||
} else if Path::new(&zkey_path).exists() {
|
||||
verifying_key = ZKEY(resources_folder).unwrap().vk;
|
||||
Ok(verifying_key)
|
||||
} else {
|
||||
Err(Error::new(
|
||||
|
@ -44,16 +62,23 @@ pub fn VK() -> Result<VerifyingKey<Bn254>> {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn CIRCOM() -> CircomBuilder<Bn254> {
|
||||
#[allow(non_snake_case)]
|
||||
pub fn CIRCOM(resources_folder: &str) -> Option<CircomBuilder<Bn254>> {
|
||||
let wasm_path = format!("{resources_folder}{WASM_FILENAME}");
|
||||
let r1cs_path = format!("{resources_folder}{R1CS_FILENAME}");
|
||||
|
||||
// Load the WASM and R1CS for witness and proof generation
|
||||
let cfg = CircomConfig::<Bn254>::new(WASM_PATH, R1CS_PATH).unwrap(); // should be )?; but need to address "the trait `From<ErrReport>` is not implemented for `protocol::ProofError`"
|
||||
// We build and return the circuit
|
||||
CircomBuilder::new(cfg)
|
||||
let cfg = CircomConfig::<Bn254>::new(&wasm_path, &r1cs_path).unwrap();
|
||||
|
||||
// We build and return the circuit
|
||||
Some(CircomBuilder::new(cfg))
|
||||
}
|
||||
|
||||
// TODO: all the following implementations are taken from a public github project: find reference for them
|
||||
|
||||
// Utilities to convert a json verification key in a groth16::VerificationKey
|
||||
fn fq_from_str(s: &str) -> Fq {
|
||||
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap() //was BigInteger256:: and .into()
|
||||
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap()
|
||||
}
|
||||
|
||||
// Extracts the element in G1 corresponding to its JSON serialization
|
||||
|
@ -139,8 +164,8 @@ fn vk_from_json(vk_path: &str) -> VerifyingKey<Bn254> {
|
|||
}
|
||||
|
||||
// Checks verification key to be correct with respect to proving key
|
||||
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Bn254>) {
|
||||
let zkey = ZKEY();
|
||||
pub fn check_vk_from_zkey(resources_folder: &str, verifying_key: VerifyingKey<Bn254>) {
|
||||
let zkey = ZKEY(resources_folder);
|
||||
if zkey.is_ok() {
|
||||
assert_eq!(zkey.unwrap().vk, verifying_key);
|
||||
}
|
||||
|
|
311
rln/src/ffi.rs
311
rln/src/ffi.rs
|
@ -37,8 +37,9 @@ impl<'a> From<&Buffer> for &'a [u8] {
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, ctx: *mut *mut RLN) -> bool {
|
||||
let rln = RLN::new(tree_height);
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let rln = RLN::new(tree_height, input_data);
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
|
@ -53,6 +54,13 @@ pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
|
|||
rln.set_tree(tree_height).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
rln.delete_leaf(index).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
|
||||
|
@ -61,6 +69,14 @@ pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buf
|
|||
rln.set_leaf(index, input_data).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
rln.set_next_leaf(input_data).is_ok()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn set_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
|
@ -105,11 +121,11 @@ pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut B
|
|||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn prove(
|
||||
ctx: *const RLN,
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
|
@ -143,16 +159,97 @@ pub extern "C" fn verify(
|
|||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn generate_rln_proof(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
if rln.generate_rln_proof(input_data, &mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn verify_rln_proof(
|
||||
ctx: *const RLN,
|
||||
proof_buffer: *const Buffer,
|
||||
proof_is_valid_ptr: *mut bool,
|
||||
) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let proof_data = <&[u8]>::from(unsafe { &*proof_buffer });
|
||||
if match rln.verify_rln_proof(proof_data) {
|
||||
Ok(verified) => verified,
|
||||
Err(_) => return false,
|
||||
} {
|
||||
unsafe { *proof_is_valid_ptr = true };
|
||||
} else {
|
||||
unsafe { *proof_is_valid_ptr = false };
|
||||
};
|
||||
true
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Utils
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn key_gen(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
let rln = unsafe { &*ctx };
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
if rln.key_gen(&mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn hash(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
output_buffer: *mut Buffer,
|
||||
) -> bool {
|
||||
let rln = unsafe { &mut *ctx };
|
||||
let input_data = <&[u8]>::from(unsafe { &*input_buffer });
|
||||
let mut output_data: Vec<u8> = Vec::new();
|
||||
|
||||
if rln.hash(input_data, &mut output_data).is_ok() {
|
||||
unsafe { *output_buffer = Buffer::from(&output_data[..]) };
|
||||
std::mem::forget(output_data);
|
||||
true
|
||||
} else {
|
||||
std::mem::forget(output_data);
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::circuit::*;
|
||||
use crate::protocol::*;
|
||||
use crate::utils::*;
|
||||
use ark_bn254::{Bn254, Fr};
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::rand::thread_rng;
|
||||
use ark_std::str::FromStr;
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use semaphore::{identity::Identity, poseidon_hash, Field};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
@ -161,23 +258,25 @@ mod test {
|
|||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_batch_additions_ffi() {
|
||||
let tree_height = 16;
|
||||
fn test_merkle_operations_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Field> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..256 {
|
||||
leaves.push(hash_to_field(&rng.gen::<[u8; 32]>()));
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(to_field(&Fr::rand(&mut rng)));
|
||||
}
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let success = new(tree_height, rln_pointer.as_mut_ptr());
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We first add leaves one by one
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let leaf_ser = field_to_bytes_le(&leaf);
|
||||
|
@ -198,11 +297,34 @@ mod test {
|
|||
let success = set_tree(rln_pointer, tree_height);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let leaf_ser = field_to_bytes_le(&leaf);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained adding leaves using the internal index
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_next, _) = bytes_le_to_field(&result_data);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_single, root_next);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_field_to_bytes_le(&leaves);
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = set_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "set leaf call failed");
|
||||
assert!(success, "set leaves call failed");
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
|
@ -212,18 +334,53 @@ mod test {
|
|||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_batch, _) = bytes_le_to_field(&result_data);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_single, root_batch);
|
||||
|
||||
// We now delete all leaves set and check if the root corresponds to the empty tree root
|
||||
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
|
||||
let delete_range = 2 * no_of_leaves;
|
||||
for i in 0..delete_range {
|
||||
let success = delete_leaf(rln_pointer, i);
|
||||
assert!(success, "delete leaf call failed");
|
||||
}
|
||||
|
||||
// We get the root of the tree obtained deleting all leaves
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_delete, _) = bytes_le_to_field(&result_data);
|
||||
|
||||
// We reset the tree to default
|
||||
let success = set_tree(rln_pointer, tree_height);
|
||||
assert!(success, "set tree call failed");
|
||||
|
||||
// We get the root of the empty tree
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_root(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "get root call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root_empty, _) = bytes_le_to_field(&result_data);
|
||||
|
||||
// We check if roots are the same
|
||||
assert_eq!(root_delete, root_empty);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public C API
|
||||
// This test contains hardcoded values!
|
||||
// TODO: expand this test to work with tree_height = 20
|
||||
fn test_merkle_proof_ffi() {
|
||||
let tree_height = 16;
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let success = new(tree_height, rln_pointer.as_mut_ptr());
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
|
@ -247,12 +404,6 @@ mod test {
|
|||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (root, _) = bytes_le_to_field(&result_data);
|
||||
|
||||
assert_eq!(
|
||||
root,
|
||||
Field::from_str("0x27401a4559ce263630907ce3b77c570649e28ede22d2a7f5296839627a16e870")
|
||||
.unwrap()
|
||||
);
|
||||
|
||||
// We obtain the Merkle tree root
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = get_proof(rln_pointer, leaf_index, output_buffer.as_mut_ptr());
|
||||
|
@ -305,18 +456,19 @@ mod test {
|
|||
|
||||
// We double check that the proof computed from public API is correct
|
||||
let root_from_proof =
|
||||
get_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
|
||||
compute_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
|
||||
|
||||
assert_eq!(root, root_from_proof);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_groth16_proof_ffi() {
|
||||
let tree_height: usize = 16;
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let success = new(tree_height, rln_pointer.as_mut_ptr());
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
|
@ -347,4 +499,117 @@ mod test {
|
|||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rln_proof_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Field> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(to_field(&Fr::rand(&mut rng)));
|
||||
}
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let leaves_ser = vec_field_to_bytes_le(&leaves);
|
||||
let input_buffer = &Buffer::from(leaves_ser.as_ref());
|
||||
let success = set_leaves(rln_pointer, input_buffer);
|
||||
assert!(success, "set leaves call failed");
|
||||
|
||||
// We generate a new identity pair
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret, read) = bytes_le_to_field(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_field(&result_data[read..].to_vec());
|
||||
|
||||
// We set as leaf id_commitment, its index would be equal to no_of_leaves
|
||||
let leaf_ser = field_to_bytes_le(&id_commitment);
|
||||
let input_buffer = &Buffer::from(leaf_ser.as_ref());
|
||||
let success = set_next_leaf(rln_pointer, input_buffer);
|
||||
assert!(success, "set next leaf call failed");
|
||||
|
||||
let identity_index: u64 = no_of_leaves;
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
let signal_len = u64::try_from(signal.len()).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
serialized.append(&mut field_to_bytes_le(&identity_secret));
|
||||
serialized.append(&mut identity_index.to_le_bytes().to_vec());
|
||||
serialized.append(&mut field_to_bytes_le(&epoch));
|
||||
serialized.append(&mut signal_len.to_le_bytes().to_vec());
|
||||
serialized.append(&mut signal.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(serialized.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = generate_rln_proof(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "set leaves call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
// result_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let mut proof_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data | signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
|
||||
// We call generate_rln_proof
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success = verify_rln_proof(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hash_to_field_ffi() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a RLN instance
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let input_buffer = &Buffer::from(TEST_RESOURCES_FOLDER.as_bytes());
|
||||
let success = new(tree_height, input_buffer, rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
let rln_pointer = unsafe { &mut *rln_pointer.assume_init() };
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let input_buffer = &Buffer::from(signal.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = hash(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "hash call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (hash1, _) = bytes_le_to_field(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,6 +8,8 @@ use ark_std::str::FromStr;
|
|||
|
||||
pub mod circuit;
|
||||
pub mod ffi;
|
||||
pub mod merkle_tree;
|
||||
pub mod poseidon_tree;
|
||||
pub mod protocol;
|
||||
pub mod public;
|
||||
pub mod utils;
|
||||
|
@ -15,13 +17,12 @@ pub mod utils;
|
|||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::circuit::{TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT};
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use hex_literal::hex;
|
||||
use num_bigint::BigInt;
|
||||
use semaphore::{
|
||||
hash::Hash, hash_to_field, identity::Identity, poseidon_hash, poseidon_tree::PoseidonTree,
|
||||
Field,
|
||||
};
|
||||
use semaphore::{hash::Hash, identity::Identity, poseidon_hash, Field};
|
||||
|
||||
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
|
||||
const WITNESS_JSON: &str = r#"
|
||||
|
@ -69,8 +70,10 @@ mod test {
|
|||
|
||||
#[test]
|
||||
// We test Merkle Tree generation, proofs and verification
|
||||
// This test contains hardcoded values!
|
||||
// TODO: expand this test to work with tree_height = 20
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = 16;
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// generate identity
|
||||
|
@ -93,8 +96,8 @@ mod test {
|
|||
);
|
||||
|
||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||
let path_elements = get_path_elements(&merkle_proof);
|
||||
let identity_path_index = get_identity_path_index(&merkle_proof);
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
// We check correct computation of the path and indexes
|
||||
let expected_path_elements = vec![
|
||||
|
@ -142,11 +145,13 @@ mod test {
|
|||
|
||||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
// This test contains hardcoded values!
|
||||
// TODO: expand this test to work with tree_height = 20
|
||||
fn test_witness_from_json() {
|
||||
// We generate all relevant keys
|
||||
let proving_key = ZKEY().unwrap();
|
||||
let verification_key = VK().unwrap();
|
||||
let builder = CIRCOM();
|
||||
let proving_key = ZKEY(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = VK(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = CIRCOM(TEST_RESOURCES_FOLDER).unwrap();
|
||||
|
||||
// We compute witness from the json input example
|
||||
let rln_witness = rln_witness_from_json(WITNESS_JSON);
|
||||
|
@ -165,14 +170,11 @@ mod test {
|
|||
#[test]
|
||||
// We test a RLN proof generation and verification
|
||||
fn test_end_to_end() {
|
||||
let tree_height = 16;
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
// Generate identity
|
||||
// We follow zk-kit approach for identity generation
|
||||
let id = Identity::from_seed(b"hello");
|
||||
let identity_secret = poseidon_hash(&vec![id.trapdoor, id.nullifier]);
|
||||
let id_commitment = poseidon_hash(&vec![identity_secret]);
|
||||
// Generate identity pair
|
||||
let (identity_secret, id_commitment) = keygen();
|
||||
|
||||
//// generate merkle tree
|
||||
let default_leaf = Field::from(0);
|
||||
|
@ -186,15 +188,19 @@ mod test {
|
|||
|
||||
// We set the remaining values to random ones
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
//let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
|
||||
let rln_witness: RLNWitnessInput =
|
||||
rln_witness_from_values(identity_secret, &merkle_proof, x, epoch, rln_identifier);
|
||||
let rln_witness: RLNWitnessInput = rln_witness_from_values(
|
||||
identity_secret,
|
||||
&merkle_proof,
|
||||
x,
|
||||
epoch, /*, rln_identifier*/
|
||||
);
|
||||
|
||||
// We generate all relevant keys
|
||||
let proving_key = ZKEY().unwrap();
|
||||
let verification_key = VK().unwrap();
|
||||
let builder = CIRCOM();
|
||||
let proving_key = ZKEY(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let verification_key = VK(TEST_RESOURCES_FOLDER).unwrap();
|
||||
let builder = CIRCOM(TEST_RESOURCES_FOLDER).unwrap();
|
||||
|
||||
// Let's generate a zkSNARK proof
|
||||
let proof = generate_proof(builder, &proving_key, &rln_witness).unwrap();
|
||||
|
@ -212,13 +218,13 @@ mod test {
|
|||
// We test witness serialization
|
||||
let rln_witness = rln_witness_from_json(WITNESS_JSON);
|
||||
let ser = serialize_witness(&rln_witness);
|
||||
let deser = deserialize_witness(&ser);
|
||||
let (deser, _) = deserialize_witness(&ser);
|
||||
assert_eq!(rln_witness, deser);
|
||||
|
||||
// We test Proof values serialization
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
let ser = serialize_proof_values(&proof_values);
|
||||
let deser = deserialize_proof_values(&ser);
|
||||
let (deser, _) = deserialize_proof_values(&ser);
|
||||
assert_eq!(proof_values, deser);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,379 @@
|
|||
// Implementation adapted from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/merkle_tree.rs
|
||||
// In our customization, we expand MerkleTree to have a next_index counter, so that we can add a public API to add leaves to the next available counter with no need to specify the index
|
||||
|
||||
//! Implements basic binary Merkle trees
|
||||
//!
|
||||
//! # To do
|
||||
//!
|
||||
//! * Disk based storage backend (using mmaped files should be easy)
|
||||
|
||||
use semaphore::Field;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
cmp::max,
|
||||
fmt::Debug,
|
||||
iter::{once, repeat, successors},
|
||||
};
|
||||
|
||||
/// Hash types, values and algorithms for a Merkle tree
|
||||
pub trait Hasher {
|
||||
/// Type of the leaf and node hashes
|
||||
type Hash: Copy + Clone + Eq + Serialize;
|
||||
|
||||
/// Compute the hash of an intermediate node
|
||||
fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash;
|
||||
}
|
||||
|
||||
/// Merkle tree with all leaf and intermediate hashes stored
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub struct MerkleTree<H: Hasher> {
|
||||
/// Depth of the tree, # of layers including leaf layer
|
||||
depth: usize,
|
||||
|
||||
/// Hash value of empty subtrees of given depth, starting at leaf level
|
||||
empty: Vec<H::Hash>,
|
||||
|
||||
/// Hash values of tree nodes and leaves, breadth first order
|
||||
nodes: Vec<H::Hash>,
|
||||
|
||||
// The next available (i.e., never used) tree index. Equivalently, the number of leaves added to the tree
|
||||
// (deletions leave next_index unchanged)
|
||||
pub next_index: usize,
|
||||
}
|
||||
|
||||
/// Element of a Merkle proof
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub enum Branch<H: Hasher> {
|
||||
/// Left branch taken, value is the right sibling hash.
|
||||
Left(H::Hash),
|
||||
|
||||
/// Right branch taken, value is the left sibling hash.
|
||||
Right(H::Hash),
|
||||
}
|
||||
|
||||
/// Merkle proof path, bottom to top.
|
||||
#[derive(Clone, PartialEq, Eq, Serialize)]
|
||||
pub struct Proof<H: Hasher>(pub Vec<Branch<H>>);
|
||||
|
||||
/// For a given node index, return the parent node index
|
||||
/// Returns None if there is no parent (root node)
|
||||
const fn parent(index: usize) -> Option<usize> {
|
||||
if index == 0 {
|
||||
None
|
||||
} else {
|
||||
Some(((index + 1) >> 1) - 1)
|
||||
}
|
||||
}
|
||||
|
||||
/// For a given node index, return index of the first (left) child.
|
||||
const fn first_child(index: usize) -> usize {
|
||||
(index << 1) + 1
|
||||
}
|
||||
|
||||
const fn depth(index: usize) -> usize {
|
||||
// `n.next_power_of_two()` will return `n` iff `n` is a power of two.
|
||||
// The extra offset corrects this.
|
||||
(index + 2).next_power_of_two().trailing_zeros() as usize - 1
|
||||
}
|
||||
|
||||
impl<H: Hasher> MerkleTree<H> {
|
||||
/// Creates a new `MerkleTree`
|
||||
/// * `depth` - The depth of the tree, including the root. This is 1 greater
|
||||
/// than the `treeLevels` argument to the Semaphore contract.
|
||||
pub fn new(depth: usize, initial_leaf: H::Hash) -> Self {
|
||||
// Compute empty node values, leaf to root
|
||||
let empty = successors(Some(initial_leaf), |prev| Some(H::hash_node(prev, prev)))
|
||||
.take(depth)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Compute node values
|
||||
let nodes = empty
|
||||
.iter()
|
||||
.rev()
|
||||
.enumerate()
|
||||
.flat_map(|(depth, hash)| repeat(hash).take(1 << depth))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
debug_assert!(nodes.len() == (1 << depth) - 1);
|
||||
|
||||
let next_index = 0;
|
||||
|
||||
Self {
|
||||
depth,
|
||||
empty,
|
||||
nodes,
|
||||
next_index,
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn num_leaves(&self) -> usize {
|
||||
self.depth
|
||||
.checked_sub(1)
|
||||
.map(|n| 1 << n)
|
||||
.unwrap_or_default()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn root(&self) -> H::Hash {
|
||||
self.nodes[0]
|
||||
}
|
||||
|
||||
pub fn set(&mut self, leaf: usize, hash: H::Hash) {
|
||||
self.set_range(leaf, once(hash));
|
||||
self.next_index = max(self.next_index, leaf + 1);
|
||||
}
|
||||
|
||||
pub fn set_range<I: IntoIterator<Item = H::Hash>>(&mut self, start: usize, hashes: I) {
|
||||
let index = self.num_leaves() + start - 1;
|
||||
let mut count = 0;
|
||||
// TODO: Error/panic when hashes is longer than available leafs
|
||||
for (leaf, hash) in self.nodes[index..].iter_mut().zip(hashes) {
|
||||
*leaf = hash;
|
||||
count += 1;
|
||||
}
|
||||
if count != 0 {
|
||||
self.update_nodes(index, index + (count - 1));
|
||||
self.next_index = max(self.next_index, start + count);
|
||||
}
|
||||
}
|
||||
|
||||
fn update_nodes(&mut self, start: usize, end: usize) {
|
||||
debug_assert_eq!(depth(start), depth(end));
|
||||
if let (Some(start), Some(end)) = (parent(start), parent(end)) {
|
||||
for parent in start..=end {
|
||||
let child = first_child(parent);
|
||||
self.nodes[parent] = H::hash_node(&self.nodes[child], &self.nodes[child + 1]);
|
||||
}
|
||||
self.update_nodes(start, end);
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn proof(&self, leaf: usize) -> Option<Proof<H>> {
|
||||
if leaf >= self.num_leaves() {
|
||||
return None;
|
||||
}
|
||||
let mut index = self.num_leaves() + leaf - 1;
|
||||
let mut path = Vec::with_capacity(self.depth);
|
||||
while let Some(parent) = parent(index) {
|
||||
// Add proof for node at index to parent
|
||||
path.push(match index & 1 {
|
||||
1 => Branch::Left(self.nodes[index + 1]),
|
||||
0 => Branch::Right(self.nodes[index - 1]),
|
||||
_ => unreachable!(),
|
||||
});
|
||||
index = parent;
|
||||
}
|
||||
Some(Proof(path))
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn verify(&self, hash: H::Hash, proof: &Proof<H>) -> bool {
|
||||
proof.root(hash) == self.root()
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn leaves(&self) -> &[H::Hash] {
|
||||
&self.nodes[(self.num_leaves() - 1)..]
|
||||
}
|
||||
}
|
||||
|
||||
impl<H: Hasher> Proof<H> {
|
||||
/// Compute the leaf index for this proof
|
||||
#[must_use]
|
||||
pub fn leaf_index(&self) -> usize {
|
||||
self.0.iter().rev().fold(0, |index, branch| match branch {
|
||||
Branch::Left(_) => index << 1,
|
||||
Branch::Right(_) => (index << 1) + 1,
|
||||
})
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
pub fn get_path_elements(&self) -> Vec<H::Hash> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
Branch::Left(value) | Branch::Right(value) => *value,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute path index (TODO: do we want to keep this here?)
|
||||
#[must_use]
|
||||
pub fn get_path_index(&self) -> Vec<u8> {
|
||||
self.0
|
||||
.iter()
|
||||
.map(|branch| match branch {
|
||||
Branch::Left(_) => 0,
|
||||
Branch::Right(_) => 1,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Compute the Merkle root given a leaf hash
|
||||
#[must_use]
|
||||
pub fn root(&self, hash: H::Hash) -> H::Hash {
|
||||
self.0.iter().fold(hash, |hash, branch| match branch {
|
||||
Branch::Left(sibling) => H::hash_node(&hash, sibling),
|
||||
Branch::Right(sibling) => H::hash_node(sibling, &hash),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<H> Debug for Branch<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Hash: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Self::Left(arg0) => f.debug_tuple("Left").field(arg0).finish(),
|
||||
Self::Right(arg0) => f.debug_tuple("Right").field(arg0).finish(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<H> Debug for Proof<H>
|
||||
where
|
||||
H: Hasher,
|
||||
H::Hash: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Proof").field(&self.0).finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test {
|
||||
use super::*;
|
||||
use hex_literal::hex;
|
||||
use tiny_keccak::{Hasher as _, Keccak};
|
||||
|
||||
struct Keccak256;
|
||||
|
||||
impl Hasher for Keccak256 {
|
||||
type Hash = [u8; 32];
|
||||
|
||||
fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {
|
||||
let mut output = [0; 32];
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(left);
|
||||
hasher.update(right);
|
||||
hasher.finalize(&mut output);
|
||||
output
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_index_calculus() {
|
||||
assert_eq!(parent(0), None);
|
||||
assert_eq!(parent(1), Some(0));
|
||||
assert_eq!(parent(2), Some(0));
|
||||
assert_eq!(parent(3), Some(1));
|
||||
assert_eq!(parent(4), Some(1));
|
||||
assert_eq!(parent(5), Some(2));
|
||||
assert_eq!(parent(6), Some(2));
|
||||
assert_eq!(first_child(0), 1);
|
||||
assert_eq!(first_child(2), 5);
|
||||
assert_eq!(depth(0), 0);
|
||||
assert_eq!(depth(1), 1);
|
||||
assert_eq!(depth(2), 1);
|
||||
assert_eq!(depth(3), 2);
|
||||
assert_eq!(depth(6), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_root() {
|
||||
let mut tree = MerkleTree::<Keccak256>::new(3, [0; 32]);
|
||||
assert_eq!(
|
||||
tree.root(),
|
||||
hex!("b4c11951957c6f8f642c4af61cd6b24640fec6dc7fc607ee8206a99e92410d30")
|
||||
);
|
||||
tree.set(
|
||||
0,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
);
|
||||
assert_eq!(
|
||||
tree.root(),
|
||||
hex!("c1ba1812ff680ce84c1d5b4f1087eeb08147a4d510f3496b2849df3a73f5af95")
|
||||
);
|
||||
tree.set(
|
||||
1,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
);
|
||||
assert_eq!(
|
||||
tree.root(),
|
||||
hex!("893760ec5b5bee236f29e85aef64f17139c3c1b7ff24ce64eb6315fca0f2485b")
|
||||
);
|
||||
tree.set(
|
||||
2,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
);
|
||||
assert_eq!(
|
||||
tree.root(),
|
||||
hex!("222ff5e0b5877792c2bc1670e2ccd0c2c97cd7bb1672a57d598db05092d3d72c")
|
||||
);
|
||||
tree.set(
|
||||
3,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
);
|
||||
assert_eq!(
|
||||
tree.root(),
|
||||
hex!("a9bb8c3f1f12e9aa903a50c47f314b57610a3ab32f2d463293f58836def38d36")
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_proof() {
|
||||
let mut tree = MerkleTree::<Keccak256>::new(3, [0; 32]);
|
||||
tree.set(
|
||||
0,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
);
|
||||
tree.set(
|
||||
1,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
);
|
||||
tree.set(
|
||||
2,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
);
|
||||
tree.set(
|
||||
3,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
);
|
||||
|
||||
let proof = tree.proof(2).expect("proof should exist");
|
||||
assert_eq!(proof.leaf_index(), 2);
|
||||
assert!(tree.verify(
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
&proof
|
||||
));
|
||||
assert!(!tree.verify(
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
&proof
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_position() {
|
||||
let mut tree = MerkleTree::<Keccak256>::new(3, [0; 32]);
|
||||
tree.set(
|
||||
0,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000001"),
|
||||
);
|
||||
tree.set(
|
||||
1,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000002"),
|
||||
);
|
||||
tree.set(
|
||||
2,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000003"),
|
||||
);
|
||||
tree.set(
|
||||
3,
|
||||
hex!("0000000000000000000000000000000000000000000000000000000000000004"),
|
||||
);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
// Implementation taken from https://github.com/worldcoin/semaphore-rs/blob/d462a4372f1fd9c27610f2acfe4841fab1d396aa/src/poseidon_tree.rs (no differences)
|
||||
// Implements Merkle trees with Poseidon hash for the customized semaphore-rs merkle_tree implementation
|
||||
|
||||
use crate::merkle_tree::{self, Hasher, MerkleTree};
|
||||
use semaphore::{poseidon_hash, Field};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub type PoseidonTree = MerkleTree<PoseidonHash>;
|
||||
#[allow(dead_code)]
|
||||
pub type Branch = merkle_tree::Branch<PoseidonHash>;
|
||||
#[allow(dead_code)]
|
||||
pub type Proof = merkle_tree::Proof<PoseidonHash>;
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct PoseidonHash;
|
||||
|
||||
impl Hasher for PoseidonHash {
|
||||
type Hash = Field;
|
||||
|
||||
fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {
|
||||
poseidon_hash(&[*left, *right])
|
||||
}
|
||||
}
|
|
@ -1,4 +1,6 @@
|
|||
use crate::circuit::{VK, ZKEY};
|
||||
use crate::merkle_tree::{self, Branch};
|
||||
use crate::poseidon_tree::PoseidonHash;
|
||||
use ark_bn254::{Bn254, Fr, Parameters};
|
||||
use ark_circom::{read_zkey, CircomBuilder, CircomConfig, CircomReduction};
|
||||
use ark_ec::bn::Bn;
|
||||
|
@ -16,18 +18,14 @@ use ethers_core::utils::keccak256;
|
|||
use num_bigint::{BigInt, BigUint, ToBigInt};
|
||||
use primitive_types::U256;
|
||||
use rand::Rng;
|
||||
use semaphore::{
|
||||
identity::Identity,
|
||||
merkle_tree::{self, Branch},
|
||||
poseidon_hash,
|
||||
poseidon_tree::PoseidonHash,
|
||||
Field,
|
||||
};
|
||||
use semaphore::{identity::Identity, poseidon_hash, Field};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::io::Write;
|
||||
use std::time::Instant;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::poseidon_tree::*;
|
||||
use crate::public::{RLN, RLN_IDENTIFIER};
|
||||
pub use crate::utils::*;
|
||||
|
||||
///////////////////////////////////////////////////////
|
||||
|
@ -47,13 +45,13 @@ pub struct RLNWitnessInput {
|
|||
#[derive(Debug, PartialEq)]
|
||||
pub struct RLNProofValues {
|
||||
// Public outputs:
|
||||
y: Field,
|
||||
nullifier: Field,
|
||||
root: Field,
|
||||
pub y: Field,
|
||||
pub nullifier: Field,
|
||||
pub root: Field,
|
||||
// Public Inputs:
|
||||
x: Field,
|
||||
epoch: Field,
|
||||
rln_identifier: Field,
|
||||
pub x: Field,
|
||||
pub epoch: Field,
|
||||
pub rln_identifier: Field,
|
||||
}
|
||||
|
||||
pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
|
||||
|
@ -69,7 +67,7 @@ pub fn serialize_witness(rln_witness: &RLNWitnessInput) -> Vec<u8> {
|
|||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize_witness(serialized: &[u8]) -> RLNWitnessInput {
|
||||
pub fn deserialize_witness(serialized: &[u8]) -> (RLNWitnessInput, usize) {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (identity_secret, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
|
@ -90,61 +88,115 @@ pub fn deserialize_witness(serialized: &[u8]) -> RLNWitnessInput {
|
|||
let (rln_identifier, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
// TODO: check rln_identifier against public::RLN_IDENTIFIER
|
||||
assert_eq!(serialized.len(), all_read);
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
}
|
||||
|
||||
(
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
},
|
||||
all_read,
|
||||
)
|
||||
}
|
||||
|
||||
// This function deserializes input for kilic's rln generate_proof public API
|
||||
// https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
|
||||
// input_data is [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
// return value is a rln witness populated according to this information
|
||||
pub fn proof_inputs_to_rln_witness(
|
||||
tree: &mut PoseidonTree,
|
||||
serialized: &[u8],
|
||||
) -> (RLNWitnessInput, usize) {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (identity_secret, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let id_index = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
|
||||
all_read += 8;
|
||||
|
||||
let (epoch, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let signal_len = u64::from_le_bytes(serialized[all_read..all_read + 8].try_into().unwrap());
|
||||
all_read += 8;
|
||||
|
||||
let signal: Vec<u8> =
|
||||
serialized[all_read..all_read + usize::try_from(signal_len).unwrap()].to_vec();
|
||||
|
||||
let merkle_proof = tree
|
||||
.proof(usize::try_from(id_index).unwrap())
|
||||
.expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
|
||||
|
||||
(
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
identity_path_index,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
},
|
||||
all_read,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn serialize_proof_values(rln_proof_values: &RLNProofValues) -> Vec<u8> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.root));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.epoch));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.x));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.y));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.nullifier));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.root));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.x));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.epoch));
|
||||
serialized.append(&mut field_to_bytes_le(&rln_proof_values.rln_identifier));
|
||||
|
||||
serialized
|
||||
}
|
||||
|
||||
pub fn deserialize_proof_values(serialized: &[u8]) -> RLNProofValues {
|
||||
pub fn deserialize_proof_values(serialized: &[u8]) -> (RLNProofValues, usize) {
|
||||
let mut all_read: usize = 0;
|
||||
|
||||
let (root, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (epoch, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (x, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (y, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (nullifier, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (root, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (x, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (epoch, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let (rln_identifier, read) = bytes_le_to_field(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
assert_eq!(serialized.len(), all_read);
|
||||
|
||||
RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
}
|
||||
(
|
||||
RLNProofValues {
|
||||
y,
|
||||
nullifier,
|
||||
root,
|
||||
x,
|
||||
epoch,
|
||||
rln_identifier,
|
||||
},
|
||||
all_read,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
|
||||
|
@ -169,6 +221,8 @@ pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
|
|||
|
||||
let rln_identifier = str_to_field(input_json["rln_identifier"].to_string(), 10);
|
||||
|
||||
// TODO: check rln_identifier against public::RLN_IDENTIFIER
|
||||
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
path_elements,
|
||||
|
@ -184,10 +238,11 @@ pub fn rln_witness_from_values(
|
|||
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
|
||||
x: Field,
|
||||
epoch: Field,
|
||||
rln_identifier: Field,
|
||||
//rln_identifier: Field,
|
||||
) -> RLNWitnessInput {
|
||||
let path_elements = get_path_elements(merkle_proof);
|
||||
let identity_path_index = get_identity_path_index(merkle_proof);
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER);
|
||||
|
||||
RLNWitnessInput {
|
||||
identity_secret,
|
||||
|
@ -205,7 +260,7 @@ pub fn random_rln_witness(tree_height: usize) -> RLNWitnessInput {
|
|||
let identity_secret = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let x = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let epoch = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let rln_identifier = hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
let rln_identifier = hash_to_field(RLN_IDENTIFIER); //hash_to_field(&rng.gen::<[u8; 32]>());
|
||||
|
||||
let mut path_elements: Vec<Field> = Vec::new();
|
||||
let mut identity_path_index: Vec<u8> = Vec::new();
|
||||
|
@ -247,7 +302,7 @@ pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValue
|
|||
}
|
||||
}
|
||||
|
||||
let root = get_tree_root(
|
||||
let root = compute_tree_root(
|
||||
&rln_witness.identity_secret,
|
||||
&rln_witness.path_elements,
|
||||
&rln_witness.identity_path_index,
|
||||
|
@ -268,30 +323,7 @@ pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValue
|
|||
// Merkle tree utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
|
||||
/// Helper to merkle proof into a bigint vector
|
||||
/// TODO: we should create a From trait for this
|
||||
pub fn get_path_elements(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
|
||||
proof
|
||||
.0
|
||||
.iter()
|
||||
.map(|x| match x {
|
||||
Branch::Left(value) | Branch::Right(value) => *value,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_identity_path_index(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<u8> {
|
||||
proof
|
||||
.0
|
||||
.iter()
|
||||
.map(|branch| match branch {
|
||||
Branch::Left(_) => 0,
|
||||
Branch::Right(_) => 1,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn get_tree_root(
|
||||
pub fn compute_tree_root(
|
||||
leaf: &Field,
|
||||
path_elements: &[Field],
|
||||
identity_path_index: &[u8],
|
||||
|
@ -316,6 +348,14 @@ pub fn get_tree_root(
|
|||
///////////////////////////////////////////////////////
|
||||
// Signal/nullifier utility functions
|
||||
///////////////////////////////////////////////////////
|
||||
// Generates a tupe (identity_secret, id_commitment) where
|
||||
// identity_secret is random and id_commitment = PoseidonHash(identity_secret)
|
||||
pub fn keygen() -> (Field, Field) {
|
||||
let mut rng = thread_rng();
|
||||
let identity_secret = to_field(&Fr::rand(&mut rng));
|
||||
let id_commitment = poseidon_hash(&[identity_secret]);
|
||||
(identity_secret, id_commitment)
|
||||
}
|
||||
|
||||
pub fn hash_to_field(signal: &[u8]) -> Field {
|
||||
let hash = keccak256(signal);
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
use crate::poseidon_tree::PoseidonTree;
|
||||
/// This is the main public API for RLN. It is used by the FFI, and should be
|
||||
/// used by tests etc as well
|
||||
///
|
||||
|
@ -6,40 +7,50 @@ use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
|
|||
use ark_groth16::Proof as ArkProof;
|
||||
use ark_groth16::{ProvingKey, VerifyingKey};
|
||||
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
|
||||
use ark_std::rand::thread_rng;
|
||||
use ark_std::{rand::thread_rng, str::FromStr, UniformRand};
|
||||
use num_bigint::BigInt;
|
||||
use semaphore::{hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field};
|
||||
use semaphore::{identity::Identity, Field};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json;
|
||||
use std::default::Default;
|
||||
use std::io::Cursor;
|
||||
use std::io::{self, Error, ErrorKind, Result}; //default read/write
|
||||
use std::option::Option;
|
||||
|
||||
// For the ToBytes implementation of groth16::Proof
|
||||
use ark_ec::bn::Bn;
|
||||
use ark_ff::bytes::ToBytes;
|
||||
use ark_serialize::{Read, Write};
|
||||
|
||||
use crate::circuit::{CIRCOM, VK, ZKEY};
|
||||
use crate::protocol::*;
|
||||
use crate::circuit::{CIRCOM, TEST_RESOURCES_FOLDER, TEST_TREE_HEIGHT, VK, ZKEY};
|
||||
use crate::protocol::{self, *};
|
||||
use crate::utils::*;
|
||||
|
||||
// Application specific RLN identifier
|
||||
pub const RLN_IDENTIFIER: &[u8] = b"zerokit/rln/010203040506070809";
|
||||
|
||||
// TODO Add Engine here? i.e. <E: Engine> not <Bn254>
|
||||
// TODO Assuming we want to use IncrementalMerkleTree, figure out type/trait conversions
|
||||
pub struct RLN {
|
||||
pub circom: CircomBuilder<Bn254>,
|
||||
pub circom: Option<CircomBuilder<Bn254>>,
|
||||
pub proving_key: Result<ProvingKey<Bn254>>,
|
||||
pub verification_key: Result<VerifyingKey<Bn254>>,
|
||||
pub tree: PoseidonTree,
|
||||
pub resources_folder: String,
|
||||
}
|
||||
|
||||
use crate::utils::{to_field, to_fr};
|
||||
use std::io::Cursor;
|
||||
|
||||
impl RLN {
|
||||
pub fn new(tree_height: usize) -> RLN {
|
||||
let circom = CIRCOM();
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> RLN {
|
||||
// We read input
|
||||
let mut input: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input).unwrap();
|
||||
|
||||
let proving_key = ZKEY();
|
||||
let verification_key = VK();
|
||||
let resources_folder = String::from_utf8(input).expect("Found invalid UTF-8");
|
||||
|
||||
let circom = None::<CircomBuilder<Bn254>>; //CIRCOM();
|
||||
|
||||
let proving_key = ZKEY(&resources_folder);
|
||||
let verification_key = VK(&resources_folder);
|
||||
|
||||
// We compute a default empty tree
|
||||
let leaf = Field::from(0);
|
||||
|
@ -50,6 +61,7 @@ impl RLN {
|
|||
proving_key,
|
||||
verification_key,
|
||||
tree,
|
||||
resources_folder,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -92,6 +104,30 @@ impl RLN {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
// Set input leaf to the next available index
|
||||
pub fn set_next_leaf<R: Read>(&mut self, mut input_data: R) -> io::Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut leaf_byte)?;
|
||||
|
||||
// We set the leaf at input index
|
||||
let (leaf, _) = bytes_le_to_field(&leaf_byte);
|
||||
self.tree.set(self.tree.next_index, leaf);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Deleting a leaf corresponds to set its value to the default 0 leaf
|
||||
pub fn delete_leaf(&mut self, index: usize) -> io::Result<()> {
|
||||
// We reset the leaf only if we previously set a leaf at that index
|
||||
if index < self.tree.next_index {
|
||||
let leaf = Field::from(0);
|
||||
self.tree.set(index, leaf);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// returns current membership root
|
||||
/// * `root` is a scalar field element in 32 bytes
|
||||
pub fn get_root<W: Write>(&self, mut output_data: W) -> io::Result<()> {
|
||||
|
@ -105,8 +141,8 @@ impl RLN {
|
|||
/// * `root` is a scalar field element in 32 bytes
|
||||
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> io::Result<()> {
|
||||
let merkle_proof = self.tree.proof(index).expect("proof should exist");
|
||||
let path_elements = get_path_elements(&merkle_proof);
|
||||
let identity_path_index = get_identity_path_index(&merkle_proof);
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
let identity_path_index = merkle_proof.get_path_index();
|
||||
|
||||
output_data.write_all(&vec_field_to_bytes_le(&path_elements))?;
|
||||
output_data.write_all(&vec_u8_to_bytes_le(&identity_path_index))?;
|
||||
|
@ -118,17 +154,21 @@ impl RLN {
|
|||
// zkSNARK APIs
|
||||
////////////////////////////////////////////////////////
|
||||
pub fn prove<R: Read, W: Write>(
|
||||
&self,
|
||||
&mut self,
|
||||
mut input_data: R,
|
||||
mut output_data: W,
|
||||
) -> io::Result<()> {
|
||||
// We read input RLN witness and we deserialize it
|
||||
let mut witness_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut witness_byte)?;
|
||||
let rln_witness = deserialize_witness(&witness_byte);
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
let (rln_witness, _) = deserialize_witness(&serialized);
|
||||
|
||||
if self.circom.is_none() {
|
||||
self.circom = CIRCOM(&self.resources_folder);
|
||||
}
|
||||
|
||||
let proof = generate_proof(
|
||||
self.circom.clone(),
|
||||
self.circom.as_ref().unwrap().clone(),
|
||||
self.proving_key.as_ref().unwrap(),
|
||||
&rln_witness,
|
||||
)
|
||||
|
@ -145,10 +185,11 @@ impl RLN {
|
|||
// serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
|
||||
let mut input_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut input_byte)?;
|
||||
let proof: Proof = ArkProof::deserialize(&mut Cursor::new(&input_byte[..128].to_vec()))
|
||||
.unwrap()
|
||||
.into();
|
||||
let proof_values = deserialize_proof_values(&input_byte[128..].to_vec());
|
||||
let proof: protocol::Proof =
|
||||
ArkProof::deserialize(&mut Cursor::new(&input_byte[..128].to_vec()))
|
||||
.unwrap()
|
||||
.into();
|
||||
let (proof_values, _) = deserialize_proof_values(&input_byte[128..].to_vec());
|
||||
|
||||
let verified = verify_proof(
|
||||
self.verification_key.as_ref().unwrap(),
|
||||
|
@ -159,12 +200,105 @@ impl RLN {
|
|||
|
||||
Ok(verified)
|
||||
}
|
||||
|
||||
// This API keeps partial compatibility with kilic's rln public API https://github.com/kilic/rln/blob/7ac74183f8b69b399e3bc96c1ae8ab61c026dc43/src/public.rs#L148
|
||||
// input_data is [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
pub fn generate_rln_proof<R: Read, W: Write>(
|
||||
&mut self,
|
||||
mut input_data: R,
|
||||
mut output_data: W,
|
||||
) -> io::Result<()> {
|
||||
// We read input RLN witness and we deserialize it
|
||||
let mut witness_byte: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut witness_byte)?;
|
||||
let (rln_witness, _) = proof_inputs_to_rln_witness(&mut self.tree, &witness_byte);
|
||||
let proof_values = proof_values_from_witness(&rln_witness);
|
||||
|
||||
if self.circom.is_none() {
|
||||
self.circom = CIRCOM(&self.resources_folder);
|
||||
}
|
||||
|
||||
let proof = generate_proof(
|
||||
self.circom.as_ref().unwrap().clone(),
|
||||
self.proving_key.as_ref().unwrap(),
|
||||
&rln_witness,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Note: we export a serialization of ark-groth16::Proof not semaphore::Proof
|
||||
// This proof is compressed, i.e. 128 bytes long
|
||||
ArkProof::from(proof).serialize(&mut output_data).unwrap();
|
||||
output_data.write_all(&serialize_proof_values(&proof_values))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Input data is serialized for Bn254 as:
|
||||
// [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
pub fn verify_rln_proof<R: Read>(&self, mut input_data: R) -> io::Result<bool> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
let mut all_read = 0;
|
||||
let proof: protocol::Proof =
|
||||
ArkProof::deserialize(&mut Cursor::new(&serialized[..128].to_vec()))
|
||||
.unwrap()
|
||||
.into();
|
||||
all_read += 128;
|
||||
let (proof_values, read) = deserialize_proof_values(&serialized[all_read..].to_vec());
|
||||
all_read += read;
|
||||
|
||||
let signal_len = usize::try_from(u64::from_le_bytes(
|
||||
serialized[all_read..all_read + 8].try_into().unwrap(),
|
||||
))
|
||||
.unwrap();
|
||||
all_read += 8;
|
||||
|
||||
let signal: Vec<u8> = serialized[all_read..all_read + signal_len].to_vec();
|
||||
|
||||
let verified = verify_proof(
|
||||
self.verification_key.as_ref().unwrap(),
|
||||
&proof,
|
||||
&proof_values,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Consistency checks to counter proof tampering
|
||||
let x = hash_to_field(&signal);
|
||||
Ok(verified
|
||||
&& (self.tree.root() == proof_values.root)
|
||||
&& (x == proof_values.x)
|
||||
&& (proof_values.rln_identifier == hash_to_field(RLN_IDENTIFIER)))
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Utils
|
||||
////////////////////////////////////////////////////////
|
||||
|
||||
pub fn key_gen<W: Write>(&self, mut output_data: W) -> io::Result<()> {
|
||||
let (id_key, id_commitment_key) = keygen();
|
||||
output_data.write_all(&field_to_bytes_le(&id_key))?;
|
||||
output_data.write_all(&field_to_bytes_le(&id_commitment_key))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn hash<R: Read, W: Write>(&self, mut input_data: R, mut output_data: W) -> io::Result<()> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
|
||||
let hash = hash_to_field(&serialized);
|
||||
output_data.write_all(&field_to_bytes_le(&hash))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for RLN {
|
||||
fn default() -> Self {
|
||||
let tree_height = 21;
|
||||
Self::new(tree_height)
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
Self::new(tree_height, buffer)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -177,21 +311,26 @@ mod test {
|
|||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_batch_additions() {
|
||||
let tree_height = 16;
|
||||
fn test_merkle_operations() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Field> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..256 {
|
||||
leaves.push(hash_to_field(&rng.gen::<[u8; 32]>()));
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(to_field(&Fr::rand(&mut rng)));
|
||||
}
|
||||
|
||||
// We create a new tree
|
||||
let mut rln = RLN::new(tree_height);
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let mut rln = RLN::new(tree_height, input_buffer);
|
||||
|
||||
// We first add leaves one by one
|
||||
// We first add leaves one by one specifying the index
|
||||
for (i, leaf) in leaves.iter().enumerate() {
|
||||
// We check if internal index is properly set
|
||||
assert_eq!(rln.tree.next_index, i);
|
||||
|
||||
let mut buffer = Cursor::new(field_to_bytes_le(&leaf));
|
||||
rln.set_leaf(i, &mut buffer).unwrap();
|
||||
}
|
||||
|
@ -204,25 +343,72 @@ mod test {
|
|||
// We reset the tree to default
|
||||
rln.set_tree(tree_height).unwrap();
|
||||
|
||||
// We add leaves one by one using the internal index (new leaves goes in next available position)
|
||||
for leaf in &leaves {
|
||||
let mut buffer = Cursor::new(field_to_bytes_le(&leaf));
|
||||
rln.set_next_leaf(&mut buffer).unwrap();
|
||||
}
|
||||
|
||||
// We check if internal index is properly set
|
||||
assert_eq!(rln.tree.next_index, no_of_leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves using the internal index
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_next, _) = bytes_le_to_field(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_single, root_next);
|
||||
|
||||
// We reset the tree to default
|
||||
rln.set_tree(tree_height).unwrap();
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_field_to_bytes_le(&leaves));
|
||||
rln.set_leaves(&mut buffer).unwrap();
|
||||
|
||||
// We check if internal index is properly set
|
||||
assert_eq!(rln.tree.next_index, no_of_leaves);
|
||||
|
||||
// We get the root of the tree obtained adding leaves in batch
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_batch, _) = bytes_le_to_field(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_single, root_batch);
|
||||
|
||||
// We now delete all leaves set and check if the root corresponds to the empty tree root
|
||||
// delete calls over indexes higher than no_of_leaves are ignored and will not increase self.tree.next_index
|
||||
for i in 0..2 * no_of_leaves {
|
||||
rln.delete_leaf(i).unwrap();
|
||||
}
|
||||
|
||||
// We check if internal index is properly set
|
||||
assert_eq!(rln.tree.next_index, no_of_leaves);
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_delete, _) = bytes_le_to_field(&buffer.into_inner());
|
||||
|
||||
// We reset the tree to default
|
||||
rln.set_tree(tree_height).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_root(&mut buffer).unwrap();
|
||||
let (root_empty, _) = bytes_le_to_field(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_delete, root_empty);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
// This test contains hardcoded values!
|
||||
// TODO: expand this test to work with tree_height = 20
|
||||
fn test_merkle_proof() {
|
||||
let tree_height = 16;
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let leaf_index = 3;
|
||||
|
||||
let mut rln = RLN::new(tree_height);
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let mut rln = RLN::new(tree_height, input_buffer);
|
||||
|
||||
// generate identity
|
||||
// We follow zk-kit approach for identity generation
|
||||
|
@ -295,7 +481,7 @@ mod test {
|
|||
|
||||
// We double check that the proof computed from public API is correct
|
||||
let root_from_proof =
|
||||
get_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
|
||||
compute_tree_root(&id_commitment, &path_elements, &identity_path_index, false);
|
||||
|
||||
assert_eq!(root, root_from_proof);
|
||||
}
|
||||
|
@ -303,9 +489,10 @@ mod test {
|
|||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
fn test_groth16_proof() {
|
||||
let tree_height = 16;
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let rln = RLN::new(tree_height);
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let mut rln = RLN::new(tree_height, input_buffer);
|
||||
|
||||
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
|
@ -318,7 +505,7 @@ mod test {
|
|||
let serialized_proof = output_buffer.into_inner();
|
||||
|
||||
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
|
||||
let proof: Proof = ArkProof::deserialize(&mut Cursor::new(&serialized_proof))
|
||||
let proof: protocol::Proof = ArkProof::deserialize(&mut Cursor::new(&serialized_proof))
|
||||
.unwrap()
|
||||
.into();
|
||||
let verified = verify_proof(
|
||||
|
@ -340,4 +527,88 @@ mod test {
|
|||
|
||||
assert!(verified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rln_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
// We generate a vector of random leaves
|
||||
let mut leaves: Vec<Field> = Vec::new();
|
||||
let mut rng = thread_rng();
|
||||
for _ in 0..no_of_leaves {
|
||||
leaves.push(to_field(&Fr::rand(&mut rng)));
|
||||
}
|
||||
|
||||
// We create a new RLN instance
|
||||
let input_buffer = Cursor::new(TEST_RESOURCES_FOLDER);
|
||||
let mut rln = RLN::new(tree_height, input_buffer);
|
||||
|
||||
// We add leaves in a batch into the tree
|
||||
let mut buffer = Cursor::new(vec_field_to_bytes_le(&leaves));
|
||||
rln.set_leaves(&mut buffer).unwrap();
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret, id_commitment) = keygen();
|
||||
|
||||
// We set as leaf id_commitment after storing its index
|
||||
let identity_index = u64::try_from(rln.tree.next_index).unwrap();
|
||||
let mut buffer = Cursor::new(field_to_bytes_le(&id_commitment));
|
||||
rln.set_next_leaf(&mut buffer).unwrap();
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
let signal_len = u64::try_from(signal.len()).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ id_key<32> | id_index<8> | epoch<32> | signal_len<8> | signal<var> ]
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
serialized.append(&mut field_to_bytes_le(&identity_secret));
|
||||
serialized.append(&mut identity_index.to_le_bytes().to_vec());
|
||||
serialized.append(&mut field_to_bytes_le(&epoch));
|
||||
serialized.append(&mut signal_len.to_le_bytes().to_vec());
|
||||
serialized.append(&mut signal.to_vec());
|
||||
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
|
||||
// output_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> ]
|
||||
let mut proof_data = output_buffer.into_inner();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | share_y<32> | nullifier<32> | root<32> | epoch<32> | share_x<32> | rln_identifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut signal_len.to_le_bytes().to_vec());
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
|
||||
let mut input_buffer = Cursor::new(proof_data);
|
||||
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_hash_to_field() {
|
||||
let rln = RLN::default();
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
let mut input_buffer = Cursor::new(&signal);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
|
||||
rln.hash(&mut input_buffer, &mut output_buffer).unwrap();
|
||||
let serialized_hash = output_buffer.into_inner();
|
||||
let (hash1, _) = bytes_le_to_field(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -62,7 +62,8 @@ pub fn str_to_field(input: String, radix: i32) -> Field {
|
|||
assert!((radix == 10) || (radix == 16));
|
||||
|
||||
// We remove any quote present and we trim
|
||||
let input_clean = input.replace("\"", "");
|
||||
let single_quote: char = '\"';
|
||||
let input_clean = input.replace(single_quote, "");
|
||||
let input_clean = input_clean.trim();
|
||||
|
||||
if radix == 10 {
|
||||
|
|
Loading…
Reference in New Issue