chore(rln): remove unnecessary test, restyle

This commit is contained in:
s1fr0 2022-06-02 19:43:59 +02:00
parent 8f738ef327
commit cf6168bf4e
No known key found for this signature in database
GPG Key ID: 2C041D60117BFF46
5 changed files with 125 additions and 321 deletions

View File

@ -1,20 +1,16 @@
use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G2Affine}; use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_bn254::{G1Projective, G2Projective};
use ark_circom::{read_zkey, CircomBuilder, CircomConfig, WitnessCalculator}; use ark_circom::{read_zkey, CircomBuilder, CircomConfig, WitnessCalculator};
use ark_ff::BigInteger256; use ark_ff::BigInteger256;
/// Adapted from semaphore-rs
use ark_groth16::{ProvingKey, VerifyingKey}; use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices; use ark_relations::r1cs::ConstraintMatrices;
use core::include_bytes; use core::include_bytes;
use num_bigint::BigUint; use num_bigint::BigUint;
use once_cell::sync::Lazy;
use serde_json::Value; use serde_json::Value;
use std::convert::TryFrom; use std::convert::TryFrom;
use std::fs::File; use std::fs::File;
use std::io::{Cursor, Write}; use std::io::{Cursor, Write};
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use tempfile::NamedTempFile;
const ZKEY_PATH: &str = "./resources/rln_final.zkey"; const ZKEY_PATH: &str = "./resources/rln_final.zkey";
const VK_PATH: &str = "./resources/verifying_key.json"; const VK_PATH: &str = "./resources/verifying_key.json";
@ -23,19 +19,19 @@ const WASM_PATH: &str = "./resources/rln.wasm";
pub fn ZKEY() -> ProvingKey<Bn254> /*, ConstraintMatrices<Fr>)*/ { pub fn ZKEY() -> ProvingKey<Bn254> /*, ConstraintMatrices<Fr>)*/ {
let mut file = File::open(ZKEY_PATH).unwrap(); let mut file = File::open(ZKEY_PATH).unwrap();
let (provingKey, _matrices) = read_zkey(&mut file).unwrap(); let (proving_key, _matrices) = read_zkey(&mut file).unwrap();
provingKey proving_key
} }
pub fn VK() -> VerifyingKey<Bn254> { pub fn VK() -> VerifyingKey<Bn254> {
let verifyingKey: VerifyingKey<Bn254>; let verifying_key: VerifyingKey<Bn254>;
if Path::new(VK_PATH).exists() { if Path::new(VK_PATH).exists() {
let verifyingKey = vk_from_json(VK_PATH); verifying_key = vk_from_json(VK_PATH);
verifyingKey verifying_key
} else if Path::new(ZKEY_PATH).exists() { } else if Path::new(ZKEY_PATH).exists() {
verifyingKey = ZKEY().vk; verifying_key = ZKEY().vk;
verifyingKey verifying_key
} else { } else {
panic!("No proving/verification key present!"); panic!("No proving/verification key present!");
} }
@ -44,11 +40,8 @@ pub fn VK() -> VerifyingKey<Bn254> {
pub fn CIRCOM() -> CircomBuilder<Bn254> { pub fn CIRCOM() -> CircomBuilder<Bn254> {
// Load the WASM and R1CS for witness and proof generation // Load the WASM and R1CS for witness and proof generation
let cfg = CircomConfig::<Bn254>::new(WASM_PATH, R1CS_PATH).unwrap(); // should be )?; but need to address "the trait `From<ErrReport>` is not implemented for `protocol::ProofError`" let cfg = CircomConfig::<Bn254>::new(WASM_PATH, R1CS_PATH).unwrap(); // should be )?; but need to address "the trait `From<ErrReport>` is not implemented for `protocol::ProofError`"
// We build and return the circuit
// We build the circuit CircomBuilder::new(cfg)
let builder = CircomBuilder::new(cfg);
builder
} }
// Utilities to convert a json verification key in a groth16::VerificationKey // Utilities to convert a json verification key in a groth16::VerificationKey
@ -127,30 +120,15 @@ fn vk_from_json(vk_path: &str) -> VerifyingKey<Bn254> {
let json = std::fs::read_to_string(vk_path).unwrap(); let json = std::fs::read_to_string(vk_path).unwrap();
let json: Value = serde_json::from_str(&json).unwrap(); let json: Value = serde_json::from_str(&json).unwrap();
let vk = VerifyingKey { VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1"), alpha_g1: json_to_g1(&json, "vk_alpha_1"),
beta_g2: json_to_g2(&json, "vk_beta_2"), beta_g2: json_to_g2(&json, "vk_beta_2"),
gamma_g2: json_to_g2(&json, "vk_gamma_2"), gamma_g2: json_to_g2(&json, "vk_gamma_2"),
delta_g2: json_to_g2(&json, "vk_delta_2"), delta_g2: json_to_g2(&json, "vk_delta_2"),
gamma_abc_g1: json_to_g1_vec(&json, "IC"), gamma_abc_g1: json_to_g1_vec(&json, "IC"),
}; }
return vk;
} }
pub fn check_vk_from_zkey(verifyingKey: VerifyingKey<Bn254>) { pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Bn254>) {
assert_eq!(ZKEY().vk, verifyingKey); assert_eq!(ZKEY().vk, verifying_key);
} }
// Not sure this is still useful...
const WASM: &[u8] = include_bytes!("../resources/rln.wasm");
pub static WITNESS_CALCULATOR: Lazy<WitnessCalculator> = Lazy::new(|| {
// HACK: ark-circom requires a file, so we make one!
let mut tmpfile = NamedTempFile::new().expect("Failed to create temp file");
let written = tmpfile.write(WASM).expect("Failed to write to temp file");
assert_eq!(written, WASM.len());
let path = tmpfile.into_temp_path();
let result = WitnessCalculator::new(&path).expect("Failed to create witness calculator");
path.close().expect("Could not remove tempfile");
result
});

View File

@ -1,20 +1,17 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_imports)] #![allow(unused_imports)]
pub mod ffi; use crate::circuit::{CIRCOM, VK, ZKEY};
pub mod public;
use ark_bn254::{Fr, Parameters}; use ark_bn254::{Fr, Parameters};
use ark_ec::bn::Bn; use ark_ec::bn::Bn;
use ark_std::str::FromStr;
pub mod circuit; pub mod circuit;
pub mod ffi;
pub mod protocol; pub mod protocol;
pub mod public;
pub type Field = Fr; pub type Field = Fr;
pub type Groth16Proof = ark_groth16::Proof<Bn<Parameters>>;
pub type EthereumGroth16Proof = ark_circom::ethereum::Proof;
use crate::circuit::{CIRCOM, VK, ZKEY};
#[cfg(test)] #[cfg(test)]
mod test { mod test {
@ -28,68 +25,68 @@ mod test {
}; };
#[test] #[test]
// We test Merkle Tree generation, proofs and verification
fn test_merkle_proof() { fn test_merkle_proof() {
let leaf = Field::from(0); let tree_height = 16;
let leaf_index = 3;
// generate identity // generate identity
let id = Identity::from_seed(b"hello"); // We follow zk-kit approach for identity generation
let id = Identity::from_seed(b"test-merkle-proof");
let identity_secret = poseidon_hash(&vec![id.trapdoor, id.nullifier]);
let id_commitment = poseidon_hash(&vec![identity_secret]);
// generate merkle tree // generate merkle tree
let mut tree = PoseidonTree::new(21, leaf); let default_leaf = Field::from(0);
tree.set(0, id.commitment()); let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into());
let merkle_proof = tree.proof(0).expect("proof should exist"); // We check correct computation of the root
let root: Field = tree.root().into(); let root = tree.root();
assert_eq!(
println!("Root: {:#}", root);
println!("Merkle proof: {:#?}", merkle_proof);
}
#[test]
fn test_semaphore() {
let leaf = Field::from(0);
// generate identity
let id = Identity::from_seed(b"hello");
// generate merkle tree
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment());
let merkle_proof = tree.proof(0).expect("proof should exist");
let root = tree.root().into();
// change signal_hash and external_nullifier here
let signal_hash = hash_to_field(b"xxx");
let external_nullifier_hash = hash_to_field(b"appId");
let nullifier_hash =
semaphore::protocol::generate_nullifier_hash(&id, external_nullifier_hash);
let proof = semaphore::protocol::generate_proof(
&id,
&merkle_proof,
external_nullifier_hash,
signal_hash,
)
.unwrap();
let success = semaphore::protocol::verify_proof(
root, root,
nullifier_hash, Field::from_str("0x27401a4559ce263630907ce3b77c570649e28ede22d2a7f5296839627a16e870")
signal_hash, .unwrap()
external_nullifier_hash, );
&proof,
)
.unwrap();
assert!(success); let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let path_elements = get_path_elements(&merkle_proof);
let identity_path_index = get_identity_path_index(&merkle_proof);
// We check correct computation of the path and indexes
let expected_path_elements = vec![
"0",
"14744269619966411208579211824598458697587494354926760081771325075741142829156",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524",
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(id_commitment.into(), &merkle_proof));
} }
#[test] #[test]
// We test a RLN proof generation and verification
fn test_end_to_end() { fn test_end_to_end() {
let TREE_HEIGHT = 16; let tree_height = 16;
let leafIndex = 3; let leaf_index = 3;
// Generate identity // Generate identity
// We follow zk-kit approach for identity generation // We follow zk-kit approach for identity generation
@ -98,11 +95,11 @@ mod test {
let id_commitment = poseidon_hash(&vec![identity_secret]); let id_commitment = poseidon_hash(&vec![identity_secret]);
//// generate merkle tree //// generate merkle tree
let leaf = Field::from(0); let default_leaf = Field::from(0);
let mut tree = PoseidonTree::new(TREE_HEIGHT, leaf); let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leafIndex, id_commitment.into()); tree.set(leaf_index, id_commitment.into());
let merkle_proof = tree.proof(leafIndex).expect("proof should exist"); let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let signal = b"hey hey"; let signal = b"hey hey";
let x = hash_to_field(signal); let x = hash_to_field(signal);
@ -111,39 +108,20 @@ mod test {
let epoch = hash_to_field(b"test-epoch"); let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier"); let rln_identifier = hash_to_field(b"test-rln-identifier");
let rlnWitness: RLNWitnessInput = let rln_witness: RLNWitnessInput =
initRLNWitnessFromValues(identity_secret, &merkle_proof, x, epoch, rln_identifier); rln_witness_from_values(identity_secret, &merkle_proof, x, epoch, rln_identifier);
println!("rlnWitness: {:#?}", rlnWitness);
// We generate all relevant keys // We generate all relevant keys
let provingKey = &ZKEY(); let proving_key = &ZKEY();
let verificationKey = &VK(); let verification_key = &VK();
let builder = CIRCOM(); let builder = CIRCOM();
// Let's generate a zkSNARK proof // Let's generate a zkSNARK proof
let (proof, inputs) = generate_proof(builder, provingKey, rlnWitness).unwrap(); let (proof, inputs) = generate_proof(builder, proving_key, rln_witness).unwrap();
// Let's verify the proof // Let's verify the proof
let success = verify_proof(verificationKey, proof, inputs).unwrap(); let success = verify_proof(verification_key, proof, inputs).unwrap();
assert!(success); assert!(success);
} }
//to_str_radix(10);
//
//// change signal_hash and external_nullifier_hash here
//let signal_hash = hash_to_field(b"xxx");
//let external_nullifier_hash = hash_to_field(b"appId");
//
//let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
//
//
//// We generate all relevant keys
//let provingKey = &ZKEY();
//let verificationKey = &VK();
//let mut builder = CIRCOM();
//println!("Proof: {:#?}", proof);
} }

View File

@ -1,19 +1,13 @@
use color_eyre::Result; use color_eyre::Result;
// Tracing // Tracing
use ark_relations::r1cs::{ConstraintLayer, ConstraintTrace, TracingMode}; use ark_relations::r1cs::{ConstraintLayer, ConstraintTrace, TracingMode};
use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::layer::SubscriberExt;
// JSON // JSON
use rln::circuit::{CIRCOM, VK, ZKEY}; use rln::circuit::{CIRCOM, VK, ZKEY};
use rln::protocol::{generate_proof, initRLNWitnessFromJSON, verify_proof}; use rln::protocol::{generate_proof, rln_witness_from_json, verify_proof};
// RLN // RLN
fn groth16_proof_example() -> Result<()> { fn groth16_proof_example() -> Result<()> {
@ -72,18 +66,18 @@ fn groth16_proof_example() -> Result<()> {
"#; "#;
// We generate all relevant keys // We generate all relevant keys
let provingKey = &ZKEY(); let proving_key = &ZKEY();
let verificationKey = &VK(); let verification_key = &VK();
let builder = CIRCOM(); let builder = CIRCOM();
// We compute witness from the json input example // We compute witness from the json input example
let rlnWitness = initRLNWitnessFromJSON(input_json_str); let rln_witness = rln_witness_from_json(input_json_str);
// Let's generate a zkSNARK proof // Let's generate a zkSNARK proof
let (proof, inputs) = generate_proof(builder, provingKey, rlnWitness).unwrap(); let (proof, inputs) = generate_proof(builder, proving_key, rln_witness).unwrap();
// Let's verify the proof // Let's verify the proof
let verified = verify_proof(verificationKey, proof, inputs); let verified = verify_proof(verification_key, proof, inputs);
assert!(verified.unwrap()); assert!(verified.unwrap());

View File

@ -1,6 +1,6 @@
/// Adapted from semaphore-rs use crate::circuit::{VK, ZKEY};
use crate::circuit::{VK, WITNESS_CALCULATOR, ZKEY};
use ark_bn254::{Bn254, Fr, Parameters}; use ark_bn254::{Bn254, Fr, Parameters};
use ark_circom::{read_zkey, CircomBuilder, CircomConfig, CircomReduction};
use ark_ec::bn::Bn; use ark_ec::bn::Bn;
use ark_ff::{Fp256, PrimeField}; use ark_ff::{Fp256, PrimeField};
use ark_groth16::{ use ark_groth16::{
@ -25,7 +25,9 @@ use serde::{Deserialize, Serialize};
use std::time::Instant; use std::time::Instant;
use thiserror::Error; use thiserror::Error;
use ark_circom::{read_zkey, CircomBuilder, CircomConfig, CircomReduction}; ///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
pub struct RLNWitnessInput { pub struct RLNWitnessInput {
@ -37,36 +39,37 @@ pub struct RLNWitnessInput {
rln_identifier: String, rln_identifier: String,
} }
pub fn initRLNWitnessFromJSON(input_json_str: &str) -> RLNWitnessInput { pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
let rlnWitness: RLNWitnessInput = let rln_witness: RLNWitnessInput =
serde_json::from_str(&input_json_str).expect("JSON was not well-formatted"); serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
return rlnWitness; rln_witness
} }
pub fn initRLNWitnessFromValues( pub fn rln_witness_from_values(
identity_secret: Field, identity_secret: Field,
merkle_proof: &merkle_tree::Proof<PoseidonHash>, merkle_proof: &merkle_tree::Proof<PoseidonHash>,
x: Field, x: Field,
epoch: Field, epoch: Field,
rln_identifier: Field, rln_identifier: Field,
) -> RLNWitnessInput { ) -> RLNWitnessInput {
//println!("Merkle proof: {:#?}", merkle_proof); let path_elements = get_path_elements(merkle_proof);
let path_elements = getPathElements(merkle_proof); let identity_path_index = get_identity_path_index(merkle_proof);
let identity_path_index = getIdentityPathIndex(merkle_proof);
let rlnWitness = RLNWitnessInput { let rln_witness = RLNWitnessInput {
identity_secret: BigInt::from(identity_secret).to_str_radix(10), identity_secret: BigInt::from(identity_secret).to_str_radix(10),
path_elements: path_elements, path_elements,
identity_path_index: identity_path_index, identity_path_index,
x: BigInt::from(x).to_str_radix(10), x: BigInt::from(x).to_str_radix(10),
epoch: format!("{:#066x}", BigInt::from(epoch)), //We format it as a padded 32 bytes hex with leading 0x for compatibility with zk-kit epoch: format!("{:#066x}", BigInt::from(epoch)), //We format it as a padded 32 bytes hex with leading 0x for compatibility with zk-kit
rln_identifier: BigInt::from(rln_identifier).to_str_radix(10), rln_identifier: BigInt::from(rln_identifier).to_str_radix(10),
}; };
return rlnWitness; rln_witness
} }
// TODO Fields need to be updated to RLN based ones ///////////////////////////////////////////////////////
// Proof data structure and utility functions
///////////////////////////////////////////////////////
// Matches the private G1Tup type in ark-circom. // Matches the private G1Tup type in ark-circom.
pub type G1 = (U256, U256); pub type G1 = (U256, U256);
@ -108,9 +111,13 @@ impl From<Proof> for ArkProof<Bn<Parameters>> {
} }
} }
///////////////////////////////////////////////////////
// Merkle tree utility functions
///////////////////////////////////////////////////////
/// Helper to merkle proof into a bigint vector /// Helper to merkle proof into a bigint vector
/// TODO: we should create a From trait for this /// TODO: we should create a From trait for this
pub fn getPathElements(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<String> { pub fn get_path_elements(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<String> {
proof proof
.0 .0
.iter() .iter()
@ -120,7 +127,7 @@ pub fn getPathElements(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<String>
.collect() .collect()
} }
pub fn getIdentityPathIndex(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<u8> { pub fn get_identity_path_index(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<u8> {
proof proof
.0 .0
.iter() .iter()
@ -131,6 +138,10 @@ pub fn getIdentityPathIndex(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<u8>
.collect() .collect()
} }
///////////////////////////////////////////////////////
// Signal/nullifier utility functions
///////////////////////////////////////////////////////
/// Internal helper to hash the signal to make sure it's in the field /// Internal helper to hash the signal to make sure it's in the field
fn hash_signal(signal: &[u8]) -> Field { fn hash_signal(signal: &[u8]) -> Field {
let hash = keccak256(signal); let hash = keccak256(signal);
@ -146,6 +157,10 @@ pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -
poseidon_hash(&[external_nullifier, identity.nullifier]) poseidon_hash(&[external_nullifier, identity.nullifier])
} }
///////////////////////////////////////////////////////
// zkSNARK utility functions
///////////////////////////////////////////////////////
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum ProofError { pub enum ProofError {
#[error("Error reading circuit key: {0}")] #[error("Error reading circuit key: {0}")]
@ -232,12 +247,12 @@ pub fn generate_proof(
/// Returns a [`ProofError`] if verifying fails. Verification failure does not /// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect. /// necessarily mean the proof is incorrect.
pub fn verify_proof( pub fn verify_proof(
verifyingKey: &VerifyingKey<Bn254>, verifying_key: &VerifyingKey<Bn254>,
proof: Proof, proof: Proof,
inputs: Vec<Fr>, inputs: Vec<Fr>,
) -> Result<bool, ProofError> { ) -> Result<bool, ProofError> {
// Check that the proof is valid // Check that the proof is valid
let pvk = prepare_verifying_key(verifyingKey); let pvk = prepare_verifying_key(verifying_key);
let pr: ArkProof<Bn254> = proof.into(); let pr: ArkProof<Bn254> = proof.into();
let verified = ark_verify_proof(&pvk, &pr, &inputs)?; let verified = ark_verify_proof(&pvk, &pr, &inputs)?;

View File

@ -1,32 +1,21 @@
/// This is the main public API for RLN. It is used by the FFI, and should be /// This is the main public API for RLN. It is used by the FFI, and should be
/// used by tests etc as well /// used by tests etc as well
/// ///
use semaphore::{
hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, protocol::*, Field,
};
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_std::rand::thread_rng;
use ark_bn254::Bn254; use ark_bn254::Bn254;
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_groth16::{ use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof, create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey, Proof, ProvingKey,
}; };
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
// , SerializationError}; use ark_std::rand::thread_rng;
use std::io::{self, Read, Write};
use num_bigint::BigInt; use num_bigint::BigInt;
use semaphore::{
// JSON hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, protocol::*, Field,
};
use serde::Deserialize; use serde::Deserialize;
use serde_json; use serde_json;
use std::io::{self, Read, Write};
// For RLN Rust version
//use bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr, ScalarEngine};
//use sapling_crypto::bellman::pairing::bn256::Bn256;
// TODO Add Engine here? i.e. <E: Engine> not <Bn254> // TODO Add Engine here? i.e. <E: Engine> not <Bn254>
// TODO Assuming we want to use IncrementalMerkleTree, figure out type/trait conversions // TODO Assuming we want to use IncrementalMerkleTree, figure out type/trait conversions
@ -34,8 +23,6 @@ use serde_json;
pub struct RLN { pub struct RLN {
circom: CircomCircuit<Bn254>, circom: CircomCircuit<Bn254>,
params: ProvingKey<Bn254>, params: ProvingKey<Bn254>,
// RLN Rust version
//tree: IncrementalMerkleTree<Bn256>,
tree: PoseidonTree, tree: PoseidonTree,
} }
@ -81,131 +68,6 @@ impl RLN {
} }
} }
// XXX This is a tempory hack to get end to end proving/verification working
// Not supposed to be part of public API
pub fn new_json_spike() -> RLN {
let cfg =
CircomConfig::<Bn254>::new("./resources/rln.wasm", "./resources/rln.r1cs").unwrap();
// TODO Refactor
// From rln JSON witness
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
let input_json_str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
let witness_input: WitnessInput =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
println!("Witness input JSON: {:?}", witness_input);
let mut builder = CircomBuilder::new(cfg);
builder.push_input(
"identity_secret",
BigInt::parse_bytes(witness_input.identity_secret.as_bytes(), 10).unwrap(),
);
for v in witness_input.path_elements.iter() {
builder.push_input(
"path_elements",
BigInt::parse_bytes(v.as_bytes(), 10).unwrap(),
);
}
for v in witness_input.identity_path_index.iter() {
builder.push_input("identity_path_index", BigInt::from(*v));
}
builder.push_input(
"x",
BigInt::parse_bytes(witness_input.x.as_bytes(), 10).unwrap(),
);
builder.push_input(
"epoch",
BigInt::parse_bytes(
witness_input.epoch.strip_prefix("0x").unwrap().as_bytes(),
16,
)
.unwrap(),
);
builder.push_input(
"rln_identifier",
BigInt::parse_bytes(witness_input.rln_identifier.as_bytes(), 10).unwrap(),
);
println!("Builder input:\n {:#?}", builder.inputs);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let circom = builder.build().unwrap();
let inputs = circom.get_public_inputs().unwrap();
println!("Public inputs {:#?} ", inputs);
// Sapling based tree
// // TODO Add as parameter(s)
// let merkle_depth: usize = 3;
// let poseidon_params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
// let hasher = PoseidonHasher::new(poseidon_params.clone());
// let tree = IncrementalMerkleTree::empty(hasher, merkle_depth);
let leaf = Field::from(0);
let tree = PoseidonTree::new(21, leaf);
RLN {
circom,
params,
tree,
}
}
/// returns current membership root /// returns current membership root
/// * `root` is a scalar field element in 32 bytes /// * `root` is a scalar field element in 32 bytes
pub fn get_root<W: Write>(&self, _result_data: W) -> io::Result<()> { pub fn get_root<W: Write>(&self, _result_data: W) -> io::Result<()> {
@ -260,26 +122,3 @@ impl Default for RLN {
Self::new() Self::new()
} }
} }
// NOTE: Expensive test, ignoring by default
#[ignore]
#[test]
fn rln_proof() {
let rln = RLN::new();
let rln_spike = RLN::new_json_spike();
//let inputs = mul.circom.get_public_inputs().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = rln_spike.prove(&mut output_data);
let proof_data = &output_data[..];
// XXX Pass as arg?
//let pvk = prepare_verifying_key(&mul.params.vk);
// XXX: Something is wrong here I think, because it doesn't verify with the
// full proof fields like yShare - just witness? Might be a bug
let verified = rln.verify(proof_data).unwrap();
assert!(verified);
}