Merge pull request #18 from vacp2p/fix-e2e

RLN: fix proof generation/verification and E2E tests, add RLN utilities
This commit is contained in:
G 2022-06-09 14:27:21 +02:00 committed by GitHub
commit 98ebd24261
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 711 additions and 1025 deletions

View File

@ -17,6 +17,7 @@ num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
# ZKP Generation # ZKP Generation
ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] } ark-ec = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] } ark-ff = { version = "0.3.0", default-features = false, features = ["parallel", "asm"] }
#ark-ff = { git = "https://github.com/arkworks-rs/algebra/"}
ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] } ark-std = { version = "0.3.0", default-features = false, features = ["parallel"] }
ark-bn254 = { version = "0.3.0" } ark-bn254 = { version = "0.3.0" }
ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] } ark-groth16 = { git = "https://github.com/arkworks-rs/groth16", rev = "765817f", features = ["parallel"] }
@ -64,7 +65,9 @@ blake2 = "0.8.1"
sapling-crypto = { package = "sapling-crypto_ce", version = "0.1.3", default-features = false } sapling-crypto = { package = "sapling-crypto_ce", version = "0.1.3", default-features = false }
bellman = { package = "bellman_ce", version = "0.3.4", default-features = false } bellman = { package = "bellman_ce", version = "0.3.4", default-features = false }
semaphore = { git = "https://github.com/oskarth/semaphore-rs" } #semaphore = { git = "https://github.com/oskarth/semaphore-rs" }
semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "d462a43"}
tempfile = "3.3.0" tempfile = "3.3.0"

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,28 +1,137 @@
/// Adapted from semaphore-rs use ark_bn254::{Bn254, Fq, Fq2, Fr, G1Affine, G1Projective, G2Affine, G2Projective};
use ark_bn254::{Bn254, Fr}; use ark_circom::{read_zkey, CircomBuilder, CircomConfig, WitnessCalculator};
use ark_circom::{read_zkey, WitnessCalculator}; use ark_ff::BigInteger256;
use ark_groth16::ProvingKey; use ark_groth16::{ProvingKey, VerifyingKey};
use ark_relations::r1cs::ConstraintMatrices; use ark_relations::r1cs::ConstraintMatrices;
use core::include_bytes; use core::include_bytes;
use once_cell::sync::Lazy; use num_bigint::BigUint;
use serde_json::Value;
use std::convert::TryFrom;
use std::fs::File;
use std::io::{Cursor, Write}; use std::io::{Cursor, Write};
use tempfile::NamedTempFile; use std::path::Path;
use std::str::FromStr;
const ZKEY_BYTES: &[u8] = include_bytes!("../resources/rln_final.zkey"); const ZKEY_PATH: &str = "./resources/rln_final.zkey";
const WASM: &[u8] = include_bytes!("../resources/rln.wasm"); const VK_PATH: &str = "./resources/verifying_key.json";
const R1CS_PATH: &str = "./resources/rln.r1cs";
const WASM_PATH: &str = "./resources/rln.wasm";
pub static ZKEY: Lazy<(ProvingKey<Bn254>, ConstraintMatrices<Fr>)> = Lazy::new(|| { pub fn ZKEY() -> ProvingKey<Bn254> /*, ConstraintMatrices<Fr>)*/ {
let mut reader = Cursor::new(ZKEY_BYTES); let mut file = File::open(ZKEY_PATH).unwrap();
read_zkey(&mut reader).expect("zkey should be valid") let (proving_key, _matrices) = read_zkey(&mut file).unwrap();
}); proving_key
}
pub static WITNESS_CALCULATOR: Lazy<WitnessCalculator> = Lazy::new(|| { pub fn VK() -> VerifyingKey<Bn254> {
// HACK: ark-circom requires a file, so we make one! let verifying_key: VerifyingKey<Bn254>;
let mut tmpfile = NamedTempFile::new().expect("Failed to create temp file");
let written = tmpfile.write(WASM).expect("Failed to write to temp file"); if Path::new(VK_PATH).exists() {
assert_eq!(written, WASM.len()); verifying_key = vk_from_json(VK_PATH);
let path = tmpfile.into_temp_path(); verifying_key
let result = WitnessCalculator::new(&path).expect("Failed to create witness calculator"); } else if Path::new(ZKEY_PATH).exists() {
path.close().expect("Could not remove tempfile"); verifying_key = ZKEY().vk;
result verifying_key
}); } else {
panic!("No proving/verification key present!");
}
}
pub fn CIRCOM() -> CircomBuilder<Bn254> {
// Load the WASM and R1CS for witness and proof generation
let cfg = CircomConfig::<Bn254>::new(WASM_PATH, R1CS_PATH).unwrap(); // should be )?; but need to address "the trait `From<ErrReport>` is not implemented for `protocol::ProofError`"
// We build and return the circuit
CircomBuilder::new(cfg)
}
// Utilities to convert a json verification key in a groth16::VerificationKey
fn fq_from_str(s: &str) -> Fq {
Fq::try_from(BigUint::from_str(s).unwrap()).unwrap() //was BigInteger256:: and .into()
}
// Extracts the element in G1 corresponding to its JSON serialization
fn json_to_g1(json: &Value, key: &str) -> G1Affine {
let els: Vec<String> = json
.get(key)
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|i| i.as_str().unwrap().to_string())
.collect();
G1Affine::from(G1Projective::new(
fq_from_str(&els[0]),
fq_from_str(&els[1]),
fq_from_str(&els[2]),
))
}
// Extracts the vector of G1 elements corresponding to its JSON serialization
fn json_to_g1_vec(json: &Value, key: &str) -> Vec<G1Affine> {
let els: Vec<Vec<String>> = json
.get(key)
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|i| {
i.as_array()
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect();
els.iter()
.map(|coords| {
G1Affine::from(G1Projective::new(
fq_from_str(&coords[0]),
fq_from_str(&coords[1]),
fq_from_str(&coords[2]),
))
})
.collect()
}
// Extracts the element in G2 corresponding to its JSON serialization
fn json_to_g2(json: &Value, key: &str) -> G2Affine {
let els: Vec<Vec<String>> = json
.get(key)
.unwrap()
.as_array()
.unwrap()
.iter()
.map(|i| {
i.as_array()
.unwrap()
.iter()
.map(|x| x.as_str().unwrap().to_string())
.collect::<Vec<String>>()
})
.collect();
let x = Fq2::new(fq_from_str(&els[0][0]), fq_from_str(&els[0][1]));
let y = Fq2::new(fq_from_str(&els[1][0]), fq_from_str(&els[1][1]));
let z = Fq2::new(fq_from_str(&els[2][0]), fq_from_str(&els[2][1]));
G2Affine::from(G2Projective::new(x, y, z))
}
// Computes the verification key from its JSON serialization
fn vk_from_json(vk_path: &str) -> VerifyingKey<Bn254> {
let json = std::fs::read_to_string(vk_path).unwrap();
let json: Value = serde_json::from_str(&json).unwrap();
VerifyingKey {
alpha_g1: json_to_g1(&json, "vk_alpha_1"),
beta_g2: json_to_g2(&json, "vk_beta_2"),
gamma_g2: json_to_g2(&json, "vk_gamma_2"),
delta_g2: json_to_g2(&json, "vk_delta_2"),
gamma_abc_g1: json_to_g1_vec(&json, "IC"),
}
}
// Checks verification key to be correct with respect to proving key
pub fn check_vk_from_zkey(verifying_key: VerifyingKey<Bn254>) {
assert_eq!(ZKEY().vk, verifying_key);
}

View File

@ -1,22 +1,16 @@
#![allow(dead_code)] #![allow(dead_code)]
#![allow(unused_imports)] #![allow(unused_imports)]
pub mod ffi; use crate::circuit::{CIRCOM, VK, ZKEY};
pub mod public;
use ark_bn254::{Fr, Parameters}; use ark_bn254::{Fr, Parameters};
use ark_ec::bn::Bn; use ark_ec::bn::Bn;
use ark_std::str::FromStr;
pub mod circuit; pub mod circuit;
pub mod ffi;
pub mod protocol; pub mod protocol;
pub mod public;
pub type Field = Fr; pub mod utils;
pub type Groth16Proof = ark_groth16::Proof<Bn<Parameters>>;
pub type EthereumGroth16Proof = ark_circom::ethereum::Proof;
// RLN lib
pub mod merkle;
pub mod poseidon;
#[cfg(test)] #[cfg(test)]
mod test { mod test {
@ -25,116 +19,192 @@ mod test {
use hex_literal::hex; use hex_literal::hex;
use num_bigint::BigInt; use num_bigint::BigInt;
use semaphore::{ use semaphore::{
hash::Hash, hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, Field, hash::Hash, hash_to_field, identity::Identity, poseidon_hash, poseidon_tree::PoseidonTree,
Field,
}; };
#[test] #[test]
// We test Merkle Tree generation, proofs and verification
fn test_merkle_proof() { fn test_merkle_proof() {
let leaf = Field::from(0); let tree_height = 16;
let leaf_index = 3;
// generate identity // generate identity
let id = Identity::from_seed(b"hello"); // We follow zk-kit approach for identity generation
let id = Identity::from_seed(b"test-merkle-proof");
let identity_secret = poseidon_hash(&vec![id.trapdoor, id.nullifier]);
let id_commitment = poseidon_hash(&vec![identity_secret]);
// generate merkle tree // generate merkle tree
let mut tree = PoseidonTree::new(21, leaf); let default_leaf = Field::from(0);
tree.set(0, id.commitment()); let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into());
let merkle_proof = tree.proof(0).expect("proof should exist"); // We check correct computation of the root
let root: Field = tree.root().into(); let root = tree.root();
assert_eq!(
root,
Field::from_str("0x27401a4559ce263630907ce3b77c570649e28ede22d2a7f5296839627a16e870")
.unwrap()
);
println!("Root: {:#}", root); let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
println!("Merkle proof: {:#?}", merkle_proof); let path_elements = get_path_elements(&merkle_proof);
let identity_path_index = get_identity_path_index(&merkle_proof);
// We check correct computation of the path and indexes
let expected_path_elements = vec![
Field::from_str("0x0000000000000000000000000000000000000000000000000000000000000000")
.unwrap(),
Field::from_str("0x2098f5fb9e239eab3ceac3f27b81e481dc3124d55ffed523a839ee8446b64864")
.unwrap(),
Field::from_str("0x1069673dcdb12263df301a6ff584a7ec261a44cb9dc68df067a4774460b1f1e1")
.unwrap(),
Field::from_str("0x18f43331537ee2af2e3d758d50f72106467c6eea50371dd528d57eb2b856d238")
.unwrap(),
Field::from_str("0x07f9d837cb17b0d36320ffe93ba52345f1b728571a568265caac97559dbc952a")
.unwrap(),
Field::from_str("0x2b94cf5e8746b3f5c9631f4c5df32907a699c58c94b2ad4d7b5cec1639183f55")
.unwrap(),
Field::from_str("0x2dee93c5a666459646ea7d22cca9e1bcfed71e6951b953611d11dda32ea09d78")
.unwrap(),
Field::from_str("0x078295e5a22b84e982cf601eb639597b8b0515a88cb5ac7fa8a4aabe3c87349d")
.unwrap(),
Field::from_str("0x2fa5e5f18f6027a6501bec864564472a616b2e274a41211a444cbe3a99f3cc61")
.unwrap(),
Field::from_str("0x0e884376d0d8fd21ecb780389e941f66e45e7acce3e228ab3e2156a614fcd747")
.unwrap(),
Field::from_str("0x1b7201da72494f1e28717ad1a52eb469f95892f957713533de6175e5da190af2")
.unwrap(),
Field::from_str("0x1f8d8822725e36385200c0b201249819a6e6e1e4650808b5bebc6bface7d7636")
.unwrap(),
Field::from_str("0x2c5d82f66c914bafb9701589ba8cfcfb6162b0a12acf88a8d0879a0471b5f85a")
.unwrap(),
Field::from_str("0x14c54148a0940bb820957f5adf3fa1134ef5c4aaa113f4646458f270e0bfbfd0")
.unwrap(),
Field::from_str("0x190d33b12f986f961e10c0ee44d8b9af11be25588cad89d416118e4bf4ebe80c")
.unwrap(),
];
let expected_identity_path_index: Vec<u8> =
vec![1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
assert_eq!(path_elements, expected_path_elements);
assert_eq!(identity_path_index, expected_identity_path_index);
// We check correct verification of the proof
assert!(tree.verify(id_commitment.into(), &merkle_proof));
} }
#[test] #[test]
fn test_semaphore() { // We test a RLN proof generation and verification
let leaf = Field::from(0); fn test_witness_from_json() {
// From rln JSON witness
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
let input_json_str = r#"
{
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
// generate identity // We generate all relevant keys
let proving_key = &ZKEY();
let verification_key = &VK();
let builder = CIRCOM();
// We compute witness from the json input example
let rln_witness = rln_witness_from_json(input_json_str);
// Let's generate a zkSNARK proof
let proof = generate_proof(builder, proving_key, &rln_witness).unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let verified = verify_proof(verification_key, proof, &proof_values);
assert!(verified.unwrap());
}
#[test]
// We test a RLN proof generation and verification
fn test_end_to_end() {
let tree_height = 16;
let leaf_index = 3;
// Generate identity
// We follow zk-kit approach for identity generation
let id = Identity::from_seed(b"hello"); let id = Identity::from_seed(b"hello");
let identity_secret = poseidon_hash(&vec![id.trapdoor, id.nullifier]);
let id_commitment = poseidon_hash(&vec![identity_secret]);
// generate merkle tree //// generate merkle tree
let mut tree = PoseidonTree::new(21, leaf); let default_leaf = Field::from(0);
tree.set(0, id.commitment()); let mut tree = PoseidonTree::new(tree_height, default_leaf);
tree.set(leaf_index, id_commitment.into());
let merkle_proof = tree.proof(0).expect("proof should exist"); let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
let root = tree.root().into();
// change signal_hash and external_nullifier here let signal = b"hey hey";
let signal_hash = hash_to_field(b"xxx"); let x = hash_to_field(signal);
let external_nullifier_hash = hash_to_field(b"appId");
let nullifier_hash = // We set the remaining values to random ones
semaphore::protocol::generate_nullifier_hash(&id, external_nullifier_hash); let epoch = hash_to_field(b"test-epoch");
let rln_identifier = hash_to_field(b"test-rln-identifier");
let proof = semaphore::protocol::generate_proof( let rln_witness: RLNWitnessInput =
&id, rln_witness_from_values(identity_secret, &merkle_proof, x, epoch, rln_identifier);
&merkle_proof,
external_nullifier_hash,
signal_hash,
)
.unwrap();
let success = semaphore::protocol::verify_proof( // We generate all relevant keys
root, let proving_key = &ZKEY();
nullifier_hash, let verification_key = &VK();
signal_hash, let builder = CIRCOM();
external_nullifier_hash,
&proof, // Let's generate a zkSNARK proof
) let proof = generate_proof(builder, proving_key, &rln_witness).unwrap();
.unwrap();
let proof_values = proof_values_from_witness(&rln_witness);
// Let's verify the proof
let success = verify_proof(verification_key, proof, &proof_values).unwrap();
assert!(success); assert!(success);
} }
#[ignore]
#[test]
fn test_end_to_end() {
let leaf = Field::from(0);
// generate identity
let id = Identity::from_seed(b"hello");
// generate merkle tree
let mut tree = PoseidonTree::new(21, leaf);
tree.set(0, id.commitment().into());
let merkle_proof = tree.proof(0).expect("proof should exist");
let root = tree.root().into();
println!("Root: {:#}", root);
println!("Merkle proof: {:#?}", merkle_proof);
// change signal_hash and external_nullifier_hash here
let signal_hash = hash_to_field(b"xxx");
let external_nullifier_hash = hash_to_field(b"appId");
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
let proof =
generate_proof(&id, &merkle_proof, external_nullifier_hash, signal_hash).unwrap();
println!("Proof: {:#?}", proof);
// TODO Make this test pass
//
// Currently fails at:
// thread 'test::test_end_to_end' panicked at 'called `Result::unwrap()`
// on an `Err` value: SynthesisError(MalformedVerifyingKey)',
// rln/src/lib.rs:62:84
//
// Not sure why this is MalformedVerifyingKey, though the proof is
// likely incorrect with wrong fields in protocol.rs
//
// Indeed:
// if (public_inputs.len() + 1) != pvk.vk.gamma_abc_g1.len() {
let success = verify_proof(
root,
nullifier_hash,
signal_hash,
external_nullifier_hash,
&proof,
)
.unwrap();
}
} }

View File

@ -1,38 +1,15 @@
/// This is basic entry point for `cargo run` to play around with proof,
/// outputs, etc.
///
use ark_circom::{CircomBuilder, CircomConfig};
use ark_std::rand::thread_rng;
use color_eyre::Result; use color_eyre::Result;
use ark_bn254::Bn254;
use ark_groth16::{
create_random_proof, generate_random_parameters, prepare_verifying_key, verify_proof,
};
use num_bigint::BigInt;
// Tracing // Tracing
use ark_relations::r1cs::{ConstraintLayer, ConstraintTrace, TracingMode}; use ark_relations::r1cs::{ConstraintLayer, ConstraintTrace, TracingMode};
//use tracing::{event, span, Level};
use tracing_subscriber::layer::SubscriberExt; use tracing_subscriber::layer::SubscriberExt;
// JSON // JSON
use serde::Deserialize;
//use serde_json;
#[derive(Debug, Deserialize)] use rln::circuit::{CIRCOM, VK, ZKEY};
//#[serde(rename_all = "camelCase")] use rln::protocol::{
struct WitnessInput { generate_proof, proof_values_from_witness, rln_witness_from_json, verify_proof,
identity_secret: String, };
path_elements: Vec<String>,
identity_path_index: Vec<i32>,
x: String,
epoch: String,
rln_identifier: String,
}
// TODO: This should use public.rs as much as possible
// RLN // RLN
fn groth16_proof_example() -> Result<()> { fn groth16_proof_example() -> Result<()> {
@ -45,122 +22,68 @@ fn groth16_proof_example() -> Result<()> {
let trace = ConstraintTrace::capture(); let trace = ConstraintTrace::capture();
println!("Trace is: {:?}", trace); println!("Trace is: {:?}", trace);
let cfg = CircomConfig::<Bn254>::new("./resources/rln.wasm", "./resources/rln.r1cs")?;
// Test
let trace = ConstraintTrace::capture();
println!("Trace is: {:?}", trace);
// From rln JSON witness // From rln JSON witness
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8 // Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8
let input_json_str = r#" let input_json_str = r#"
{ {
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736", "identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736",
"path_elements": [ "path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333", "18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732", "20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657", "7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792", "11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162", "3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981", "19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016", "20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293", "3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601", "21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039", "6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922", "12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726", "14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018", "20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960", "9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524" "11331146992410411304059858900317123658895005918277453009197229807340014528524"
], ],
"identity_path_index": [ "identity_path_index": [
1, 1,
1, 1,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0, 0,
0 0
], ],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982", "x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f", "epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439" "rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
} }
"#; "#;
let witness_input: WitnessInput = // We generate all relevant keys
serde_json::from_str(input_json_str).expect("JSON was not well-formatted"); let proving_key = &ZKEY();
let verification_key = &VK();
let builder = CIRCOM();
println!("Witness input JSON: {:?}", witness_input); // We compute witness from the json input example
let rln_witness = rln_witness_from_json(input_json_str);
let mut builder = CircomBuilder::new(cfg); // Let's generate a zkSNARK proof
let proof = generate_proof(builder, proving_key, &rln_witness).unwrap();
builder.push_input( let proof_values = proof_values_from_witness(&rln_witness);
"identity_secret",
BigInt::parse_bytes(witness_input.identity_secret.as_bytes(), 10).unwrap(),
);
for v in witness_input.path_elements.iter() { // Let's verify the proof
builder.push_input( let verified = verify_proof(verification_key, proof, &proof_values);
"path_elements",
BigInt::parse_bytes(v.as_bytes(), 10).unwrap(),
);
}
for v in witness_input.identity_path_index.iter() { assert!(verified.unwrap());
builder.push_input("identity_path_index", BigInt::from(*v));
}
builder.push_input(
"x",
BigInt::parse_bytes(witness_input.x.as_bytes(), 10).unwrap(),
);
builder.push_input(
"epoch",
BigInt::parse_bytes(
witness_input.epoch.strip_prefix("0x").unwrap().as_bytes(),
16,
)
.unwrap(),
);
builder.push_input(
"rln_identifier",
BigInt::parse_bytes(witness_input.rln_identifier.as_bytes(), 10).unwrap(),
);
println!("Builder input:\n {:#?}", builder.inputs);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng)?;
let circom = builder.build()?;
let inputs = circom.get_public_inputs().unwrap();
println!("Public inputs {:#?} ", inputs);
let proof = create_random_proof(circom, &params, &mut rng)?;
println!("Proof: {:?}", proof);
let pvk = prepare_verifying_key(&params.vk);
let verified = verify_proof(&pvk, &proof, &inputs)?;
assert!(verified);
Ok(()) Ok(())
} }
@ -168,7 +91,6 @@ fn groth16_proof_example() -> Result<()> {
fn main() { fn main() {
println!("rln example proof"); println!("rln example proof");
// Tornado-core
match groth16_proof_example() { match groth16_proof_example() {
Ok(_) => println!("Success"), Ok(_) => println!("Success"),
Err(_) => println!("Error"), Err(_) => println!("Error"),

View File

@ -1,227 +0,0 @@
// Adapted from https://github.com/kilic/rln/blob/master/src/merkle.rs
//
use crate::poseidon::{Poseidon as Hasher, PoseidonParams};
// TODO Replace these with arkworks
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
use std::io::{self, Error, ErrorKind};
use std::{collections::HashMap, hash::Hash};
pub struct IncrementalMerkleTree<E>
where
E: Engine,
{
pub current_index: usize,
merkle_tree: MerkleTree<E>,
}
impl<E> IncrementalMerkleTree<E>
where
E: Engine,
{
pub fn empty(hasher: Hasher<E>, depth: usize) -> Self {
let mut zero: Vec<E::Fr> = Vec::with_capacity(depth + 1);
zero.push(E::Fr::from_str("0").unwrap());
for i in 0..depth {
zero.push(hasher.hash([zero[i]; 2].to_vec()));
}
zero.reverse();
let merkle_tree = MerkleTree {
hasher: hasher,
zero: zero.clone(),
depth: depth,
nodes: HashMap::new(),
};
let current_index: usize = 0;
IncrementalMerkleTree {
current_index,
merkle_tree,
}
}
pub fn update_next(&mut self, leaf: E::Fr) -> io::Result<()> {
self.merkle_tree.update(self.current_index, leaf)?;
self.current_index += 1;
Ok(())
}
pub fn delete(&mut self, index: usize) -> io::Result<()> {
let zero = E::Fr::from_str("0").unwrap();
self.merkle_tree.update(index, zero)?;
Ok(())
}
pub fn get_witness(&self, index: usize) -> io::Result<Vec<(E::Fr, bool)>> {
if index >= self.current_index {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds incremental index",
));
}
self.merkle_tree.get_witness(index)
}
pub fn hash(&self, inputs: Vec<E::Fr>) -> E::Fr {
self.merkle_tree.hasher.hash(inputs)
}
pub fn check_inclusion(
&self,
witness: Vec<(E::Fr, bool)>,
leaf_index: usize,
) -> io::Result<bool> {
if leaf_index >= self.current_index {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds incremental index",
));
}
self.merkle_tree.check_inclusion(witness, leaf_index)
}
pub fn get_root(&self) -> E::Fr {
return self.merkle_tree.get_root();
}
}
pub struct MerkleTree<E>
where
E: Engine,
{
pub hasher: Hasher<E>,
pub depth: usize,
zero: Vec<E::Fr>,
nodes: HashMap<(usize, usize), E::Fr>,
}
impl<E> MerkleTree<E>
where
E: Engine,
{
pub fn empty(hasher: Hasher<E>, depth: usize) -> Self {
let mut zero: Vec<E::Fr> = Vec::with_capacity(depth + 1);
zero.push(E::Fr::from_str("0").unwrap());
for i in 0..depth {
zero.push(hasher.hash([zero[i]; 2].to_vec()));
}
zero.reverse();
MerkleTree {
hasher: hasher,
zero: zero.clone(),
depth: depth,
nodes: HashMap::new(),
}
}
pub fn set_size(&self) -> usize {
1 << self.depth
}
pub fn update(&mut self, index: usize, leaf: E::Fr) -> io::Result<()> {
if index >= self.set_size() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
self.nodes.insert((self.depth, index), leaf);
self.recalculate_from(index);
Ok(())
}
pub fn check_inclusion(&self, witness: Vec<(E::Fr, bool)>, index: usize) -> io::Result<bool> {
if index >= self.set_size() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut acc = self.get_node(self.depth, index);
for w in witness.into_iter() {
if w.1 {
acc = self.hasher.hash(vec![acc, w.0]);
} else {
acc = self.hasher.hash(vec![w.0, acc]);
}
}
Ok(acc.eq(&self.get_root()))
}
pub fn get_root(&self) -> E::Fr {
return self.get_node(0, 0);
}
pub fn get_witness(&self, index: usize) -> io::Result<Vec<(E::Fr, bool)>> {
if index >= self.set_size() {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
"index exceeds set size",
));
}
let mut witness = Vec::<(E::Fr, bool)>::with_capacity(self.depth);
let mut i = index;
let mut depth = self.depth;
loop {
i ^= 1;
witness.push((self.get_node(depth, i), (i & 1 == 1)));
i >>= 1;
depth -= 1;
if depth == 0 {
break;
}
}
assert_eq!(i, 0);
Ok(witness)
}
fn get_node(&self, depth: usize, index: usize) -> E::Fr {
let node = *self
.nodes
.get(&(depth, index))
.unwrap_or_else(|| &self.zero[depth]);
node
}
fn get_leaf(&self, index: usize) -> E::Fr {
self.get_node(self.depth, index)
}
fn hash_couple(&mut self, depth: usize, index: usize) -> E::Fr {
let b = index & !1;
self.hasher
.hash([self.get_node(depth, b), self.get_node(depth, b + 1)].to_vec())
}
fn recalculate_from(&mut self, index: usize) {
let mut i = index;
let mut depth = self.depth;
loop {
let h = self.hash_couple(depth, i);
i >>= 1;
depth -= 1;
self.nodes.insert((depth, i), h);
if depth == 0 {
break;
}
}
assert_eq!(depth, 0);
assert_eq!(i, 0);
}
}
#[test]
fn test_merkle_set() {
let data: Vec<Fr> = (0..8)
.map(|s| Fr::from_str(&format!("{}", s)).unwrap())
.collect();
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr, FrRepr};
let params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
let hasher = Hasher::new(params);
let mut set = MerkleTree::empty(hasher.clone(), 3);
let leaf_index = 6;
let leaf = hasher.hash(vec![data[0]]);
set.update(leaf_index, leaf).unwrap();
let witness = set.get_witness(leaf_index).unwrap();
assert!(set.check_inclusion(witness, leaf_index).unwrap());
}

View File

@ -1,233 +0,0 @@
// Adapted from https://github.com/kilic/rln/blob/master/src/poseidon.rs
//
use blake2::{Blake2s, Digest};
use sapling_crypto::bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr};
use sapling_crypto::bellman::pairing::Engine;
// TODO: Using arkworks libs here instead
//use ff::{Field, PrimeField, PrimeFieldRepr};
//use ark_ec::{PairingEngine as Engine};
#[derive(Clone)]
pub struct PoseidonParams<E: Engine> {
rf: usize,
rp: usize,
t: usize,
round_constants: Vec<E::Fr>,
mds_matrix: Vec<E::Fr>,
}
#[derive(Clone)]
pub struct Poseidon<E: Engine> {
params: PoseidonParams<E>,
}
impl<E: Engine> PoseidonParams<E> {
pub fn new(
rf: usize,
rp: usize,
t: usize,
round_constants: Option<Vec<E::Fr>>,
mds_matrix: Option<Vec<E::Fr>>,
seed: Option<Vec<u8>>,
) -> PoseidonParams<E> {
let seed = match seed {
Some(seed) => seed,
None => b"".to_vec(),
};
let _round_constants = match round_constants {
Some(round_constants) => round_constants,
None => PoseidonParams::<E>::generate_constants(b"drlnhdsc", seed.clone(), rf + rp),
};
assert_eq!(rf + rp, _round_constants.len());
let _mds_matrix = match mds_matrix {
Some(mds_matrix) => mds_matrix,
None => PoseidonParams::<E>::generate_mds_matrix(b"drlnhdsm", seed.clone(), t),
};
PoseidonParams {
rf,
rp,
t,
round_constants: _round_constants,
mds_matrix: _mds_matrix,
}
}
pub fn width(&self) -> usize {
return self.t;
}
pub fn partial_round_len(&self) -> usize {
return self.rp;
}
pub fn full_round_half_len(&self) -> usize {
return self.rf / 2;
}
pub fn total_rounds(&self) -> usize {
return self.rf + self.rp;
}
pub fn round_constant(&self, round: usize) -> E::Fr {
return self.round_constants[round];
}
pub fn mds_matrix_row(&self, i: usize) -> Vec<E::Fr> {
let w = self.width();
self.mds_matrix[i * w..(i + 1) * w].to_vec()
}
pub fn mds_matrix(&self) -> Vec<E::Fr> {
self.mds_matrix.clone()
}
pub fn generate_mds_matrix(persona: &[u8; 8], seed: Vec<u8>, t: usize) -> Vec<E::Fr> {
let v: Vec<E::Fr> = PoseidonParams::<E>::generate_constants(persona, seed, t * 2);
let mut matrix: Vec<E::Fr> = Vec::with_capacity(t * t);
for i in 0..t {
for j in 0..t {
let mut tmp = v[i];
tmp.add_assign(&v[t + j]);
let entry = tmp.inverse().unwrap();
matrix.insert((i * t) + j, entry);
}
}
matrix
}
pub fn generate_constants(persona: &[u8; 8], seed: Vec<u8>, len: usize) -> Vec<E::Fr> {
let mut constants: Vec<E::Fr> = Vec::new();
let mut source = seed.clone();
loop {
let mut hasher = Blake2s::new();
hasher.input(persona);
hasher.input(source);
source = hasher.result().to_vec();
let mut candidate_repr = <E::Fr as PrimeField>::Repr::default();
candidate_repr.read_le(&source[..]).unwrap();
if let Ok(candidate) = E::Fr::from_repr(candidate_repr) {
constants.push(candidate);
if constants.len() == len {
break;
}
}
}
constants
}
}
impl<E: Engine> Poseidon<E> {
pub fn new(params: PoseidonParams<E>) -> Poseidon<E> {
Poseidon { params }
}
pub fn hash(&self, inputs: Vec<E::Fr>) -> E::Fr {
let mut state = inputs.clone();
state.resize(self.t(), E::Fr::zero());
let mut round_counter: usize = 0;
loop {
self.round(&mut state, round_counter);
round_counter += 1;
if round_counter == self.params.total_rounds() {
break;
}
}
state[0]
}
fn t(&self) -> usize {
self.params.t
}
fn round(&self, state: &mut Vec<E::Fr>, round: usize) {
let a1 = self.params.full_round_half_len();
let a2 = a1 + self.params.partial_round_len();
let a3 = self.params.total_rounds();
if round < a1 {
self.full_round(state, round);
} else if round >= a1 && round < a2 {
self.partial_round(state, round);
} else if round >= a2 && round < a3 {
if round == a3 - 1 {
self.full_round_last(state);
} else {
self.full_round(state, round);
}
} else {
panic!("should not be here")
}
}
fn full_round(&self, state: &mut Vec<E::Fr>, round: usize) {
self.add_round_constants(state, round);
self.apply_quintic_sbox(state, true);
self.mul_mds_matrix(state);
}
fn full_round_last(&self, state: &mut Vec<E::Fr>) {
let last_round = self.params.total_rounds() - 1;
self.add_round_constants(state, last_round);
self.apply_quintic_sbox(state, true);
}
fn partial_round(&self, state: &mut Vec<E::Fr>, round: usize) {
self.add_round_constants(state, round);
self.apply_quintic_sbox(state, false);
self.mul_mds_matrix(state);
}
fn add_round_constants(&self, state: &mut Vec<E::Fr>, round: usize) {
for (_, b) in state.iter_mut().enumerate() {
let c = self.params.round_constants[round];
b.add_assign(&c);
}
}
fn apply_quintic_sbox(&self, state: &mut Vec<E::Fr>, full: bool) {
for s in state.iter_mut() {
let mut b = s.clone();
b.square();
b.square();
s.mul_assign(&b);
if !full {
break;
}
}
}
fn mul_mds_matrix(&self, state: &mut Vec<E::Fr>) {
let w = self.params.t;
let mut new_state = vec![E::Fr::zero(); w];
for (i, ns) in new_state.iter_mut().enumerate() {
for (j, s) in state.iter().enumerate() {
let mut tmp = s.clone();
tmp.mul_assign(&self.params.mds_matrix[i * w + j]);
ns.add_assign(&tmp);
}
}
for (i, ns) in new_state.iter_mut().enumerate() {
state[i].clone_from(ns);
}
}
}
#[test]
fn test_poseidon_hash() {
use sapling_crypto::bellman::pairing::bn256;
use sapling_crypto::bellman::pairing::bn256::{Bn256, Fr};
let params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
let hasher = Poseidon::<Bn256>::new(params);
let input1: Vec<Fr> = ["0"].iter().map(|e| Fr::from_str(e).unwrap()).collect();
let r1: Fr = hasher.hash(input1);
let input2: Vec<Fr> = ["0", "0"]
.iter()
.map(|e| Fr::from_str(e).unwrap())
.collect();
let r2: Fr = hasher.hash(input2.to_vec());
// println!("{:?}", r1);
assert_eq!(r1, r2, "just to see if internal state resets");
}

View File

@ -1,14 +1,15 @@
/// Adapted from semaphore-rs use crate::circuit::{VK, ZKEY};
use crate::circuit::{WITNESS_CALCULATOR, ZKEY}; use ark_bn254::{Bn254, Fr, Parameters};
use ark_bn254::{Bn254, Parameters}; use ark_circom::{read_zkey, CircomBuilder, CircomConfig, CircomReduction};
use ark_circom::CircomReduction;
use ark_ec::bn::Bn; use ark_ec::bn::Bn;
use ark_ff::{Fp256, PrimeField}; use ark_ff::{Fp256, PrimeField};
use ark_groth16::{ use ark_groth16::{
create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof as ArkProof, create_proof_with_reduction_and_matrices, create_random_proof_with_reduction,
prepare_verifying_key, verify_proof as ark_verify_proof, Proof as ArkProof, ProvingKey,
VerifyingKey,
}; };
use ark_relations::r1cs::SynthesisError; use ark_relations::r1cs::SynthesisError;
use ark_std::{rand::thread_rng, UniformRand}; use ark_std::{rand::thread_rng, str::FromStr, UniformRand};
use color_eyre::Result; use color_eyre::Result;
use ethers_core::utils::keccak256; use ethers_core::utils::keccak256;
use num_bigint::{BigInt, BigUint, ToBigInt}; use num_bigint::{BigInt, BigUint, ToBigInt};
@ -24,7 +25,190 @@ use serde::{Deserialize, Serialize};
use std::time::Instant; use std::time::Instant;
use thiserror::Error; use thiserror::Error;
// TODO Fields need to be updated to RLN based ones pub use crate::utils::{add, bytes_to_field, mul, str_to_field, vec_to_field, vec_to_fr};
///////////////////////////////////////////////////////
// RLN Witness data structure and utility functions
///////////////////////////////////////////////////////
#[derive(Debug, Deserialize, Clone)]
pub struct RLNWitnessInput {
identity_secret: Field,
path_elements: Vec<Field>,
identity_path_index: Vec<u8>,
x: Field,
epoch: Field,
rln_identifier: Field,
}
#[derive(Debug, Deserialize, Clone)]
pub struct RLNProofValues {
// Public outputs:
y: Field,
nullifier: Field,
root: Field,
// Public Inputs:
x: Field,
epoch: Field,
rln_identifier: Field,
}
pub fn rln_witness_from_json(input_json_str: &str) -> RLNWitnessInput {
let input_json: serde_json::Value =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
let identity_secret = str_to_field(input_json["identity_secret"].to_string(), 10);
let mut path_elements: Vec<Field> = vec![];
for v in input_json["path_elements"].as_array().unwrap().iter() {
path_elements.push(str_to_field(v.to_string(), 10));
}
let mut identity_path_index: Vec<u8> = vec![];
for v in input_json["identity_path_index"].as_array().unwrap().iter() {
identity_path_index.push(v.as_u64().unwrap() as u8);
}
let x = str_to_field(input_json["x"].to_string(), 10);
let epoch = str_to_field(input_json["epoch"].to_string(), 16);
let rln_identifier = str_to_field(input_json["rln_identifier"].to_string(), 10);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
}
pub fn rln_witness_from_values(
identity_secret: Field,
merkle_proof: &merkle_tree::Proof<PoseidonHash>,
x: Field,
epoch: Field,
rln_identifier: Field,
) -> RLNWitnessInput {
let path_elements = get_path_elements(merkle_proof);
let identity_path_index = get_identity_path_index(merkle_proof);
RLNWitnessInput {
identity_secret,
path_elements,
identity_path_index,
x,
epoch,
rln_identifier,
}
}
pub fn proof_values_from_witness(rln_witness: &RLNWitnessInput) -> RLNProofValues {
// y share
let a_0 = rln_witness.identity_secret;
let a_1 = poseidon_hash(&[a_0, rln_witness.epoch]);
let y = mul(rln_witness.x, a_1);
let y = add(y, a_0);
// Nullifier
let nullifier = poseidon_hash(&[a_1, rln_witness.rln_identifier]);
// Merkle tree root computations
let mut root = poseidon_hash(&[rln_witness.identity_secret]);
for i in 0..rln_witness.identity_path_index.len() {
if rln_witness.identity_path_index[i] == 0 {
root = poseidon_hash(&[root, rln_witness.path_elements[i]]);
} else {
root = poseidon_hash(&[rln_witness.path_elements[i], root]);
}
}
let root = get_tree_root(
rln_witness.identity_secret,
&rln_witness.path_elements,
&rln_witness.identity_path_index,
true,
);
RLNProofValues {
y,
nullifier,
root,
x: rln_witness.x,
epoch: rln_witness.epoch,
rln_identifier: rln_witness.rln_identifier,
}
}
///////////////////////////////////////////////////////
// Merkle tree utility functions
///////////////////////////////////////////////////////
/// Helper to merkle proof into a bigint vector
/// TODO: we should create a From trait for this
pub fn get_path_elements(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
proof
.0
.iter()
.map(|x| match x {
Branch::Left(value) | Branch::Right(value) => *value,
})
.collect()
}
pub fn get_identity_path_index(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<u8> {
proof
.0
.iter()
.map(|branch| match branch {
Branch::Left(_) => 0,
Branch::Right(_) => 1,
})
.collect()
}
pub fn get_tree_root(
leaf: Field,
path_elements: &[Field],
identity_path_index: &[u8],
hash_leaf: bool,
) -> Field {
let mut root = leaf;
if hash_leaf {
root = poseidon_hash(&[root]);
}
for i in 0..identity_path_index.len() {
if identity_path_index[i] == 0 {
root = poseidon_hash(&[root, path_elements[i]]);
} else {
root = poseidon_hash(&[path_elements[i], root]);
}
}
root
}
///////////////////////////////////////////////////////
// Signal/nullifier utility functions
///////////////////////////////////////////////////////
fn hash_signal(signal: &[u8]) -> Field {
let hash = keccak256(signal);
bytes_to_field(&hash)
}
/// Generates the nullifier hash
#[must_use]
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
poseidon_hash(&[external_nullifier, identity.nullifier])
}
///////////////////////////////////////////////////////
// Proof data structure and utility functions
///////////////////////////////////////////////////////
// Matches the private G1Tup type in ark-circom. // Matches the private G1Tup type in ark-circom.
pub type G1 = (U256, U256); pub type G1 = (U256, U256);
@ -66,32 +250,9 @@ impl From<Proof> for ArkProof<Bn<Parameters>> {
} }
} }
/// Helper to merkle proof into a bigint vector ///////////////////////////////////////////////////////
/// TODO: we should create a From trait for this // zkSNARK utility functions
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> { ///////////////////////////////////////////////////////
proof
.0
.iter()
.map(|x| match x {
Branch::Left(value) | Branch::Right(value) => *value,
})
.collect()
}
/// Internal helper to hash the signal to make sure it's in the field
fn hash_signal(signal: &[u8]) -> Field {
let hash = keccak256(signal);
// Shift right one byte to make it fit in the field
let mut bytes = [0_u8; 32];
bytes[1..].copy_from_slice(&hash[..31]);
Field::from_be_bytes_mod_order(&bytes)
}
/// Generates the nullifier hash
#[must_use]
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
poseidon_hash(&[external_nullifier, identity.nullifier])
}
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum ProofError { pub enum ProofError {
@ -103,121 +264,86 @@ pub enum ProofError {
SynthesisError(#[from] SynthesisError), SynthesisError(#[from] SynthesisError),
} }
// XXX This is different from zk-kit API: /// Generates a RLN proof
// const witness = RLN.genWitness(secretHash, merkleProof, epoch, signal, rlnIdentifier)
// const fullProof = await RLN.genProof(witness, wasmFilePath, finalZkeyPath)
//
// TODO Change API here
/// Generates a semaphore proof
/// ///
/// # Errors /// # Errors
/// ///
/// Returns a [`ProofError`] if proving fails. /// Returns a [`ProofError`] if proving fails.
pub fn generate_proof( pub fn generate_proof(
identity: &Identity, mut builder: CircomBuilder<Bn254>,
merkle_proof: &merkle_tree::Proof<PoseidonHash>, proving_key: &ProvingKey<Bn254>,
external_nullifier_hash: Field, rln_witness: &RLNWitnessInput,
signal_hash: Field,
) -> Result<Proof, ProofError> { ) -> Result<Proof, ProofError> {
// TODO Fix inputs
// Semaphore genWitness corresponds to these
// RLN different, should be:
// identity_secret
// path_elements (merkleProof.siblings))
// identity_path_index (merkleProof.pathIndices)
// x (RLN.genSignalHash(signal), assuming shouldHash is true)
// epoch
// rln_identifier
let inputs = [
// FIXME should be identity_secret, not just nullifier!
("identity_secret", vec![identity.nullifier]),
//("identityTrapdoor", vec![identity.trapdoor]),
("path_elements", merkle_proof_to_vec(merkle_proof)),
("identity_path_index", merkle_proof.path_index()),
("externalNullifier", vec![external_nullifier_hash]),
// XXX: Assuming signal is hashed
("x", vec![signal_hash]),
// FIXME epoch just hardcoded to random value
("epoch", vec![signal_hash]),
// FIXME rln_identifier just hardcoded to random value
("rln_identifier", vec![signal_hash]),
];
let inputs = inputs.into_iter().map(|(name, values)| {
(
name.to_string(),
values.iter().copied().map(Into::into).collect::<Vec<_>>(),
)
});
let now = Instant::now(); let now = Instant::now();
let full_assignment = WITNESS_CALCULATOR builder.push_input("identity_secret", BigInt::from(rln_witness.identity_secret));
.clone()
.calculate_witness_element::<Bn254, _>(inputs, false) for v in rln_witness.path_elements.iter() {
.map_err(ProofError::WitnessError)?; builder.push_input("path_elements", BigInt::from(*v));
}
for v in rln_witness.identity_path_index.iter() {
builder.push_input("identity_path_index", BigInt::from(*v));
}
builder.push_input("x", BigInt::from(rln_witness.x));
builder.push_input("epoch", BigInt::from(rln_witness.epoch));
builder.push_input("rln_identifier", BigInt::from(rln_witness.rln_identifier));
let circom = builder.build().unwrap();
// This can be checked against proof_values_from_witness
// Get the populated instance of the circuit with the witness
//let inputs = vec_to_field(circom.get_public_inputs().unwrap());
println!("witness generation took: {:.2?}", now.elapsed()); println!("witness generation took: {:.2?}", now.elapsed());
let mut rng = thread_rng();
let rng = &mut rng;
let r = ark_bn254::Fr::rand(rng);
let s = ark_bn254::Fr::rand(rng);
let now = Instant::now(); let now = Instant::now();
let ark_proof = create_proof_with_reduction_and_matrices::<_, CircomReduction>( // Generate a random proof
&ZKEY.0, let mut rng = thread_rng();
r,
s, let ark_proof = create_random_proof_with_reduction::<_, _, _, CircomReduction>(
&ZKEY.1, circom,
ZKEY.1.num_instance_variables, proving_key,
ZKEY.1.num_constraints, &mut rng,
full_assignment.as_slice(), )
)?; .unwrap();
let proof = ark_proof.into(); let proof = ark_proof.into();
println!("proof generation took: {:.2?}", now.elapsed()); println!("proof generation took: {:.2?}", now.elapsed());
Ok(proof) Ok(proof)
} }
// TODO Update API here /// Verifies a given RLN proof
/// Verifies a given semaphore proof
/// ///
/// # Errors /// # Errors
/// ///
/// Returns a [`ProofError`] if verifying fails. Verification failure does not /// Returns a [`ProofError`] if verifying fails. Verification failure does not
/// necessarily mean the proof is incorrect. /// necessarily mean the proof is incorrect.
pub fn verify_proof( pub fn verify_proof(
root: Field, verifying_key: &VerifyingKey<Bn254>,
nullifier_hash: Field, proof: Proof,
signal_hash: Field, proof_values: &RLNProofValues,
external_nullifier_hash: Field,
proof: &Proof,
) -> Result<bool, ProofError> { ) -> Result<bool, ProofError> {
// XXX: Why is verification key in zkey but that's not what is used in // We re-arrange proof-values according to the circuit specification
// verifyProof with verification_key.json? Is there a difference? let inputs = vec![
let pvk = prepare_verifying_key(&ZKEY.0.vk); proof_values.y,
proof_values.root,
// TODO Update this, should be: proof_values.nullifier,
// XXX This is returned from the proof! Why is it called yShare here? proof_values.x,
// Isn't this publicOutput? proof_values.epoch,
// publicSignals 0..5 in specific order: proof_values.rln_identifier,
// yShare
// merkleRoot
// internalNullifier
// signalHash
// epoch
// rlnIdentifier
let public_inputs = vec![
root.into(),
nullifier_hash.into(),
signal_hash.into(),
external_nullifier_hash.into(),
]; ];
let ark_proof = (*proof).into();
let result = ark_groth16::verify_proof(&pvk, &ark_proof, &public_inputs[..])?; // Check that the proof is valid
Ok(result) let pvk = prepare_verifying_key(verifying_key);
let pr: ArkProof<Bn254> = proof.into();
let verified = ark_verify_proof(&pvk, &pr, &vec_to_fr(inputs))?;
Ok(verified)
} }

View File

@ -1,34 +1,24 @@
/// This is the main public API for RLN. It is used by the FFI, and should be /// This is the main public API for RLN. It is used by the FFI, and should be
/// used by tests etc as well /// used by tests etc as well
/// ///
use crate::merkle::IncrementalMerkleTree;
use crate::poseidon::{Poseidon as PoseidonHasher, PoseidonParams};
use semaphore::{
hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, protocol::*, Field,
};
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_std::rand::thread_rng;
use ark_bn254::Bn254; use ark_bn254::Bn254;
use ark_circom::{CircomBuilder, CircomCircuit, CircomConfig};
use ark_groth16::{ use ark_groth16::{
create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof, create_random_proof as prove, generate_random_parameters, prepare_verifying_key, verify_proof,
Proof, ProvingKey, Proof, ProvingKey,
}; };
use ark_serialize::{CanonicalDeserialize, CanonicalSerialize}; use ark_serialize::{CanonicalDeserialize, CanonicalSerialize};
// , SerializationError}; use ark_std::rand::thread_rng;
use std::io::{self, Read, Write};
use num_bigint::BigInt; use num_bigint::BigInt;
use semaphore::{
// JSON hash_to_field, identity::Identity, poseidon_tree::PoseidonTree, protocol::*, Field,
};
use serde::Deserialize; use serde::Deserialize;
use serde_json; use serde_json;
use std::io::{self, Read, Write};
// For RLN Rust version use crate::circuit::{CIRCOM, ZKEY};
//use bellman::pairing::ff::{Field, PrimeField, PrimeFieldRepr, ScalarEngine}; use crate::protocol;
//use sapling_crypto::bellman::pairing::bn256::Bn256;
// TODO Add Engine here? i.e. <E: Engine> not <Bn254> // TODO Add Engine here? i.e. <E: Engine> not <Bn254>
// TODO Assuming we want to use IncrementalMerkleTree, figure out type/trait conversions // TODO Assuming we want to use IncrementalMerkleTree, figure out type/trait conversions
@ -36,22 +26,9 @@ use serde_json;
pub struct RLN { pub struct RLN {
circom: CircomCircuit<Bn254>, circom: CircomCircuit<Bn254>,
params: ProvingKey<Bn254>, params: ProvingKey<Bn254>,
// RLN Rust version
//tree: IncrementalMerkleTree<Bn256>,
tree: PoseidonTree, tree: PoseidonTree,
} }
#[derive(Debug, Deserialize)]
//#[serde(rename_all = "camelCase")]
struct WitnessInput {
identity_secret: String,
path_elements: Vec<String>,
identity_path_index: Vec<i32>,
x: String,
epoch: String,
rln_identifier: String,
}
// TODO Expand API to have better coverage of things needed // TODO Expand API to have better coverage of things needed
impl RLN { impl RLN {
@ -60,21 +37,20 @@ impl RLN {
let cfg = let cfg =
CircomConfig::<Bn254>::new("./resources/rln.wasm", "./resources/rln.r1cs").unwrap(); CircomConfig::<Bn254>::new("./resources/rln.wasm", "./resources/rln.r1cs").unwrap();
let mut builder = CircomBuilder::new(cfg); let builder = CircomBuilder::new(cfg);
// create an empty instance for setting it up let params = ZKEY();
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let circom = builder.build().unwrap(); let circom = builder.build().unwrap();
let inputs = circom.get_public_inputs().unwrap(); let inputs = circom.get_public_inputs().unwrap();
println!("Public inputs {:#?} ", inputs); println!("Public inputs {:#?} ", inputs);
// We compute a default empty tree
// Probably better to pass it as parameter
let TREE_HEIGHT = 21;
let leaf = Field::from(0); let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf); let tree = PoseidonTree::new(TREE_HEIGHT, leaf);
RLN { RLN {
circom, circom,
@ -83,134 +59,21 @@ impl RLN {
} }
} }
// XXX This is a tempory hack to get end to end proving/verification working pub fn set_tree<R: Read>(&self, _input_data: R) -> io::Result<()> {
// Not supposed to be part of public API //Implement leaf and deserialization
pub fn new_json_spike() -> RLN { //let leaf = Leaf::deserialize(input_data).unwrap();
let cfg =
CircomConfig::<Bn254>::new("./resources/rln.wasm", "./resources/rln.r1cs").unwrap();
// TODO Refactor //returns H::Hash, which is a 256 bit hash value
// From rln JSON witness //let root = self.tree.root();
// Input generated with https://github.com/oskarth/zk-kit/commit/b6a872f7160c7c14e10a0ea40acab99cbb23c9a8 // TODO Return root as LE here
let input_json_str = r#" //root.write_le(&mut result_data)?;
{ //println!("NYI: root le write buffer {:#?}", root);
"identity_secret": "12825549237505733615964533204745049909430608936689388901883576945030025938736", Ok(())
"path_elements": [
"18622655742232062119094611065896226799484910997537830749762961454045300666333",
"20590447254980891299813706518821659736846425329007960381537122689749540452732",
"7423237065226347324353380772367382631490014989348495481811164164159255474657",
"11286972368698509976183087595462810875513684078608517520839298933882497716792",
"3607627140608796879659380071776844901612302623152076817094415224584923813162",
"19712377064642672829441595136074946683621277828620209496774504837737984048981",
"20775607673010627194014556968476266066927294572720319469184847051418138353016",
"3396914609616007258851405644437304192397291162432396347162513310381425243293",
"21551820661461729022865262380882070649935529853313286572328683688269863701601",
"6573136701248752079028194407151022595060682063033565181951145966236778420039",
"12413880268183407374852357075976609371175688755676981206018884971008854919922",
"14271763308400718165336499097156975241954733520325982997864342600795471836726",
"20066985985293572387227381049700832219069292839614107140851619262827735677018",
"9394776414966240069580838672673694685292165040808226440647796406499139370960",
"11331146992410411304059858900317123658895005918277453009197229807340014528524"
],
"identity_path_index": [
1,
1,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
],
"x": "8143228284048792769012135629627737459844825626241842423967352803501040982",
"epoch": "0x0000005b612540fc986b42322f8cb91c2273afad58ed006fdba0c97b4b16b12f",
"rln_identifier": "11412926387081627876309792396682864042420635853496105400039841573530884328439"
}
"#;
let witness_input: WitnessInput =
serde_json::from_str(input_json_str).expect("JSON was not well-formatted");
println!("Witness input JSON: {:?}", witness_input);
let mut builder = CircomBuilder::new(cfg);
builder.push_input(
"identity_secret",
BigInt::parse_bytes(witness_input.identity_secret.as_bytes(), 10).unwrap(),
);
for v in witness_input.path_elements.iter() {
builder.push_input(
"path_elements",
BigInt::parse_bytes(v.as_bytes(), 10).unwrap(),
);
}
for v in witness_input.identity_path_index.iter() {
builder.push_input("identity_path_index", BigInt::from(*v));
}
builder.push_input(
"x",
BigInt::parse_bytes(witness_input.x.as_bytes(), 10).unwrap(),
);
builder.push_input(
"epoch",
BigInt::parse_bytes(
witness_input.epoch.strip_prefix("0x").unwrap().as_bytes(),
16,
)
.unwrap(),
);
builder.push_input(
"rln_identifier",
BigInt::parse_bytes(witness_input.rln_identifier.as_bytes(), 10).unwrap(),
);
println!("Builder input:\n {:#?}", builder.inputs);
// create an empty instance for setting it up
let circom = builder.setup();
let mut rng = thread_rng();
let params = generate_random_parameters::<Bn254, _, _>(circom, &mut rng).unwrap();
let circom = builder.build().unwrap();
let inputs = circom.get_public_inputs().unwrap();
println!("Public inputs {:#?} ", inputs);
// Sapling based tree
// // TODO Add as parameter(s)
// let merkle_depth: usize = 3;
// let poseidon_params = PoseidonParams::<Bn256>::new(8, 55, 3, None, None, None);
// let hasher = PoseidonHasher::new(poseidon_params.clone());
// let tree = IncrementalMerkleTree::empty(hasher, merkle_depth);
let leaf = Field::from(0);
let mut tree = PoseidonTree::new(21, leaf);
RLN {
circom,
params,
tree,
}
} }
/// returns current membership root /// returns current membership root
/// * `root` is a scalar field element in 32 bytes /// * `root` is a scalar field element in 32 bytes
pub fn get_root<W: Write>(&self, mut result_data: W) -> io::Result<()> { pub fn get_root<W: Write>(&self, _result_data: W) -> io::Result<()> {
//let root = self.tree.get_root(); //let root = self.tree.get_root();
// Converts PrimeFieldRepr into LE // Converts PrimeFieldRepr into LE
//root.into_repr().write_le(&mut result_data)?; //root.into_repr().write_le(&mut result_data)?;
@ -262,26 +125,3 @@ impl Default for RLN {
Self::new() Self::new()
} }
} }
// NOTE: Expensive test, ignoring by default
#[ignore]
#[test]
fn rln_proof() {
let rln = RLN::new();
let rln_spike = RLN::new_json_spike();
//let inputs = mul.circom.get_public_inputs().unwrap();
let mut output_data: Vec<u8> = Vec::new();
let _ = rln_spike.prove(&mut output_data);
let proof_data = &output_data[..];
// XXX Pass as arg?
//let pvk = prepare_verifying_key(&mul.params.vk);
// XXX: Something is wrong here I think, because it doesn't verify with the
// full proof fields like yShare - just witness? Might be a bug
let verified = rln.verify(proof_data).unwrap();
assert!(verified);
}

76
rln/src/utils.rs Normal file
View File

@ -0,0 +1,76 @@
use ark_bn254::{Bn254, Fr, Parameters};
use ark_ff::{Fp256, PrimeField};
use ark_std::str::FromStr;
use ethers_core::utils::keccak256;
use num_bigint::{BigInt, BigUint, ToBigInt};
use semaphore::{identity::Identity, Field};
pub fn to_fr(el: Field) -> Fr {
Fr::try_from(el).unwrap()
}
pub fn to_field(el: Fr) -> Field {
el.try_into().unwrap()
}
pub fn vec_to_fr(v: Vec<Field>) -> Vec<Fr> {
let mut result: Vec<Fr> = vec![];
for el in v {
result.push(to_fr(el));
}
result
}
pub fn vec_to_field(v: Vec<Fr>) -> Vec<Field> {
let mut result: Vec<Field> = vec![];
for el in v {
result.push(to_field(el));
}
result
}
pub fn str_to_field(input: String, radix: i32) -> Field {
assert!((radix == 10) || (radix == 16));
// We remove any quote present and we trim
let input_clean = input.replace("\"", "");
let input_clean = input_clean.trim();
if radix == 10 {
Field::from_str(&format!(
"{:01$x}",
BigUint::from_str(input_clean).unwrap(),
64
))
.unwrap()
} else {
let input_clean = input_clean.replace("0x", "");
Field::from_str(&format!("{:0>64}", &input_clean)).unwrap()
}
}
pub fn bytes_to_fr(input: &[u8]) -> Fr {
Fr::from(BigUint::from_bytes_le(input))
}
pub fn bytes_to_field(input: &[u8]) -> Field {
to_field(bytes_to_fr(input))
}
// Arithmetic over Field elements (wrapped over arkworks algebra crate)
pub fn add(a: Field, b: Field) -> Field {
to_field(to_fr(a) + to_fr(b))
}
pub fn mul(a: Field, b: Field) -> Field {
to_field(to_fr(a) * to_fr(b))
}
pub fn div(a: Field, b: Field) -> Field {
to_field(to_fr(a) / to_fr(b))
}
pub fn inv(a: Field) -> Field {
to_field(Fr::from(1) / to_fr(a))
}

View File

@ -21,7 +21,7 @@ num-bigint = { version = "0.4", default-features = false, features = ["rand"] }
once_cell = "1.8" once_cell = "1.8"
primitive-types = "0.11.1" primitive-types = "0.11.1"
rand = "0.8.4" rand = "0.8.4"
semaphore = { git = "https://github.com/worldcoin/semaphore-rs" } semaphore = { git = "https://github.com/worldcoin/semaphore-rs", rev = "d462a43"}
serde = "1.0" serde = "1.0"
thiserror = "1.0.0" thiserror = "1.0.0"
wasmer = { version = "2.0" } wasmer = { version = "2.0" }