mirror of
https://github.com/vacp2p/semaphore-rs.git
synced 2025-02-24 09:38:34 +00:00
commit
f1f2dff348
@ -53,6 +53,8 @@ ethers-core = { git = "https://github.com/gakonst/ethers-rs", default-features =
|
|||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
pretty_assertions = "1.0"
|
pretty_assertions = "1.0"
|
||||||
|
rand_chacha = "0.3.1"
|
||||||
|
rand_core = "0.6.3"
|
||||||
serde_json = "1.0.79"
|
serde_json = "1.0.79"
|
||||||
tempfile = "3.0"
|
tempfile = "3.0"
|
||||||
tiny-keccak = "2.0.2"
|
tiny-keccak = "2.0.2"
|
||||||
|
@ -1,16 +1,11 @@
|
|||||||
use num_bigint::{BigInt, Sign};
|
use crate::{posseidon_hash, Field};
|
||||||
use once_cell::sync::Lazy;
|
use ark_ff::PrimeField;
|
||||||
use poseidon_rs::Poseidon;
|
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
|
|
||||||
use crate::util::{bigint_to_fr, fr_to_bigint};
|
|
||||||
|
|
||||||
static POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||||
pub struct Identity {
|
pub struct Identity {
|
||||||
pub trapdoor: BigInt,
|
pub trapdoor: Field,
|
||||||
pub nullifier: BigInt,
|
pub nullifier: Field,
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo: improve
|
// todo: improve
|
||||||
@ -28,14 +23,16 @@ impl Identity {
|
|||||||
let seed_hash = &sha(seed);
|
let seed_hash = &sha(seed);
|
||||||
|
|
||||||
// https://github.com/appliedzkp/zk-kit/blob/1ea410456fc2b95877efa7c671bc390ffbfb5d36/packages/identity/src/identity.ts#L58
|
// https://github.com/appliedzkp/zk-kit/blob/1ea410456fc2b95877efa7c671bc390ffbfb5d36/packages/identity/src/identity.ts#L58
|
||||||
let trapdoor = BigInt::from_bytes_be(
|
let trapdoor = Field::from_be_bytes_mod_order(&sha(format!(
|
||||||
Sign::Plus,
|
"{}identity_trapdoor",
|
||||||
&sha(format!("{}identity_trapdoor", hex::encode(seed_hash)).as_bytes()),
|
hex::encode(seed_hash)
|
||||||
);
|
)
|
||||||
let nullifier = BigInt::from_bytes_be(
|
.as_bytes()));
|
||||||
Sign::Plus,
|
let nullifier = Field::from_be_bytes_mod_order(&sha(format!(
|
||||||
&sha(format!("{}identity_nullifier", hex::encode(seed_hash)).as_bytes()),
|
"{}identity_nullifier",
|
||||||
);
|
hex::encode(seed_hash)
|
||||||
|
)
|
||||||
|
.as_bytes()));
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
trapdoor,
|
trapdoor,
|
||||||
@ -44,21 +41,12 @@ impl Identity {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn secret_hash(&self) -> BigInt {
|
pub fn secret_hash(&self) -> Field {
|
||||||
let res = POSEIDON
|
posseidon_hash(&[self.nullifier, self.trapdoor])
|
||||||
.hash(vec![
|
|
||||||
bigint_to_fr(&self.nullifier),
|
|
||||||
bigint_to_fr(&self.trapdoor),
|
|
||||||
])
|
|
||||||
.expect("input length is constant and valid for the hash");
|
|
||||||
fr_to_bigint(res)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn commitment(&self) -> BigInt {
|
pub fn commitment(&self) -> Field {
|
||||||
let res = POSEIDON
|
posseidon_hash(&[self.secret_hash()])
|
||||||
.hash(vec![bigint_to_fr(&self.secret_hash())])
|
|
||||||
.expect("input length is constant and valid for the hash");
|
|
||||||
fr_to_bigint(res)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
32
src/lib.rs
32
src/lib.rs
@ -7,6 +7,7 @@ pub mod hash;
|
|||||||
pub mod identity;
|
pub mod identity;
|
||||||
pub mod merkle_tree;
|
pub mod merkle_tree;
|
||||||
pub mod poseidon_tree;
|
pub mod poseidon_tree;
|
||||||
|
mod posseidon_hash;
|
||||||
pub mod protocol;
|
pub mod protocol;
|
||||||
pub mod util;
|
pub mod util;
|
||||||
|
|
||||||
@ -15,9 +16,12 @@ pub mod mimc_hash;
|
|||||||
#[cfg(feature = "mimc")]
|
#[cfg(feature = "mimc")]
|
||||||
pub mod mimc_tree;
|
pub mod mimc_tree;
|
||||||
|
|
||||||
use ark_bn254::Parameters;
|
use ark_bn254::{Fr, FrParameters, Parameters};
|
||||||
use ark_ec::bn::Bn;
|
use ark_ec::bn::Bn;
|
||||||
|
|
||||||
|
pub use crate::posseidon_hash::posseidon_hash;
|
||||||
|
|
||||||
|
pub type Field = Fr;
|
||||||
pub type Groth16Proof = ark_groth16::Proof<Bn<Parameters>>;
|
pub type Groth16Proof = ark_groth16::Proof<Bn<Parameters>>;
|
||||||
pub type EthereumGroth16Proof = ark_circom::ethereum::Proof;
|
pub type EthereumGroth16Proof = ark_circom::ethereum::Proof;
|
||||||
|
|
||||||
@ -45,39 +49,32 @@ mod test {
|
|||||||
|
|
||||||
// generate merkle tree
|
// generate merkle tree
|
||||||
let mut tree = PoseidonTree::new(21, LEAF);
|
let mut tree = PoseidonTree::new(21, LEAF);
|
||||||
let (_, leaf) = id.commitment().to_bytes_be();
|
tree.set(0, id.commitment().into());
|
||||||
tree.set(0, leaf.into());
|
|
||||||
|
|
||||||
let merkle_proof = tree.proof(0).expect("proof should exist");
|
let merkle_proof = tree.proof(0).expect("proof should exist");
|
||||||
let root = tree.root();
|
let root = tree.root().into();
|
||||||
|
|
||||||
// change signal and external_nullifier here
|
// change signal and external_nullifier here
|
||||||
let signal = b"xxx";
|
let signal = b"xxx";
|
||||||
let external_nullifier = b"appId";
|
let external_nullifier = b"appId";
|
||||||
|
|
||||||
let external_nullifier_hash = hash_external_nullifier(external_nullifier);
|
let external_nullifier_hash = hash_external_nullifier(external_nullifier);
|
||||||
let nullifier_hash = generate_nullifier_hash(&id, &external_nullifier_hash);
|
let nullifier_hash = generate_nullifier_hash(&id, external_nullifier_hash);
|
||||||
|
|
||||||
let config = SnarkFileConfig {
|
let config = SnarkFileConfig {
|
||||||
zkey: "./semaphore/build/snark/semaphore_final.zkey".to_string(),
|
zkey: "./semaphore/build/snark/semaphore_final.zkey".to_string(),
|
||||||
wasm: "./semaphore/build/snark/semaphore.wasm".to_string(),
|
wasm: "./semaphore/build/snark/semaphore.wasm".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let proof = generate_proof(
|
let proof =
|
||||||
&config,
|
generate_proof(&config, &id, &merkle_proof, external_nullifier, signal).unwrap();
|
||||||
&id,
|
|
||||||
&merkle_proof,
|
|
||||||
&external_nullifier_hash,
|
|
||||||
signal,
|
|
||||||
)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
let success = verify_proof(
|
let success = verify_proof(
|
||||||
&config,
|
&config,
|
||||||
&root.into(),
|
root,
|
||||||
&nullifier_hash,
|
nullifier_hash,
|
||||||
signal,
|
signal,
|
||||||
&external_nullifier_hash,
|
external_nullifier,
|
||||||
&proof,
|
&proof,
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
@ -113,8 +110,7 @@ pub mod bench {
|
|||||||
// Create tree
|
// Create tree
|
||||||
let id = Identity::new(b"hello");
|
let id = Identity::new(b"hello");
|
||||||
let mut tree = PoseidonTree::new(21, LEAF);
|
let mut tree = PoseidonTree::new(21, LEAF);
|
||||||
let (_, leaf) = id.commitment().to_bytes_be();
|
tree.set(0, id.commitment().into());
|
||||||
tree.set(0, leaf.into());
|
|
||||||
let merkle_proof = tree.proof(0).expect("proof should exist");
|
let merkle_proof = tree.proof(0).expect("proof should exist");
|
||||||
|
|
||||||
// change signal and external_nullifier here
|
// change signal and external_nullifier here
|
||||||
|
@ -4,7 +4,7 @@
|
|||||||
//!
|
//!
|
||||||
//! * Disk based storage backend (using mmaped files should be easy)
|
//! * Disk based storage backend (using mmaped files should be easy)
|
||||||
|
|
||||||
use num_bigint::BigInt;
|
use crate::Field;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::{
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
@ -178,12 +178,12 @@ impl<H: Hasher> Proof<H> {
|
|||||||
|
|
||||||
/// Compute path index (TODO: do we want to keep this here?)
|
/// Compute path index (TODO: do we want to keep this here?)
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn path_index(&self) -> Vec<BigInt> {
|
pub fn path_index(&self) -> Vec<Field> {
|
||||||
self.0
|
self.0
|
||||||
.iter()
|
.iter()
|
||||||
.map(|branch| match branch {
|
.map(|branch| match branch {
|
||||||
Branch::Left(_) => BigInt::from(0),
|
Branch::Left(_) => Field::from(0),
|
||||||
Branch::Right(_) => BigInt::from(1),
|
Branch::Right(_) => Field::from(1),
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
@ -1,14 +1,11 @@
|
|||||||
use crate::{
|
use crate::{
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
merkle_tree::{self, Hasher, MerkleTree},
|
merkle_tree::{self, Hasher, MerkleTree},
|
||||||
|
posseidon_hash, Field,
|
||||||
};
|
};
|
||||||
use ff::{PrimeField, PrimeFieldRepr};
|
use ark_ff::{PrimeField, ToBytes};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use poseidon_rs::{Fr, FrRepr, Poseidon};
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
static POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub type PoseidonTree = MerkleTree<PoseidonHash>;
|
pub type PoseidonTree = MerkleTree<PoseidonHash>;
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
@ -20,20 +17,28 @@ pub type Proof = merkle_tree::Proof<PoseidonHash>;
|
|||||||
pub struct PoseidonHash;
|
pub struct PoseidonHash;
|
||||||
|
|
||||||
#[allow(clippy::fallible_impl_from)] // TODO
|
#[allow(clippy::fallible_impl_from)] // TODO
|
||||||
impl From<&Hash> for Fr {
|
impl From<&Hash> for Field {
|
||||||
fn from(hash: &Hash) -> Self {
|
fn from(hash: &Hash) -> Self {
|
||||||
let mut repr = FrRepr::default();
|
Field::from_be_bytes_mod_order(&hash.0)
|
||||||
repr.read_be(&hash.as_bytes_be()[..]).unwrap();
|
|
||||||
Self::from_repr(repr).unwrap()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(clippy::fallible_impl_from)] // TODO
|
#[allow(clippy::fallible_impl_from)] // TODO
|
||||||
impl From<Fr> for Hash {
|
impl From<Hash> for Field {
|
||||||
fn from(fr: Fr) -> Self {
|
fn from(hash: Hash) -> Self {
|
||||||
|
Field::from_be_bytes_mod_order(&hash.0)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::fallible_impl_from)] // TODO
|
||||||
|
impl From<Field> for Hash {
|
||||||
|
fn from(n: Field) -> Self {
|
||||||
let mut bytes = [0_u8; 32];
|
let mut bytes = [0_u8; 32];
|
||||||
fr.into_repr().write_be(&mut bytes[..]).unwrap();
|
n.into_repr()
|
||||||
Self::from_bytes_be(bytes)
|
.write(&mut bytes[..])
|
||||||
|
.expect("write should succeed");
|
||||||
|
bytes.reverse(); // Convert to big endian
|
||||||
|
Self(bytes)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -41,17 +46,29 @@ impl Hasher for PoseidonHash {
|
|||||||
type Hash = Hash;
|
type Hash = Hash;
|
||||||
|
|
||||||
fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {
|
fn hash_node(left: &Self::Hash, right: &Self::Hash) -> Self::Hash {
|
||||||
POSEIDON
|
posseidon_hash(&[left.into(), right.into()]).into()
|
||||||
.hash(vec![left.into(), right.into()])
|
|
||||||
.unwrap() // TODO
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub mod test {
|
pub mod test {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use ark_ff::UniformRand;
|
||||||
use hex_literal::hex;
|
use hex_literal::hex;
|
||||||
|
use rand_chacha::ChaChaRng;
|
||||||
|
use rand_core::SeedableRng;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ark_hash_ark_roundtrip() {
|
||||||
|
use ark_ff::One;
|
||||||
|
let mut rng = ChaChaRng::seed_from_u64(123);
|
||||||
|
for i in 0..1000 {
|
||||||
|
let n = Field::rand(&mut rng);
|
||||||
|
let n = Field::one();
|
||||||
|
let m = Hash::from(n).into();
|
||||||
|
assert_eq!(n, m);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_tree_4() {
|
fn test_tree_4() {
|
||||||
|
59
src/posseidon_hash.rs
Normal file
59
src/posseidon_hash.rs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
use crate::{
|
||||||
|
hash::Hash,
|
||||||
|
merkle_tree::{self, Hasher, MerkleTree},
|
||||||
|
Field,
|
||||||
|
};
|
||||||
|
use ark_ff::{BigInteger256, PrimeField as _};
|
||||||
|
use ff::{PrimeField as _, PrimeFieldRepr as _};
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
use poseidon_rs::{Fr, FrRepr, Poseidon};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
|
||||||
|
static POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);
|
||||||
|
|
||||||
|
fn ark_to_posseidon(n: Field) -> Fr {
|
||||||
|
Fr::from_repr(FrRepr(n.into_repr().0)).expect("n is a valid field element")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn posseidon_to_ark(n: Fr) -> Field {
|
||||||
|
Field::from_repr(BigInteger256(n.into_repr().0)).expect("n is a valid field element")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn posseidon_hash(input: &[Field]) -> Field {
|
||||||
|
let input = input
|
||||||
|
.iter()
|
||||||
|
.copied()
|
||||||
|
.map(ark_to_posseidon)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
|
POSEIDON
|
||||||
|
.hash(input)
|
||||||
|
.map(posseidon_to_ark)
|
||||||
|
.expect("hash with fixed input size can't fail")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod test {
|
||||||
|
use super::{ark_to_posseidon, posseidon_to_ark};
|
||||||
|
use crate::Field;
|
||||||
|
use ark_ff::{Field as _, UniformRand};
|
||||||
|
use ff::{Field as _, PrimeField, PrimeFieldRepr};
|
||||||
|
use poseidon_rs::Fr;
|
||||||
|
use rand_chacha::ChaChaRng;
|
||||||
|
use rand_core::SeedableRng;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_modulus_identical() {
|
||||||
|
assert_eq!(Fr::char().0, Field::characteristic());
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_ark_pos_ark_roundtrip() {
|
||||||
|
let mut rng = ChaChaRng::seed_from_u64(123);
|
||||||
|
for i in 0..1000 {
|
||||||
|
let n = Field::rand(&mut rng);
|
||||||
|
let m = posseidon_to_ark(ark_to_posseidon(n));
|
||||||
|
assert_eq!(n, m)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
123
src/protocol.rs
123
src/protocol.rs
@ -1,27 +1,22 @@
|
|||||||
|
use crate::{
|
||||||
|
identity::Identity,
|
||||||
|
merkle_tree::{self, Branch},
|
||||||
|
poseidon_tree::PoseidonHash,
|
||||||
|
posseidon_hash, Field,
|
||||||
|
};
|
||||||
use ark_bn254::{Bn254, Parameters};
|
use ark_bn254::{Bn254, Parameters};
|
||||||
use ark_circom::{read_zkey, CircomReduction, WitnessCalculator};
|
use ark_circom::{read_zkey, CircomReduction, WitnessCalculator};
|
||||||
use ark_ec::bn::Bn;
|
use ark_ec::bn::Bn;
|
||||||
use ark_ff::Fp256;
|
use ark_ff::{Fp256, PrimeField};
|
||||||
use ark_groth16::{create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof};
|
use ark_groth16::{create_proof_with_reduction_and_matrices, prepare_verifying_key, Proof};
|
||||||
use ark_relations::r1cs::SynthesisError;
|
use ark_relations::r1cs::SynthesisError;
|
||||||
use ark_std::{rand::thread_rng, UniformRand};
|
use ark_std::{rand::thread_rng, UniformRand};
|
||||||
use color_eyre::Result;
|
use color_eyre::Result;
|
||||||
use ethers_core::utils::keccak256;
|
use ethers_core::utils::keccak256;
|
||||||
use num_bigint::{BigInt, Sign};
|
use num_bigint::{BigInt, BigUint, ToBigInt};
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use poseidon_rs::Poseidon;
|
|
||||||
use std::{collections::HashMap, fs::File, ops::Shr, time::Instant};
|
use std::{collections::HashMap, fs::File, ops::Shr, time::Instant};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
use crate::{
|
|
||||||
identity::Identity,
|
|
||||||
merkle_tree::{self, Branch},
|
|
||||||
poseidon_tree::PoseidonHash,
|
|
||||||
util::{bigint_to_fr, fr_to_bigint},
|
|
||||||
};
|
|
||||||
|
|
||||||
static POSEIDON: Lazy<Poseidon> = Lazy::new(Poseidon::new);
|
|
||||||
|
|
||||||
pub struct SnarkFileConfig {
|
pub struct SnarkFileConfig {
|
||||||
pub zkey: String,
|
pub zkey: String,
|
||||||
pub wasm: String,
|
pub wasm: String,
|
||||||
@ -29,42 +24,42 @@ pub struct SnarkFileConfig {
|
|||||||
|
|
||||||
/// Helper to merkle proof into a bigint vector
|
/// Helper to merkle proof into a bigint vector
|
||||||
/// TODO: we should create a From trait for this
|
/// TODO: we should create a From trait for this
|
||||||
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<BigInt> {
|
fn merkle_proof_to_vec(proof: &merkle_tree::Proof<PoseidonHash>) -> Vec<Field> {
|
||||||
proof
|
proof
|
||||||
.0
|
.0
|
||||||
.iter()
|
.iter()
|
||||||
.map(|x| match x {
|
.map(|x| match x {
|
||||||
Branch::Left(value) | Branch::Right(value) => value.into(),
|
Branch::Left(value) | Branch::Right(value) => value.into(),
|
||||||
})
|
})
|
||||||
.collect::<Vec<BigInt>>()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Internal helper to hash the signal to make sure it's in the field
|
/// Internal helper to hash the signal to make sure it's in the field
|
||||||
fn hash_signal(signal: &[u8]) -> BigInt {
|
fn hash_signal(signal: &[u8]) -> Field {
|
||||||
BigInt::from_bytes_be(Sign::Plus, &keccak256(signal)).shr(8)
|
let hash = keccak256(signal);
|
||||||
|
// Shift right one byte to make it fit in the field
|
||||||
|
let mut bytes = [0_u8; 32];
|
||||||
|
bytes[1..].copy_from_slice(&hash[..31]);
|
||||||
|
Field::from_be_bytes_mod_order(&bytes)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Internal helper to hash the external nullifier
|
/// Internal helper to hash the external nullifier
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn hash_external_nullifier(nullifier: &[u8]) -> [u8; 32] {
|
pub fn hash_external_nullifier(nullifier: &[u8]) -> Field {
|
||||||
|
// Hash input to 256 bits.
|
||||||
let mut hash = keccak256(nullifier);
|
let mut hash = keccak256(nullifier);
|
||||||
hash[0] = 0;
|
// Clear first four bytes to make sure the hash is in the field.
|
||||||
hash[1] = 0;
|
for byte in &mut hash[0..4] {
|
||||||
hash[2] = 0;
|
*byte = 0;
|
||||||
hash[3] = 0;
|
}
|
||||||
hash
|
// Convert to field element.
|
||||||
|
Fp256::from_be_bytes_mod_order(&hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Generates the nullifier hash
|
/// Generates the nullifier hash
|
||||||
#[must_use]
|
#[must_use]
|
||||||
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: &[u8]) -> BigInt {
|
pub fn generate_nullifier_hash(identity: &Identity, external_nullifier: Field) -> Field {
|
||||||
let res = POSEIDON
|
posseidon_hash(&[external_nullifier, identity.nullifier])
|
||||||
.hash(vec![
|
|
||||||
bigint_to_fr(&BigInt::from_bytes_be(Sign::Plus, external_nullifier)),
|
|
||||||
bigint_to_fr(&identity.nullifier),
|
|
||||||
])
|
|
||||||
.expect("hash with fixed input size can't fail");
|
|
||||||
fr_to_bigint(res)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Error, Debug)]
|
#[derive(Error, Debug)]
|
||||||
@ -77,6 +72,11 @@ pub enum ProofError {
|
|||||||
SynthesisError(#[from] SynthesisError),
|
SynthesisError(#[from] SynthesisError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn ark_to_bigint(n: Field) -> BigInt {
|
||||||
|
let n: BigUint = n.into();
|
||||||
|
n.to_bigint().expect("conversion always succeeds for uint")
|
||||||
|
}
|
||||||
|
|
||||||
/// Generates a semaphore proof
|
/// Generates a semaphore proof
|
||||||
///
|
///
|
||||||
/// # Errors
|
/// # Errors
|
||||||
@ -94,27 +94,26 @@ pub fn generate_proof(
|
|||||||
let num_inputs = matrices.num_instance_variables;
|
let num_inputs = matrices.num_instance_variables;
|
||||||
let num_constraints = matrices.num_constraints;
|
let num_constraints = matrices.num_constraints;
|
||||||
|
|
||||||
let inputs = {
|
let external_nullifier = hash_external_nullifier(external_nullifier);
|
||||||
let mut inputs: HashMap<String, Vec<BigInt>> = HashMap::new();
|
let signal = hash_signal(signal);
|
||||||
|
let inputs = [
|
||||||
inputs.insert("identityNullifier".to_string(), vec![identity
|
("identityNullifier", vec![identity.nullifier]),
|
||||||
.nullifier
|
("identityTrapdoor", vec![identity.trapdoor]),
|
||||||
.clone()]);
|
("treePathIndices", merkle_proof.path_index()),
|
||||||
inputs.insert("identityTrapdoor".to_string(), vec![identity
|
("treeSiblings", merkle_proof_to_vec(merkle_proof)),
|
||||||
.trapdoor
|
("externalNullifier", vec![external_nullifier]),
|
||||||
.clone()]);
|
("signalHash", vec![signal]),
|
||||||
inputs.insert("treePathIndices".to_string(), merkle_proof.path_index());
|
];
|
||||||
inputs.insert(
|
let inputs = inputs.iter().map(|(name, values)| {
|
||||||
"treeSiblings".to_string(),
|
(
|
||||||
merkle_proof_to_vec(merkle_proof),
|
name.to_string(),
|
||||||
);
|
values
|
||||||
inputs.insert("externalNullifier".to_string(), vec![
|
.iter()
|
||||||
BigInt::from_bytes_be(Sign::Plus, external_nullifier),
|
.copied()
|
||||||
]);
|
.map(ark_to_bigint)
|
||||||
inputs.insert("signalHash".to_string(), vec![hash_signal(signal)]);
|
.collect::<Vec<_>>(),
|
||||||
|
)
|
||||||
inputs
|
});
|
||||||
};
|
|
||||||
|
|
||||||
let now = Instant::now();
|
let now = Instant::now();
|
||||||
|
|
||||||
@ -157,8 +156,8 @@ pub fn generate_proof(
|
|||||||
/// necessarily mean the proof is incorrect.
|
/// necessarily mean the proof is incorrect.
|
||||||
pub fn verify_proof(
|
pub fn verify_proof(
|
||||||
config: &SnarkFileConfig,
|
config: &SnarkFileConfig,
|
||||||
root: &BigInt,
|
root: Field,
|
||||||
nullifier_hash: &BigInt,
|
nullifier_hash: Field,
|
||||||
signal: &[u8],
|
signal: &[u8],
|
||||||
external_nullifier: &[u8],
|
external_nullifier: &[u8],
|
||||||
proof: &Proof<Bn<Parameters>>,
|
proof: &Proof<Bn<Parameters>>,
|
||||||
@ -169,18 +168,10 @@ pub fn verify_proof(
|
|||||||
let pvk = prepare_verifying_key(¶ms.vk);
|
let pvk = prepare_verifying_key(¶ms.vk);
|
||||||
|
|
||||||
let public_inputs = vec![
|
let public_inputs = vec![
|
||||||
Fp256::from(root.to_biguint().expect("can not be negative")),
|
root,
|
||||||
Fp256::from(nullifier_hash.to_biguint().expect("can not be negative")),
|
nullifier_hash,
|
||||||
Fp256::from(
|
hash_signal(signal),
|
||||||
hash_signal(signal)
|
hash_external_nullifier(external_nullifier),
|
||||||
.to_biguint()
|
|
||||||
.expect("can not be negative"),
|
|
||||||
),
|
|
||||||
Fp256::from(
|
|
||||||
BigInt::from_bytes_be(Sign::Plus, external_nullifier)
|
|
||||||
.to_biguint()
|
|
||||||
.expect("can not be negative"),
|
|
||||||
),
|
|
||||||
];
|
];
|
||||||
let result = ark_groth16::verify_proof(&pvk, proof, &public_inputs)?;
|
let result = ark_groth16::verify_proof(&pvk, proof, &public_inputs)?;
|
||||||
Ok(result)
|
Ok(result)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user