mirror of https://github.com/vacp2p/zerokit.git
Stateless Feature (#265)
* add stateless feature and tests * update docs and new function
This commit is contained in:
parent
c4579e1917
commit
0d5642492a
|
@ -8,7 +8,7 @@ jobs:
|
|||
linux:
|
||||
strategy:
|
||||
matrix:
|
||||
feature: ["default", "arkzkey"]
|
||||
feature: ["default", "arkzkey", "stateless"]
|
||||
target:
|
||||
- x86_64-unknown-linux-gnu
|
||||
- aarch64-unknown-linux-gnu
|
||||
|
@ -47,7 +47,7 @@ jobs:
|
|||
runs-on: macos-latest
|
||||
strategy:
|
||||
matrix:
|
||||
feature: ["default", "arkzkey"]
|
||||
feature: ["default", "arkzkey", "stateless"]
|
||||
target:
|
||||
- x86_64-apple-darwin
|
||||
- aarch64-apple-darwin
|
||||
|
|
|
@ -993,6 +993,15 @@ dependencies = [
|
|||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "document-features"
|
||||
version = "0.2.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cb6969eaabd2421f8a2775cfd2471a2b634372b4a25d41e3bd647b79912850a0"
|
||||
dependencies = [
|
||||
"litrs",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ecdsa"
|
||||
version = "0.16.7"
|
||||
|
@ -1611,6 +1620,12 @@ version = "0.4.10"
|
|||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f"
|
||||
|
||||
[[package]]
|
||||
name = "litrs"
|
||||
version = "0.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4ce301924b7887e9d637144fdade93f9dfff9b60981d4ac161db09720d39aa5"
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.9"
|
||||
|
@ -2281,6 +2296,7 @@ dependencies = [
|
|||
"cfg-if",
|
||||
"color-eyre",
|
||||
"criterion 0.4.0",
|
||||
"document-features",
|
||||
"lazy_static 1.4.0",
|
||||
"num-bigint",
|
||||
"num-traits",
|
||||
|
|
|
@ -59,6 +59,8 @@ utils = { package = "zerokit_utils", version = "=0.5.1", path = "../utils/", def
|
|||
serde_json = "=1.0.96"
|
||||
serde = { version = "=1.0.163", features = ["derive"] }
|
||||
|
||||
document-features = { version = "=0.2.10", optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
sled = "=0.34.7"
|
||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||
|
@ -75,6 +77,7 @@ parallel = [
|
|||
wasm = ["wasmer/js", "wasmer/std"]
|
||||
fullmerkletree = ["default"]
|
||||
arkzkey = ["ark-zkey"]
|
||||
stateless = []
|
||||
|
||||
# Note: pmtree feature is still experimental
|
||||
pmtree-ft = ["utils/pmtree-ft"]
|
||||
|
@ -94,3 +97,6 @@ harness = false
|
|||
[[bench]]
|
||||
name = "poseidon_tree_benchmark"
|
||||
harness = false
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
all-features = true
|
||||
|
|
|
@ -192,6 +192,7 @@ impl<'a> From<&Buffer> for &'a [u8] {
|
|||
////////////////////////////////////////////////////////
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new(tree_height, input_buffer.process()) {
|
||||
|
@ -207,6 +208,23 @@ pub extern "C" fn new(tree_height: usize, input_buffer: *const Buffer, ctx: *mut
|
|||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(feature = "stateless")]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new(ctx: *mut *mut RLN) -> bool {
|
||||
match RLN::new() {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
tree_height: usize,
|
||||
|
@ -234,47 +252,79 @@ pub extern "C" fn new_with_params(
|
|||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[cfg(feature = "stateless")]
|
||||
#[no_mangle]
|
||||
pub extern "C" fn new_with_params(
|
||||
circom_buffer: *const Buffer,
|
||||
zkey_buffer: *const Buffer,
|
||||
vk_buffer: *const Buffer,
|
||||
ctx: *mut *mut RLN,
|
||||
) -> bool {
|
||||
match RLN::new_with_params(
|
||||
circom_buffer.process().to_vec(),
|
||||
zkey_buffer.process().to_vec(),
|
||||
vk_buffer.process().to_vec(),
|
||||
) {
|
||||
Ok(rln) => {
|
||||
unsafe { *ctx = Box::into_raw(Box::new(rln)) };
|
||||
true
|
||||
}
|
||||
Err(err) => {
|
||||
eprintln!("could not instantiate rln: {err}");
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Merkle tree APIs
|
||||
////////////////////////////////////////////////////////
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_tree(ctx: *mut RLN, tree_height: usize) -> bool {
|
||||
call!(ctx, set_tree, tree_height)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn delete_leaf(ctx: *mut RLN, index: usize) -> bool {
|
||||
call!(ctx, delete_leaf, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_leaf(ctx: *mut RLN, index: usize, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_leaf, index, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_leaf(ctx: *mut RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_leaf, output_buffer, index)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn leaves_set(ctx: *mut RLN) -> usize {
|
||||
ctx.process().leaves_set()
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_next_leaf(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_next_leaf, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_leaves_from(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
|
@ -285,12 +335,14 @@ pub extern "C" fn set_leaves_from(
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn init_tree_with_leaves(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, init_tree_with_leaves, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
index: usize,
|
||||
|
@ -302,6 +354,7 @@ pub extern "C" fn atomic_operation(
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn seq_atomic_operation(
|
||||
ctx: *mut RLN,
|
||||
leaves_buffer: *const Buffer,
|
||||
|
@ -318,12 +371,14 @@ pub extern "C" fn seq_atomic_operation(
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_root(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_root, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_proof(ctx: *const RLN, index: usize, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_proof, output_buffer, index)
|
||||
}
|
||||
|
@ -353,6 +408,7 @@ pub extern "C" fn verify(
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn generate_rln_proof(
|
||||
ctx: *mut RLN,
|
||||
input_buffer: *const Buffer,
|
||||
|
@ -378,6 +434,7 @@ pub extern "C" fn generate_rln_proof_with_witness(
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn verify_rln_proof(
|
||||
ctx: *const RLN,
|
||||
proof_buffer: *const Buffer,
|
||||
|
@ -461,18 +518,21 @@ pub extern "C" fn recover_id_secret(
|
|||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn set_metadata(ctx: *mut RLN, input_buffer: *const Buffer) -> bool {
|
||||
call!(ctx, set_metadata, input_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn get_metadata(ctx: *const RLN, output_buffer: *mut Buffer) -> bool {
|
||||
call_with_output_arg!(ctx, get_metadata, output_buffer)
|
||||
}
|
||||
|
||||
#[allow(clippy::not_unsafe_ptr_arg_deref)]
|
||||
#[no_mangle]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub extern "C" fn flush(ctx: *mut RLN) -> bool {
|
||||
call!(ctx, flush)
|
||||
}
|
||||
|
|
|
@ -43,6 +43,7 @@ pub const RLN_IDENTIFIER: &[u8] = b"zerokit/rln/010203040506070809";
|
|||
pub struct RLN {
|
||||
proving_key: (ProvingKey<Curve>, ConstraintMatrices<Fr>),
|
||||
pub(crate) verification_key: VerifyingKey<Curve>,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub(crate) tree: PoseidonTree,
|
||||
|
||||
// The witness calculator can't be loaded in zerokit. Since this struct
|
||||
|
@ -66,12 +67,12 @@ impl RLN {
|
|||
/// use std::io::Cursor;
|
||||
///
|
||||
/// let tree_height = 20;
|
||||
/// let input = Cursor::new(json!({}).to_string());;
|
||||
/// let input = Cursor::new(json!({}).to_string());
|
||||
///
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new(tree_height, input);
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn new<R: Read>(tree_height: usize, mut input_data: R) -> Result<RLN> {
|
||||
// We read input
|
||||
let mut input: Vec<u8> = Vec::new();
|
||||
|
@ -102,12 +103,35 @@ impl RLN {
|
|||
witness_calculator: witness_calculator.to_owned(),
|
||||
proving_key: proving_key.to_owned(),
|
||||
verification_key: verification_key.to_owned(),
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
tree,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by loading circuit resources from a folder.
|
||||
/// ```
|
||||
/// // We create a new RLN instance
|
||||
/// let mut rln = RLN::new(input);
|
||||
/// ```
|
||||
#[cfg_attr(docsrs, doc(cfg(feature = "stateless")))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
|
||||
pub fn new() -> Result<RLN> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_folder();
|
||||
let proving_key = zkey_from_folder();
|
||||
let verification_key = vk_from_folder();
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator: witness_calculator.to_owned(),
|
||||
proving_key: proving_key.to_owned(),
|
||||
verification_key: verification_key.to_owned(),
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
|
@ -145,7 +169,7 @@ impl RLN {
|
|||
/// tree_config_input,
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn new_with_params<R: Read>(
|
||||
tree_height: usize,
|
||||
circom_vec: Vec<u8>,
|
||||
|
@ -180,14 +204,16 @@ impl RLN {
|
|||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
tree,
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(target_arch = "wasm32")]
|
||||
#[cfg(all(target_arch = "wasm32", not(feature = "stateless")))]
|
||||
pub fn new_with_params(tree_height: usize, zkey_vec: Vec<u8>, vk_vec: Vec<u8>) -> Result<RLN> {
|
||||
// TODO: check this lines while update rln-wasm
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
|
||||
|
@ -205,6 +231,93 @@ impl RLN {
|
|||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `circom_vec`: a byte vector containing the ZK circuit (`rln.wasm`) as binary file
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.arkvkey`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln.wasm", "rln_final.zkey", "verification_key.arkvkey"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// resources[2].clone(),
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(all(not(target_arch = "wasm32"), feature = "stateless"))]
|
||||
pub fn new_with_params(circom_vec: Vec<u8>, zkey_vec: Vec<u8>, vk_vec: Vec<u8>) -> Result<RLN> {
|
||||
let witness_calculator = circom_from_raw(&circom_vec)?;
|
||||
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
Ok(RLN {
|
||||
witness_calculator,
|
||||
proving_key,
|
||||
verification_key,
|
||||
})
|
||||
}
|
||||
|
||||
/// Creates a new stateless RLN object by passing circuit resources as byte vectors.
|
||||
///
|
||||
/// Input parameters are
|
||||
/// - `zkey_vec`: a byte vector containing to the proving key (`rln_final.zkey`) or (`rln_final.arkzkey`) as binary file
|
||||
/// - `vk_vec`: a byte vector containing to the verification key (`verification_key.arkvkey`) as binary file
|
||||
///
|
||||
/// Example:
|
||||
/// ```
|
||||
/// use std::fs::File;
|
||||
/// use std::io::Read;
|
||||
///
|
||||
/// let resources_folder = "./resources/tree_height_20/";
|
||||
///
|
||||
/// let mut resources: Vec<Vec<u8>> = Vec::new();
|
||||
/// for filename in ["rln_final.zkey", "verification_key.arkvkey"] {
|
||||
/// let fullpath = format!("{resources_folder}{filename}");
|
||||
/// let mut file = File::open(&fullpath).expect("no file found");
|
||||
/// let metadata = std::fs::metadata(&fullpath).expect("unable to read metadata");
|
||||
/// let mut buffer = vec![0; metadata.len() as usize];
|
||||
/// file.read_exact(&mut buffer).expect("buffer overflow");
|
||||
/// resources.push(buffer);
|
||||
/// }
|
||||
///
|
||||
/// let mut rln = RLN::new_with_params(
|
||||
/// resources[0].clone(),
|
||||
/// resources[1].clone(),
|
||||
/// );
|
||||
/// ```
|
||||
#[cfg(all(target_arch = "wasm32", feature = "stateless"))]
|
||||
pub fn new_with_params(zkey_vec: Vec<u8>, vk_vec: Vec<u8>) -> Result<RLN> {
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
let witness_calculator = circom_from_raw(circom_vec)?;
|
||||
|
||||
let proving_key = zkey_from_raw(&zkey_vec)?;
|
||||
let verification_key = vk_from_raw(&vk_vec, &zkey_vec)?;
|
||||
|
||||
Ok(RLN {
|
||||
proving_key,
|
||||
verification_key,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
}
|
||||
|
||||
////////////////////////////////////////////////////////
|
||||
// Merkle-tree APIs
|
||||
////////////////////////////////////////////////////////
|
||||
|
@ -214,6 +327,7 @@ impl RLN {
|
|||
///
|
||||
/// Input values are:
|
||||
/// - `tree_height`: the height of the Merkle tree.
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_tree(&mut self, tree_height: usize) -> Result<()> {
|
||||
// We compute a default empty tree of desired height
|
||||
self.tree = PoseidonTree::default(tree_height)?;
|
||||
|
@ -244,6 +358,7 @@ impl RLN {
|
|||
/// let mut buffer = Cursor::new(serialize_field_element(rate_commitment));
|
||||
/// rln.set_leaf(id_index, &mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_leaf<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
|
@ -273,6 +388,7 @@ impl RLN {
|
|||
/// let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
/// rln.get_leaf(id_index, &mut buffer).unwrap();
|
||||
/// let rate_commitment = deserialize_field_element(&buffer.into_inner()).unwrap();
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_leaf<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
// We get the leaf at input index
|
||||
let leaf = self.tree.get(index)?;
|
||||
|
@ -315,6 +431,7 @@ impl RLN {
|
|||
/// let mut buffer = Cursor::new(vec_fr_to_bytes_le(&leaves));
|
||||
/// rln.set_leaves_from(index, &mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_leaves_from<R: Read>(&mut self, index: usize, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaves_byte: Vec<u8> = Vec::new();
|
||||
|
@ -335,6 +452,7 @@ impl RLN {
|
|||
///
|
||||
/// Input values are:
|
||||
/// - `input_data`: a reader for the serialization of multiple leaf values (serialization done with [`rln::utils::vec_fr_to_bytes_le`](crate::utils::vec_fr_to_bytes_le))
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn init_tree_with_leaves<R: Read>(&mut self, input_data: R) -> Result<()> {
|
||||
// reset the tree
|
||||
// NOTE: this requires the tree to be initialized with the correct height initially
|
||||
|
@ -385,6 +503,7 @@ impl RLN {
|
|||
/// let mut indices_buffer = Cursor::new(vec_u8_to_bytes_le(&indices));
|
||||
/// rln.atomic_operation(index, &mut leaves_buffer, indices_buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn atomic_operation<R: Read>(
|
||||
&mut self,
|
||||
index: usize,
|
||||
|
@ -410,6 +529,7 @@ impl RLN {
|
|||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn leaves_set(&mut self) -> usize {
|
||||
self.tree.leaves_set()
|
||||
}
|
||||
|
@ -457,6 +577,7 @@ impl RLN {
|
|||
/// let mut buffer = Cursor::new(fr_to_bytes_le(&rate_commitment));
|
||||
/// rln.set_next_leaf(&mut buffer).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_next_leaf<R: Read>(&mut self, mut input_data: R) -> Result<()> {
|
||||
// We read input
|
||||
let mut leaf_byte: Vec<u8> = Vec::new();
|
||||
|
@ -482,6 +603,7 @@ impl RLN {
|
|||
/// let index = 10;
|
||||
/// rln.delete_leaf(index).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn delete_leaf(&mut self, index: usize) -> Result<()> {
|
||||
self.tree.delete(index)?;
|
||||
Ok(())
|
||||
|
@ -499,6 +621,7 @@ impl RLN {
|
|||
/// let metadata = b"some metadata";
|
||||
/// rln.set_metadata(metadata).unwrap();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn set_metadata(&mut self, metadata: &[u8]) -> Result<()> {
|
||||
self.tree.set_metadata(metadata)?;
|
||||
Ok(())
|
||||
|
@ -518,6 +641,7 @@ impl RLN {
|
|||
/// rln.get_metadata(&mut buffer).unwrap();
|
||||
/// let metadata = buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_metadata<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let metadata = self.tree.metadata()?;
|
||||
output_data.write_all(&metadata)?;
|
||||
|
@ -537,6 +661,7 @@ impl RLN {
|
|||
/// rln.get_root(&mut buffer).unwrap();
|
||||
/// let (root, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_root<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let root = self.tree.root();
|
||||
output_data.write_all(&fr_to_bytes_le(&root))?;
|
||||
|
@ -559,6 +684,7 @@ impl RLN {
|
|||
/// rln.get_subtree_root(level, index, &mut buffer).unwrap();
|
||||
/// let (subroot, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_subtree_root<W: Write>(
|
||||
&self,
|
||||
level: usize,
|
||||
|
@ -592,6 +718,7 @@ impl RLN {
|
|||
/// let (path_elements, read) = bytes_le_to_vec_fr(&buffer_inner);
|
||||
/// let (identity_path_index, _) = bytes_le_to_vec_u8(&buffer_inner[read..].to_vec());
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_proof<W: Write>(&self, index: usize, mut output_data: W) -> Result<()> {
|
||||
let merkle_proof = self.tree.proof(index).expect("proof should exist");
|
||||
let path_elements = merkle_proof.get_path_elements();
|
||||
|
@ -635,6 +762,7 @@ impl RLN {
|
|||
/// let idxs = bytes_le_to_vec_usize(&buffer.into_inner()).unwrap();
|
||||
/// assert_eq!(idxs, [0, 1, 2, 3, 4]);
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_empty_leaves_indices<W: Write>(&self, mut output_data: W) -> Result<()> {
|
||||
let idxs = self.tree.get_empty_leaves_indices();
|
||||
idxs.serialize_compressed(&mut output_data)?;
|
||||
|
@ -793,7 +921,7 @@ impl RLN {
|
|||
/// // proof_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32>]
|
||||
/// let mut proof_data = output_buffer.into_inner();
|
||||
/// ```
|
||||
#[cfg(not(target_arch = "wasm32"))]
|
||||
#[cfg(all(not(target_arch = "wasm32"), not(feature = "stateless")))]
|
||||
pub fn generate_rln_proof<R: Read, W: Write>(
|
||||
&mut self,
|
||||
mut input_data: R,
|
||||
|
@ -890,6 +1018,7 @@ impl RLN {
|
|||
///
|
||||
/// assert!(verified);
|
||||
/// ```
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn verify_rln_proof<R: Read>(&self, mut input_data: R) -> Result<bool> {
|
||||
let mut serialized: Vec<u8> = Vec::new();
|
||||
input_data.read_to_end(&mut serialized)?;
|
||||
|
@ -1254,6 +1383,7 @@ impl RLN {
|
|||
/// - `input_data`: a reader for the serialization of `[ identity_secret<32> | id_index<8> | user_message_limit<32> | message_id<32> | external_nullifier<32> | signal_len<8> | signal<var> ]`
|
||||
///
|
||||
/// The function returns the corresponding [`RLNWitnessInput`](crate::protocol::RLNWitnessInput) object serialized using [`rln::protocol::serialize_witness`](crate::protocol::serialize_witness)).
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn get_serialized_rln_witness<R: Read>(&mut self, mut input_data: R) -> Result<Vec<u8>> {
|
||||
// We read input RLN witness and we serialize_compressed it
|
||||
let mut witness_byte: Vec<u8> = Vec::new();
|
||||
|
@ -1293,6 +1423,7 @@ impl RLN {
|
|||
/// This function should be called before the RLN object is dropped.
|
||||
/// If not called, the connection will be closed when the RLN object is dropped.
|
||||
/// This improves robustness of the tree.
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
pub fn flush(&mut self) -> Result<()> {
|
||||
self.tree.close_db_connection()
|
||||
}
|
||||
|
@ -1301,10 +1432,15 @@ impl RLN {
|
|||
#[cfg(not(target_arch = "wasm32"))]
|
||||
impl Default for RLN {
|
||||
fn default() -> Self {
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
{
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let buffer = Cursor::new(json!({}).to_string());
|
||||
Self::new(tree_height, buffer).unwrap()
|
||||
}
|
||||
#[cfg(feature = "stateless")]
|
||||
Self::new().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// Hashes an input signal to an element in the working prime field.
|
||||
|
|
|
@ -1,21 +1,186 @@
|
|||
use crate::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::circuit::TEST_TREE_HEIGHT;
|
||||
use crate::protocol::*;
|
||||
use crate::public::RLN;
|
||||
use crate::utils::*;
|
||||
use ark_groth16::Proof as ArkProof;
|
||||
use ark_serialize::{CanonicalDeserialize, Read};
|
||||
use ark_serialize::CanonicalDeserialize;
|
||||
use std::io::Cursor;
|
||||
use std::str::FromStr;
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
fn fq_from_str(s: String) -> ark_bn254::Fq {
|
||||
ark_bn254::Fq::from_str(&s).unwrap()
|
||||
}
|
||||
|
||||
fn g1_from_str(g1: &[String]) -> ark_bn254::G1Affine {
|
||||
let x = fq_from_str(g1[0].clone());
|
||||
let y = fq_from_str(g1[1].clone());
|
||||
let z = fq_from_str(g1[2].clone());
|
||||
ark_bn254::G1Affine::from(ark_bn254::G1Projective::new(x, y, z))
|
||||
}
|
||||
|
||||
fn g2_from_str(g2: &[Vec<String>]) -> ark_bn254::G2Affine {
|
||||
let c0 = fq_from_str(g2[0][0].clone());
|
||||
let c1 = fq_from_str(g2[0][1].clone());
|
||||
let x = ark_bn254::Fq2::new(c0, c1);
|
||||
|
||||
let c0 = fq_from_str(g2[1][0].clone());
|
||||
let c1 = fq_from_str(g2[1][1].clone());
|
||||
let y = ark_bn254::Fq2::new(c0, c1);
|
||||
|
||||
let c0 = fq_from_str(g2[2][0].clone());
|
||||
let c1 = fq_from_str(g2[2][1].clone());
|
||||
let z = ark_bn254::Fq2::new(c0, c1);
|
||||
|
||||
ark_bn254::G2Affine::from(ark_bn254::G2Projective::new(x, y, z))
|
||||
}
|
||||
|
||||
fn value_to_string_vec(value: &Value) -> Vec<String> {
|
||||
value
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|val| val.as_str().unwrap().to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_operations() {
|
||||
fn test_groth16_proof_hardcoded() {
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
let rln = RLN::new(TEST_TREE_HEIGHT, generate_input_buffer()).unwrap();
|
||||
#[cfg(feature = "stateless")]
|
||||
let rln = RLN::new().unwrap();
|
||||
|
||||
let valid_snarkjs_proof = json!({
|
||||
"pi_a": [
|
||||
"606446415626469993821291758185575230335423926365686267140465300918089871829",
|
||||
"14881534001609371078663128199084130129622943308489025453376548677995646280161",
|
||||
"1"
|
||||
],
|
||||
"pi_b": [
|
||||
[
|
||||
"18053812507994813734583839134426913715767914942522332114506614735770984570178",
|
||||
"11219916332635123001710279198522635266707985651975761715977705052386984005181"
|
||||
],
|
||||
[
|
||||
"17371289494006920912949790045699521359436706797224428511776122168520286372970",
|
||||
"14038575727257298083893642903204723310279435927688342924358714639926373603890"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"pi_c": [
|
||||
"17701377127561410274754535747274973758826089226897242202671882899370780845888",
|
||||
"12608543716397255084418384146504333522628400182843246910626782513289789807030",
|
||||
"1"
|
||||
],
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128"
|
||||
});
|
||||
let valid_ark_proof = ArkProof {
|
||||
a: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_a"])),
|
||||
b: g2_from_str(
|
||||
&valid_snarkjs_proof["pi_b"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| value_to_string_vec(item))
|
||||
.collect::<Vec<Vec<String>>>(),
|
||||
),
|
||||
c: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_c"])),
|
||||
};
|
||||
|
||||
let valid_proof_values = RLNProofValues {
|
||||
x: str_to_fr(
|
||||
"20645213238265527935869146898028115621427162613172918400241870500502509785943",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
external_nullifier: str_to_fr(
|
||||
"21074405743803627666274838159589343934394162804826017440941339048886754734203",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
y: str_to_fr(
|
||||
"16401008481486069296141645075505218976370369489687327284155463920202585288271",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
root: str_to_fr(
|
||||
"8502402278351299594663821509741133196466235670407051417832304486953898514733",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
nullifier: str_to_fr(
|
||||
"9102791780887227194595604713537772536258726662792598131262022534710887343694",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
let verified = verify_proof(&rln.verification_key, &valid_ark_proof, &valid_proof_values);
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
fn test_groth16_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
#[cfg(feature = "stateless")]
|
||||
let mut rln = RLN::new().unwrap();
|
||||
|
||||
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// We compute a Groth16 proof
|
||||
let mut input_buffer = Cursor::new(serialize_witness(&rln_witness).unwrap());
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.prove(&mut input_buffer, &mut output_buffer).unwrap();
|
||||
let serialized_proof = output_buffer.into_inner();
|
||||
|
||||
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
|
||||
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&serialized_proof)).unwrap();
|
||||
let verified = verify_proof(&rln.verification_key, &proof, &proof_values);
|
||||
// dbg!(verified.unwrap());
|
||||
assert!(verified.unwrap());
|
||||
|
||||
// We prepare the input to prove API, consisting of serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
|
||||
let serialized_proof_values = serialize_proof_values(&proof_values);
|
||||
let mut verify_data = Vec::<u8>::new();
|
||||
verify_data.extend(&serialized_proof);
|
||||
verify_data.extend(&serialized_proof_values);
|
||||
let mut input_buffer = Cursor::new(verify_data);
|
||||
|
||||
// We verify the Groth16 proof against the provided proof values
|
||||
let verified = rln.verify(&mut input_buffer).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
mod tree_test {
|
||||
use crate::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::protocol::*;
|
||||
use crate::public::RLN;
|
||||
use crate::utils::*;
|
||||
use ark_serialize::Read;
|
||||
use std::io::Cursor;
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
|
||||
#[test]
|
||||
// We test merkle batch Merkle tree additions
|
||||
fn test_merkle_operations() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
|
@ -100,12 +265,12 @@ fn test_merkle_operations() {
|
|||
let (root_empty, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_delete, root_empty);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
// We test leaf setting with a custom index, to enable batch updates to the root
|
||||
// Uses `set_leaves_from` to set leaves in a batch, from index `start_index`
|
||||
fn test_leaf_setting_with_index() {
|
||||
#[test]
|
||||
// We test leaf setting with a custom index, to enable batch updates to the root
|
||||
// Uses `set_leaves_from` to set leaves in a batch, from index `start_index`
|
||||
fn test_leaf_setting_with_index() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
|
@ -175,11 +340,11 @@ fn test_leaf_setting_with_index() {
|
|||
assert_eq!(root_batch_with_init, root_single_additions);
|
||||
|
||||
rln.flush().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests the atomic_operation fn, which set_leaves_from uses internally
|
||||
fn test_atomic_operation() {
|
||||
#[test]
|
||||
// Tests the atomic_operation fn, which set_leaves_from uses internally
|
||||
fn test_atomic_operation() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
|
@ -224,10 +389,10 @@ fn test_atomic_operation() {
|
|||
let (root_after_noop, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_after_insertion, root_after_noop);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_operation_zero_indexed() {
|
||||
#[test]
|
||||
fn test_atomic_operation_zero_indexed() {
|
||||
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
@ -268,10 +433,10 @@ fn test_atomic_operation_zero_indexed() {
|
|||
let (root_after_deletion, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_ne!(root_after_insertion, root_after_deletion);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_atomic_operation_consistency() {
|
||||
#[test]
|
||||
fn test_atomic_operation_consistency() {
|
||||
// Test duplicated from https://github.com/waku-org/go-zerokit-rln/pull/12/files
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
@ -319,12 +484,12 @@ fn test_atomic_operation_consistency() {
|
|||
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
|
||||
|
||||
assert_eq!(received_leaf, Fr::from(0));
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(unused_must_use)]
|
||||
#[test]
|
||||
// This test checks if `set_leaves_from` throws an error when the index is out of bounds
|
||||
fn test_set_leaves_bad_index() {
|
||||
#[allow(unused_must_use)]
|
||||
#[test]
|
||||
// This test checks if `set_leaves_from` throws an error when the index is out of bounds
|
||||
fn test_set_leaves_bad_index() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
|
@ -358,161 +523,65 @@ fn test_set_leaves_bad_index() {
|
|||
let (root_after_bad_set, _) = bytes_le_to_fr(&buffer.into_inner());
|
||||
|
||||
assert_eq!(root_empty, root_after_bad_set);
|
||||
}
|
||||
}
|
||||
|
||||
fn fq_from_str(s: String) -> ark_bn254::Fq {
|
||||
ark_bn254::Fq::from_str(&s).unwrap()
|
||||
}
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We generate a random tree
|
||||
let tree_height = 10;
|
||||
let mut rng = thread_rng();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
fn g1_from_str(g1: &[String]) -> ark_bn254::G1Affine {
|
||||
let x = fq_from_str(g1[0].clone());
|
||||
let y = fq_from_str(g1[1].clone());
|
||||
let z = fq_from_str(g1[2].clone());
|
||||
ark_bn254::G1Affine::from(ark_bn254::G1Projective::new(x, y, z))
|
||||
}
|
||||
// We generate a random leaf
|
||||
let leaf = Fr::rand(&mut rng);
|
||||
|
||||
fn g2_from_str(g2: &[Vec<String>]) -> ark_bn254::G2Affine {
|
||||
let c0 = fq_from_str(g2[0][0].clone());
|
||||
let c1 = fq_from_str(g2[0][1].clone());
|
||||
let x = ark_bn254::Fq2::new(c0, c1);
|
||||
// We generate a random index
|
||||
let index = rng.gen_range(0..rln.tree.capacity());
|
||||
|
||||
let c0 = fq_from_str(g2[1][0].clone());
|
||||
let c1 = fq_from_str(g2[1][1].clone());
|
||||
let y = ark_bn254::Fq2::new(c0, c1);
|
||||
// We add the leaf to the tree
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
|
||||
rln.set_leaf(index, &mut buffer).unwrap();
|
||||
|
||||
let c0 = fq_from_str(g2[2][0].clone());
|
||||
let c1 = fq_from_str(g2[2][1].clone());
|
||||
let z = ark_bn254::Fq2::new(c0, c1);
|
||||
// We get the leaf
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_leaf(index, &mut output_buffer).unwrap();
|
||||
|
||||
ark_bn254::G2Affine::from(ark_bn254::G2Projective::new(x, y, z))
|
||||
}
|
||||
// We ensure that the leaf is the same as the one we added
|
||||
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
|
||||
assert_eq!(received_leaf, leaf);
|
||||
}
|
||||
|
||||
fn value_to_string_vec(value: &Value) -> Vec<String> {
|
||||
value
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.into_iter()
|
||||
.map(|val| val.as_str().unwrap().to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_groth16_proof_hardcoded() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
let valid_snarkjs_proof = json!({
|
||||
"pi_a": [
|
||||
"606446415626469993821291758185575230335423926365686267140465300918089871829",
|
||||
"14881534001609371078663128199084130129622943308489025453376548677995646280161",
|
||||
"1"
|
||||
],
|
||||
"pi_b": [
|
||||
[
|
||||
"18053812507994813734583839134426913715767914942522332114506614735770984570178",
|
||||
"11219916332635123001710279198522635266707985651975761715977705052386984005181"
|
||||
],
|
||||
[
|
||||
"17371289494006920912949790045699521359436706797224428511776122168520286372970",
|
||||
"14038575727257298083893642903204723310279435927688342924358714639926373603890"
|
||||
],
|
||||
[
|
||||
"1",
|
||||
"0"
|
||||
]
|
||||
],
|
||||
"pi_c": [
|
||||
"17701377127561410274754535747274973758826089226897242202671882899370780845888",
|
||||
"12608543716397255084418384146504333522628400182843246910626782513289789807030",
|
||||
"1"
|
||||
],
|
||||
"protocol": "groth16",
|
||||
"curve": "bn128"
|
||||
});
|
||||
let valid_ark_proof = ArkProof {
|
||||
a: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_a"])),
|
||||
b: g2_from_str(
|
||||
&valid_snarkjs_proof["pi_b"]
|
||||
.as_array()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|item| value_to_string_vec(item))
|
||||
.collect::<Vec<Vec<String>>>(),
|
||||
),
|
||||
c: g1_from_str(&value_to_string_vec(&valid_snarkjs_proof["pi_c"])),
|
||||
};
|
||||
|
||||
let valid_proof_values = RLNProofValues {
|
||||
x: str_to_fr(
|
||||
"20645213238265527935869146898028115621427162613172918400241870500502509785943",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
external_nullifier: str_to_fr(
|
||||
"21074405743803627666274838159589343934394162804826017440941339048886754734203",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
y: str_to_fr(
|
||||
"16401008481486069296141645075505218976370369489687327284155463920202585288271",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
root: str_to_fr(
|
||||
"8502402278351299594663821509741133196466235670407051417832304486953898514733",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
nullifier: str_to_fr(
|
||||
"9102791780887227194595604713537772536258726662792598131262022534710887343694",
|
||||
10,
|
||||
)
|
||||
.unwrap(),
|
||||
};
|
||||
|
||||
let verified = verify_proof(&rln.verification_key, &valid_ark_proof, &valid_proof_values);
|
||||
assert!(verified.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
fn test_groth16_proof() {
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
// Note: we only test Groth16 proof generation, so we ignore setting the tree in the RLN object
|
||||
let rln_witness = random_rln_witness(tree_height);
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
let arbitrary_metadata: &[u8] = b"block_number:200000";
|
||||
rln.set_metadata(arbitrary_metadata).unwrap();
|
||||
|
||||
// We compute a Groth16 proof
|
||||
let mut input_buffer = Cursor::new(serialize_witness(&rln_witness).unwrap());
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.prove(&mut input_buffer, &mut output_buffer).unwrap();
|
||||
let serialized_proof = output_buffer.into_inner();
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
let received_metadata = buffer.into_inner();
|
||||
|
||||
// Before checking public verify API, we check that the (deserialized) proof generated by prove is actually valid
|
||||
let proof = ArkProof::deserialize_compressed(&mut Cursor::new(&serialized_proof)).unwrap();
|
||||
let verified = verify_proof(&rln.verification_key, &proof, &proof_values);
|
||||
// dbg!(verified.unwrap());
|
||||
assert!(verified.unwrap());
|
||||
assert_eq!(arbitrary_metadata, received_metadata);
|
||||
}
|
||||
|
||||
// We prepare the input to prove API, consisting of serialized_proof (compressed, 4*32 bytes) || serialized_proof_values (6*32 bytes)
|
||||
let serialized_proof_values = serialize_proof_values(&proof_values);
|
||||
let mut verify_data = Vec::<u8>::new();
|
||||
verify_data.extend(&serialized_proof);
|
||||
verify_data.extend(&serialized_proof_values);
|
||||
let mut input_buffer = Cursor::new(verify_data);
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We verify the Groth16 proof against the provided proof values
|
||||
let verified = rln.verify(&mut input_buffer).unwrap();
|
||||
let rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
let received_metadata = buffer.into_inner();
|
||||
|
||||
#[test]
|
||||
fn test_rln_proof() {
|
||||
assert_eq!(received_metadata.len(), 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rln_proof() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
|
@ -582,10 +651,10 @@ fn test_rln_proof() {
|
|||
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_rln_with_witness() {
|
||||
#[test]
|
||||
fn test_rln_with_witness() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
||||
|
@ -664,10 +733,10 @@ fn test_rln_with_witness() {
|
|||
let verified = rln.verify_rln_proof(&mut input_buffer).unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn proof_verification_with_roots() {
|
||||
#[test]
|
||||
fn proof_verification_with_roots() {
|
||||
// The first part is similar to test_rln_with_witness
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
let no_of_leaves = 256;
|
||||
|
@ -765,10 +834,10 @@ fn proof_verification_with_roots() {
|
|||
.unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recover_id_secret() {
|
||||
#[test]
|
||||
fn test_recover_id_secret() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
// We create a new RLN instance
|
||||
|
@ -814,7 +883,7 @@ fn test_recover_id_secret() {
|
|||
serialized1.append(&mut normalize_usize(signal1.len()));
|
||||
serialized1.append(&mut signal1.to_vec());
|
||||
|
||||
// We attach the second signal to the first proof input
|
||||
// We attach the second signal to the second proof input
|
||||
serialized2.append(&mut normalize_usize(signal2.len()));
|
||||
serialized2.append(&mut signal2.to_vec());
|
||||
|
||||
|
@ -895,64 +964,263 @@ fn test_recover_id_secret() {
|
|||
.unwrap();
|
||||
|
||||
let serialized_identity_secret_hash = output_buffer.into_inner();
|
||||
let (recovered_identity_secret_hash_new, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
let (recovered_identity_secret_hash_new, _) =
|
||||
bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
// We generate a random tree
|
||||
let tree_height = 10;
|
||||
#[cfg(feature = "stateless")]
|
||||
mod stateless_test {
|
||||
use crate::circuit::{Fr, TEST_TREE_HEIGHT};
|
||||
use crate::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash};
|
||||
use crate::poseidon_tree::PoseidonTree;
|
||||
use crate::protocol::*;
|
||||
use crate::public::RLN;
|
||||
use crate::utils::*;
|
||||
use std::io::Cursor;
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
#[test]
|
||||
fn test_stateless_rln_proof() {
|
||||
// We create a new RLN instance
|
||||
let mut rln = RLN::new().unwrap();
|
||||
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
|
||||
// We set as leaf id_commitment after storing its index
|
||||
let identity_index = tree.leaves_set();
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = thread_rng();
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
// We generate a random leaf
|
||||
let leaf = Fr::rand(&mut rng);
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
// We generate a random rln_identifier
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We generate a random index
|
||||
let index = rng.gen_range(0..rln.tree.capacity());
|
||||
// We prepare input for generate_rln_proof API
|
||||
// input_data is [ identity_secret<32> | id_index<8> | external_nullifier<32> | user_message_limit<32> | message_id<32> | signal_len<8> | signal<var> ]
|
||||
|
||||
// We add the leaf to the tree
|
||||
let mut buffer = Cursor::new(fr_to_bytes_le(&leaf));
|
||||
rln.set_leaf(index, &mut buffer).unwrap();
|
||||
let x = hash_to_field(&signal);
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
// We get the leaf
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized = serialize_witness(&rln_witness).unwrap();
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_leaf(index, &mut output_buffer).unwrap();
|
||||
rln.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
|
||||
// We ensure that the leaf is the same as the one we added
|
||||
let (received_leaf, _) = bytes_le_to_fr(output_buffer.into_inner().as_ref());
|
||||
assert_eq!(received_leaf, leaf);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let mut rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
let arbitrary_metadata: &[u8] = b"block_number:200000";
|
||||
rln.set_metadata(arbitrary_metadata).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
let received_metadata = buffer.into_inner();
|
||||
|
||||
assert_eq!(arbitrary_metadata, received_metadata);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
let tree_height = TEST_TREE_HEIGHT;
|
||||
|
||||
let rln = RLN::new(tree_height, generate_input_buffer()).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.get_metadata(&mut buffer).unwrap();
|
||||
let received_metadata = buffer.into_inner();
|
||||
|
||||
assert_eq!(received_metadata.len(), 0);
|
||||
// output_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let mut proof_data = output_buffer.into_inner();
|
||||
|
||||
// We prepare input for verify_rln_proof API
|
||||
// input_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> | signal_len<8> | signal<var> ]
|
||||
// that is [ proof_data || signal_len<8> | signal<var> ]
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
let input_buffer = Cursor::new(proof_data);
|
||||
|
||||
// If no roots is provided, proof validation is skipped and if the remaining proof values are valid, the proof will be correctly verified
|
||||
let mut roots_serialized: Vec<u8> = Vec::new();
|
||||
let mut roots_buffer = Cursor::new(roots_serialized.clone());
|
||||
let verified = rln
|
||||
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
|
||||
.unwrap();
|
||||
|
||||
assert!(verified);
|
||||
|
||||
// We serialize in the roots buffer some random values and we check that the proof is not verified since doesn't contain the correct root the proof refers to
|
||||
for _ in 0..5 {
|
||||
roots_serialized.append(&mut fr_to_bytes_le(&Fr::rand(&mut rng)));
|
||||
}
|
||||
roots_buffer = Cursor::new(roots_serialized.clone());
|
||||
let verified = rln
|
||||
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(verified, false);
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let root = tree.root();
|
||||
|
||||
// We add the real root and we check if now the proof is verified
|
||||
roots_serialized.append(&mut fr_to_bytes_le(&root));
|
||||
roots_buffer = Cursor::new(roots_serialized.clone());
|
||||
let verified = rln
|
||||
.verify_with_roots(&mut input_buffer.clone(), &mut roots_buffer)
|
||||
.unwrap();
|
||||
|
||||
assert!(verified);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stateless_recover_id_secret() {
|
||||
// We create a new RLN instance
|
||||
let mut rln = RLN::new().unwrap();
|
||||
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Generate identity pair
|
||||
let (identity_secret_hash, id_commitment) = keygen();
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
// We generate a random rln_identifier
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We generate a random signal
|
||||
let mut rng = thread_rng();
|
||||
let signal1: [u8; 32] = rng.gen();
|
||||
let x1 = hash_to_field(&signal1);
|
||||
|
||||
let signal2: [u8; 32] = rng.gen();
|
||||
let x2 = hash_to_field(&signal2);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
let rln_witness1 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x1,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let rln_witness2 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x2,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized = serialize_witness(&rln_witness1).unwrap();
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
let proof_data_1 = output_buffer.into_inner();
|
||||
|
||||
let serialized = serialize_witness(&rln_witness2).unwrap();
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
let proof_data_2 = output_buffer.into_inner();
|
||||
|
||||
let mut input_proof_data_1 = Cursor::new(proof_data_1.clone());
|
||||
let mut input_proof_data_2 = Cursor::new(proof_data_2);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.recover_id_secret(
|
||||
&mut input_proof_data_1,
|
||||
&mut input_proof_data_2,
|
||||
&mut output_buffer,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized_identity_secret_hash = output_buffer.into_inner();
|
||||
|
||||
// We ensure that a non-empty value is written to output_buffer
|
||||
assert!(!serialized_identity_secret_hash.is_empty());
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash_new, id_commitment_new) = keygen();
|
||||
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
|
||||
tree.update_next(rate_commitment_new).unwrap();
|
||||
|
||||
let signal3: [u8; 32] = rng.gen();
|
||||
let x3 = hash_to_field(&signal3);
|
||||
|
||||
let identity_index_new = tree.leaves_set();
|
||||
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
|
||||
|
||||
let rln_witness3 = rln_witness_from_values(
|
||||
identity_secret_hash_new,
|
||||
&merkle_proof_new,
|
||||
x3,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized = serialize_witness(&rln_witness3).unwrap();
|
||||
let mut input_buffer = Cursor::new(serialized);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.generate_rln_proof_with_witness(&mut input_buffer, &mut output_buffer)
|
||||
.unwrap();
|
||||
let proof_data_3 = output_buffer.into_inner();
|
||||
|
||||
let mut input_proof_data_1 = Cursor::new(proof_data_1.clone());
|
||||
let mut input_proof_data_3 = Cursor::new(proof_data_3);
|
||||
let mut output_buffer = Cursor::new(Vec::<u8>::new());
|
||||
rln.recover_id_secret(
|
||||
&mut input_proof_data_1,
|
||||
&mut input_proof_data_3,
|
||||
&mut output_buffer,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized_identity_secret_hash = output_buffer.into_inner();
|
||||
let (recovered_identity_secret_hash_new, _) =
|
||||
bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
}
|
||||
}
|
||||
|
|
477
rln/tests/ffi.rs
477
rln/tests/ffi.rs
|
@ -1,4 +1,5 @@
|
|||
#[cfg(test)]
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
mod test {
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
|
@ -529,7 +530,7 @@ mod test {
|
|||
|
||||
#[test]
|
||||
// Computes and verifies an RLN ZK proof by checking proof's root against an input roots buffer
|
||||
fn test_verify_with_roots() {
|
||||
fn test_verify_with_roots_ffi() {
|
||||
// First part similar to test_rln_proof_ffi
|
||||
let user_message_limit = Fr::from(100);
|
||||
|
||||
|
@ -905,7 +906,7 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_leaf() {
|
||||
fn test_get_leaf_ffi() {
|
||||
// We create a RLN instance
|
||||
let no_of_leaves = 1 << TEST_TREE_HEIGHT;
|
||||
|
||||
|
@ -945,7 +946,7 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_valid_metadata() {
|
||||
fn test_valid_metadata_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
|
@ -966,7 +967,7 @@ mod test {
|
|||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_metadata() {
|
||||
fn test_empty_metadata_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
|
@ -978,3 +979,471 @@ mod test {
|
|||
assert_eq!(output_buffer.len, 0);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[cfg(feature = "stateless")]
|
||||
mod stateless_test {
|
||||
use ark_std::{rand::thread_rng, UniformRand};
|
||||
use rand::Rng;
|
||||
use rln::circuit::*;
|
||||
use rln::ffi::generate_rln_proof_with_witness;
|
||||
use rln::ffi::{hash as ffi_hash, poseidon_hash as ffi_poseidon_hash, *};
|
||||
use rln::hashers::{hash_to_field, poseidon_hash as utils_poseidon_hash, ROUND_PARAMS};
|
||||
use rln::poseidon_tree::PoseidonTree;
|
||||
use rln::protocol::*;
|
||||
use rln::public::RLN;
|
||||
use rln::utils::*;
|
||||
use std::mem::MaybeUninit;
|
||||
use std::time::{Duration, Instant};
|
||||
use utils::ZerokitMerkleTree;
|
||||
|
||||
type ConfigOf<T> = <T as ZerokitMerkleTree>::Config;
|
||||
|
||||
fn create_rln_instance() -> &'static mut RLN {
|
||||
let mut rln_pointer = MaybeUninit::<*mut RLN>::uninit();
|
||||
let success = new(rln_pointer.as_mut_ptr());
|
||||
assert!(success, "RLN object creation failed");
|
||||
unsafe { &mut *rln_pointer.assume_init() }
|
||||
}
|
||||
|
||||
fn identity_pair_gen(rln_pointer: &mut RLN) -> (Fr, Fr) {
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = key_gen(rln_pointer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
(identity_secret_hash, id_commitment)
|
||||
}
|
||||
|
||||
fn rln_proof_gen_with_witness(rln_pointer: &mut RLN, serialized: &[u8]) -> Vec<u8> {
|
||||
let input_buffer = &Buffer::from(serialized);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success =
|
||||
generate_rln_proof_with_witness(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "generate rln proof call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
<&[u8]>::from(&output_buffer).to_vec()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_recover_id_secret_stateless_ffi() {
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We generate two proofs using same epoch but different signals.
|
||||
// We generate a random signal
|
||||
let mut rng = thread_rng();
|
||||
let signal1: [u8; 32] = rng.gen();
|
||||
let x1 = hash_to_field(&signal1);
|
||||
|
||||
let signal2: [u8; 32] = rng.gen();
|
||||
let x2 = hash_to_field(&signal2);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
let rln_witness1 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x1,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
let serialized1 = serialize_witness(&rln_witness1).unwrap();
|
||||
|
||||
let rln_witness2 = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x2,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
let serialized2 = serialize_witness(&rln_witness2).unwrap();
|
||||
|
||||
// We call generate_rln_proof for first proof values
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_1 = rln_proof_gen_with_witness(rln_pointer, serialized1.as_ref());
|
||||
|
||||
// We call generate_rln_proof
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_2 = rln_proof_gen_with_witness(rln_pointer, serialized2.as_ref());
|
||||
|
||||
let input_proof_buffer_1 = &Buffer::from(proof_data_1.as_ref());
|
||||
let input_proof_buffer_2 = &Buffer::from(proof_data_2.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = recover_id_secret(
|
||||
rln_pointer,
|
||||
input_proof_buffer_1,
|
||||
input_proof_buffer_2,
|
||||
output_buffer.as_mut_ptr(),
|
||||
);
|
||||
assert!(success, "recover id secret call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let serialized_identity_secret_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
|
||||
// We passed two shares for the same secret, so recovery should be successful
|
||||
// To check it, we ensure that recovered identity secret hash is empty
|
||||
assert!(!serialized_identity_secret_hash.is_empty());
|
||||
|
||||
// We check if the recovered identity secret hash corresponds to the original one
|
||||
let (recovered_identity_secret_hash, _) = bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
assert_eq!(recovered_identity_secret_hash, identity_secret_hash);
|
||||
|
||||
// We now test that computing identity_secret_hash is unsuccessful if shares computed from two different identity secret hashes but within same epoch are passed
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash_new, id_commitment_new) = identity_pair_gen(rln_pointer);
|
||||
let rate_commitment_new = utils_poseidon_hash(&[id_commitment_new, user_message_limit]);
|
||||
tree.update_next(rate_commitment_new).unwrap();
|
||||
|
||||
// We generate a random signals
|
||||
let signal3: [u8; 32] = rng.gen();
|
||||
let x3 = hash_to_field(&signal3);
|
||||
|
||||
let identity_index_new = tree.leaves_set();
|
||||
let merkle_proof_new = tree.proof(identity_index_new).expect("proof should exist");
|
||||
|
||||
let rln_witness3 = rln_witness_from_values(
|
||||
identity_secret_hash_new,
|
||||
&merkle_proof_new,
|
||||
x3,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
let serialized3 = serialize_witness(&rln_witness3).unwrap();
|
||||
|
||||
// We call generate_rln_proof
|
||||
// result_data is [ proof<128> | root<32> | external_nullifier<32> | x<32> | y<32> | nullifier<32> ]
|
||||
let proof_data_3 = rln_proof_gen_with_witness(rln_pointer, serialized3.as_ref());
|
||||
|
||||
// We attempt to recover the secret using share1 (coming from identity_secret_hash) and share3 (coming from identity_secret_hash_new)
|
||||
|
||||
let input_proof_buffer_1 = &Buffer::from(proof_data_1.as_ref());
|
||||
let input_proof_buffer_3 = &Buffer::from(proof_data_3.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = recover_id_secret(
|
||||
rln_pointer,
|
||||
input_proof_buffer_1,
|
||||
input_proof_buffer_3,
|
||||
output_buffer.as_mut_ptr(),
|
||||
);
|
||||
assert!(success, "recover id secret call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let serialized_identity_secret_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (recovered_identity_secret_hash_new, _) =
|
||||
bytes_le_to_fr(&serialized_identity_secret_hash);
|
||||
|
||||
// ensure that the recovered secret does not match with either of the
|
||||
// used secrets in proof generation
|
||||
assert_ne!(recovered_identity_secret_hash_new, identity_secret_hash_new);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_verify_with_roots_stateless_ffi() {
|
||||
let default_leaf = Fr::from(0);
|
||||
let mut tree = PoseidonTree::new(
|
||||
TEST_TREE_HEIGHT,
|
||||
default_leaf,
|
||||
ConfigOf::<PoseidonTree>::default(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair
|
||||
let (identity_secret_hash, id_commitment) = identity_pair_gen(rln_pointer);
|
||||
|
||||
let identity_index = tree.leaves_set();
|
||||
let user_message_limit = Fr::from(100);
|
||||
let rate_commitment = utils_poseidon_hash(&[id_commitment, user_message_limit]);
|
||||
tree.update_next(rate_commitment).unwrap();
|
||||
|
||||
// We generate a random epoch
|
||||
let epoch = hash_to_field(b"test-epoch");
|
||||
let rln_identifier = hash_to_field(b"test-rln-identifier");
|
||||
let external_nullifier = utils_poseidon_hash(&[epoch, rln_identifier]);
|
||||
|
||||
// We generate two proofs using same epoch but different signals.
|
||||
// We generate a random signal
|
||||
let mut rng = thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
let x = hash_to_field(&signal);
|
||||
|
||||
let merkle_proof = tree.proof(identity_index).expect("proof should exist");
|
||||
|
||||
// We prepare input for generate_rln_proof API
|
||||
let rln_witness = rln_witness_from_values(
|
||||
identity_secret_hash,
|
||||
&merkle_proof,
|
||||
x,
|
||||
external_nullifier,
|
||||
user_message_limit,
|
||||
Fr::from(1),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let serialized = serialize_witness(&rln_witness).unwrap();
|
||||
let mut proof_data = rln_proof_gen_with_witness(rln_pointer, serialized.as_ref());
|
||||
|
||||
proof_data.append(&mut normalize_usize(signal.len()));
|
||||
proof_data.append(&mut signal.to_vec());
|
||||
|
||||
// If no roots is provided, proof validation is skipped and if the remaining proof values are valid, the proof will be correctly verified
|
||||
let mut roots_data: Vec<u8> = Vec::new();
|
||||
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let roots_buffer = &Buffer::from(roots_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success =
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid
|
||||
assert_eq!(proof_is_valid, true);
|
||||
|
||||
// We serialize in the roots buffer some random values and we check that the proof is not verified since doesn't contain the correct root the proof refers to
|
||||
for _ in 0..5 {
|
||||
roots_data.append(&mut fr_to_bytes_le(&Fr::rand(&mut rng)));
|
||||
}
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let roots_buffer = &Buffer::from(roots_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success =
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be invalid.
|
||||
assert_eq!(proof_is_valid, false);
|
||||
|
||||
// We get the root of the tree obtained adding one leaf per time
|
||||
let root = tree.root();
|
||||
|
||||
// We add the real root and we check if now the proof is verified
|
||||
roots_data.append(&mut fr_to_bytes_le(&root));
|
||||
let input_buffer = &Buffer::from(proof_data.as_ref());
|
||||
let roots_buffer = &Buffer::from(roots_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let success =
|
||||
verify_with_roots(rln_pointer, input_buffer, roots_buffer, proof_is_valid_ptr);
|
||||
assert!(success, "verify call failed");
|
||||
// Proof should be valid.
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_groth16_proofs_performance_stateless_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We compute some benchmarks regarding proof and verify API calls
|
||||
// Note that circuit loading requires some initial overhead.
|
||||
// Once the circuit is loaded (i.e., when the RLN object is created), proof generation and verification times should be similar at each call.
|
||||
let sample_size = 100;
|
||||
let mut prove_time: u128 = 0;
|
||||
let mut verify_time: u128 = 0;
|
||||
|
||||
for _ in 0..sample_size {
|
||||
// We generate random witness instances and relative proof values
|
||||
let rln_witness = random_rln_witness(TEST_TREE_HEIGHT);
|
||||
let proof_values = proof_values_from_witness(&rln_witness).unwrap();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let rln_witness_ser = serialize_witness(&rln_witness).unwrap();
|
||||
let input_buffer = &Buffer::from(rln_witness_ser.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let now = Instant::now();
|
||||
let success = prove(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
prove_time += now.elapsed().as_nanos();
|
||||
assert!(success, "prove call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_proof = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let serialized_proof_values = serialize_proof_values(&proof_values);
|
||||
let mut verify_data = Vec::<u8>::new();
|
||||
verify_data.extend(&serialized_proof);
|
||||
verify_data.extend(&serialized_proof_values);
|
||||
|
||||
// We prepare input proof values and we call verify
|
||||
let input_buffer = &Buffer::from(verify_data.as_ref());
|
||||
let mut proof_is_valid: bool = false;
|
||||
let proof_is_valid_ptr = &mut proof_is_valid as *mut bool;
|
||||
let now = Instant::now();
|
||||
let success = verify(rln_pointer, input_buffer, proof_is_valid_ptr);
|
||||
verify_time += now.elapsed().as_nanos();
|
||||
assert!(success, "verify call failed");
|
||||
assert_eq!(proof_is_valid, true);
|
||||
}
|
||||
|
||||
println!(
|
||||
"Average prove API call time: {:?}",
|
||||
Duration::from_nanos((prove_time / sample_size).try_into().unwrap())
|
||||
);
|
||||
println!(
|
||||
"Average verify API call time: {:?}",
|
||||
Duration::from_nanos((verify_time / sample_size).try_into().unwrap())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_keygen_stateless_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity pair from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = seeded_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "seeded key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_secret_hash, read) = bytes_le_to_fr(&result_data);
|
||||
let (id_commitment, _) = bytes_le_to_fr(&result_data[read..].to_vec());
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
);
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0xbf16d2b5c0d6f9d9d561e05bfca16a81b4b873bb063508fae360d8c74cef51f",
|
||||
16,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_seeded_extended_keygen_stateless_ffi() {
|
||||
// We create a RLN instance
|
||||
let rln_pointer = create_rln_instance();
|
||||
|
||||
// We generate a new identity tuple from an input seed
|
||||
let seed_bytes: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7, 8, 9];
|
||||
let input_buffer = &Buffer::from(seed_bytes);
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success =
|
||||
seeded_extended_key_gen(rln_pointer, input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "seeded key gen call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (identity_trapdoor, identity_nullifier, identity_secret_hash, id_commitment) =
|
||||
deserialize_identity_tuple(result_data);
|
||||
|
||||
// We check against expected values
|
||||
let expected_identity_trapdoor_seed_bytes = str_to_fr(
|
||||
"0x766ce6c7e7a01bdf5b3f257616f603918c30946fa23480f2859c597817e6716",
|
||||
16,
|
||||
);
|
||||
let expected_identity_nullifier_seed_bytes = str_to_fr(
|
||||
"0x1f18714c7bc83b5bca9e89d404cf6f2f585bc4c0f7ed8b53742b7e2b298f50b4",
|
||||
16,
|
||||
);
|
||||
let expected_identity_secret_hash_seed_bytes = str_to_fr(
|
||||
"0x2aca62aaa7abaf3686fff2caf00f55ab9462dc12db5b5d4bcf3994e671f8e521",
|
||||
16,
|
||||
);
|
||||
let expected_id_commitment_seed_bytes = str_to_fr(
|
||||
"0x68b66aa0a8320d2e56842581553285393188714c48f9b17acd198b4f1734c5c",
|
||||
16,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
identity_trapdoor,
|
||||
expected_identity_trapdoor_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
identity_nullifier,
|
||||
expected_identity_nullifier_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(
|
||||
identity_secret_hash,
|
||||
expected_identity_secret_hash_seed_bytes.unwrap()
|
||||
);
|
||||
assert_eq!(id_commitment, expected_id_commitment_seed_bytes.unwrap());
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Tests hash to field using FFI APIs
|
||||
fn test_hash_to_field_stateless_ffi() {
|
||||
let mut rng = rand::thread_rng();
|
||||
let signal: [u8; 32] = rng.gen();
|
||||
|
||||
// We prepare id_commitment and we set the leaf at provided index
|
||||
let input_buffer = &Buffer::from(signal.as_ref());
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = ffi_hash(input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "hash call failed");
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
|
||||
// We read the returned proof and we append proof values for verify
|
||||
let serialized_hash = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (hash1, _) = bytes_le_to_fr(&serialized_hash);
|
||||
|
||||
let hash2 = hash_to_field(&signal);
|
||||
|
||||
assert_eq!(hash1, hash2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
// Test Poseidon hash FFI
|
||||
fn test_poseidon_hash_stateless_ffi() {
|
||||
// generate random number between 1..ROUND_PARAMS.len()
|
||||
let mut rng = thread_rng();
|
||||
let number_of_inputs = rng.gen_range(1..ROUND_PARAMS.len());
|
||||
let mut inputs = Vec::with_capacity(number_of_inputs);
|
||||
for _ in 0..number_of_inputs {
|
||||
inputs.push(Fr::rand(&mut rng));
|
||||
}
|
||||
let inputs_ser = vec_fr_to_bytes_le(&inputs).unwrap();
|
||||
let input_buffer = &Buffer::from(inputs_ser.as_ref());
|
||||
|
||||
let expected_hash = utils_poseidon_hash(inputs.as_ref());
|
||||
|
||||
let mut output_buffer = MaybeUninit::<Buffer>::uninit();
|
||||
let success = ffi_poseidon_hash(input_buffer, output_buffer.as_mut_ptr());
|
||||
assert!(success, "poseidon hash call failed");
|
||||
|
||||
let output_buffer = unsafe { output_buffer.assume_init() };
|
||||
let result_data = <&[u8]>::from(&output_buffer).to_vec();
|
||||
let (received_hash, _) = bytes_le_to_fr(&result_data);
|
||||
|
||||
assert_eq!(received_hash, expected_hash);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -12,6 +12,7 @@ mod test {
|
|||
|
||||
#[test]
|
||||
// This test is similar to the one in lib, but uses only public API
|
||||
#[cfg(not(feature = "stateless"))]
|
||||
fn test_merkle_proof() {
|
||||
let leaf_index = 3;
|
||||
let user_message_limit = 1;
|
||||
|
|
|
@ -13,9 +13,11 @@ bench = false
|
|||
|
||||
[dependencies]
|
||||
ark-ff = { version = "=0.4.1", default-features = false, features = ["asm"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = ["rand"] }
|
||||
num-bigint = { version = "=0.4.3", default-features = false, features = [
|
||||
"rand",
|
||||
] }
|
||||
color-eyre = "=0.6.2"
|
||||
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true}
|
||||
pmtree = { package = "vacp2p_pmtree", version = "=2.0.2", optional = true }
|
||||
sled = "=0.34.7"
|
||||
serde = "=1.0.163"
|
||||
lazy_static = "1.4.0"
|
||||
|
|
Loading…
Reference in New Issue