mirror of https://github.com/vacp2p/zerokit.git
chore(rln): onchain tree poc
This commit is contained in:
parent
b5760697bc
commit
b5e5be47db
|
@ -56,7 +56,7 @@ sled = "=0.34.7"
|
||||||
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
criterion = { version = "=0.4.0", features = ["html_reports"] }
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
default = ["parallel", "wasmer/sys-default", "pmtree-ft"]
|
default = ["parallel", "wasmer/sys-default"]
|
||||||
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
|
parallel = ["ark-ec/parallel", "ark-ff/parallel", "ark-std/parallel", "ark-groth16/parallel", "utils/parallel"]
|
||||||
wasm = ["wasmer/js", "wasmer/std"]
|
wasm = ["wasmer/js", "wasmer/std"]
|
||||||
fullmerkletree = ["default"]
|
fullmerkletree = ["default"]
|
||||||
|
|
|
@ -107,6 +107,8 @@ mod test {
|
||||||
|
|
||||||
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
let merkle_proof = tree.proof(leaf_index).expect("proof should exist");
|
||||||
let path_elements = merkle_proof.get_path_elements();
|
let path_elements = merkle_proof.get_path_elements();
|
||||||
|
dbg!(&path_elements);
|
||||||
|
dbg!(poseidon_hash(&[rate_commitment, Fr::from(0)]));
|
||||||
let identity_path_index = merkle_proof.get_path_index();
|
let identity_path_index = merkle_proof.get_path_index();
|
||||||
|
|
||||||
// We check correct computation of the path and indexes
|
// We check correct computation of the path and indexes
|
||||||
|
@ -257,6 +259,118 @@ mod test {
|
||||||
assert!(verified.unwrap());
|
assert!(verified.unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_compute_root_single_insertion() {
|
||||||
|
let tree_height = TEST_TREE_HEIGHT;
|
||||||
|
let leaf_index = 0;
|
||||||
|
|
||||||
|
let rate_commitment = str_to_fr(
|
||||||
|
"0x2A09A9FD93C590C26B91EFFBB2499F07E8F7AA12E2B4940A3AED2411CB65E11C",
|
||||||
|
16,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let default_leaf = Fr::from(0);
|
||||||
|
let mut tree = PoseidonTree::new(
|
||||||
|
tree_height,
|
||||||
|
default_leaf,
|
||||||
|
ConfigOf::<PoseidonTree>::default(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
tree.set(leaf_index, rate_commitment.into()).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
tree.root().to_string(),
|
||||||
|
"7919895337495550471953660523154055129542864206434083474237224229170626792564"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_compute_root_multiple_insertions() {
|
||||||
|
let tree_height = TEST_TREE_HEIGHT;
|
||||||
|
let start_index = 0;
|
||||||
|
|
||||||
|
let rate_commitments: Vec<Fr> = [
|
||||||
|
BigInt([
|
||||||
|
6344960399222479404,
|
||||||
|
8873735028955887118,
|
||||||
|
7344916015079734877,
|
||||||
|
3049769223023031486,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
5712098114927176582,
|
||||||
|
8940737845291386850,
|
||||||
|
13760702216785496874,
|
||||||
|
2705829225787818087,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
11095489423361569757,
|
||||||
|
3600059334404558726,
|
||||||
|
2596276295120316067,
|
||||||
|
1747990648971046346,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
6042348329893423557,
|
||||||
|
18258910608249868782,
|
||||||
|
15808282831752017379,
|
||||||
|
431669247253051424,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
10095207707778447201,
|
||||||
|
5682738389371124904,
|
||||||
|
13211310082780638286,
|
||||||
|
1315201582035914269,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
17025532269492512967,
|
||||||
|
1150892318682047614,
|
||||||
|
9382150527271933425,
|
||||||
|
3232654496558305327,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
12575250814731208081,
|
||||||
|
3588033008530583836,
|
||||||
|
14988210865591309718,
|
||||||
|
1882695786084137797,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
2907978739955320703,
|
||||||
|
8716018548752635030,
|
||||||
|
10462674785957232325,
|
||||||
|
2943370953425792650,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
5234706529109067247,
|
||||||
|
11231622334196983240,
|
||||||
|
1886386083208828393,
|
||||||
|
1690607978854820878,
|
||||||
|
]),
|
||||||
|
BigInt([
|
||||||
|
12359804935986041669,
|
||||||
|
10294673542421867784,
|
||||||
|
11783956311711833641,
|
||||||
|
2759017549080634703,
|
||||||
|
]),
|
||||||
|
]
|
||||||
|
.into_iter()
|
||||||
|
.map(|x| Fr::from(x))
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let default_leaf = Fr::from(0);
|
||||||
|
let mut tree = PoseidonTree::new(
|
||||||
|
tree_height,
|
||||||
|
default_leaf,
|
||||||
|
ConfigOf::<PoseidonTree>::default(),
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
tree.set_range(start_index, rate_commitments).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
tree.root().to_string(),
|
||||||
|
"5210724218081541877101688952118136930297124697603087561558225712176057209122"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
// We test a RLN proof generation and verification
|
// We test a RLN proof generation and verification
|
||||||
fn test_end_to_end() {
|
fn test_end_to_end() {
|
||||||
|
@ -332,6 +446,24 @@ mod test {
|
||||||
assert_eq!(proof_values, deser);
|
assert_eq!(proof_values, deser);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_poseidon_hash() {
|
||||||
|
let inputs = &[
|
||||||
|
str_to_fr(
|
||||||
|
"0x2A09A9FD93C590C26B91EFFBB2499F07E8F7AA12E2B4940A3AED2411CB65E11C",
|
||||||
|
16,
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
Fr::from(0),
|
||||||
|
];
|
||||||
|
let hash = poseidon_hash(inputs);
|
||||||
|
|
||||||
|
assert_eq!(
|
||||||
|
hash.to_string(),
|
||||||
|
"13164376930590487041313497514223288845711140604177161029957349518915056324115"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
// Tests seeded keygen
|
// Tests seeded keygen
|
||||||
// Note that hardcoded values are only valid for Bn254
|
// Note that hardcoded values are only valid for Bn254
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
//! * Disk based storage backend (using mmaped files should be easy)
|
//! * Disk based storage backend (using mmaped files should be easy)
|
||||||
//! * Implement serialization for tree and Merkle proof
|
//! * Implement serialization for tree and Merkle proof
|
||||||
|
|
||||||
|
use std::fmt::Debug;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use color_eyre::Result;
|
use color_eyre::Result;
|
||||||
|
@ -21,7 +22,7 @@ use color_eyre::Result;
|
||||||
/// and the hash function used to initialize a Merkle Tree implementation
|
/// and the hash function used to initialize a Merkle Tree implementation
|
||||||
pub trait Hasher {
|
pub trait Hasher {
|
||||||
/// Type of the leaf and tree node
|
/// Type of the leaf and tree node
|
||||||
type Fr: Clone + Copy + Eq;
|
type Fr: Clone + Copy + Eq + Debug + ToString;
|
||||||
|
|
||||||
/// Returns the default tree leaf
|
/// Returns the default tree leaf
|
||||||
fn default_leaf() -> Self::Fr;
|
fn default_leaf() -> Self::Fr;
|
||||||
|
|
|
@ -113,6 +113,8 @@ where
|
||||||
if index >= self.capacity() {
|
if index >= self.capacity() {
|
||||||
return Err(Report::msg("index exceeds set size"));
|
return Err(Report::msg("index exceeds set size"));
|
||||||
}
|
}
|
||||||
|
let log = format!("inserting {} at {}[{index}]", leaf.to_string(), self.depth);
|
||||||
|
dbg!(log);
|
||||||
self.nodes.insert((self.depth, index), leaf);
|
self.nodes.insert((self.depth, index), leaf);
|
||||||
self.recalculate_from(index)?;
|
self.recalculate_from(index)?;
|
||||||
self.next_index = max(self.next_index, index + 1);
|
self.next_index = max(self.next_index, index + 1);
|
||||||
|
@ -192,6 +194,7 @@ where
|
||||||
if index >= self.capacity() {
|
if index >= self.capacity() {
|
||||||
return Err(Report::msg("index exceeds set size"));
|
return Err(Report::msg("index exceeds set size"));
|
||||||
}
|
}
|
||||||
|
dbg!("yomama");
|
||||||
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
|
let mut witness = Vec::<(H::Fr, u8)>::with_capacity(self.depth);
|
||||||
let mut i = index;
|
let mut i = index;
|
||||||
let mut depth = self.depth;
|
let mut depth = self.depth;
|
||||||
|
@ -246,6 +249,13 @@ where
|
||||||
.nodes
|
.nodes
|
||||||
.get(&(depth, index))
|
.get(&(depth, index))
|
||||||
.unwrap_or_else(|| &self.cached_nodes[depth]);
|
.unwrap_or_else(|| &self.cached_nodes[depth]);
|
||||||
|
let log = format!(
|
||||||
|
"depth: {}, index: {}, node at depth[index]: {}",
|
||||||
|
depth,
|
||||||
|
index,
|
||||||
|
&node.to_string()
|
||||||
|
);
|
||||||
|
dbg!(log);
|
||||||
node
|
node
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -255,10 +265,14 @@ where
|
||||||
|
|
||||||
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
|
fn hash_couple(&mut self, depth: usize, index: usize) -> H::Fr {
|
||||||
let b = index & !1;
|
let b = index & !1;
|
||||||
H::hash(&[self.get_node(depth, b), self.get_node(depth, b + 1)])
|
let l = self.get_node(depth, b);
|
||||||
|
let r = self.get_node(depth, b + 1);
|
||||||
|
dbg!(l.to_string(), r.to_string());
|
||||||
|
H::hash(&[l, r])
|
||||||
}
|
}
|
||||||
|
|
||||||
fn recalculate_from(&mut self, index: usize) -> Result<()> {
|
fn recalculate_from(&mut self, index: usize) -> Result<()> {
|
||||||
|
dbg!("running recalc from");
|
||||||
let mut i = index;
|
let mut i = index;
|
||||||
let mut depth = self.depth;
|
let mut depth = self.depth;
|
||||||
loop {
|
loop {
|
||||||
|
@ -266,6 +280,8 @@ where
|
||||||
i >>= 1;
|
i >>= 1;
|
||||||
depth -= 1;
|
depth -= 1;
|
||||||
self.nodes.insert((depth, i), h);
|
self.nodes.insert((depth, i), h);
|
||||||
|
let log = format!("inserting {} at {depth}[{i}]", h.to_string());
|
||||||
|
dbg!(log);
|
||||||
if depth == 0 {
|
if depth == 0 {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue