mirror of
https://github.com/logos-blockchain/lssa.git
synced 2026-01-05 23:03:06 +00:00
Merge branch 'main' into schouhy/update-utxo-crate-3-add-randomness
This commit is contained in:
commit
014cbc3c4d
1267
Cargo.lock
generated
1267
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -36,7 +36,6 @@ lru = "0.7.8"
|
|||||||
thiserror = "1.0"
|
thiserror = "1.0"
|
||||||
rs_merkle = "1.4"
|
rs_merkle = "1.4"
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
monotree = "0.1.5"
|
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
aes-gcm = "0.10.3"
|
aes-gcm = "0.10.3"
|
||||||
toml = "0.7.4"
|
toml = "0.7.4"
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
use aes_gcm::{aead::Aead, AeadCore, Aes256Gcm, Key, KeyInit};
|
use aes_gcm::{aead::Aead, AeadCore, Aes256Gcm, KeyInit};
|
||||||
|
use elliptic_curve::point::AffineCoordinates;
|
||||||
use elliptic_curve::PrimeField;
|
use elliptic_curve::PrimeField;
|
||||||
use k256::{AffinePoint, FieldBytes, Scalar};
|
use k256::{AffinePoint, FieldBytes, Scalar};
|
||||||
use log::info;
|
use log::info;
|
||||||
@ -39,14 +40,8 @@ impl EphemeralKeyHolder {
|
|||||||
viewing_public_key_receiver: AffinePoint,
|
viewing_public_key_receiver: AffinePoint,
|
||||||
data: &[u8],
|
data: &[u8],
|
||||||
) -> (CipherText, Nonce) {
|
) -> (CipherText, Nonce) {
|
||||||
let key_point = self.calculate_shared_secret_sender(viewing_public_key_receiver);
|
let shared_secret = self.calculate_shared_secret_sender(viewing_public_key_receiver);
|
||||||
let binding = serde_json::to_vec(&key_point).unwrap();
|
let cipher = Aes256Gcm::new(&shared_secret.x());
|
||||||
let key_raw = &binding.as_slice()[..32];
|
|
||||||
let key_raw_adjust: [u8; 32] = key_raw.try_into().unwrap();
|
|
||||||
|
|
||||||
let key: Key<Aes256Gcm> = key_raw_adjust.into();
|
|
||||||
|
|
||||||
let cipher = Aes256Gcm::new(&key);
|
|
||||||
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
let nonce = Aes256Gcm::generate_nonce(&mut OsRng);
|
||||||
|
|
||||||
(cipher.encrypt(&nonce, data).unwrap(), nonce)
|
(cipher.encrypt(&nonce, data).unwrap(), nonce)
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
use aes_gcm::{aead::Aead, Aes256Gcm, Key, KeyInit};
|
use aes_gcm::{aead::Aead, Aes256Gcm, KeyInit};
|
||||||
use common::merkle_tree_public::TreeHashType;
|
use common::merkle_tree_public::TreeHashType;
|
||||||
use constants_types::{CipherText, Nonce};
|
use constants_types::{CipherText, Nonce};
|
||||||
|
use elliptic_curve::point::AffineCoordinates;
|
||||||
use ephemeral_key_holder::EphemeralKeyHolder;
|
use ephemeral_key_holder::EphemeralKeyHolder;
|
||||||
use k256::AffinePoint;
|
use k256::AffinePoint;
|
||||||
use log::info;
|
use log::info;
|
||||||
@ -63,14 +64,8 @@ impl AddressKeyHolder {
|
|||||||
ciphertext: CipherText,
|
ciphertext: CipherText,
|
||||||
nonce: Nonce,
|
nonce: Nonce,
|
||||||
) -> Result<Vec<u8>, aes_gcm::Error> {
|
) -> Result<Vec<u8>, aes_gcm::Error> {
|
||||||
let key_point = self.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
let shared_secret = self.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
||||||
let binding = serde_json::to_vec(&key_point).unwrap();
|
let cipher = Aes256Gcm::new(&shared_secret.x());
|
||||||
let key_raw = &binding.as_slice()[..32];
|
|
||||||
let key_raw_adjust: [u8; 32] = key_raw.try_into().unwrap();
|
|
||||||
|
|
||||||
let key: Key<Aes256Gcm> = key_raw_adjust.into();
|
|
||||||
|
|
||||||
let cipher = Aes256Gcm::new(&key);
|
|
||||||
|
|
||||||
cipher.decrypt(&nonce, ciphertext.as_slice())
|
cipher.decrypt(&nonce, ciphertext.as_slice())
|
||||||
}
|
}
|
||||||
@ -115,6 +110,7 @@ mod tests {
|
|||||||
use constants_types::{NULLIFIER_SECRET_CONST, VIEWING_SECRET_CONST};
|
use constants_types::{NULLIFIER_SECRET_CONST, VIEWING_SECRET_CONST};
|
||||||
use elliptic_curve::ff::Field;
|
use elliptic_curve::ff::Field;
|
||||||
use elliptic_curve::group::prime::PrimeCurveAffine;
|
use elliptic_curve::group::prime::PrimeCurveAffine;
|
||||||
|
use elliptic_curve::point::AffineCoordinates;
|
||||||
use k256::{AffinePoint, ProjectivePoint, Scalar};
|
use k256::{AffinePoint, ProjectivePoint, Scalar};
|
||||||
|
|
||||||
use super::*;
|
use super::*;
|
||||||
@ -154,22 +150,14 @@ mod tests {
|
|||||||
let address_key_holder = AddressKeyHolder::new_os_random();
|
let address_key_holder = AddressKeyHolder::new_os_random();
|
||||||
|
|
||||||
// Generate an ephemeral key and shared secret
|
// Generate an ephemeral key and shared secret
|
||||||
let scalar = Scalar::random(OsRng);
|
|
||||||
let ephemeral_public_key_sender = address_key_holder
|
let ephemeral_public_key_sender = address_key_holder
|
||||||
.produce_ephemeral_key_holder()
|
.produce_ephemeral_key_holder()
|
||||||
.generate_ephemeral_public_key();
|
.generate_ephemeral_public_key();
|
||||||
let shared_secret =
|
let shared_secret =
|
||||||
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
||||||
|
|
||||||
// Prepare the encryption key from shared secret
|
|
||||||
let key_raw = serde_json::to_vec(&shared_secret).unwrap();
|
|
||||||
let key_raw_adjust_pre = &key_raw.as_slice()[..32];
|
|
||||||
let key_raw_adjust: [u8; 32] = key_raw_adjust_pre.try_into().unwrap();
|
|
||||||
let key: Key<Aes256Gcm> = key_raw_adjust.into();
|
|
||||||
|
|
||||||
let cipher = Aes256Gcm::new(&key);
|
|
||||||
|
|
||||||
// Encrypt sample data
|
// Encrypt sample data
|
||||||
|
let cipher = Aes256Gcm::new(&shared_secret.x());
|
||||||
let nonce = Nonce::from_slice(b"unique nonce");
|
let nonce = Nonce::from_slice(b"unique nonce");
|
||||||
let plaintext = b"Sensitive data";
|
let plaintext = b"Sensitive data";
|
||||||
let ciphertext = cipher
|
let ciphertext = cipher
|
||||||
@ -225,19 +213,12 @@ mod tests {
|
|||||||
|
|
||||||
// Generate ephemeral public key and shared secret
|
// Generate ephemeral public key and shared secret
|
||||||
let scalar = Scalar::random(OsRng);
|
let scalar = Scalar::random(OsRng);
|
||||||
let ephemeral_public_key_sender = (ProjectivePoint::generator() * scalar).to_affine();
|
let ephemeral_public_key_sender = (ProjectivePoint::GENERATOR * scalar).to_affine();
|
||||||
let shared_secret =
|
let shared_secret =
|
||||||
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
||||||
|
|
||||||
// Prepare the encryption key from shared secret
|
|
||||||
let key_raw = serde_json::to_vec(&shared_secret).unwrap();
|
|
||||||
let key_raw_adjust_pre = &key_raw.as_slice()[..32];
|
|
||||||
let key_raw_adjust: [u8; 32] = key_raw_adjust_pre.try_into().unwrap();
|
|
||||||
let key: Key<Aes256Gcm> = key_raw_adjust.into();
|
|
||||||
|
|
||||||
let cipher = Aes256Gcm::new(&key);
|
|
||||||
|
|
||||||
// Encrypt sample data with a specific nonce
|
// Encrypt sample data with a specific nonce
|
||||||
|
let cipher = Aes256Gcm::new(&shared_secret.x());
|
||||||
let nonce = Nonce::from_slice(b"unique nonce");
|
let nonce = Nonce::from_slice(b"unique nonce");
|
||||||
let plaintext = b"Sensitive data";
|
let plaintext = b"Sensitive data";
|
||||||
let ciphertext = cipher
|
let ciphertext = cipher
|
||||||
@ -265,19 +246,12 @@ mod tests {
|
|||||||
|
|
||||||
// Generate ephemeral public key and shared secret
|
// Generate ephemeral public key and shared secret
|
||||||
let scalar = Scalar::random(OsRng);
|
let scalar = Scalar::random(OsRng);
|
||||||
let ephemeral_public_key_sender = (ProjectivePoint::generator() * scalar).to_affine();
|
let ephemeral_public_key_sender = (ProjectivePoint::GENERATOR * scalar).to_affine();
|
||||||
let shared_secret =
|
let shared_secret =
|
||||||
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
||||||
|
|
||||||
// Prepare the encryption key from shared secret
|
|
||||||
let key_raw = serde_json::to_vec(&shared_secret).unwrap();
|
|
||||||
let key_raw_adjust_pre = &key_raw.as_slice()[..32];
|
|
||||||
let key_raw_adjust: [u8; 32] = key_raw_adjust_pre.try_into().unwrap();
|
|
||||||
let key: Key<Aes256Gcm> = key_raw_adjust.into();
|
|
||||||
|
|
||||||
let cipher = Aes256Gcm::new(&key);
|
|
||||||
|
|
||||||
// Encrypt sample data
|
// Encrypt sample data
|
||||||
|
let cipher = Aes256Gcm::new(&shared_secret.x());
|
||||||
let nonce = Nonce::from_slice(b"unique nonce");
|
let nonce = Nonce::from_slice(b"unique nonce");
|
||||||
let plaintext = b"Sensitive data";
|
let plaintext = b"Sensitive data";
|
||||||
let ciphertext = cipher
|
let ciphertext = cipher
|
||||||
@ -307,7 +281,7 @@ mod tests {
|
|||||||
|
|
||||||
// Generate ephemeral key and shared secret
|
// Generate ephemeral key and shared secret
|
||||||
let scalar = Scalar::random(OsRng);
|
let scalar = Scalar::random(OsRng);
|
||||||
let ephemeral_public_key_sender = (ProjectivePoint::generator() * scalar).to_affine();
|
let ephemeral_public_key_sender = (ProjectivePoint::GENERATOR * scalar).to_affine();
|
||||||
|
|
||||||
// Encrypt sample data
|
// Encrypt sample data
|
||||||
let plaintext = b"Round-trip test data";
|
let plaintext = b"Round-trip test data";
|
||||||
@ -315,12 +289,7 @@ mod tests {
|
|||||||
|
|
||||||
let shared_secret =
|
let shared_secret =
|
||||||
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
|
||||||
// Prepare the encryption key from shared secret
|
let cipher = Aes256Gcm::new(&shared_secret.x());
|
||||||
let key_raw = serde_json::to_vec(&shared_secret).unwrap();
|
|
||||||
let key_raw_adjust_pre = &key_raw.as_slice()[..32];
|
|
||||||
let key_raw_adjust: [u8; 32] = key_raw_adjust_pre.try_into().unwrap();
|
|
||||||
let key: Key<Aes256Gcm> = key_raw_adjust.into();
|
|
||||||
let cipher = Aes256Gcm::new(&key);
|
|
||||||
|
|
||||||
let ciphertext = cipher
|
let ciphertext = cipher
|
||||||
.encrypt(nonce, plaintext.as_ref())
|
.encrypt(nonce, plaintext.as_ref())
|
||||||
|
|||||||
@ -9,7 +9,6 @@ thiserror.workspace = true
|
|||||||
serde_json.workspace = true
|
serde_json.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
monotree.workspace = true
|
|
||||||
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.0" }
|
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.0" }
|
||||||
|
|
||||||
rs_merkle.workspace = true
|
rs_merkle.workspace = true
|
||||||
|
|||||||
@ -1,283 +0,0 @@
|
|||||||
use monotree::database::MemoryDB;
|
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::{Hasher, Monotree, Proof};
|
|
||||||
|
|
||||||
use crate::commitment::Commitment;
|
|
||||||
use crate::merkle_tree_public::CommitmentHashType;
|
|
||||||
|
|
||||||
pub struct CommitmentsSparseMerkleTree {
|
|
||||||
pub curr_root: Option<CommitmentHashType>,
|
|
||||||
pub tree: Monotree<MemoryDB, Blake3>,
|
|
||||||
pub hasher: Blake3,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl CommitmentsSparseMerkleTree {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
CommitmentsSparseMerkleTree {
|
|
||||||
curr_root: None,
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_item(&mut self, commitment: Commitment) -> Result<(), monotree::Errors> {
|
|
||||||
let root = self
|
|
||||||
.curr_root
|
|
||||||
.as_ref()
|
|
||||||
.map(|val| val[0..32].try_into().unwrap());
|
|
||||||
|
|
||||||
let new_root = self.tree.insert(
|
|
||||||
root,
|
|
||||||
&commitment.commitment_hash[0..32].try_into().unwrap(),
|
|
||||||
&commitment.commitment_hash[0..32].try_into().unwrap(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
self.curr_root = new_root.map(|val| val.to_vec());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_items(&mut self, commitments: Vec<Commitment>) -> Result<(), monotree::Errors> {
|
|
||||||
let root = self
|
|
||||||
.curr_root
|
|
||||||
.as_ref()
|
|
||||||
.map(|val| val[0..32].try_into().unwrap());
|
|
||||||
|
|
||||||
let hashes: Vec<_> = commitments
|
|
||||||
.iter()
|
|
||||||
.map(|val| val.commitment_hash[0..32].try_into().unwrap())
|
|
||||||
.collect::<Vec<_>>();
|
|
||||||
|
|
||||||
let new_root = self.tree.inserts(root, &hashes, &hashes)?;
|
|
||||||
|
|
||||||
self.curr_root = new_root.map(|val| val[0..32].try_into().unwrap());
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn search_item_inclusion(
|
|
||||||
&mut self,
|
|
||||||
commitment_hash: CommitmentHashType,
|
|
||||||
) -> Result<bool, monotree::Errors> {
|
|
||||||
self.tree
|
|
||||||
.get(
|
|
||||||
self.curr_root
|
|
||||||
.as_ref()
|
|
||||||
.map(|val| val[0..32].try_into().unwrap()),
|
|
||||||
&commitment_hash[0..32].try_into().unwrap(),
|
|
||||||
)
|
|
||||||
.map(|data| data.is_some())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn search_item_inclusions(
|
|
||||||
&mut self,
|
|
||||||
commitment_hashes: &[CommitmentHashType],
|
|
||||||
) -> Result<Vec<bool>, monotree::Errors> {
|
|
||||||
let mut inclusions = vec![];
|
|
||||||
|
|
||||||
for nullifier_hash in commitment_hashes {
|
|
||||||
let is_included = self
|
|
||||||
.tree
|
|
||||||
.get(
|
|
||||||
self.curr_root
|
|
||||||
.as_ref()
|
|
||||||
.map(|val| val[0..32].try_into().unwrap()),
|
|
||||||
nullifier_hash[0..32].try_into().unwrap(),
|
|
||||||
)
|
|
||||||
.map(|data| data.is_some())?;
|
|
||||||
|
|
||||||
inclusions.push(is_included);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(inclusions)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_non_membership_proof(
|
|
||||||
&mut self,
|
|
||||||
commitment_hash: CommitmentHashType,
|
|
||||||
) -> Result<(Option<Proof>, Option<CommitmentHashType>), monotree::Errors> {
|
|
||||||
let is_member = self.search_item_inclusion(commitment_hash.clone())?;
|
|
||||||
|
|
||||||
if is_member {
|
|
||||||
Err(monotree::Errors::new("Is a member"))
|
|
||||||
} else {
|
|
||||||
Ok((
|
|
||||||
self.tree.get_merkle_proof(
|
|
||||||
self.curr_root
|
|
||||||
.as_ref()
|
|
||||||
.map(|val| val[0..32].try_into().unwrap()),
|
|
||||||
&commitment_hash,
|
|
||||||
)?,
|
|
||||||
self.curr_root.clone(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
pub fn get_non_membership_proofs(
|
|
||||||
&mut self,
|
|
||||||
commitment_hashes: &[CommitmentHashType],
|
|
||||||
) -> Result<Vec<(Option<Proof>, Option<CommitmentHashType>)>, monotree::Errors> {
|
|
||||||
let mut non_membership_proofs = vec![];
|
|
||||||
|
|
||||||
for commitment_hash in commitment_hashes {
|
|
||||||
let is_member = self.search_item_inclusion(commitment_hash.clone())?;
|
|
||||||
|
|
||||||
if is_member {
|
|
||||||
return Err(monotree::Errors::new(
|
|
||||||
format!("{commitment_hash:?} Is a member").as_str(),
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
non_membership_proofs.push((
|
|
||||||
self.tree.get_merkle_proof(
|
|
||||||
self.curr_root
|
|
||||||
.as_ref()
|
|
||||||
.map(|val| val[0..32].try_into().unwrap()),
|
|
||||||
commitment_hash,
|
|
||||||
)?,
|
|
||||||
self.curr_root.clone(),
|
|
||||||
))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(non_membership_proofs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for CommitmentsSparseMerkleTree {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::nullifier::UTXONullifier;
|
|
||||||
use monotree::database::MemoryDB;
|
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::Monotree;
|
|
||||||
|
|
||||||
fn create_nullifier(hash: CommitmentHashType) -> Commitment {
|
|
||||||
Commitment {
|
|
||||||
commitment_hash: hash,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_new_tree_initialization() {
|
|
||||||
let tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
assert!(tree.curr_root.is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_single_item() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let nullifier = create_nullifier([1u8; 32].to_vec()); // Sample 32-byte hash
|
|
||||||
|
|
||||||
let result = tree.insert_item(nullifier);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
assert!(tree.curr_root.is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_multiple_items() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let nullifiers = vec![
|
|
||||||
create_nullifier([1u8; 32].to_vec()),
|
|
||||||
create_nullifier([2u8; 32].to_vec()),
|
|
||||||
create_nullifier([3u8; 32].to_vec()),
|
|
||||||
];
|
|
||||||
|
|
||||||
let result = tree.insert_items(nullifiers);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
assert!(tree.curr_root.is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_search_item_inclusion() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let nullifier = create_nullifier([1u8; 32].to_vec());
|
|
||||||
|
|
||||||
tree.insert_item(nullifier.clone()).unwrap();
|
|
||||||
|
|
||||||
let result = tree.search_item_inclusion([1u8; 32].to_vec());
|
|
||||||
assert!(result.is_ok());
|
|
||||||
assert_eq!(result.unwrap(), true);
|
|
||||||
|
|
||||||
let non_existing = tree.search_item_inclusion([99u8; 32].to_vec());
|
|
||||||
assert!(non_existing.is_ok());
|
|
||||||
assert_eq!(non_existing.unwrap(), false);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_search_multiple_item_inclusions() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let nullifiers = vec![
|
|
||||||
create_nullifier([1u8; 32].to_vec()),
|
|
||||||
create_nullifier([2u8; 32].to_vec()),
|
|
||||||
create_nullifier([3u8; 32].to_vec()),
|
|
||||||
];
|
|
||||||
|
|
||||||
tree.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
let search_hashes = vec![[1u8; 32].to_vec(), [2u8; 32].to_vec(), [99u8; 32].to_vec()];
|
|
||||||
let result = tree.search_item_inclusions(&search_hashes);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let expected_results = vec![true, true, false];
|
|
||||||
assert_eq!(result.unwrap(), expected_results);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_non_membership_proof() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let non_member_hash = [5u8; 32].to_vec();
|
|
||||||
|
|
||||||
let result = tree.get_non_membership_proof(non_member_hash);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let (proof, root) = result.unwrap();
|
|
||||||
assert!(root.is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_non_membership_proofs_multiple() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let non_member_hashes = vec![[5u8; 32].to_vec(), [6u8; 32].to_vec(), [7u8; 32].to_vec()];
|
|
||||||
|
|
||||||
let result = tree.get_non_membership_proofs(&non_member_hashes);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let proofs = result.unwrap();
|
|
||||||
for (proof, root) in proofs {
|
|
||||||
assert!(root.is_none());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_and_get_proof_of_existing_item() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let nullifier = create_nullifier([1u8; 32].to_vec());
|
|
||||||
|
|
||||||
tree.insert_item(nullifier.clone()).unwrap();
|
|
||||||
|
|
||||||
let proof_result = tree.get_non_membership_proof([1u8; 32].to_vec());
|
|
||||||
assert!(proof_result.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_and_get_proofs_of_existing_items() {
|
|
||||||
let mut tree = CommitmentsSparseMerkleTree::new();
|
|
||||||
let nullifiers = vec![
|
|
||||||
create_nullifier([1u8; 32].to_vec()),
|
|
||||||
create_nullifier([2u8; 32].to_vec()),
|
|
||||||
];
|
|
||||||
|
|
||||||
tree.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
let proof_result =
|
|
||||||
tree.get_non_membership_proofs(&[[1u8; 32].to_vec(), [2u8; 32].to_vec()]);
|
|
||||||
assert!(proof_result.is_err());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -3,10 +3,8 @@ use serde::Deserialize;
|
|||||||
|
|
||||||
pub mod block;
|
pub mod block;
|
||||||
pub mod commitment;
|
pub mod commitment;
|
||||||
pub mod commitments_sparse_merkle_tree;
|
|
||||||
pub mod merkle_tree_public;
|
pub mod merkle_tree_public;
|
||||||
pub mod nullifier;
|
pub mod nullifier;
|
||||||
pub mod nullifier_sparse_merkle_tree;
|
|
||||||
pub mod rpc_primitives;
|
pub mod rpc_primitives;
|
||||||
pub mod transaction;
|
pub mod transaction;
|
||||||
pub mod utxo_commitment;
|
pub mod utxo_commitment;
|
||||||
@ -67,8 +65,6 @@ pub enum ExecutionFailureKind {
|
|||||||
AmountMismatchError,
|
AmountMismatchError,
|
||||||
#[error("Sequencer client error: {0:?}")]
|
#[error("Sequencer client error: {0:?}")]
|
||||||
SequencerClientError(#[from] SequencerClientError),
|
SequencerClientError(#[from] SequencerClientError),
|
||||||
#[error("Datebase returned error : {0:?}")]
|
|
||||||
MonoTreeError(#[from] monotree::Errors),
|
|
||||||
#[error("Insufficient gas for operation")]
|
#[error("Insufficient gas for operation")]
|
||||||
InsufficientGasError,
|
InsufficientGasError,
|
||||||
#[error("Can not pay for operation")]
|
#[error("Can not pay for operation")]
|
||||||
|
|||||||
@ -1,6 +1,11 @@
|
|||||||
use std::collections::HashMap;
|
use std::{collections::HashMap, fmt, marker::PhantomData};
|
||||||
|
|
||||||
use rs_merkle::{MerkleProof, MerkleTree};
|
use rs_merkle::{MerkleProof, MerkleTree};
|
||||||
|
use serde::{
|
||||||
|
de::{SeqAccess, Visitor},
|
||||||
|
ser::SerializeSeq,
|
||||||
|
Deserialize, Deserializer, Serialize,
|
||||||
|
};
|
||||||
|
|
||||||
use crate::{transaction::Transaction, utxo_commitment::UTXOCommitment};
|
use crate::{transaction::Transaction, utxo_commitment::UTXOCommitment};
|
||||||
|
|
||||||
@ -12,6 +17,70 @@ pub struct HashStorageMerkleTree<Leav: TreeLeavItem + Clone> {
|
|||||||
tree: MerkleTree<OwnHasher>,
|
tree: MerkleTree<OwnHasher>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<Leav: TreeLeavItem + Clone + Serialize> Serialize for HashStorageMerkleTree<Leav> {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
let mut vector = self.leaves.iter().collect::<Vec<_>>();
|
||||||
|
vector.sort_by(|a, b| a.0.cmp(b.0));
|
||||||
|
|
||||||
|
let mut seq = serializer.serialize_seq(Some(self.leaves.len()))?;
|
||||||
|
for element in vector.iter() {
|
||||||
|
seq.serialize_element(element.1)?;
|
||||||
|
}
|
||||||
|
seq.end()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct HashStorageMerkleTreeDeserializer<Leav: TreeLeavItem + Clone> {
|
||||||
|
marker: PhantomData<fn() -> HashStorageMerkleTree<Leav>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<Leaf: TreeLeavItem + Clone> HashStorageMerkleTreeDeserializer<Leaf> {
|
||||||
|
fn new() -> Self {
|
||||||
|
HashStorageMerkleTreeDeserializer {
|
||||||
|
marker: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de, Leav: TreeLeavItem + Clone + Deserialize<'de>> Visitor<'de>
|
||||||
|
for HashStorageMerkleTreeDeserializer<Leav>
|
||||||
|
{
|
||||||
|
type Value = HashStorageMerkleTree<Leav>;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
formatter.write_str("HashStorageMerkleTree key value sequence.")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
|
||||||
|
where
|
||||||
|
A: SeqAccess<'de>,
|
||||||
|
{
|
||||||
|
let mut vector = vec![];
|
||||||
|
|
||||||
|
loop {
|
||||||
|
let opt_key = seq.next_element::<Leav>()?;
|
||||||
|
if let Some(value) = opt_key {
|
||||||
|
vector.push(value);
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(HashStorageMerkleTree::new(vector))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de, Leav: TreeLeavItem + Clone + Deserialize<'de>> serde::Deserialize<'de>
|
||||||
|
for HashStorageMerkleTree<Leav>
|
||||||
|
{
|
||||||
|
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
|
||||||
|
deserializer.deserialize_seq(HashStorageMerkleTreeDeserializer::new())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub type PublicTransactionMerkleTree = HashStorageMerkleTree<Transaction>;
|
pub type PublicTransactionMerkleTree = HashStorageMerkleTree<Transaction>;
|
||||||
|
|
||||||
pub type UTXOCommitmentsMerkleTree = HashStorageMerkleTree<UTXOCommitment>;
|
pub type UTXOCommitmentsMerkleTree = HashStorageMerkleTree<UTXOCommitment>;
|
||||||
@ -101,7 +170,7 @@ mod tests {
|
|||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
// Mock implementation of TreeLeavItem trait for testing
|
// Mock implementation of TreeLeavItem trait for testing
|
||||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
struct MockTransaction {
|
struct MockTransaction {
|
||||||
pub hash: TreeHashType,
|
pub hash: TreeHashType,
|
||||||
}
|
}
|
||||||
@ -136,6 +205,26 @@ mod tests {
|
|||||||
assert!(tree.get_root().is_some());
|
assert!(tree.get_root().is_some());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_new_merkle_tree_serialize() {
|
||||||
|
let tx1 = MockTransaction {
|
||||||
|
hash: get_first_32_bytes("tx1"),
|
||||||
|
};
|
||||||
|
let tx2 = MockTransaction {
|
||||||
|
hash: get_first_32_bytes("tx2"),
|
||||||
|
};
|
||||||
|
|
||||||
|
let tree = HashStorageMerkleTree::new(vec![tx1.clone(), tx2.clone()]);
|
||||||
|
|
||||||
|
let binding = serde_json::to_vec(&tree).unwrap();
|
||||||
|
|
||||||
|
let obj: HashStorageMerkleTree<MockTransaction> = serde_json::from_slice(&binding).unwrap();
|
||||||
|
|
||||||
|
assert_eq!(tree.leaves, obj.leaves);
|
||||||
|
assert_eq!(tree.hash_to_id_map, obj.hash_to_id_map);
|
||||||
|
assert_eq!(tree.tree.root(), obj.tree.root());
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_get_tx() {
|
fn test_get_tx() {
|
||||||
let tx1 = MockTransaction {
|
let tx1 = MockTransaction {
|
||||||
|
|||||||
@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
|
|||||||
use crate::merkle_tree_public::TreeHashType;
|
use crate::merkle_tree_public::TreeHashType;
|
||||||
|
|
||||||
//ToDo: Update Nullifier model, when it is clear
|
//ToDo: Update Nullifier model, when it is clear
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq)]
|
#[derive(Debug, Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash)]
|
||||||
///General nullifier object
|
///General nullifier object
|
||||||
pub struct UTXONullifier {
|
pub struct UTXONullifier {
|
||||||
pub utxo_hash: TreeHashType,
|
pub utxo_hash: TreeHashType,
|
||||||
|
|||||||
@ -1,245 +0,0 @@
|
|||||||
use monotree::database::MemoryDB;
|
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::{Hasher, Monotree, Proof};
|
|
||||||
|
|
||||||
use crate::merkle_tree_public::TreeHashType;
|
|
||||||
use crate::nullifier::UTXONullifier;
|
|
||||||
|
|
||||||
pub struct NullifierSparseMerkleTree {
|
|
||||||
pub curr_root: Option<TreeHashType>,
|
|
||||||
pub tree: Monotree<MemoryDB, Blake3>,
|
|
||||||
pub hasher: Blake3,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl NullifierSparseMerkleTree {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
NullifierSparseMerkleTree {
|
|
||||||
curr_root: None,
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_item(&mut self, nullifier: UTXONullifier) -> Result<(), monotree::Errors> {
|
|
||||||
let root = self.curr_root.as_ref();
|
|
||||||
|
|
||||||
let new_root = self
|
|
||||||
.tree
|
|
||||||
.insert(root, &nullifier.utxo_hash, &nullifier.utxo_hash)?;
|
|
||||||
|
|
||||||
self.curr_root = new_root;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn insert_items(&mut self, nullifiers: Vec<UTXONullifier>) -> Result<(), monotree::Errors> {
|
|
||||||
let root = self.curr_root.as_ref();
|
|
||||||
|
|
||||||
let hashes: Vec<TreeHashType> = nullifiers.iter().map(|nu| nu.utxo_hash).collect();
|
|
||||||
|
|
||||||
let new_root = self.tree.inserts(root, &hashes, &hashes)?;
|
|
||||||
|
|
||||||
self.curr_root = new_root;
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn search_item_inclusion(
|
|
||||||
&mut self,
|
|
||||||
nullifier_hash: TreeHashType,
|
|
||||||
) -> Result<bool, monotree::Errors> {
|
|
||||||
self.tree
|
|
||||||
.get(self.curr_root.as_ref(), &nullifier_hash)
|
|
||||||
.map(|data| data.is_some())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn search_item_inclusions(
|
|
||||||
&mut self,
|
|
||||||
nullifier_hashes: &[TreeHashType],
|
|
||||||
) -> Result<Vec<bool>, monotree::Errors> {
|
|
||||||
let mut inclusions = vec![];
|
|
||||||
|
|
||||||
for nullifier_hash in nullifier_hashes {
|
|
||||||
let is_included = self
|
|
||||||
.tree
|
|
||||||
.get(self.curr_root.as_ref(), nullifier_hash)
|
|
||||||
.map(|data| data.is_some())?;
|
|
||||||
|
|
||||||
inclusions.push(is_included);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(inclusions)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_non_membership_proof(
|
|
||||||
&mut self,
|
|
||||||
nullifier_hash: TreeHashType,
|
|
||||||
) -> Result<(Option<Proof>, Option<TreeHashType>), monotree::Errors> {
|
|
||||||
let is_member = self.search_item_inclusion(nullifier_hash)?;
|
|
||||||
|
|
||||||
if is_member {
|
|
||||||
Err(monotree::Errors::new("Is a member"))
|
|
||||||
} else {
|
|
||||||
Ok((
|
|
||||||
self.tree
|
|
||||||
.get_merkle_proof(self.curr_root.as_ref(), &nullifier_hash)?,
|
|
||||||
self.curr_root,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(clippy::type_complexity)]
|
|
||||||
pub fn get_non_membership_proofs(
|
|
||||||
&mut self,
|
|
||||||
nullifier_hashes: &[TreeHashType],
|
|
||||||
) -> Result<Vec<(Option<Proof>, Option<TreeHashType>)>, monotree::Errors> {
|
|
||||||
let mut non_membership_proofs = vec![];
|
|
||||||
|
|
||||||
for nullifier_hash in nullifier_hashes {
|
|
||||||
let is_member = self.search_item_inclusion(*nullifier_hash)?;
|
|
||||||
|
|
||||||
if is_member {
|
|
||||||
return Err(monotree::Errors::new(
|
|
||||||
format!("{nullifier_hash:?} Is a member").as_str(),
|
|
||||||
));
|
|
||||||
} else {
|
|
||||||
non_membership_proofs.push((
|
|
||||||
self.tree
|
|
||||||
.get_merkle_proof(self.curr_root.as_ref(), nullifier_hash)?,
|
|
||||||
self.curr_root,
|
|
||||||
))
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(non_membership_proofs)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Default for NullifierSparseMerkleTree {
|
|
||||||
fn default() -> Self {
|
|
||||||
Self::new()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use crate::nullifier::UTXONullifier;
|
|
||||||
|
|
||||||
fn create_nullifier(hash: TreeHashType) -> UTXONullifier {
|
|
||||||
UTXONullifier { utxo_hash: hash }
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_new_tree_initialization() {
|
|
||||||
let tree = NullifierSparseMerkleTree::new();
|
|
||||||
assert!(tree.curr_root.is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_single_item() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let nullifier = create_nullifier([1u8; 32]); // Sample 32-byte hash
|
|
||||||
|
|
||||||
let result = tree.insert_item(nullifier);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
assert!(tree.curr_root.is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_multiple_items() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let nullifiers = vec![
|
|
||||||
create_nullifier([1u8; 32]),
|
|
||||||
create_nullifier([2u8; 32]),
|
|
||||||
create_nullifier([3u8; 32]),
|
|
||||||
];
|
|
||||||
|
|
||||||
let result = tree.insert_items(nullifiers);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
assert!(tree.curr_root.is_some());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_search_item_inclusion() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let nullifier = create_nullifier([1u8; 32]);
|
|
||||||
|
|
||||||
tree.insert_item(nullifier.clone()).unwrap();
|
|
||||||
|
|
||||||
let result = tree.search_item_inclusion([1u8; 32]);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
assert_eq!(result.unwrap(), true);
|
|
||||||
|
|
||||||
let non_existing = tree.search_item_inclusion([99u8; 32]);
|
|
||||||
assert!(non_existing.is_ok());
|
|
||||||
assert_eq!(non_existing.unwrap(), false);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_search_multiple_item_inclusions() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let nullifiers = vec![
|
|
||||||
create_nullifier([1u8; 32]),
|
|
||||||
create_nullifier([2u8; 32]),
|
|
||||||
create_nullifier([3u8; 32]),
|
|
||||||
];
|
|
||||||
|
|
||||||
tree.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
let search_hashes = vec![[1u8; 32], [2u8; 32], [99u8; 32]];
|
|
||||||
let result = tree.search_item_inclusions(&search_hashes);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let expected_results = vec![true, true, false];
|
|
||||||
assert_eq!(result.unwrap(), expected_results);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_non_membership_proof() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let non_member_hash = [5u8; 32];
|
|
||||||
|
|
||||||
let result = tree.get_non_membership_proof(non_member_hash);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let (proof, root) = result.unwrap();
|
|
||||||
assert!(root.is_none());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_non_membership_proofs_multiple() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let non_member_hashes = vec![[5u8; 32], [6u8; 32], [7u8; 32]];
|
|
||||||
|
|
||||||
let result = tree.get_non_membership_proofs(&non_member_hashes);
|
|
||||||
assert!(result.is_ok());
|
|
||||||
|
|
||||||
let proofs = result.unwrap();
|
|
||||||
for (proof, root) in proofs {
|
|
||||||
assert!(root.is_none());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_and_get_proof_of_existing_item() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let nullifier = create_nullifier([1u8; 32]);
|
|
||||||
|
|
||||||
tree.insert_item(nullifier.clone()).unwrap();
|
|
||||||
|
|
||||||
let proof_result = tree.get_non_membership_proof([1u8; 32]);
|
|
||||||
assert!(proof_result.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_insert_and_get_proofs_of_existing_items() {
|
|
||||||
let mut tree = NullifierSparseMerkleTree::new();
|
|
||||||
let nullifiers = vec![create_nullifier([1u8; 32]), create_nullifier([2u8; 32])];
|
|
||||||
|
|
||||||
tree.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
let proof_result = tree.get_non_membership_proofs(&[[1u8; 32], [2u8; 32]]);
|
|
||||||
assert!(proof_result.is_err());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -21,8 +21,8 @@ pub struct RegisterAccountRequest {
|
|||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SendTxRequest {
|
pub struct SendTxRequest {
|
||||||
pub transaction: Transaction,
|
pub transaction: Transaction,
|
||||||
///Nullifier Root, UTXO Commitment Root, Pub Tx Root
|
///UTXO Commitment Root, Pub Tx Root
|
||||||
pub tx_roots: [[u8; 32]; 3],
|
pub tx_roots: [[u8; 32]; 2],
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
|
|||||||
@ -12,7 +12,6 @@ serde.workspace = true
|
|||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
k256.workspace = true
|
k256.workspace = true
|
||||||
sha2.workspace = true
|
sha2.workspace = true
|
||||||
monotree.workspace = true
|
|
||||||
bincode.workspace = true
|
bincode.workspace = true
|
||||||
elliptic-curve.workspace = true
|
elliptic-curve.workspace = true
|
||||||
reqwest.workspace = true
|
reqwest.workspace = true
|
||||||
|
|||||||
@ -1,5 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, HashMap},
|
collections::{BTreeMap, HashMap, HashSet},
|
||||||
path::Path,
|
path::Path,
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -10,7 +10,6 @@ use common::{
|
|||||||
block::Block,
|
block::Block,
|
||||||
merkle_tree_public::merkle_tree::{PublicTransactionMerkleTree, UTXOCommitmentsMerkleTree},
|
merkle_tree_public::merkle_tree::{PublicTransactionMerkleTree, UTXOCommitmentsMerkleTree},
|
||||||
nullifier::UTXONullifier,
|
nullifier::UTXONullifier,
|
||||||
nullifier_sparse_merkle_tree::NullifierSparseMerkleTree,
|
|
||||||
utxo_commitment::UTXOCommitment,
|
utxo_commitment::UTXOCommitment,
|
||||||
};
|
};
|
||||||
use k256::AffinePoint;
|
use k256::AffinePoint;
|
||||||
@ -26,7 +25,7 @@ pub mod public_context;
|
|||||||
pub struct NodeChainStore {
|
pub struct NodeChainStore {
|
||||||
pub acc_map: HashMap<AccountAddress, Account>,
|
pub acc_map: HashMap<AccountAddress, Account>,
|
||||||
pub block_store: NodeBlockStore,
|
pub block_store: NodeBlockStore,
|
||||||
pub nullifier_store: NullifierSparseMerkleTree,
|
pub nullifier_store: HashSet<UTXONullifier>,
|
||||||
pub utxo_commitments_store: UTXOCommitmentsMerkleTree,
|
pub utxo_commitments_store: UTXOCommitmentsMerkleTree,
|
||||||
pub pub_tx_store: PublicTransactionMerkleTree,
|
pub pub_tx_store: PublicTransactionMerkleTree,
|
||||||
}
|
}
|
||||||
@ -34,7 +33,7 @@ pub struct NodeChainStore {
|
|||||||
impl NodeChainStore {
|
impl NodeChainStore {
|
||||||
pub fn new_with_genesis(home_dir: &Path, genesis_block: Block) -> Self {
|
pub fn new_with_genesis(home_dir: &Path, genesis_block: Block) -> Self {
|
||||||
let acc_map = HashMap::new();
|
let acc_map = HashMap::new();
|
||||||
let nullifier_store = NullifierSparseMerkleTree::default();
|
let nullifier_store = HashSet::new();
|
||||||
let utxo_commitments_store = UTXOCommitmentsMerkleTree::new(vec![]);
|
let utxo_commitments_store = UTXOCommitmentsMerkleTree::new(vec![]);
|
||||||
let pub_tx_store = PublicTransactionMerkleTree::new(vec![]);
|
let pub_tx_store = PublicTransactionMerkleTree::new(vec![]);
|
||||||
|
|
||||||
@ -97,13 +96,11 @@ impl NodeChainStore {
|
|||||||
.collect(),
|
.collect(),
|
||||||
);
|
);
|
||||||
|
|
||||||
self.nullifier_store.insert_items(
|
for nullifier in tx.nullifier_created_hashes.iter() {
|
||||||
tx.nullifier_created_hashes
|
self.nullifier_store.insert(UTXONullifier {
|
||||||
.clone()
|
utxo_hash: *nullifier,
|
||||||
.into_iter()
|
});
|
||||||
.map(|hash| UTXONullifier { utxo_hash: hash })
|
}
|
||||||
.collect(),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
if !tx.encoded_data.is_empty() {
|
if !tx.encoded_data.is_empty() {
|
||||||
let ephemeral_public_key_sender =
|
let ephemeral_public_key_sender =
|
||||||
@ -153,7 +150,6 @@ impl NodeChainStore {
|
|||||||
caller_address: caller,
|
caller_address: caller,
|
||||||
caller_balance: self.acc_map.get(&caller).unwrap().balance,
|
caller_balance: self.acc_map.get(&caller).unwrap().balance,
|
||||||
account_masks,
|
account_masks,
|
||||||
nullifier_store_root: self.nullifier_store.curr_root.unwrap_or([0; 32]),
|
|
||||||
comitment_store_root: self.utxo_commitments_store.get_root().unwrap_or([0; 32]),
|
comitment_store_root: self.utxo_commitments_store.get_root().unwrap_or([0; 32]),
|
||||||
pub_tx_store_root: self.pub_tx_store.get_root().unwrap_or([0; 32]),
|
pub_tx_store_root: self.pub_tx_store.get_root().unwrap_or([0; 32]),
|
||||||
}
|
}
|
||||||
|
|||||||
@ -9,7 +9,6 @@ pub const CALLER_ADDRESS: &str = "caller_address";
|
|||||||
pub const CALLER_BALANCE: &str = "caller_balance";
|
pub const CALLER_BALANCE: &str = "caller_balance";
|
||||||
pub const ACCOUNT_MASKS_KEYS_SORTED: &str = "account_masks_keys_sorted";
|
pub const ACCOUNT_MASKS_KEYS_SORTED: &str = "account_masks_keys_sorted";
|
||||||
pub const ACCOUNT_MASKS_VALUES_SORTED: &str = "account_masks_values_sorted";
|
pub const ACCOUNT_MASKS_VALUES_SORTED: &str = "account_masks_values_sorted";
|
||||||
pub const NULLIFIER_STORE_ROOT: &str = "nullifier_store_root";
|
|
||||||
pub const COMMITMENT_STORE_ROOT: &str = "commitment_store_root";
|
pub const COMMITMENT_STORE_ROOT: &str = "commitment_store_root";
|
||||||
pub const PUT_TX_STORE_ROOT: &str = "put_tx_store_root";
|
pub const PUT_TX_STORE_ROOT: &str = "put_tx_store_root";
|
||||||
|
|
||||||
@ -18,7 +17,6 @@ pub struct PublicSCContext {
|
|||||||
pub caller_address: AccountAddress,
|
pub caller_address: AccountAddress,
|
||||||
pub caller_balance: u64,
|
pub caller_balance: u64,
|
||||||
pub account_masks: BTreeMap<AccountAddress, AccountPublicMask>,
|
pub account_masks: BTreeMap<AccountAddress, AccountPublicMask>,
|
||||||
pub nullifier_store_root: TreeHashType,
|
|
||||||
pub comitment_store_root: TreeHashType,
|
pub comitment_store_root: TreeHashType,
|
||||||
pub pub_tx_store_root: TreeHashType,
|
pub pub_tx_store_root: TreeHashType,
|
||||||
}
|
}
|
||||||
@ -41,7 +39,6 @@ impl Serialize for PublicSCContext {
|
|||||||
s.serialize_field(CALLER_BALANCE, &self.caller_balance)?;
|
s.serialize_field(CALLER_BALANCE, &self.caller_balance)?;
|
||||||
s.serialize_field(ACCOUNT_MASKS_KEYS_SORTED, &account_masks_keys)?;
|
s.serialize_field(ACCOUNT_MASKS_KEYS_SORTED, &account_masks_keys)?;
|
||||||
s.serialize_field(ACCOUNT_MASKS_VALUES_SORTED, &account_mask_values)?;
|
s.serialize_field(ACCOUNT_MASKS_VALUES_SORTED, &account_mask_values)?;
|
||||||
s.serialize_field(NULLIFIER_STORE_ROOT, &self.nullifier_store_root)?;
|
|
||||||
s.serialize_field(COMMITMENT_STORE_ROOT, &self.comitment_store_root)?;
|
s.serialize_field(COMMITMENT_STORE_ROOT, &self.comitment_store_root)?;
|
||||||
s.serialize_field(PUT_TX_STORE_ROOT, &self.pub_tx_store_root)?;
|
s.serialize_field(PUT_TX_STORE_ROOT, &self.pub_tx_store_root)?;
|
||||||
|
|
||||||
@ -100,7 +97,6 @@ mod tests {
|
|||||||
|
|
||||||
fn create_test_context() -> PublicSCContext {
|
fn create_test_context() -> PublicSCContext {
|
||||||
let caller_address = [1; 32];
|
let caller_address = [1; 32];
|
||||||
let nullifier_store_root = [2; 32];
|
|
||||||
let comitment_store_root = [3; 32];
|
let comitment_store_root = [3; 32];
|
||||||
let pub_tx_store_root = [4; 32];
|
let pub_tx_store_root = [4; 32];
|
||||||
|
|
||||||
@ -118,7 +114,6 @@ mod tests {
|
|||||||
caller_address,
|
caller_address,
|
||||||
caller_balance: 100,
|
caller_balance: 100,
|
||||||
account_masks,
|
account_masks,
|
||||||
nullifier_store_root,
|
|
||||||
comitment_store_root,
|
comitment_store_root,
|
||||||
pub_tx_store_root,
|
pub_tx_store_root,
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,232 +0,0 @@
|
|||||||
use bincode;
|
|
||||||
use common::{
|
|
||||||
commitment::Commitment, commitments_sparse_merkle_tree::CommitmentsSparseMerkleTree,
|
|
||||||
nullifier::UTXONullifier, nullifier_sparse_merkle_tree::NullifierSparseMerkleTree,
|
|
||||||
};
|
|
||||||
use k256::Scalar;
|
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::{Hasher, Monotree};
|
|
||||||
use rand::thread_rng;
|
|
||||||
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use utxo::utxo_core::UTXO;
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn commitment_secrets_random(value: u64) -> CommitmentSecrets {
|
|
||||||
CommitmentSecrets {
|
|
||||||
value,
|
|
||||||
value_blinding_factor: Tweak::new(&mut thread_rng()),
|
|
||||||
generator_blinding_factor: Tweak::new(&mut thread_rng()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_random() -> Tag {
|
|
||||||
use rand::thread_rng;
|
|
||||||
use rand::RngCore;
|
|
||||||
|
|
||||||
let mut bytes = [0u8; 32];
|
|
||||||
thread_rng().fill_bytes(&mut bytes);
|
|
||||||
|
|
||||||
Tag::from(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn commit(comm: &CommitmentSecrets, tag: Tag) -> PedersenCommitment {
|
|
||||||
let generator = Generator::new_blinded(SECP256K1, tag, comm.generator_blinding_factor);
|
|
||||||
|
|
||||||
PedersenCommitment::new(SECP256K1, comm.value, comm.value_blinding_factor, generator)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash(input: &[u8]) -> Vec<u8> {
|
|
||||||
Sha256::digest(input).to_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate nullifiers
|
|
||||||
|
|
||||||
// takes the input_utxo and nsk
|
|
||||||
// returns the nullifiers[i], where the nullifier[i] = hash(in_commitments[i] || nsk) where the hash function
|
|
||||||
pub fn generate_nullifiers(input_utxo: &UTXO, nsk: &[u8]) -> Vec<u8> {
|
|
||||||
let mut input = bincode::serialize(input_utxo).unwrap().to_vec();
|
|
||||||
input.extend_from_slice(nsk);
|
|
||||||
hash(&input)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate commitments for output UTXOs
|
|
||||||
|
|
||||||
// uses the list of input_utxos[]
|
|
||||||
// returns in_commitments[] where each in_commitments[i] = Commitment(in_utxos[i]) where the commitment
|
|
||||||
pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
|
|
||||||
input_utxos
|
|
||||||
.iter()
|
|
||||||
.map(|utxo| {
|
|
||||||
let serialized = bincode::serialize(utxo).unwrap(); // Serialize UTXO.
|
|
||||||
hash(&serialized)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate inclusion proof for in_commitments
|
|
||||||
|
|
||||||
// takes the in_commitments[i] as a leaf, the root hash root_commitment and the path in_commitments_proofs[i][],
|
|
||||||
// returns True if the in_commitments[i] is in the tree with root hash root_commitment otherwise returns False, as membership proof.
|
|
||||||
pub fn validate_in_commitments_proof(
|
|
||||||
in_commitment: &Vec<u8>,
|
|
||||||
root_commitment: Vec<u8>,
|
|
||||||
in_commitments_proof: &[Vec<u8>],
|
|
||||||
) -> bool {
|
|
||||||
// Placeholder implementation.
|
|
||||||
// Replace with Merkle proof verification logic.
|
|
||||||
// hash(&[pedersen_commitment.serialize().to_vec(), in_commitments_proof.concat()].concat()) == root_commitment
|
|
||||||
|
|
||||||
let mut nsmt = CommitmentsSparseMerkleTree {
|
|
||||||
curr_root: Option::Some(root_commitment),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let commitments: Vec<_> = in_commitments_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| Commitment {
|
|
||||||
commitment_hash: n_p.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(commitments).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(in_commitment.clone())
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate non-membership proof for nullifiers
|
|
||||||
|
|
||||||
// takes the nullifiers[i], path nullifiers_proof[i][] and the root hash root_nullifier,
|
|
||||||
// returns True if the nullifiers[i] is not in the tree with root hash root_nullifier otherwise returns False, as non-membership proof.
|
|
||||||
pub fn validate_nullifiers_proof(
|
|
||||||
nullifier: [u8; 32],
|
|
||||||
root_nullifier: [u8; 32],
|
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
) -> bool {
|
|
||||||
let mut nsmt = NullifierSparseMerkleTree {
|
|
||||||
curr_root: Option::Some(root_nullifier),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let nullifiers: Vec<_> = nullifiers_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| UTXONullifier { utxo_hash: *n_p })
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(nullifier)
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_none()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check balances
|
|
||||||
|
|
||||||
// takes the public_info and output_utxos[],
|
|
||||||
// returns the True if the token amount in public_info matches the sum of all output_utxos[], otherwise return False.
|
|
||||||
pub fn check_balances(public_info: u128, output_utxos: &[UTXO]) -> bool {
|
|
||||||
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
|
|
||||||
public_info == total_output
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Pedersen commitment
|
|
||||||
|
|
||||||
// takes the public_info, secret_r and pedersen_commitment and
|
|
||||||
// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment.
|
|
||||||
pub fn verify_commitment(
|
|
||||||
public_info: u64,
|
|
||||||
secret_r: &[u8],
|
|
||||||
pedersen_commitment: &PedersenCommitment,
|
|
||||||
) -> bool {
|
|
||||||
let commitment_secrets = CommitmentSecrets {
|
|
||||||
value: public_info,
|
|
||||||
value_blinding_factor: Tweak::from_slice(secret_r).unwrap(),
|
|
||||||
generator_blinding_factor: Tweak::new(&mut thread_rng()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let tag = tag_random();
|
|
||||||
let commitment = commit(&commitment_secrets, tag);
|
|
||||||
|
|
||||||
commitment == *pedersen_commitment
|
|
||||||
}
|
|
||||||
|
|
||||||
// new_commitment
|
|
||||||
pub fn new_commitment(public_info: u64, secret_r: &[u8]) -> (Tweak, &[u8], PedersenCommitment) {
|
|
||||||
let generator_blinding_factor = Tweak::new(&mut thread_rng());
|
|
||||||
let commitment_secrets = CommitmentSecrets {
|
|
||||||
value: public_info,
|
|
||||||
value_blinding_factor: Tweak::from_slice(secret_r).unwrap(),
|
|
||||||
generator_blinding_factor,
|
|
||||||
};
|
|
||||||
|
|
||||||
let tag = tag_random();
|
|
||||||
let commitment = commit(&commitment_secrets, tag);
|
|
||||||
|
|
||||||
(generator_blinding_factor, secret_r, commitment)
|
|
||||||
}
|
|
||||||
|
|
||||||
// new_commitment for a Vec of values
|
|
||||||
pub fn new_commitment_vec(
|
|
||||||
public_info_vec: Vec<u64>,
|
|
||||||
secret_r: &[u8],
|
|
||||||
) -> (Tweak, &[u8], Vec<PedersenCommitment>) {
|
|
||||||
let generator_blinding_factor = Tweak::new(&mut thread_rng());
|
|
||||||
let tag = tag_random();
|
|
||||||
|
|
||||||
let vec_commitments = public_info_vec
|
|
||||||
.into_iter()
|
|
||||||
.map(|public_info| {
|
|
||||||
let commitment_secrets = CommitmentSecrets {
|
|
||||||
value: public_info,
|
|
||||||
value_blinding_factor: Tweak::from_slice(secret_r).unwrap(),
|
|
||||||
generator_blinding_factor,
|
|
||||||
};
|
|
||||||
|
|
||||||
commit(&commitment_secrets, tag)
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
(generator_blinding_factor, secret_r, vec_commitments)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn de_kernel(
|
|
||||||
root_commitment: &[u8],
|
|
||||||
root_nullifier: [u8; 32],
|
|
||||||
public_info: u64,
|
|
||||||
input_utxos: &[UTXO],
|
|
||||||
in_commitments_proof: &[Vec<u8>],
|
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
nullifier_secret_key: Scalar,
|
|
||||||
) -> (Vec<u8>, Vec<Vec<u8>>) {
|
|
||||||
check_balances(public_info as u128, input_utxos);
|
|
||||||
|
|
||||||
let nullifiers: Vec<_> = input_utxos
|
|
||||||
.into_iter()
|
|
||||||
.map(|utxo| generate_nullifiers(&utxo, &nullifier_secret_key.to_bytes()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let in_commitments = generate_commitments(&input_utxos);
|
|
||||||
|
|
||||||
for in_commitment in in_commitments {
|
|
||||||
validate_in_commitments_proof(
|
|
||||||
&in_commitment,
|
|
||||||
root_commitment.to_vec(),
|
|
||||||
in_commitments_proof,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
for nullifier in nullifiers.iter() {
|
|
||||||
validate_nullifiers_proof(
|
|
||||||
nullifier[0..32].try_into().unwrap(),
|
|
||||||
root_nullifier,
|
|
||||||
nullifiers_proof,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
(vec![], nullifiers)
|
|
||||||
}
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
pub mod de;
|
|
||||||
pub mod private_exec;
|
|
||||||
pub mod se;
|
|
||||||
@ -1,133 +0,0 @@
|
|||||||
use bincode;
|
|
||||||
use common::{
|
|
||||||
commitment::Commitment, commitments_sparse_merkle_tree::CommitmentsSparseMerkleTree,
|
|
||||||
nullifier::UTXONullifier, nullifier_sparse_merkle_tree::NullifierSparseMerkleTree,
|
|
||||||
};
|
|
||||||
use k256::Scalar;
|
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::{Hasher, Monotree};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use utxo::utxo_core::UTXO;
|
|
||||||
|
|
||||||
fn hash(input: &[u8]) -> Vec<u8> {
|
|
||||||
Sha256::digest(input).to_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate nullifiers
|
|
||||||
|
|
||||||
// takes the input_utxo and nsk
|
|
||||||
// returns the nullifiers[i], where the nullifier[i] = hash(in_commitments[i] || nsk) where the hash function
|
|
||||||
pub fn generate_nullifiers(input_utxo: &UTXO, nsk: &[u8]) -> Vec<u8> {
|
|
||||||
let mut input = bincode::serialize(input_utxo).unwrap().to_vec();
|
|
||||||
input.extend_from_slice(nsk);
|
|
||||||
hash(&input)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate commitments for output UTXOs
|
|
||||||
|
|
||||||
// uses the list of input_utxos[]
|
|
||||||
// returns in_commitments[] where each in_commitments[i] = Commitment(in_utxos[i]) where the commitment
|
|
||||||
pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
|
|
||||||
input_utxos
|
|
||||||
.iter()
|
|
||||||
.map(|utxo| {
|
|
||||||
let serialized = bincode::serialize(utxo).unwrap(); // Serialize UTXO.
|
|
||||||
hash(&serialized)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate inclusion proof for in_commitments
|
|
||||||
|
|
||||||
// takes the in_commitments[i] as a leaf, the root hash root_commitment and the path in_commitments_proofs[i][],
|
|
||||||
// returns True if the in_commitments[i] is in the tree with root hash root_commitment otherwise returns False, as membership proof.
|
|
||||||
pub fn validate_in_commitments_proof(
|
|
||||||
in_commitment: &Vec<u8>,
|
|
||||||
root_commitment: Vec<u8>,
|
|
||||||
in_commitments_proof: &[Vec<u8>],
|
|
||||||
) -> bool {
|
|
||||||
// Placeholder implementation.
|
|
||||||
// Replace with Merkle proof verification logic.
|
|
||||||
// hash(&[pedersen_commitment.serialize().to_vec(), in_commitments_proof.concat()].concat()) == root_commitment
|
|
||||||
|
|
||||||
let mut nsmt = CommitmentsSparseMerkleTree {
|
|
||||||
curr_root: Option::Some(root_commitment),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let commitments: Vec<_> = in_commitments_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| Commitment {
|
|
||||||
commitment_hash: n_p.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(commitments).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(in_commitment.clone())
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate non-membership proof for nullifiers
|
|
||||||
|
|
||||||
// takes the nullifiers[i], path nullifiers_proof[i][] and the root hash root_nullifier,
|
|
||||||
// returns True if the nullifiers[i] is not in the tree with root hash root_nullifier otherwise returns False, as non-membership proof.
|
|
||||||
pub fn validate_nullifiers_proof(
|
|
||||||
nullifier: [u8; 32],
|
|
||||||
root_nullifier: [u8; 32],
|
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
) -> bool {
|
|
||||||
let mut nsmt = NullifierSparseMerkleTree {
|
|
||||||
curr_root: Option::Some(root_nullifier),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let nullifiers: Vec<_> = nullifiers_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| UTXONullifier { utxo_hash: *n_p })
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(nullifier)
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_none()
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn private_kernel(
|
|
||||||
root_commitment: &[u8],
|
|
||||||
root_nullifier: [u8; 32],
|
|
||||||
input_utxos: &[UTXO],
|
|
||||||
in_commitments_proof: &[Vec<u8>],
|
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
nullifier_secret_key: Scalar,
|
|
||||||
) -> (Vec<u8>, Vec<Vec<u8>>) {
|
|
||||||
let nullifiers: Vec<_> = input_utxos
|
|
||||||
.into_iter()
|
|
||||||
.map(|utxo| generate_nullifiers(&utxo, &nullifier_secret_key.to_bytes()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let in_commitments = generate_commitments(&input_utxos);
|
|
||||||
|
|
||||||
for in_commitment in in_commitments {
|
|
||||||
validate_in_commitments_proof(
|
|
||||||
&in_commitment,
|
|
||||||
root_commitment.to_vec(),
|
|
||||||
in_commitments_proof,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
for nullifier in nullifiers.iter() {
|
|
||||||
validate_nullifiers_proof(
|
|
||||||
nullifier[0..32].try_into().unwrap(),
|
|
||||||
root_nullifier,
|
|
||||||
nullifiers_proof,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
(vec![], nullifiers)
|
|
||||||
}
|
|
||||||
@ -1,186 +0,0 @@
|
|||||||
use bincode;
|
|
||||||
use common::{
|
|
||||||
commitment::Commitment, commitments_sparse_merkle_tree::CommitmentsSparseMerkleTree,
|
|
||||||
nullifier::UTXONullifier, nullifier_sparse_merkle_tree::NullifierSparseMerkleTree,
|
|
||||||
};
|
|
||||||
use k256::Scalar;
|
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::{Hasher, Monotree};
|
|
||||||
use rand::thread_rng;
|
|
||||||
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
|
|
||||||
use sha2::{Digest, Sha256};
|
|
||||||
use utxo::utxo_core::UTXO;
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn commitment_secrets_random(value: u64) -> CommitmentSecrets {
|
|
||||||
CommitmentSecrets {
|
|
||||||
value,
|
|
||||||
value_blinding_factor: Tweak::new(&mut thread_rng()),
|
|
||||||
generator_blinding_factor: Tweak::new(&mut thread_rng()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tag_random() -> Tag {
|
|
||||||
use rand::thread_rng;
|
|
||||||
use rand::RngCore;
|
|
||||||
|
|
||||||
let mut bytes = [0u8; 32];
|
|
||||||
thread_rng().fill_bytes(&mut bytes);
|
|
||||||
|
|
||||||
Tag::from(bytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn commit(comm: &CommitmentSecrets, tag: Tag) -> PedersenCommitment {
|
|
||||||
let generator = Generator::new_blinded(SECP256K1, tag, comm.generator_blinding_factor);
|
|
||||||
|
|
||||||
PedersenCommitment::new(SECP256K1, comm.value, comm.value_blinding_factor, generator)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn hash(input: &[u8]) -> Vec<u8> {
|
|
||||||
Sha256::digest(input).to_vec()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate nullifiers
|
|
||||||
|
|
||||||
// takes the pedersen_commitment and nsk then
|
|
||||||
// returns a list of nullifiers, where the nullifier = hash(pedersen_commitment || nsk) where the hash function will be determined
|
|
||||||
|
|
||||||
pub fn generate_nullifiers(pedersen_commitment: &PedersenCommitment, nsk: &[u8]) -> Vec<u8> {
|
|
||||||
let mut input = pedersen_commitment.serialize().to_vec();
|
|
||||||
input.extend_from_slice(nsk);
|
|
||||||
hash(&input)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Generate commitments for output UTXOs
|
|
||||||
|
|
||||||
// uses the list of output_utxos[] and
|
|
||||||
// returns out_commitments[] where each out_commitments[i] = Commitment(output_utxos[i])
|
|
||||||
// where the commitment will be determined
|
|
||||||
pub fn generate_commitments(output_utxos: &[UTXO]) -> Vec<Vec<u8>> {
|
|
||||||
output_utxos
|
|
||||||
.iter()
|
|
||||||
.map(|utxo| {
|
|
||||||
let serialized = bincode::serialize(utxo).unwrap(); // Serialize UTXO.
|
|
||||||
hash(&serialized)
|
|
||||||
})
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate inclusion proof for in_commitments
|
|
||||||
|
|
||||||
// takes the pedersen_commitment as a leaf, the root hash root_commitment and the path in_commitments_proof[],
|
|
||||||
// returns True if the pedersen_commitment is in the tree with root hash root_commitment
|
|
||||||
// otherwise
|
|
||||||
// returns False, as membership proof.
|
|
||||||
pub fn validate_in_commitments_proof(
|
|
||||||
pedersen_commitment: &PedersenCommitment,
|
|
||||||
root_commitment: Vec<u8>,
|
|
||||||
in_commitments_proof: &[Vec<u8>],
|
|
||||||
) -> bool {
|
|
||||||
let mut nsmt = CommitmentsSparseMerkleTree {
|
|
||||||
curr_root: Option::Some(root_commitment),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let commitments: Vec<_> = in_commitments_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| Commitment {
|
|
||||||
commitment_hash: n_p.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(commitments).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(pedersen_commitment.serialize().to_vec())
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_some()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Validate non-membership proof for nullifiers
|
|
||||||
|
|
||||||
// takes the nullifier, path nullifiers_proof[] and the root hash root_nullifier,
|
|
||||||
// returns True if the nullifier is not in the tree with root hash root_nullifier
|
|
||||||
// otherwise
|
|
||||||
// returns False, as non-membership proof.
|
|
||||||
pub fn validate_nullifiers_proof(
|
|
||||||
nullifier: [u8; 32],
|
|
||||||
root_nullifier: [u8; 32],
|
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
) -> bool {
|
|
||||||
let mut nsmt = NullifierSparseMerkleTree {
|
|
||||||
curr_root: Option::Some(root_nullifier),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let nullifiers: Vec<_> = nullifiers_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| UTXONullifier { utxo_hash: *n_p })
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(nullifier)
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_none()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check balances
|
|
||||||
|
|
||||||
// takes the public_info and output_utxos[],
|
|
||||||
// returns the True if the token amount in public_info matches the sum of all output_utxos[], otherwise return False.
|
|
||||||
pub fn check_balances(public_info: u128, output_utxos: &[UTXO]) -> bool {
|
|
||||||
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
|
|
||||||
public_info == total_output
|
|
||||||
}
|
|
||||||
|
|
||||||
// Verify Pedersen commitment
|
|
||||||
|
|
||||||
// takes the public_info, secret_r and pedersen_commitment and
|
|
||||||
// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment.
|
|
||||||
pub fn verify_commitment(
|
|
||||||
public_info: u64,
|
|
||||||
secret_r: &[u8],
|
|
||||||
pedersen_commitment: &PedersenCommitment,
|
|
||||||
) -> bool {
|
|
||||||
let commitment_secrets = CommitmentSecrets {
|
|
||||||
value: public_info,
|
|
||||||
value_blinding_factor: Tweak::from_slice(secret_r).unwrap(),
|
|
||||||
generator_blinding_factor: Tweak::new(&mut thread_rng()),
|
|
||||||
};
|
|
||||||
|
|
||||||
let tag = tag_random();
|
|
||||||
let commitment = commit(&commitment_secrets, tag);
|
|
||||||
|
|
||||||
commitment == *pedersen_commitment
|
|
||||||
}
|
|
||||||
|
|
||||||
#[allow(unused)]
|
|
||||||
fn se_kernel(
|
|
||||||
root_commitment: &[u8],
|
|
||||||
root_nullifier: [u8; 32],
|
|
||||||
public_info: u64,
|
|
||||||
pedersen_commitment: PedersenCommitment,
|
|
||||||
secret_r: &[u8],
|
|
||||||
output_utxos: &[UTXO],
|
|
||||||
in_commitments_proof: &[Vec<u8>],
|
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
nullifier_secret_key: Scalar,
|
|
||||||
) -> (Vec<u8>, Vec<Vec<u8>>, Vec<u8>) {
|
|
||||||
check_balances(public_info as u128, output_utxos);
|
|
||||||
|
|
||||||
let out_commitments = generate_commitments(output_utxos);
|
|
||||||
|
|
||||||
let nullifier = generate_nullifiers(&pedersen_commitment, &nullifier_secret_key.to_bytes());
|
|
||||||
|
|
||||||
validate_in_commitments_proof(
|
|
||||||
&pedersen_commitment,
|
|
||||||
root_commitment.to_vec(),
|
|
||||||
in_commitments_proof,
|
|
||||||
);
|
|
||||||
|
|
||||||
verify_commitment(public_info, secret_r, &pedersen_commitment);
|
|
||||||
|
|
||||||
(vec![], out_commitments, nullifier)
|
|
||||||
}
|
|
||||||
@ -10,9 +10,10 @@ use anyhow::Result;
|
|||||||
use chain_storage::NodeChainStore;
|
use chain_storage::NodeChainStore;
|
||||||
use common::transaction::{Transaction, TransactionPayload, TxKind};
|
use common::transaction::{Transaction, TransactionPayload, TxKind};
|
||||||
use config::NodeConfig;
|
use config::NodeConfig;
|
||||||
use executions::private_exec::{generate_commitments, generate_nullifiers};
|
|
||||||
use log::info;
|
use log::info;
|
||||||
use sc_core::proofs_circuits::pedersen_commitment_vec;
|
use sc_core::proofs_circuits::{
|
||||||
|
generate_commitments, generate_nullifiers, generate_nullifiers_se, pedersen_commitment_vec,
|
||||||
|
};
|
||||||
use sequencer_client::{json::SendTxResponse, SequencerClient};
|
use sequencer_client::{json::SendTxResponse, SequencerClient};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use storage::sc_db_utils::DataBlobChangeVariant;
|
use storage::sc_db_utils::DataBlobChangeVariant;
|
||||||
@ -28,7 +29,6 @@ pub const BLOCK_GEN_DELAY_SECS: u64 = 20;
|
|||||||
|
|
||||||
pub mod chain_storage;
|
pub mod chain_storage;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
pub mod executions;
|
|
||||||
///Module, which includes pre start setup helperfunctions
|
///Module, which includes pre start setup helperfunctions
|
||||||
pub mod pre_start;
|
pub mod pre_start;
|
||||||
pub mod sequencer_client;
|
pub mod sequencer_client;
|
||||||
@ -164,10 +164,9 @@ impl NodeCore {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn get_roots(&self) -> [[u8; 32]; 3] {
|
pub async fn get_roots(&self) -> [[u8; 32]; 2] {
|
||||||
let storage = self.storage.read().await;
|
let storage = self.storage.read().await;
|
||||||
[
|
[
|
||||||
storage.nullifier_store.curr_root.unwrap_or([0; 32]),
|
|
||||||
storage.utxo_commitments_store.get_root().unwrap_or([0; 32]),
|
storage.utxo_commitments_store.get_root().unwrap_or([0; 32]),
|
||||||
storage.pub_tx_store.get_root().unwrap_or([0; 32]),
|
storage.pub_tx_store.get_root().unwrap_or([0; 32]),
|
||||||
]
|
]
|
||||||
@ -651,7 +650,7 @@ impl NodeCore {
|
|||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
let nullifier = executions::se::generate_nullifiers(
|
let nullifier = generate_nullifiers_se(
|
||||||
&commitment,
|
&commitment,
|
||||||
&account
|
&account
|
||||||
.key_holder
|
.key_holder
|
||||||
|
|||||||
@ -6,8 +6,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
#[derive(Serialize, Deserialize, Debug)]
|
#[derive(Serialize, Deserialize, Debug)]
|
||||||
pub struct SendTxRequest {
|
pub struct SendTxRequest {
|
||||||
pub transaction: Transaction,
|
pub transaction: Transaction,
|
||||||
///Nullifier Root, UTXO Commitment Root, Pub Tx Root
|
///UTXO Commitment Root, Pub Tx Root
|
||||||
pub tx_roots: [[u8; 32]; 3],
|
pub tx_roots: [[u8; 32]; 2],
|
||||||
}
|
}
|
||||||
|
|
||||||
//Responses
|
//Responses
|
||||||
|
|||||||
@ -73,7 +73,7 @@ impl SequencerClient {
|
|||||||
pub async fn send_tx(
|
pub async fn send_tx(
|
||||||
&self,
|
&self,
|
||||||
transaction: Transaction,
|
transaction: Transaction,
|
||||||
tx_roots: [[u8; 32]; 3],
|
tx_roots: [[u8; 32]; 2],
|
||||||
) -> Result<SendTxResponse, SequencerClientError> {
|
) -> Result<SendTxResponse, SequencerClientError> {
|
||||||
let tx_req = SendTxRequest {
|
let tx_req = SendTxRequest {
|
||||||
transaction,
|
transaction,
|
||||||
|
|||||||
@ -81,6 +81,5 @@ pub fn cast_common_execution_error_into_rpc_error(comm_exec_err: ExecutionFailur
|
|||||||
ExecutionFailureKind::SequencerClientError(seq_cli_err) => {
|
ExecutionFailureKind::SequencerClientError(seq_cli_err) => {
|
||||||
cast_seq_client_error_into_rpc_error(seq_cli_err)
|
cast_seq_client_error_into_rpc_error(seq_cli_err)
|
||||||
}
|
}
|
||||||
ExecutionFailureKind::MonoTreeError(_) => RpcError::new_internal_error(None, &error_string),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -12,7 +12,6 @@ serde.workspace = true
|
|||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
k256.workspace = true
|
k256.workspace = true
|
||||||
sha2.workspace = true
|
sha2.workspace = true
|
||||||
monotree.workspace = true
|
|
||||||
bincode.workspace = true
|
bincode.workspace = true
|
||||||
elliptic-curve.workspace = true
|
elliptic-curve.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
|
|||||||
@ -1,11 +1,5 @@
|
|||||||
use bincode;
|
use bincode;
|
||||||
use common::{
|
|
||||||
commitment::Commitment, commitments_sparse_merkle_tree::CommitmentsSparseMerkleTree,
|
|
||||||
nullifier::UTXONullifier, nullifier_sparse_merkle_tree::NullifierSparseMerkleTree,
|
|
||||||
};
|
|
||||||
use k256::Scalar;
|
use k256::Scalar;
|
||||||
use monotree::hasher::Blake3;
|
|
||||||
use monotree::{Hasher, Monotree};
|
|
||||||
use rand::{thread_rng, RngCore};
|
use rand::{thread_rng, RngCore};
|
||||||
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
|
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
@ -44,59 +38,21 @@ pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
|
|||||||
// takes the in_commitments[i] as a leaf, the root hash root_commitment and the path in_commitments_proofs[i][],
|
// takes the in_commitments[i] as a leaf, the root hash root_commitment and the path in_commitments_proofs[i][],
|
||||||
// returns True if the in_commitments[i] is in the tree with root hash root_commitment otherwise returns False, as membership proof.
|
// returns True if the in_commitments[i] is in the tree with root hash root_commitment otherwise returns False, as membership proof.
|
||||||
pub fn validate_in_commitments_proof(
|
pub fn validate_in_commitments_proof(
|
||||||
in_commitment: &Vec<u8>,
|
_in_commitment: &Vec<u8>,
|
||||||
root_commitment: Vec<u8>,
|
_root_commitment: Vec<u8>,
|
||||||
in_commitments_proof: &[Vec<u8>],
|
_in_commitments_proof: &[Vec<u8>],
|
||||||
) -> bool {
|
) -> bool {
|
||||||
// Placeholder implementation.
|
// ToDo: Implement correct check
|
||||||
// Replace with Merkle proof verification logic.
|
|
||||||
// hash(&[pedersen_commitment.serialize().to_vec(), in_commitments_proof.concat()].concat()) == root_commitment
|
|
||||||
|
|
||||||
let mut nsmt = CommitmentsSparseMerkleTree {
|
todo!()
|
||||||
curr_root: Option::Some(root_commitment),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let commitments: Vec<_> = in_commitments_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| Commitment {
|
|
||||||
commitment_hash: n_p.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(commitments).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(in_commitment.clone())
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_some()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Validate non-membership proof for nullifiers
|
// Validate that `nullifier` has not been present in set items before
|
||||||
|
pub fn validate_nullifier_not_present_in_set_items(
|
||||||
// takes the nullifiers[i], path nullifiers_proof[i][] and the root hash root_nullifier,
|
|
||||||
// returns True if the nullifiers[i] is not in the tree with root hash root_nullifier otherwise returns False, as non-membership proof.
|
|
||||||
pub fn validate_nullifiers_proof(
|
|
||||||
nullifier: [u8; 32],
|
nullifier: [u8; 32],
|
||||||
root_nullifier: [u8; 32],
|
nullifiers_items: &[[u8; 32]],
|
||||||
nullifiers_proof: &[[u8; 32]],
|
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut nsmt = NullifierSparseMerkleTree {
|
!nullifiers_items.contains(&nullifier)
|
||||||
curr_root: Option::Some(root_nullifier),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let nullifiers: Vec<_> = nullifiers_proof
|
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| UTXONullifier { utxo_hash: *n_p })
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(nullifiers).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(nullifier)
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_none()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[allow(unused)]
|
#[allow(unused)]
|
||||||
@ -124,9 +80,8 @@ fn private_kernel(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for nullifier in nullifiers.iter() {
|
for nullifier in nullifiers.iter() {
|
||||||
validate_nullifiers_proof(
|
validate_nullifier_not_present_in_set_items(
|
||||||
nullifier[0..32].try_into().unwrap(),
|
nullifier[0..32].try_into().unwrap(),
|
||||||
root_nullifier,
|
|
||||||
nullifiers_proof,
|
nullifiers_proof,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -243,9 +198,8 @@ fn de_kernel(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for nullifier in nullifiers.iter() {
|
for nullifier in nullifiers.iter() {
|
||||||
validate_nullifiers_proof(
|
validate_nullifier_not_present_in_set_items(
|
||||||
nullifier[0..32].try_into().unwrap(),
|
nullifier[0..32].try_into().unwrap(),
|
||||||
root_nullifier,
|
|
||||||
nullifiers_proof,
|
nullifiers_proof,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -260,28 +214,13 @@ fn de_kernel(
|
|||||||
// otherwise
|
// otherwise
|
||||||
// returns False, as membership proof.
|
// returns False, as membership proof.
|
||||||
pub fn validate_in_commitments_proof_se(
|
pub fn validate_in_commitments_proof_se(
|
||||||
pedersen_commitment: &PedersenCommitment,
|
_pedersen_commitment: &PedersenCommitment,
|
||||||
root_commitment: Vec<u8>,
|
_root_commitment: Vec<u8>,
|
||||||
in_commitments_proof: &[Vec<u8>],
|
_in_commitments_proof: &[Vec<u8>],
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let mut nsmt = CommitmentsSparseMerkleTree {
|
// ToDo: Implement correct check
|
||||||
curr_root: Option::Some(root_commitment),
|
|
||||||
tree: Monotree::default(),
|
|
||||||
hasher: Blake3::new(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let commitments: Vec<_> = in_commitments_proof
|
todo!()
|
||||||
.into_iter()
|
|
||||||
.map(|n_p| Commitment {
|
|
||||||
commitment_hash: n_p.clone(),
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
nsmt.insert_items(commitments).unwrap();
|
|
||||||
|
|
||||||
nsmt.get_non_membership_proof(pedersen_commitment.serialize().to_vec())
|
|
||||||
.unwrap()
|
|
||||||
.1
|
|
||||||
.is_some()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Generate nullifiers SE
|
// Generate nullifiers SE
|
||||||
|
|||||||
@ -59,9 +59,8 @@ impl SequencerCore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_tree_roots(&self) -> [[u8; 32]; 3] {
|
pub fn get_tree_roots(&self) -> [[u8; 32]; 2] {
|
||||||
[
|
[
|
||||||
self.store.nullifier_store.curr_root.unwrap_or([0; 32]),
|
|
||||||
self.store
|
self.store
|
||||||
.utxo_commitments_store
|
.utxo_commitments_store
|
||||||
.get_root()
|
.get_root()
|
||||||
@ -73,7 +72,7 @@ impl SequencerCore {
|
|||||||
pub fn transaction_pre_check(
|
pub fn transaction_pre_check(
|
||||||
&mut self,
|
&mut self,
|
||||||
tx: &Transaction,
|
tx: &Transaction,
|
||||||
tx_roots: [[u8; 32]; 3],
|
tx_roots: [[u8; 32]; 2],
|
||||||
) -> Result<(), TransactionMalformationErrorKind> {
|
) -> Result<(), TransactionMalformationErrorKind> {
|
||||||
let Transaction {
|
let Transaction {
|
||||||
hash,
|
hash,
|
||||||
@ -135,10 +134,9 @@ impl SequencerCore {
|
|||||||
let nullifier_tree_check = nullifier_created_hashes
|
let nullifier_tree_check = nullifier_created_hashes
|
||||||
.iter()
|
.iter()
|
||||||
.map(|nullifier_hash| {
|
.map(|nullifier_hash| {
|
||||||
self.store
|
self.store.nullifier_store.contains(&UTXONullifier {
|
||||||
.nullifier_store
|
utxo_hash: *nullifier_hash,
|
||||||
.search_item_inclusion(*nullifier_hash)
|
})
|
||||||
.unwrap_or(false)
|
|
||||||
})
|
})
|
||||||
.any(|check| check);
|
.any(|check| check);
|
||||||
let utxo_commitments_check = utxo_commitments_created_hashes
|
let utxo_commitments_check = utxo_commitments_created_hashes
|
||||||
@ -173,7 +171,7 @@ impl SequencerCore {
|
|||||||
pub fn push_tx_into_mempool_pre_check(
|
pub fn push_tx_into_mempool_pre_check(
|
||||||
&mut self,
|
&mut self,
|
||||||
item: TransactionMempool,
|
item: TransactionMempool,
|
||||||
tx_roots: [[u8; 32]; 3],
|
tx_roots: [[u8; 32]; 2],
|
||||||
) -> Result<(), TransactionMalformationErrorKind> {
|
) -> Result<(), TransactionMalformationErrorKind> {
|
||||||
self.transaction_pre_check(&item.tx, tx_roots)?;
|
self.transaction_pre_check(&item.tx, tx_roots)?;
|
||||||
|
|
||||||
@ -187,7 +185,8 @@ impl SequencerCore {
|
|||||||
tx: TransactionMempool,
|
tx: TransactionMempool,
|
||||||
) -> Result<(), TransactionMalformationErrorKind> {
|
) -> Result<(), TransactionMalformationErrorKind> {
|
||||||
let Transaction {
|
let Transaction {
|
||||||
hash,
|
// ToDo: remove hashing of transactions on node side [Issue #66]
|
||||||
|
hash: _,
|
||||||
ref utxo_commitments_created_hashes,
|
ref utxo_commitments_created_hashes,
|
||||||
ref nullifier_created_hashes,
|
ref nullifier_created_hashes,
|
||||||
..
|
..
|
||||||
@ -199,16 +198,10 @@ impl SequencerCore {
|
|||||||
.add_tx(UTXOCommitment { hash: *utxo_comm });
|
.add_tx(UTXOCommitment { hash: *utxo_comm });
|
||||||
}
|
}
|
||||||
|
|
||||||
for nullifier in nullifier_created_hashes {
|
for nullifier in nullifier_created_hashes.iter() {
|
||||||
self.store
|
self.store.nullifier_store.insert(UTXONullifier {
|
||||||
.nullifier_store
|
utxo_hash: *nullifier,
|
||||||
.insert_item(UTXONullifier {
|
});
|
||||||
utxo_hash: *nullifier,
|
|
||||||
})
|
|
||||||
.map_err(|err| TransactionMalformationErrorKind::FailedToInsert {
|
|
||||||
tx: hash,
|
|
||||||
details: format!("{err:?}"),
|
|
||||||
})?;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
self.store.pub_tx_store.add_tx(tx.tx);
|
self.store.pub_tx_store.add_tx(tx.tx);
|
||||||
@ -225,12 +218,14 @@ impl SequencerCore {
|
|||||||
|
|
||||||
///Produces new block from transactions in mempool
|
///Produces new block from transactions in mempool
|
||||||
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<u64> {
|
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<u64> {
|
||||||
|
let new_block_height = self.chain_height + 1;
|
||||||
|
|
||||||
let transactions = self
|
let transactions = self
|
||||||
.mempool
|
.mempool
|
||||||
.pop_size(self.sequencer_config.max_num_tx_in_block);
|
.pop_size(self.sequencer_config.max_num_tx_in_block);
|
||||||
|
|
||||||
for tx in transactions.clone() {
|
for tx in &transactions {
|
||||||
self.execute_check_transaction_on_state(tx)?;
|
self.execute_check_transaction_on_state(tx.clone())?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let prev_block_hash = self
|
let prev_block_hash = self
|
||||||
@ -240,7 +235,7 @@ impl SequencerCore {
|
|||||||
.hash;
|
.hash;
|
||||||
|
|
||||||
let hashable_data = HashableBlockData {
|
let hashable_data = HashableBlockData {
|
||||||
block_id: self.chain_height + 1,
|
block_id: new_block_height,
|
||||||
prev_block_id: self.chain_height,
|
prev_block_id: self.chain_height,
|
||||||
transactions: transactions.into_iter().map(|tx_mem| tx_mem.tx).collect(),
|
transactions: transactions.into_iter().map(|tx_mem| tx_mem.tx).collect(),
|
||||||
data: vec![],
|
data: vec![],
|
||||||
@ -337,7 +332,7 @@ mod tests {
|
|||||||
common_setup(&mut sequencer);
|
common_setup(&mut sequencer);
|
||||||
|
|
||||||
let roots = sequencer.get_tree_roots();
|
let roots = sequencer.get_tree_roots();
|
||||||
assert_eq!(roots.len(), 3); // Should return three roots
|
assert_eq!(roots.len(), 2); // Should return two roots
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
|||||||
@ -1,11 +1,11 @@
|
|||||||
use std::path::Path;
|
use std::{collections::HashSet, path::Path};
|
||||||
|
|
||||||
use accounts_store::SequencerAccountsStore;
|
use accounts_store::SequencerAccountsStore;
|
||||||
use block_store::SequecerBlockStore;
|
use block_store::SequecerBlockStore;
|
||||||
use common::{
|
use common::{
|
||||||
block::{Block, HashableBlockData},
|
block::{Block, HashableBlockData},
|
||||||
merkle_tree_public::merkle_tree::{PublicTransactionMerkleTree, UTXOCommitmentsMerkleTree},
|
merkle_tree_public::merkle_tree::{PublicTransactionMerkleTree, UTXOCommitmentsMerkleTree},
|
||||||
nullifier_sparse_merkle_tree::NullifierSparseMerkleTree,
|
nullifier::UTXONullifier,
|
||||||
};
|
};
|
||||||
use rand::{rngs::OsRng, RngCore};
|
use rand::{rngs::OsRng, RngCore};
|
||||||
|
|
||||||
@ -15,7 +15,7 @@ pub mod block_store;
|
|||||||
pub struct SequecerChainStore {
|
pub struct SequecerChainStore {
|
||||||
pub acc_store: SequencerAccountsStore,
|
pub acc_store: SequencerAccountsStore,
|
||||||
pub block_store: SequecerBlockStore,
|
pub block_store: SequecerBlockStore,
|
||||||
pub nullifier_store: NullifierSparseMerkleTree,
|
pub nullifier_store: HashSet<UTXONullifier>,
|
||||||
pub utxo_commitments_store: UTXOCommitmentsMerkleTree,
|
pub utxo_commitments_store: UTXOCommitmentsMerkleTree,
|
||||||
pub pub_tx_store: PublicTransactionMerkleTree,
|
pub pub_tx_store: PublicTransactionMerkleTree,
|
||||||
}
|
}
|
||||||
@ -23,7 +23,7 @@ pub struct SequecerChainStore {
|
|||||||
impl SequecerChainStore {
|
impl SequecerChainStore {
|
||||||
pub fn new_with_genesis(home_dir: &Path, genesis_id: u64, is_genesis_random: bool) -> Self {
|
pub fn new_with_genesis(home_dir: &Path, genesis_id: u64, is_genesis_random: bool) -> Self {
|
||||||
let acc_store = SequencerAccountsStore::default();
|
let acc_store = SequencerAccountsStore::default();
|
||||||
let nullifier_store = NullifierSparseMerkleTree::default();
|
let nullifier_store = HashSet::new();
|
||||||
let utxo_commitments_store = UTXOCommitmentsMerkleTree::new(vec![]);
|
let utxo_commitments_store = UTXOCommitmentsMerkleTree::new(vec![]);
|
||||||
let pub_tx_store = PublicTransactionMerkleTree::new(vec![]);
|
let pub_tx_store = PublicTransactionMerkleTree::new(vec![]);
|
||||||
|
|
||||||
|
|||||||
@ -35,12 +35,25 @@ pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set";
|
|||||||
///Key to list of all known smart contract addresses
|
///Key to list of all known smart contract addresses
|
||||||
pub const DB_META_SC_LIST: &str = "sc_list";
|
pub const DB_META_SC_LIST: &str = "sc_list";
|
||||||
|
|
||||||
|
///Key base for storing snapshot which describe block id
|
||||||
|
pub const DB_SNAPSHOT_BLOCK_ID_KEY: &str = "block_id";
|
||||||
|
///Key base for storing snapshot which describe commitment
|
||||||
|
pub const DB_SNAPSHOT_COMMITMENT_KEY: &str = "commitment";
|
||||||
|
///Key base for storing snapshot which describe transaction
|
||||||
|
pub const DB_SNAPSHOT_TRANSACTION_KEY: &str = "transaction";
|
||||||
|
///Key base for storing snapshot which describe nullifier
|
||||||
|
pub const DB_SNAPSHOT_NULLIFIER_KEY: &str = "nullifier";
|
||||||
|
///Key base for storing snapshot which describe account
|
||||||
|
pub const DB_SNAPSHOT_ACCOUNT_KEY: &str = "account";
|
||||||
|
|
||||||
///Name of block column family
|
///Name of block column family
|
||||||
pub const CF_BLOCK_NAME: &str = "cf_block";
|
pub const CF_BLOCK_NAME: &str = "cf_block";
|
||||||
///Name of meta column family
|
///Name of meta column family
|
||||||
pub const CF_META_NAME: &str = "cf_meta";
|
pub const CF_META_NAME: &str = "cf_meta";
|
||||||
///Name of smart contract column family
|
///Name of smart contract column family
|
||||||
pub const CF_SC_NAME: &str = "cf_sc";
|
pub const CF_SC_NAME: &str = "cf_sc";
|
||||||
|
///Name of snapshot column family
|
||||||
|
pub const CF_SNAPSHOT_NAME: &str = "cf_snapshot";
|
||||||
|
|
||||||
///Suffix, used to mark field, which contain length of smart contract
|
///Suffix, used to mark field, which contain length of smart contract
|
||||||
pub const SC_LEN_SUFFIX: &str = "sc_len";
|
pub const SC_LEN_SUFFIX: &str = "sc_len";
|
||||||
@ -59,6 +72,7 @@ impl RocksDBIO {
|
|||||||
let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone());
|
let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone());
|
||||||
let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone());
|
let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone());
|
||||||
let cfsc = ColumnFamilyDescriptor::new(CF_SC_NAME, cf_opts.clone());
|
let cfsc = ColumnFamilyDescriptor::new(CF_SC_NAME, cf_opts.clone());
|
||||||
|
let cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone());
|
||||||
|
|
||||||
let mut db_opts = Options::default();
|
let mut db_opts = Options::default();
|
||||||
db_opts.create_missing_column_families(true);
|
db_opts.create_missing_column_families(true);
|
||||||
@ -66,7 +80,7 @@ impl RocksDBIO {
|
|||||||
let db = DBWithThreadMode::<MultiThreaded>::open_cf_descriptors(
|
let db = DBWithThreadMode::<MultiThreaded>::open_cf_descriptors(
|
||||||
&db_opts,
|
&db_opts,
|
||||||
path,
|
path,
|
||||||
vec![cfb, cfmeta, cfsc],
|
vec![cfb, cfmeta, cfsc, cfsnapshot],
|
||||||
);
|
);
|
||||||
|
|
||||||
let dbio = Self {
|
let dbio = Self {
|
||||||
@ -101,6 +115,7 @@ impl RocksDBIO {
|
|||||||
//ToDo: Add more column families for different data
|
//ToDo: Add more column families for different data
|
||||||
let _cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone());
|
let _cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone());
|
||||||
let _cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone());
|
let _cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone());
|
||||||
|
let _cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone());
|
||||||
|
|
||||||
let mut db_opts = Options::default();
|
let mut db_opts = Options::default();
|
||||||
db_opts.create_missing_column_families(true);
|
db_opts.create_missing_column_families(true);
|
||||||
@ -121,6 +136,10 @@ impl RocksDBIO {
|
|||||||
self.db.cf_handle(CF_SC_NAME).unwrap()
|
self.db.cf_handle(CF_SC_NAME).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn snapshot_column(&self) -> Arc<BoundColumnFamily> {
|
||||||
|
self.db.cf_handle(CF_SNAPSHOT_NAME).unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_meta_first_block_in_db(&self) -> DbResult<u64> {
|
pub fn get_meta_first_block_in_db(&self) -> DbResult<u64> {
|
||||||
let cf_meta = self.meta_column();
|
let cf_meta = self.meta_column();
|
||||||
let res = self
|
let res = self
|
||||||
@ -384,6 +403,142 @@ impl RocksDBIO {
|
|||||||
|
|
||||||
Ok(data_blob_list)
|
Ok(data_blob_list)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_snapshot_block_id(&self) -> DbResult<u64> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
let res = self
|
||||||
|
.db
|
||||||
|
.get_cf(&cf_snapshot, DB_SNAPSHOT_BLOCK_ID_KEY)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
|
||||||
|
if let Some(data) = res {
|
||||||
|
Ok(u64::from_be_bytes(data.try_into().unwrap()))
|
||||||
|
} else {
|
||||||
|
Err(DbError::db_interaction_error(
|
||||||
|
"Snapshot block ID not found".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_snapshot_commitment(&self) -> DbResult<Vec<u8>> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
let res = self
|
||||||
|
.db
|
||||||
|
.get_cf(&cf_snapshot, DB_SNAPSHOT_COMMITMENT_KEY)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
|
||||||
|
if let Some(data) = res {
|
||||||
|
Ok(data)
|
||||||
|
} else {
|
||||||
|
Err(DbError::db_interaction_error(
|
||||||
|
"Snapshot commitment not found".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_snapshot_transaction(&self) -> DbResult<Vec<u8>> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
let res = self
|
||||||
|
.db
|
||||||
|
.get_cf(&cf_snapshot, DB_SNAPSHOT_TRANSACTION_KEY)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
|
||||||
|
if let Some(data) = res {
|
||||||
|
Ok(data)
|
||||||
|
} else {
|
||||||
|
Err(DbError::db_interaction_error(
|
||||||
|
"Snapshot transaction not found".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_snapshot_nullifier(&self) -> DbResult<Vec<u8>> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
let res = self
|
||||||
|
.db
|
||||||
|
.get_cf(&cf_snapshot, DB_SNAPSHOT_NULLIFIER_KEY)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
|
||||||
|
if let Some(data) = res {
|
||||||
|
Ok(data)
|
||||||
|
} else {
|
||||||
|
Err(DbError::db_interaction_error(
|
||||||
|
"Snapshot nullifier not found".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_snapshot_account(&self) -> DbResult<Vec<u8>> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
let res = self
|
||||||
|
.db
|
||||||
|
.get_cf(&cf_snapshot, DB_SNAPSHOT_ACCOUNT_KEY)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
|
||||||
|
if let Some(data) = res {
|
||||||
|
Ok(data)
|
||||||
|
} else {
|
||||||
|
Err(DbError::db_interaction_error(
|
||||||
|
"Snapshot account not found".to_string(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_snapshot_block_id_db(&self, block_id: u64) -> DbResult<()> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
self.db
|
||||||
|
.put_cf(
|
||||||
|
&cf_snapshot,
|
||||||
|
DB_SNAPSHOT_BLOCK_ID_KEY.as_bytes(),
|
||||||
|
block_id.to_be_bytes(),
|
||||||
|
)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_snapshot_commitement_db(&self, commitment: Vec<u8>) -> DbResult<()> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
self.db
|
||||||
|
.put_cf(
|
||||||
|
&cf_snapshot,
|
||||||
|
DB_SNAPSHOT_COMMITMENT_KEY.as_bytes(),
|
||||||
|
commitment,
|
||||||
|
)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_snapshot_transaction_db(&self, transaction: Vec<u8>) -> DbResult<()> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
self.db
|
||||||
|
.put_cf(
|
||||||
|
&cf_snapshot,
|
||||||
|
DB_SNAPSHOT_TRANSACTION_KEY.as_bytes(),
|
||||||
|
transaction,
|
||||||
|
)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_snapshot_nullifier_db(&self, nullifier: Vec<u8>) -> DbResult<()> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
self.db
|
||||||
|
.put_cf(
|
||||||
|
&cf_snapshot,
|
||||||
|
DB_SNAPSHOT_NULLIFIER_KEY.as_bytes(),
|
||||||
|
nullifier,
|
||||||
|
)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put_snapshot_account_db(&self, account: Vec<u8>) -> DbResult<()> {
|
||||||
|
let cf_snapshot = self.snapshot_column();
|
||||||
|
self.db
|
||||||
|
.put_cf(&cf_snapshot, DB_SNAPSHOT_ACCOUNT_KEY.as_bytes(), account)
|
||||||
|
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///Creates address for sc data blob at corresponding id
|
///Creates address for sc data blob at corresponding id
|
||||||
|
|||||||
@ -9,7 +9,6 @@ serde_json.workspace = true
|
|||||||
env_logger.workspace = true
|
env_logger.workspace = true
|
||||||
log.workspace = true
|
log.workspace = true
|
||||||
serde.workspace = true
|
serde.workspace = true
|
||||||
monotree.workspace = true
|
|
||||||
sha2.workspace = true
|
sha2.workspace = true
|
||||||
hex.workspace = true
|
hex.workspace = true
|
||||||
rand.workspace = true
|
rand.workspace = true
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user