fix: utxo nullifier calculation fix and circuits updates

This commit is contained in:
Oleksandr Pravdyvyi 2025-06-26 07:21:31 +03:00
parent 5172a95f53
commit 5d5dde33b7
8 changed files with 581 additions and 259 deletions

View File

@ -93,6 +93,24 @@ pub struct AccountPublicMask {
pub balance: u64, pub balance: u64,
} }
impl AccountPublicMask {
pub fn encrypt_data(
ephemeral_key_holder: &EphemeralKeyHolder,
viewing_public_key_receiver: AffinePoint,
data: &[u8],
) -> (CipherText, Nonce) {
ephemeral_key_holder.encrypt_data(viewing_public_key_receiver, data)
}
pub fn make_tag(&self) -> Tag {
self.address[0]
}
pub fn produce_ephemeral_key_holder(&self) -> EphemeralKeyHolder {
EphemeralKeyHolder::new_os_random()
}
}
impl Account { impl Account {
pub fn new() -> Self { pub fn new() -> Self {
let key_holder = AddressKeyHolder::new_os_random(); let key_holder = AddressKeyHolder::new_os_random();

View File

@ -11,6 +11,7 @@ use crate::{transaction::Transaction, utxo_commitment::UTXOCommitment};
use super::{hasher::OwnHasher, tree_leav_item::TreeLeavItem, TreeHashType}; use super::{hasher::OwnHasher, tree_leav_item::TreeLeavItem, TreeHashType};
#[derive(Clone)]
pub struct HashStorageMerkleTree<Leav: TreeLeavItem + Clone> { pub struct HashStorageMerkleTree<Leav: TreeLeavItem + Clone> {
leaves: HashMap<usize, Leav>, leaves: HashMap<usize, Leav>,
hash_to_id_map: HashMap<TreeHashType, usize>, hash_to_id_map: HashMap<TreeHashType, usize>,

136
sc_core/src/blob_utils.rs Normal file
View File

@ -0,0 +1,136 @@
use serde::Serialize;
use storage::{
sc_db_utils::{produce_blob_from_fit_vec, DataBlob, DataBlobChangeVariant},
SC_DATA_BLOB_SIZE,
};
///Creates blob list from generic serializable state
///
///`ToDo`: Find a way to align data in a way, to minimize read and write operations in db
pub fn produce_blob_list_from_sc_public_state<S: Serialize>(
state: &S,
) -> Result<Vec<DataBlob>, serde_json::Error> {
let mut blob_list = vec![];
let ser_data = serde_json::to_vec(state)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / SC_DATA_BLOB_SIZE) {
let next_chunk: Vec<u8>;
if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)]
.iter()
.cloned()
.collect();
} else {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
blob_list.push(produce_blob_from_fit_vec(next_chunk));
}
Ok(blob_list)
}
///Compare two consecutive in time blob lists to produce list of modified ids
pub fn compare_blob_lists(
blob_list_old: &[DataBlob],
blob_list_new: &[DataBlob],
) -> Vec<DataBlobChangeVariant> {
let mut changed_ids = vec![];
let mut id_end = 0;
let old_len = blob_list_old.len();
let new_len = blob_list_new.len();
if old_len > new_len {
for id in new_len..old_len {
changed_ids.push(DataBlobChangeVariant::Deleted { id });
}
} else if new_len > old_len {
for id in old_len..new_len {
changed_ids.push(DataBlobChangeVariant::Created {
id,
blob: blob_list_new[id],
});
}
}
loop {
let old_blob = blob_list_old.get(id_end);
let new_blob = blob_list_new.get(id_end);
match (old_blob, new_blob) {
(Some(old), Some(new)) => {
if old != new {
changed_ids.push(DataBlobChangeVariant::Modified {
id: id_end,
blob_old: *old,
blob_new: *new,
});
}
}
_ => break,
}
id_end += 1;
}
changed_ids
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Serialize;
const TEST_BLOB_SIZE: usize = 256; // Define a test blob size for simplicity
static SC_DATA_BLOB_SIZE: usize = TEST_BLOB_SIZE;
#[derive(Serialize)]
struct TestState {
a: u32,
b: u32,
}
#[test]
fn test_produce_blob_list_from_sc_public_state() {
let state = TestState { a: 42, b: 99 };
let result = produce_blob_list_from_sc_public_state(&state).unwrap();
assert!(!result.is_empty());
}
#[test]
fn test_compare_blob_lists_created() {
let old_list: Vec<DataBlob> = vec![];
let new_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Created { .. }));
}
#[test]
fn test_compare_blob_lists_deleted() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Deleted { .. }));
}
#[test]
fn test_compare_blob_lists_modified() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![[2; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Modified { .. }));
}
}

View File

@ -1,8 +1,7 @@
use ark_bn254::Fr; use ark_bn254::Fr;
use light_poseidon::{Poseidon, PoseidonBytesHasher}; use light_poseidon::{Poseidon, PoseidonBytesHasher};
#[allow(unused)] pub fn poseidon_hash(inputs: &[&[u8]]) -> anyhow::Result<[u8; 32]> {
fn poseidon_hash(inputs: &[&[u8]]) -> anyhow::Result<[u8; 32]> {
let mut poseidon = Poseidon::<Fr>::new_circom(2).unwrap(); let mut poseidon = Poseidon::<Fr>::new_circom(2).unwrap();
let hash = poseidon.hash_bytes_be(inputs)?; let hash = poseidon.hash_bytes_be(inputs)?;

View File

@ -1,3 +1,5 @@
pub mod blob_utils;
pub mod cryptography; pub mod cryptography;
pub mod proofs_circuits; pub mod proofs_circuits;
pub mod public_context;
pub mod transaction_payloads_tools; pub mod transaction_payloads_tools;

View File

@ -1,28 +1,42 @@
use bincode; use bincode;
use k256::Scalar; use common::merkle_tree_public::merkle_tree::UTXOCommitmentsMerkleTree;
use rand::{thread_rng, RngCore}; use rand::{thread_rng, RngCore};
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1}; use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::collections::HashSet;
use utxo::utxo_core::UTXO; use utxo::utxo_core::UTXO;
use crate::{cryptography::poseidon_hash, public_context::PublicSCContext};
fn hash(input: &[u8]) -> Vec<u8> { fn hash(input: &[u8]) -> Vec<u8> {
Sha256::digest(input).to_vec() Sha256::digest(input).to_vec()
} }
// Generate nullifiers /// Generate nullifiers
///
// takes the input_utxo and nsk /// takes the input_utxo and npk
// returns the nullifiers[i], where the nullifier[i] = hash(in_commitments[i] || nsk) where the hash function ///
pub fn generate_nullifiers(input_utxo: &UTXO, nsk: &[u8]) -> Vec<u8> { /// returns the nullifiers[i], where the nullifiers[i] = poseidon_hash(in_commitments[i] || npk)
let mut input = bincode::serialize(input_utxo).unwrap().to_vec(); pub fn generate_nullifiers(input_utxo: &UTXO, npk: &[u8]) -> Vec<u8> {
input.extend_from_slice(nsk); let commitment = generate_commitment(input_utxo);
hash(&input) poseidon_hash(&[commitment.as_ref(), npk]).unwrap().to_vec()
} }
// Generate commitments for output UTXOs /// Generate commitment for UTXO
///
/// uses the input_utxo
///
/// returns commitment here comminment is a hash(bincode(input_utxo))
pub fn generate_commitment(input_utxo: &UTXO) -> Vec<u8> {
let serialized = bincode::serialize(input_utxo).unwrap(); // Serialize UTXO.
hash(&serialized)
}
// uses the list of input_utxos[] /// Generate commitments for UTXO
// returns in_commitments[] where each in_commitments[i] = Commitment(in_utxos[i]) where the commitment ///
/// uses the input_utxos
///
/// returns commitments
pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> { pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
input_utxos input_utxos
.iter() .iter()
@ -33,60 +47,122 @@ pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
.collect() .collect()
} }
// Validate inclusion proof for in_commitments /// Validate inclusion proof for in_commitments
///
// takes the in_commitments[i] as a leaf, the root hash root_commitment and the path in_commitments_proofs[i][], /// ToDo: Solve it in more scalable way
// returns True if the in_commitments[i] is in the tree with root hash root_commitment otherwise returns False, as membership proof.
pub fn validate_in_commitments_proof( pub fn validate_in_commitments_proof(
_in_commitment: &Vec<u8>, in_commitment: &Vec<u8>,
_root_commitment: Vec<u8>, commitment_tree: &UTXOCommitmentsMerkleTree,
_in_commitments_proof: &[Vec<u8>],
) -> bool { ) -> bool {
// ToDo: Implement correct check let alighned_hash: [u8; 32] = in_commitment.clone().try_into().unwrap();
todo!() commitment_tree.get_proof(alighned_hash).is_some()
} }
// Validate that `nullifier` has not been present in set items before /// Validate that `nullifier` has not been present in set items before
pub fn validate_nullifier_not_present_in_set_items( pub fn validate_nullifier_not_present_in_set_items(
nullifier: [u8; 32], nullifier: [u8; 32],
nullifiers_items: &[[u8; 32]], nullifiers_set: &HashSet<[u8; 32]>,
) -> bool { ) -> bool {
!nullifiers_items.contains(&nullifier) !nullifiers_set.contains(&nullifier)
} }
#[allow(unused)] /// Check, that input utxos balances is equal to out utxo balances
fn private_kernel( pub fn check_balances_private(in_utxos: &[UTXO], out_utxos: &[UTXO]) -> bool {
root_commitment: &[u8], let in_sum = in_utxos.iter().fold(0, |prev, utxo| prev + utxo.amount);
root_nullifier: [u8; 32], let out_sum = out_utxos.iter().fold(0, |prev, utxo| prev + utxo.amount);
in_sum == out_sum
}
pub fn private_circuit(
input_utxos: &[UTXO], input_utxos: &[UTXO],
in_commitments_proof: &[Vec<u8>], output_utxos: &[UTXO],
nullifiers_proof: &[[u8; 32]], public_context: &PublicSCContext,
nullifier_secret_key: Scalar, ) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
) -> (Vec<u8>, Vec<Vec<u8>>) { assert!(check_balances_private(input_utxos, output_utxos));
let nullifiers: Vec<_> = input_utxos
.into_iter()
.map(|utxo| generate_nullifiers(&utxo, &nullifier_secret_key.to_bytes()))
.collect();
let in_commitments = generate_commitments(&input_utxos); let in_commitments = generate_commitments(&input_utxos);
let mut in_nullifiers = vec![];
for in_utxo in input_utxos {
let nullifier_public_key = public_context
.account_masks
.get(&in_utxo.owner)
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
in_nullifiers.push(generate_nullifiers(in_utxo, &key_ser));
}
for in_commitment in in_commitments { for in_commitment in in_commitments {
validate_in_commitments_proof( assert!(validate_in_commitments_proof(
&in_commitment, &in_commitment,
root_commitment.to_vec(), &public_context.commitments_tree,
in_commitments_proof, ));
);
} }
for nullifier in nullifiers.iter() { for nullifier in in_nullifiers.iter() {
validate_nullifier_not_present_in_set_items( assert!(validate_nullifier_not_present_in_set_items(
nullifier[0..32].try_into().unwrap(), nullifier.clone().try_into().unwrap(),
nullifiers_proof, &public_context.nullifiers_set,
); ));
} }
(vec![], nullifiers) (in_nullifiers, generate_commitments(&output_utxos))
}
/// Check balances DE
///
/// takes the input_utxos[] and output_balance,
///
/// returns the True if the token amount in output_balance matches the sum of all input_utxos[], otherwise return False.
pub fn check_balances_de(input_utxos: &[UTXO], output_balance: u128) -> bool {
let total_input: u128 = input_utxos.iter().map(|utxo| utxo.amount).sum();
total_input == output_balance
}
pub fn deshielded_circuit(
input_utxos: &[UTXO],
output_balance: u128,
public_context: &PublicSCContext,
) -> Vec<Vec<u8>> {
assert!(check_balances_de(input_utxos, output_balance));
let in_commitments = generate_commitments(&input_utxos);
let mut in_nullifiers = vec![];
for in_utxo in input_utxos {
let nullifier_public_key = public_context
.account_masks
.get(&in_utxo.owner)
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
in_nullifiers.push(generate_nullifiers(in_utxo, &key_ser));
}
for in_commitment in in_commitments {
assert!(validate_in_commitments_proof(
&in_commitment,
&public_context.commitments_tree,
));
}
for nullifier in in_nullifiers.iter() {
assert!(validate_nullifier_not_present_in_set_items(
nullifier.clone().try_into().unwrap(),
&public_context.nullifiers_set,
));
}
in_nullifiers
} }
#[allow(unused)] #[allow(unused)]
@ -114,16 +190,7 @@ pub fn commit(comm: &CommitmentSecrets, tag: Tag) -> PedersenCommitment {
PedersenCommitment::new(SECP256K1, comm.value, comm.value_blinding_factor, generator) PedersenCommitment::new(SECP256K1, comm.value, comm.value_blinding_factor, generator)
} }
// Check balances /// new_commitment for a Vec of values
// takes the public_info and output_utxos[],
// returns the True if the token amount in public_info matches the sum of all output_utxos[], otherwise return False.
pub fn check_balances(public_info: u128, output_utxos: &[UTXO]) -> bool {
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
public_info == total_output
}
// new_commitment for a Vec of values
pub fn pedersen_commitment_vec( pub fn pedersen_commitment_vec(
public_info_vec: Vec<u64>, public_info_vec: Vec<u64>,
) -> (Tweak, [u8; 32], Vec<PedersenCommitment>) { ) -> (Tweak, [u8; 32], Vec<PedersenCommitment>) {
@ -149,10 +216,11 @@ pub fn pedersen_commitment_vec(
(generator_blinding_factor, random_val, vec_commitments) (generator_blinding_factor, random_val, vec_commitments)
} }
// Verify Pedersen commitment /// Verify Pedersen commitment
///
// takes the public_info, secret_r and pedersen_commitment and /// takes the public_info, secret_r and pedersen_commitment and
// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment. ///
/// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment.
pub fn verify_commitment( pub fn verify_commitment(
public_info: u64, public_info: u64,
secret_r: &[u8], secret_r: &[u8],
@ -170,95 +238,69 @@ pub fn verify_commitment(
commitment == *pedersen_commitment commitment == *pedersen_commitment
} }
#[allow(unused)] /// Validate inclusion proof for pedersen_commitment
fn de_kernel( ///
root_commitment: &[u8], /// ToDo: Solve it in more scalable way
root_nullifier: [u8; 32],
public_info: u64,
input_utxos: &[UTXO],
in_commitments_proof: &[Vec<u8>],
nullifiers_proof: &[[u8; 32]],
nullifier_secret_key: Scalar,
) -> (Vec<u8>, Vec<Vec<u8>>) {
check_balances(public_info as u128, input_utxos);
let nullifiers: Vec<_> = input_utxos
.into_iter()
.map(|utxo| generate_nullifiers(&utxo, &nullifier_secret_key.to_bytes()))
.collect();
let in_commitments = generate_commitments(&input_utxos);
for in_commitment in in_commitments {
validate_in_commitments_proof(
&in_commitment,
root_commitment.to_vec(),
in_commitments_proof,
);
}
for nullifier in nullifiers.iter() {
validate_nullifier_not_present_in_set_items(
nullifier[0..32].try_into().unwrap(),
nullifiers_proof,
);
}
(vec![], nullifiers)
}
// Validate inclusion proof for in_commitments
// takes the pedersen_commitment as a leaf, the root hash root_commitment and the path in_commitments_proof[],
// returns True if the pedersen_commitment is in the tree with root hash root_commitment
// otherwise
// returns False, as membership proof.
pub fn validate_in_commitments_proof_se( pub fn validate_in_commitments_proof_se(
_pedersen_commitment: &PedersenCommitment, pedersen_commitment: &PedersenCommitment,
_root_commitment: Vec<u8>, commitment_tree: &UTXOCommitmentsMerkleTree,
_in_commitments_proof: &[Vec<u8>],
) -> bool { ) -> bool {
// ToDo: Implement correct check let alighned_hash: [u8; 32] = pedersen_commitment.serialize()[0..32].try_into().unwrap();
todo!() commitment_tree.get_proof(alighned_hash).is_some()
} }
// Generate nullifiers SE /// Generate nullifier SE
///
/// takes the pedersen_commitment and npk then
/// returns a nullifier, where the nullifier = poseidon_hash(pedersen_commitment || npk)
pub fn generate_nullifiers_se(pedersen_commitment: &PedersenCommitment, npk: &[u8]) -> Vec<u8> {
let commitment_ser = pedersen_commitment.serialize().to_vec();
// takes the pedersen_commitment and nsk then poseidon_hash(&[&commitment_ser, npk]).unwrap().to_vec()
// returns a list of nullifiers, where the nullifier = hash(pedersen_commitment || nsk) where the hash function will be determined
pub fn generate_nullifiers_se(pedersen_commitment: &PedersenCommitment, nsk: &[u8]) -> Vec<u8> {
let mut input = pedersen_commitment.serialize().to_vec();
input.extend_from_slice(nsk);
hash(&input)
} }
#[allow(unused)] /// Check balances SE
fn se_kernel( ///
root_commitment: &[u8], /// takes the input_balance and output_utxos[],
root_nullifier: [u8; 32], ///
/// returns the True if the token amount in input_balance matches the sum of all output_utxos[], otherwise return False.
pub fn check_balances_se(input_balance: u128, output_utxos: &[UTXO]) -> bool {
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
total_output == input_balance
}
pub fn shielded_circuit(
public_info: u64, public_info: u64,
output_utxos: &[UTXO],
pedersen_commitment: PedersenCommitment, pedersen_commitment: PedersenCommitment,
secret_r: &[u8], secret_r: &[u8],
output_utxos: &[UTXO], public_context: &PublicSCContext,
in_commitments_proof: &[Vec<u8>], ) -> (Vec<Vec<u8>>, Vec<u8>) {
nullifiers_proof: &[[u8; 32]], assert!(check_balances_se(public_info as u128, output_utxos));
nullifier_secret_key: Scalar,
) -> (Vec<u8>, Vec<Vec<u8>>, Vec<u8>) {
check_balances(public_info as u128, output_utxos);
let out_commitments = generate_commitments(output_utxos); let out_commitments = generate_commitments(output_utxos);
let nullifier = generate_nullifiers_se(&pedersen_commitment, &nullifier_secret_key.to_bytes()); let nullifier_public_key = public_context
.account_masks
.get(&public_context.caller_address)
.unwrap()
.nullifier_public_key;
validate_in_commitments_proof_se( let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
let nullifier = generate_nullifiers_se(&pedersen_commitment, &key_ser);
assert!(validate_in_commitments_proof_se(
&pedersen_commitment, &pedersen_commitment,
root_commitment.to_vec(), &public_context.commitments_tree,
in_commitments_proof, ));
);
verify_commitment(public_info, secret_r, &pedersen_commitment); assert!(verify_commitment(
public_info,
secret_r,
&pedersen_commitment
));
(vec![], out_commitments, nullifier) (out_commitments, nullifier)
} }

View File

@ -0,0 +1,194 @@
use std::collections::{BTreeMap, HashSet};
use accounts::account_core::{AccountAddress, AccountPublicMask};
use common::merkle_tree_public::{merkle_tree::UTXOCommitmentsMerkleTree, TreeHashType};
use serde::{ser::SerializeStruct, Serialize};
pub const PUBLIC_SC_CONTEXT: &str = "PublicSCContext";
pub const CALLER_ADDRESS: &str = "caller_address";
pub const CALLER_BALANCE: &str = "caller_balance";
pub const ACCOUNT_MASKS_KEYS_SORTED: &str = "account_masks_keys_sorted";
pub const ACCOUNT_MASKS_VALUES_SORTED: &str = "account_masks_values_sorted";
pub const COMMITMENT_STORE_ROOT: &str = "commitment_store_root";
pub const PUT_TX_STORE_ROOT: &str = "put_tx_store_root";
pub const COMMITMENT_TREE: &str = "commitments_tree";
pub const NULLIFIERS_SET: &str = "nullifiers_set";
///Strucutre, representing context, given to a smart contract on a call
pub struct PublicSCContext {
pub caller_address: AccountAddress,
pub caller_balance: u64,
pub account_masks: BTreeMap<AccountAddress, AccountPublicMask>,
pub comitment_store_root: TreeHashType,
pub pub_tx_store_root: TreeHashType,
pub commitments_tree: UTXOCommitmentsMerkleTree,
pub nullifiers_set: HashSet<[u8; 32]>,
}
impl Serialize for PublicSCContext {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut account_masks_keys: Vec<[u8; 32]> = self.account_masks.keys().cloned().collect();
account_masks_keys.sort();
let mut account_mask_values: Vec<AccountPublicMask> =
self.account_masks.values().cloned().collect();
account_mask_values.sort_by(|left, right| left.address.cmp(&right.address));
let mut s = serializer.serialize_struct(PUBLIC_SC_CONTEXT, 7)?;
s.serialize_field(CALLER_ADDRESS, &self.caller_address)?;
s.serialize_field(CALLER_BALANCE, &self.caller_balance)?;
s.serialize_field(ACCOUNT_MASKS_KEYS_SORTED, &account_masks_keys)?;
s.serialize_field(ACCOUNT_MASKS_VALUES_SORTED, &account_mask_values)?;
s.serialize_field(COMMITMENT_STORE_ROOT, &self.comitment_store_root)?;
s.serialize_field(PUT_TX_STORE_ROOT, &self.pub_tx_store_root)?;
s.serialize_field(COMMITMENT_TREE, &self.commitments_tree)?;
s.serialize_field(NULLIFIERS_SET, &self.nullifiers_set)?;
s.end()
}
}
impl PublicSCContext {
///Produces `u64` from bytes in a vector
///
/// Assumes, that vector of le_bytes
pub fn produce_u64_from_fit_vec(data: Vec<u8>) -> u64 {
let data_len = data.len();
assert!(data_len <= 8);
let mut le_bytes: [u8; 8] = [0; 8];
for (idx, item) in data.into_iter().enumerate() {
le_bytes[idx] = item
}
u64::from_le_bytes(le_bytes)
}
///Produces vector of `u64` from context
pub fn produce_u64_list_from_context(&self) -> Result<Vec<u64>, serde_json::Error> {
let mut u64_list = vec![];
let ser_data = serde_json::to_vec(self)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / 8) {
let next_chunk: Vec<u8>;
if (i + 1) * 8 < ser_data.len() {
next_chunk = ser_data[(i * 8)..((i + 1) * 8)].iter().cloned().collect();
} else {
next_chunk = ser_data[(i * 8)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
u64_list.push(PublicSCContext::produce_u64_from_fit_vec(next_chunk));
}
Ok(u64_list)
}
}
#[cfg(test)]
mod tests {
use accounts::account_core::Account;
use common::utxo_commitment::UTXOCommitment;
use super::*;
fn create_test_context() -> PublicSCContext {
let caller_address = [1; 32];
let comitment_store_root = [3; 32];
let pub_tx_store_root = [4; 32];
let commitments_tree =
UTXOCommitmentsMerkleTree::new(vec![UTXOCommitment { hash: [5; 32] }]);
let mut nullifiers_set = HashSet::new();
nullifiers_set.insert([6; 32]);
let mut account_masks = BTreeMap::new();
let acc_1 = Account::new();
let acc_2 = Account::new();
let acc_3 = Account::new();
account_masks.insert(acc_1.address, acc_1.make_account_public_mask());
account_masks.insert(acc_2.address, acc_2.make_account_public_mask());
account_masks.insert(acc_3.address, acc_3.make_account_public_mask());
PublicSCContext {
caller_address,
caller_balance: 100,
account_masks,
comitment_store_root,
pub_tx_store_root,
commitments_tree,
nullifiers_set,
}
}
#[test]
fn bin_ser_stability_test() {
let test_context = create_test_context();
let serialization_1 = serde_json::to_vec(&test_context).unwrap();
let serialization_2 = serde_json::to_vec(&test_context).unwrap();
assert_eq!(serialization_1, serialization_2);
}
#[test]
fn correct_u64_production_from_fit_vec() {
let le_vec = vec![1, 1, 1, 1, 2, 1, 1, 1];
let num = PublicSCContext::produce_u64_from_fit_vec(le_vec);
assert_eq!(num, 72340177133043969);
}
#[test]
fn correct_u64_production_from_small_vec() {
//7 items instead of 8
let le_vec = vec![1, 1, 1, 1, 2, 1, 1];
let num = PublicSCContext::produce_u64_from_fit_vec(le_vec);
assert_eq!(num, 282583095116033);
}
#[test]
fn correct_u64_production_from_small_vec_le_bytes() {
//7 items instead of 8
let le_vec = vec![1, 1, 1, 1, 2, 1, 1];
let le_vec_res = [1, 1, 1, 1, 2, 1, 1, 0];
let num = PublicSCContext::produce_u64_from_fit_vec(le_vec);
assert_eq!(num.to_le_bytes(), le_vec_res);
}
#[test]
#[should_panic]
fn correct_u64_production_from_unfit_vec_should_panic() {
//9 items instead of 8
let le_vec = vec![1, 1, 1, 1, 2, 1, 1, 1, 1];
PublicSCContext::produce_u64_from_fit_vec(le_vec);
}
#[test]
fn consistent_len_of_context_commitments() {
let test_context = create_test_context();
let context_num_vec1 = test_context.produce_u64_list_from_context().unwrap();
let context_num_vec2 = test_context.produce_u64_list_from_context().unwrap();
assert_eq!(context_num_vec1.len(), context_num_vec2.len());
}
}

View File

@ -49,7 +49,7 @@ impl DataBlob {
} }
} }
#[derive(Debug, Clone, Copy, Serialize, Deserialize)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum DataBlobChangeVariant { pub enum DataBlobChangeVariant {
Created { Created {
id: usize, id: usize,
@ -83,93 +83,66 @@ pub fn produce_blob_from_fit_vec(data: Vec<u8>) -> DataBlob {
blob blob
} }
///Creates blob list from generic serializable state
///
///`ToDo`: Find a way to align data in a way, to minimize read and write operations in db
pub fn produce_blob_list_from_sc_public_state<S: Serialize>(
state: &S,
) -> Result<Vec<DataBlob>, serde_json::Error> {
let mut blob_list = vec![];
let ser_data = serde_json::to_vec(state)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / SC_DATA_BLOB_SIZE) {
let next_chunk: Vec<u8>;
if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)]
.iter()
.cloned()
.collect();
} else {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
blob_list.push(produce_blob_from_fit_vec(next_chunk));
}
Ok(blob_list)
}
///Compare two consecutive in time blob lists to produce list of modified ids
pub fn compare_blob_lists(
blob_list_old: &[DataBlob],
blob_list_new: &[DataBlob],
) -> Vec<DataBlobChangeVariant> {
let mut changed_ids = vec![];
let mut id_end = 0;
let old_len = blob_list_old.len();
let new_len = blob_list_new.len();
if old_len > new_len {
for id in new_len..old_len {
changed_ids.push(DataBlobChangeVariant::Deleted { id });
}
} else if new_len > old_len {
for id in old_len..new_len {
changed_ids.push(DataBlobChangeVariant::Created {
id,
blob: blob_list_new[id],
});
}
}
loop {
let old_blob = blob_list_old.get(id_end);
let new_blob = blob_list_new.get(id_end);
match (old_blob, new_blob) {
(Some(old), Some(new)) => {
if old != new {
changed_ids.push(DataBlobChangeVariant::Modified {
id: id_end,
blob_old: *old,
blob_new: *new,
});
}
}
_ => break,
}
id_end += 1;
}
changed_ids
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use serde::Serialize; use serde_json;
const TEST_BLOB_SIZE: usize = 256; // Define a test blob size for simplicity const TEST_BLOB_SIZE: usize = 256; // Define a test blob size for simplicity
static SC_DATA_BLOB_SIZE: usize = TEST_BLOB_SIZE; static SC_DATA_BLOB_SIZE: usize = TEST_BLOB_SIZE;
fn sample_vec() -> Vec<u8> {
(0..SC_DATA_BLOB_SIZE)
.collect::<Vec<usize>>()
.iter()
.map(|&x| x as u8)
.collect()
}
fn sample_data_blob() -> DataBlob {
let vec: Vec<u8> = sample_vec();
produce_blob_from_fit_vec(vec)
}
#[test]
fn test_serialize_data_blob() {
let blob = sample_data_blob();
let json = serde_json::to_string(&blob).unwrap();
let expected_json = serde_json::to_string(&sample_vec()).unwrap();
assert_eq!(json, expected_json);
}
#[test]
fn test_deserialize_data_blob() {
let data = sample_vec();
let json = serde_json::to_string(&data).unwrap();
let deserialized: DataBlob = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.to_vec(), data);
}
#[test]
fn test_serialize_deserialize_data_blob_change_variant() {
let blob1 = sample_data_blob();
let blob2 = produce_blob_from_fit_vec((50..50 + SC_DATA_BLOB_SIZE as u8).collect());
let variants = vec![
DataBlobChangeVariant::Created { id: 1, blob: blob1 },
DataBlobChangeVariant::Modified {
id: 2,
blob_old: blob1,
blob_new: blob2,
},
DataBlobChangeVariant::Deleted { id: 3 },
];
for variant in variants {
let json = serde_json::to_string(&variant).unwrap();
let deserialized: DataBlobChangeVariant = serde_json::from_str(&json).unwrap();
assert_eq!(variant, deserialized);
}
}
#[test] #[test]
fn test_produce_blob_from_fit_vec() { fn test_produce_blob_from_fit_vec() {
let data = (0..0 + 255).collect(); let data = (0..0 + 255).collect();
@ -183,47 +156,4 @@ mod tests {
let data = vec![0; SC_DATA_BLOB_SIZE + 1]; let data = vec![0; SC_DATA_BLOB_SIZE + 1];
let _ = produce_blob_from_fit_vec(data); let _ = produce_blob_from_fit_vec(data);
} }
#[derive(Serialize)]
struct TestState {
a: u32,
b: u32,
}
#[test]
fn test_produce_blob_list_from_sc_public_state() {
let state = TestState { a: 42, b: 99 };
let result = produce_blob_list_from_sc_public_state(&state).unwrap();
assert!(!result.is_empty());
}
#[test]
fn test_compare_blob_lists_created() {
let old_list: Vec<DataBlob> = vec![];
let new_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Created { .. }));
}
#[test]
fn test_compare_blob_lists_deleted() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Deleted { .. }));
}
#[test]
fn test_compare_blob_lists_modified() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![[2; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Modified { .. }));
}
} }