fix: comments fix

This commit is contained in:
Oleksandr Pravdyvyi 2025-08-14 14:03:48 +03:00
parent e589ddae5a
commit 74f0c983d3
No known key found for this signature in database
GPG Key ID: 9F8955C63C443871
18 changed files with 49 additions and 1102 deletions

View File

@ -11,7 +11,6 @@ members = [
"wallet", "wallet",
"sequencer_core", "sequencer_core",
"common", "common",
"sc_core",
"nssa", "nssa",
] ]

View File

@ -106,7 +106,7 @@ impl AccountPublicMask {
} }
pub fn make_tag(&self) -> Tag { pub fn make_tag(&self) -> Tag {
self.address.tag() self.address.value()[0]
} }
} }
@ -199,7 +199,7 @@ impl Account {
} }
pub fn make_tag(&self) -> Tag { pub fn make_tag(&self) -> Tag {
self.address.tag() self.address.value()[0]
} }
///Produce account public mask ///Produce account public mask

View File

@ -14,7 +14,6 @@ secp256k1 = "0.31.1"
rand = "0.8" rand = "0.8"
hex = "0.4.3" hex = "0.4.3"
anyhow.workspace = true anyhow.workspace = true
serde_json.workspace = true
[dev-dependencies] [dev-dependencies]
test-program-methods = { path = "test_program_methods" } test-program-methods = { path = "test_program_methods" }

View File

@ -1,5 +1,7 @@
use std::{fmt::Display, str::FromStr};
use anyhow::anyhow; use anyhow::anyhow;
use serde::{Deserialize, Serialize, de::Visitor}; use serde::{Deserialize, Serialize};
use crate::signature::PublicKey; use crate::signature::PublicKey;
@ -15,10 +17,6 @@ impl Address {
Self { value } Self { value }
} }
pub fn tag(&self) -> u8 {
self.value[0]
}
pub fn value(&self) -> &[u8; 32] { pub fn value(&self) -> &[u8; 32] {
&self.value &self.value
} }
@ -49,90 +47,30 @@ impl From<&PublicKey> for Address {
} }
} }
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct HexString(String);
impl HexString {
pub fn inner(&self) -> &str {
&self.0
}
}
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
pub enum HexStringConsistencyError { pub enum AddressError {
#[error("Hex decode error")] #[error("invalid hex")]
HexError(#[from] hex::FromHexError), InvalidHex(#[from] hex::FromHexError),
#[error("Decode slice does not fit 32 bytes")] #[error("invalid length: expected 32 bytes, got {0}")]
SizeError(#[from] anyhow::Error), InvalidLength(usize),
} }
impl TryFrom<&str> for HexString { impl FromStr for Address {
type Error = HexStringConsistencyError; type Err = AddressError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
fn try_from(value: &str) -> Result<Self, Self::Error> { let bytes = hex::decode(s)?;
let decoded_str = hex::decode(value)?; if bytes.len() != 32 {
let _: Address = decoded_str.try_into()?; return Err(AddressError::InvalidLength(bytes.len()));
}
Ok(Self(value.to_string())) let mut value = [0u8; 32];
value.copy_from_slice(&bytes);
Ok(Address { value })
} }
} }
impl Serialize for HexString { impl Display for Address {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
where write!(f, "{}", hex::encode(self.value))
S: serde::Serializer,
{
serializer.serialize_str(&self.0)
}
}
struct HexStringVisitor;
impl<'de> Visitor<'de> for HexStringVisitor {
type Value = String;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("expected a valid string")
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(v)
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Ok(v.to_string())
}
}
impl<'de> Deserialize<'de> for HexString {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let str_cand = deserializer.deserialize_string(HexStringVisitor)?;
let hex_string =
HexString::try_from(str_cand.as_str()).map_err(serde::de::Error::custom)?;
Ok(hex_string)
}
}
impl From<HexString> for Address {
fn from(value: HexString) -> Self {
Address::try_from(hex::decode(value.inner()).unwrap()).unwrap()
}
}
impl From<Address> for HexString {
fn from(value: Address) -> Self {
HexString::try_from(hex::encode(value).as_str()).unwrap()
} }
} }
@ -141,7 +79,7 @@ impl Serialize for Address {
where where
S: serde::Serializer, S: serde::Serializer,
{ {
let hex_string: HexString = (*self).into(); let hex_string = self.to_string();
hex_string.serialize(serializer) hex_string.serialize(serializer)
} }
@ -152,82 +90,41 @@ impl<'de> Deserialize<'de> for Address {
where where
D: serde::Deserializer<'de>, D: serde::Deserializer<'de>,
{ {
let hex_sring = HexString::deserialize(deserializer)?; let hex_string = String::deserialize(deserializer)?;
Ok(hex_sring.into()) Address::from_str(&hex_string).map_err(serde::de::Error::custom)
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use crate::{Address, address::AddressError};
#[derive(Debug, Serialize, Deserialize)] #[test]
struct Ser1 { fn parse_valid_address() {
f1: String, let hex_str = "00".repeat(32); // 64 hex chars = 32 bytes
} let addr: Address = hex_str.parse().unwrap();
assert_eq!(addr.value, [0u8; 32]);
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Ser2 {
f1: HexString,
}
#[derive(Debug, Serialize, Deserialize, PartialEq)]
struct Ser3 {
f1: Address,
} }
#[test] #[test]
fn test_hex_ser() { fn parse_invalid_hex() {
let str_for_tests = hex::encode([42; 32]); let hex_str = "zz".repeat(32); // invalid hex chars
let result = hex_str.parse::<Address>().unwrap_err();
let hex_str_for_tests = HexString::try_from(str_for_tests.as_str()).unwrap(); assert!(matches!(result, AddressError::InvalidHex(_)));
let ser1_str = Ser1 { f1: str_for_tests };
let ser2_str = Ser2 {
f1: hex_str_for_tests,
};
let ser1_str_ser = serde_json::to_string(&ser1_str).unwrap();
let ser2_str_ser = serde_json::to_string(&ser2_str).unwrap();
println!("{ser2_str_ser:#?}");
assert_eq!(ser1_str_ser, ser2_str_ser);
} }
#[test] #[test]
fn test_hex_deser() { fn parse_wrong_length_short() {
let raw_json = r#"{ let hex_str = "00".repeat(31); // 62 chars = 31 bytes
"f1": "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a" let result = hex_str.parse::<Address>().unwrap_err();
}"#; assert!(matches!(result, AddressError::InvalidLength(_)));
let str_for_tests = hex::encode([42; 32]);
let hex_str_for_tests = HexString::try_from(str_for_tests.as_str()).unwrap();
let ser2_str = Ser2 {
f1: hex_str_for_tests,
};
let ser1_str: Ser2 = serde_json::from_str(raw_json).unwrap();
assert_eq!(ser1_str, ser2_str);
} }
#[test] #[test]
fn test_addr_deser() { fn parse_wrong_length_long() {
let raw_json = r#"{ let hex_str = "00".repeat(33); // 66 chars = 33 bytes
"f1": "2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a2a" let result = hex_str.parse::<Address>().unwrap_err();
}"#; assert!(matches!(result, AddressError::InvalidLength(_)));
let addr_for_tests = Address::new([42; 32]);
let ser2_str = Ser3 { f1: addr_for_tests };
let ser1_str: Ser3 = serde_json::from_str(raw_json).unwrap();
assert_eq!(ser1_str, ser2_str);
} }
} }

View File

@ -1,113 +0,0 @@
#[derive(Debug, Clone)]
pub struct GasCalculator {
/// Gas spent per deploying one byte of data
gas_fee_per_byte_deploy: u64,
/// Gas spent per reading one byte of data in VM
gas_fee_per_input_buffer_runtime: u64,
/// Gas spent per one byte of contract data in runtime
gas_fee_per_byte_runtime: u64,
/// Cost of one gas of runtime in public balance
gas_cost_runtime: u64,
/// Cost of one gas of deployment in public balance
gas_cost_deploy: u64,
/// Gas limit for deployment
gas_limit_deploy: u64,
/// Gas limit for runtime
gas_limit_runtime: u64,
}
impl GasCalculator {
pub fn new(
gas_fee_per_byte_deploy: u64,
gas_fee_per_input_buffer_runtime: u64,
gas_fee_per_byte_runtime: u64,
gas_cost_runtime: u64,
gas_cost_deploy: u64,
gas_limit_deploy: u64,
gas_limit_runtime: u64,
) -> Self {
Self {
gas_fee_per_byte_deploy,
gas_fee_per_input_buffer_runtime,
gas_fee_per_byte_runtime,
gas_cost_deploy,
gas_cost_runtime,
gas_limit_deploy,
gas_limit_runtime,
}
}
pub fn gas_fee_per_byte_deploy(&self) -> u64 {
self.gas_fee_per_byte_deploy
}
pub fn gas_fee_per_input_buffer_runtime(&self) -> u64 {
self.gas_fee_per_input_buffer_runtime
}
pub fn gas_fee_per_byte_runtime(&self) -> u64 {
self.gas_fee_per_byte_runtime
}
pub fn gas_cost_runtime(&self) -> u64 {
self.gas_cost_runtime
}
pub fn gas_cost_deploy(&self) -> u64 {
self.gas_cost_deploy
}
pub fn gas_limit_deploy(&self) -> u64 {
self.gas_limit_deploy
}
pub fn gas_limit_runtime(&self) -> u64 {
self.gas_limit_runtime
}
///Returns Option<u64>
///
/// Some(_) - in case if `gas` < `gas_limit_deploy`
///
/// None - else
pub fn gas_deploy(&self, elf: &[u8]) -> Option<u64> {
let gas = self.gas_fee_per_byte_deploy() * (elf.len() as u64);
if gas < self.gas_limit_deploy() {
Some(gas)
} else {
None
}
}
pub fn gas_runtime(&self, elf: &[u8]) -> u64 {
self.gas_fee_per_byte_runtime() * (elf.len() as u64)
}
pub fn gas_input_buffer(&self, input_length: usize) -> u64 {
self.gas_fee_per_input_buffer_runtime() * (input_length as u64)
}
///Returns Option<u64>
///
/// Some(_) - in case if `gas` < `gas_limit_runtime`
///
/// None - else
pub fn gas_runtime_full(&self, elf: &[u8], input_length: usize) -> Option<u64> {
let gas = self.gas_runtime(elf) + self.gas_input_buffer(input_length);
if gas < self.gas_limit_runtime() {
Some(gas)
} else {
None
}
}
pub fn deploy_cost(&self, deploy_gas: u64) -> u64 {
deploy_gas * self.gas_cost_deploy()
}
pub fn runtime_cost(&self, runtime_gas: u64) -> u64 {
runtime_gas * self.gas_cost_runtime()
}
}

View File

@ -1,6 +1,5 @@
pub mod address; pub mod address;
pub mod error; pub mod error;
pub mod gas_calculator;
pub mod program; pub mod program;
pub mod public_transaction; pub mod public_transaction;
mod signature; mod signature;

View File

@ -1,41 +0,0 @@
[package]
name = "sc_core"
version = "0.1.0"
edition = "2021"
[dependencies]
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
serde.workspace = true
rand.workspace = true
k256.workspace = true
sha2.workspace = true
bincode.workspace = true
elliptic-curve.workspace = true
hex.workspace = true
light-poseidon.workspace = true
ark-bn254.workspace = true
ark-ff.workspace = true
risc0-zkvm = "2.3.1"
[dependencies.accounts]
path = "../accounts"
[dependencies.storage]
path = "../storage"
[dependencies.utxo]
path = "../utxo"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[dependencies.secp256k1-zkp]
workspace = true
features = ["std", "rand-std", "rand", "serde", "global-context"]

View File

@ -1,125 +0,0 @@
use serde::Serialize;
use storage::{
sc_db_utils::{produce_blob_from_fit_vec, DataBlob, DataBlobChangeVariant},
SC_DATA_BLOB_SIZE,
};
///Creates blob list from generic serializable state
///
///`ToDo`: Find a way to align data in a way, to minimize read and write operations in db
pub fn produce_blob_list_from_sc_public_state<S: Serialize>(
state: &S,
) -> Result<Vec<DataBlob>, serde_json::Error> {
let mut blob_list = vec![];
let ser_data = serde_json::to_vec(state)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / SC_DATA_BLOB_SIZE) {
let next_chunk: Vec<u8> = if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)].to_vec()
} else {
ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())].to_vec()
};
blob_list.push(produce_blob_from_fit_vec(next_chunk));
}
Ok(blob_list)
}
///Compare two consecutive in time blob lists to produce list of modified ids
pub fn compare_blob_lists(
blob_list_old: &[DataBlob],
blob_list_new: &[DataBlob],
) -> Vec<DataBlobChangeVariant> {
let mut changed_ids = vec![];
let mut id_end = 0;
let old_len = blob_list_old.len();
let new_len = blob_list_new.len();
if old_len > new_len {
for id in new_len..old_len {
changed_ids.push(DataBlobChangeVariant::Deleted { id });
}
} else if new_len > old_len {
for (id, blob) in blob_list_new.iter().enumerate().take(new_len).skip(old_len) {
changed_ids.push(DataBlobChangeVariant::Created { id, blob: *blob });
}
}
loop {
let old_blob = blob_list_old.get(id_end);
let new_blob = blob_list_new.get(id_end);
match (old_blob, new_blob) {
(Some(old), Some(new)) => {
if old != new {
changed_ids.push(DataBlobChangeVariant::Modified {
id: id_end,
blob_old: *old,
blob_new: *new,
});
}
}
_ => break,
}
id_end += 1;
}
changed_ids
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Serialize;
const TEST_BLOB_SIZE: usize = 256; // Define a test blob size for simplicity
static SC_DATA_BLOB_SIZE: usize = TEST_BLOB_SIZE;
#[derive(Serialize)]
struct TestState {
a: u32,
b: u32,
}
#[test]
fn test_produce_blob_list_from_sc_public_state() {
let state = TestState { a: 42, b: 99 };
let result = produce_blob_list_from_sc_public_state(&state).unwrap();
assert!(!result.is_empty());
}
#[test]
fn test_compare_blob_lists_created() {
let old_list: Vec<DataBlob> = vec![];
let new_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Created { .. }));
}
#[test]
fn test_compare_blob_lists_deleted() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Deleted { .. }));
}
#[test]
fn test_compare_blob_lists_modified() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![[2; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Modified { .. }));
}
}

View File

@ -1,10 +0,0 @@
use ark_bn254::Fr;
use light_poseidon::{Poseidon, PoseidonBytesHasher};
pub fn poseidon_hash(inputs: &[&[u8]]) -> anyhow::Result<[u8; 32]> {
let mut poseidon = Poseidon::<Fr>::new_circom(2).unwrap();
let hash = poseidon.hash_bytes_be(inputs)?;
Ok(hash)
}

View File

@ -1,5 +0,0 @@
pub mod blob_utils;
pub mod cryptography;
pub mod proofs_circuits;
pub mod public_context;
pub mod transaction_payloads_tools;

View File

@ -1,286 +0,0 @@
use bincode;
use common::merkle_tree_public::merkle_tree::UTXOCommitmentsMerkleTree;
use nssa::Address;
use rand::{thread_rng, RngCore};
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
use sha2::{Digest, Sha256};
use utxo::utxo_core::UTXO;
//
use crate::{cryptography::poseidon_hash, public_context::PublicSCContext};
fn hash(input: &[u8]) -> Vec<u8> {
Sha256::digest(input).to_vec()
}
/// Generate nullifiers
///
/// takes the input_utxo and npk
///
/// returns the nullifiers[i], where the nullifiers[i] = poseidon_hash(in_commitments[i] || npk)
pub fn generate_nullifiers(input_utxo: &UTXO, npk: &[u8]) -> Vec<u8> {
let commitment = generate_commitment(input_utxo);
poseidon_hash(&[commitment.as_ref(), npk]).unwrap().to_vec()
}
/// Generate commitment for UTXO
///
/// uses the input_utxo
///
/// returns commitment here commitment is a hash(bincode(input_utxo))
pub fn generate_commitment(input_utxo: &UTXO) -> Vec<u8> {
let serialized = bincode::serialize(input_utxo).unwrap(); // Serialize UTXO.
hash(&serialized)
}
/// Generate commitments for UTXO
///
/// uses the input_utxos
///
/// returns commitments
pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
input_utxos
.iter()
.map(|utxo| {
let serialized = bincode::serialize(utxo).unwrap(); // Serialize UTXO.
hash(&serialized)
})
.collect()
}
/// Validate inclusion proof for in_commitments
///
/// ToDo: Solve it in more scalable way
pub fn validate_in_commitments_tree(
in_commitment: &[u8],
commitment_tree: &UTXOCommitmentsMerkleTree,
) -> bool {
let alighned_hash: [u8; 32] = in_commitment.try_into().unwrap();
commitment_tree.get_proof(alighned_hash).is_some()
}
/// Check, that input utxos balances is equal to out utxo balances
pub fn check_balances_private(in_utxos: &[UTXO], out_utxos: &[UTXO]) -> bool {
let in_sum = in_utxos.iter().fold(0, |prev, utxo| prev + utxo.amount);
let out_sum = out_utxos.iter().fold(0, |prev, utxo| prev + utxo.amount);
in_sum == out_sum
}
pub fn private_circuit(
input_utxos: &[UTXO],
output_utxos: &[UTXO],
public_context: &PublicSCContext,
) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
assert!(check_balances_private(input_utxos, output_utxos));
let in_commitments = generate_commitments(input_utxos);
let mut in_nullifiers = vec![];
for in_utxo in input_utxos {
let nullifier_public_key = public_context
.account_masks
.get(&Address::new(in_utxo.owner))
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
in_nullifiers.push(generate_nullifiers(in_utxo, &key_ser));
}
for in_commitment in in_commitments {
assert!(validate_in_commitments_tree(
&in_commitment,
&public_context.commitments_tree,
));
}
(in_nullifiers, generate_commitments(output_utxos))
}
/// Check balances DE
///
/// takes the input_utxos[] and output_balance,
///
/// returns the True if the token amount in output_balance matches the sum of all input_utxos[], otherwise return False.
pub fn check_balances_de(input_utxos: &[UTXO], output_balance: u128) -> bool {
let total_input: u128 = input_utxos.iter().map(|utxo| utxo.amount).sum();
total_input == output_balance
}
pub fn deshielded_circuit(
input_utxos: &[UTXO],
output_balance: u128,
public_context: &PublicSCContext,
) -> Vec<Vec<u8>> {
assert!(check_balances_de(input_utxos, output_balance));
let in_commitments = generate_commitments(input_utxos);
let mut in_nullifiers = vec![];
for in_utxo in input_utxos {
let nullifier_public_key = public_context
.account_masks
.get(&Address::new(in_utxo.owner))
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
in_nullifiers.push(generate_nullifiers(in_utxo, &key_ser));
}
for in_commitment in in_commitments {
assert!(validate_in_commitments_tree(
&in_commitment,
&public_context.commitments_tree,
));
}
in_nullifiers
}
#[allow(unused)]
fn commitment_secrets_random(value: u64) -> CommitmentSecrets {
CommitmentSecrets {
value,
value_blinding_factor: Tweak::new(&mut thread_rng()),
generator_blinding_factor: Tweak::new(&mut thread_rng()),
}
}
pub fn tag_random() -> Tag {
use rand::thread_rng;
use rand::RngCore;
let mut bytes = [0u8; 32];
thread_rng().fill_bytes(&mut bytes);
Tag::from(bytes)
}
pub fn commit(comm: &CommitmentSecrets, tag: Tag) -> PedersenCommitment {
let generator = Generator::new_blinded(SECP256K1, tag, comm.generator_blinding_factor);
PedersenCommitment::new(SECP256K1, comm.value, comm.value_blinding_factor, generator)
}
/// new_commitment for a Vec of values
pub fn pedersen_commitment_vec(
public_info_vec: Vec<u64>,
) -> (Tweak, [u8; 32], Vec<PedersenCommitment>) {
let mut random_val: [u8; 32] = [0; 32];
thread_rng().fill_bytes(&mut random_val);
let generator_blinding_factor = Tweak::new(&mut thread_rng());
let tag = tag_random();
let vec_commitments = public_info_vec
.into_iter()
.map(|public_info| {
let commitment_secrets = CommitmentSecrets {
value: public_info,
value_blinding_factor: Tweak::from_slice(&random_val).unwrap(),
generator_blinding_factor,
};
commit(&commitment_secrets, tag)
})
.collect();
(generator_blinding_factor, random_val, vec_commitments)
}
/// Verify Pedersen commitment
///
/// takes the public_info, secret_r and pedersen_commitment and
///
/// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment.
pub fn verify_commitment(
public_info: u64,
secret_r: &[u8],
pedersen_commitment: &PedersenCommitment,
) -> bool {
let commitment_secrets = CommitmentSecrets {
value: public_info,
value_blinding_factor: Tweak::from_slice(secret_r).unwrap(),
generator_blinding_factor: Tweak::new(&mut thread_rng()),
};
let tag = tag_random();
let commitment = commit(&commitment_secrets, tag);
commitment == *pedersen_commitment
}
/// Validate inclusion proof for pedersen_commitment
///
/// ToDo: Solve it in more scalable way
pub fn validate_in_commitments_tree_se(
pedersen_commitment: &PedersenCommitment,
commitment_tree: &UTXOCommitmentsMerkleTree,
) -> bool {
let alighned_hash: [u8; 32] = pedersen_commitment.serialize()[0..32].try_into().unwrap();
commitment_tree.get_proof(alighned_hash).is_some()
}
/// Generate nullifier SE
///
/// takes the pedersen_commitment and npk then
/// returns a nullifier, where the nullifier = poseidon_hash(pedersen_commitment || npk)
pub fn generate_nullifiers_se(pedersen_commitment: &PedersenCommitment, npk: &[u8]) -> Vec<u8> {
let commitment_ser = pedersen_commitment.serialize().to_vec();
poseidon_hash(&[&commitment_ser, npk]).unwrap().to_vec()
}
/// Check balances SE
///
/// takes the input_balance and output_utxos[],
///
/// returns the True if the token amount in input_balance matches the sum of all output_utxos[], otherwise return False.
pub fn check_balances_se(input_balance: u128, output_utxos: &[UTXO]) -> bool {
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
total_output == input_balance
}
pub fn shielded_circuit(
public_info: u64,
output_utxos: &[UTXO],
pedersen_commitment: PedersenCommitment,
secret_r: &[u8],
public_context: &PublicSCContext,
) -> (Vec<Vec<u8>>, Vec<u8>) {
assert!(check_balances_se(public_info as u128, output_utxos));
let out_commitments = generate_commitments(output_utxos);
let nullifier_public_key = public_context
.account_masks
.get(&public_context.caller_address)
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
let nullifier = generate_nullifiers_se(&pedersen_commitment, &key_ser);
assert!(validate_in_commitments_tree_se(
&pedersen_commitment,
&public_context.commitments_tree,
));
assert!(verify_commitment(
public_info,
secret_r,
&pedersen_commitment
));
(out_commitments, nullifier)
}

View File

@ -1,186 +0,0 @@
use std::collections::BTreeMap;
use accounts::account_core::AccountPublicMask;
use common::merkle_tree_public::{merkle_tree::UTXOCommitmentsMerkleTree, TreeHashType};
use nssa::Address;
use serde::{ser::SerializeStruct, Serialize};
pub const PUBLIC_SC_CONTEXT: &str = "PublicSCContext";
pub const CALLER_ADDRESS: &str = "caller_address";
pub const CALLER_BALANCE: &str = "caller_balance";
pub const ACCOUNT_MASKS_KEYS_SORTED: &str = "account_masks_keys_sorted";
pub const ACCOUNT_MASKS_VALUES_SORTED: &str = "account_masks_values_sorted";
pub const COMMITMENT_STORE_ROOT: &str = "commitment_store_root";
pub const PUT_TX_STORE_ROOT: &str = "put_tx_store_root";
pub const COMMITMENT_TREE: &str = "commitments_tree";
pub const NULLIFIERS_SET: &str = "nullifiers_set";
///Strucutre, representing context, given to a smart contract on a call
pub struct PublicSCContext {
pub caller_address: Address,
pub caller_balance: u64,
pub account_masks: BTreeMap<Address, AccountPublicMask>,
pub comitment_store_root: TreeHashType,
pub commitments_tree: UTXOCommitmentsMerkleTree,
}
impl Serialize for PublicSCContext {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let mut account_masks_keys: Vec<[u8; 32]> = self
.account_masks
.keys()
.cloned()
.map(|addr| *addr.value())
.collect();
account_masks_keys.sort();
let mut account_mask_values: Vec<AccountPublicMask> =
self.account_masks.values().cloned().collect();
account_mask_values.sort_by(|left, right| left.address.cmp(&right.address));
let mut s = serializer.serialize_struct(PUBLIC_SC_CONTEXT, 7)?;
s.serialize_field(CALLER_ADDRESS, &self.caller_address)?;
s.serialize_field(CALLER_BALANCE, &self.caller_balance)?;
s.serialize_field(ACCOUNT_MASKS_KEYS_SORTED, &account_masks_keys)?;
s.serialize_field(ACCOUNT_MASKS_VALUES_SORTED, &account_mask_values)?;
s.serialize_field(COMMITMENT_STORE_ROOT, &self.comitment_store_root)?;
s.serialize_field(COMMITMENT_TREE, &self.commitments_tree)?;
s.end()
}
}
impl PublicSCContext {
///Produces `u64` from bytes in a vector
///
/// Assumes, that vector of le_bytes
pub fn produce_u64_from_fit_vec(data: Vec<u8>) -> u64 {
let data_len = data.len();
assert!(data_len <= 8);
let mut le_bytes: [u8; 8] = [0; 8];
for (idx, item) in data.into_iter().enumerate() {
le_bytes[idx] = item
}
u64::from_le_bytes(le_bytes)
}
///Produces vector of `u64` from context
pub fn produce_u64_list_from_context(&self) -> Result<Vec<u64>, serde_json::Error> {
let mut u64_list = vec![];
let ser_data = serde_json::to_vec(self)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / 8) {
let next_chunk: Vec<u8> = if (i + 1) * 8 < ser_data.len() {
ser_data[(i * 8)..((i + 1) * 8)].to_vec()
} else {
ser_data[(i * 8)..(ser_data.len())].to_vec()
};
u64_list.push(PublicSCContext::produce_u64_from_fit_vec(next_chunk));
}
Ok(u64_list)
}
}
#[cfg(test)]
mod tests {
use accounts::account_core::Account;
use common::utxo_commitment::UTXOCommitment;
use super::*;
fn create_test_context() -> PublicSCContext {
let caller_address = Address::new([1; 32]);
let comitment_store_root = [3; 32];
let commitments_tree =
UTXOCommitmentsMerkleTree::new(vec![UTXOCommitment { hash: [5; 32] }]);
let mut account_masks = BTreeMap::new();
let acc_1 = Account::new();
let acc_2 = Account::new();
let acc_3 = Account::new();
account_masks.insert(acc_1.address, acc_1.make_account_public_mask());
account_masks.insert(acc_2.address, acc_2.make_account_public_mask());
account_masks.insert(acc_3.address, acc_3.make_account_public_mask());
PublicSCContext {
caller_address,
caller_balance: 100,
account_masks,
comitment_store_root,
commitments_tree,
}
}
#[test]
fn bin_ser_stability_test() {
let test_context = create_test_context();
let serialization_1 = serde_json::to_vec(&test_context).unwrap();
let serialization_2 = serde_json::to_vec(&test_context).unwrap();
assert_eq!(serialization_1, serialization_2);
}
#[test]
fn correct_u64_production_from_fit_vec() {
let le_vec = vec![1, 1, 1, 1, 2, 1, 1, 1];
let num = PublicSCContext::produce_u64_from_fit_vec(le_vec);
assert_eq!(num, 72340177133043969);
}
#[test]
fn correct_u64_production_from_small_vec() {
//7 items instead of 8
let le_vec = vec![1, 1, 1, 1, 2, 1, 1];
let num = PublicSCContext::produce_u64_from_fit_vec(le_vec);
assert_eq!(num, 282583095116033);
}
#[test]
fn correct_u64_production_from_small_vec_le_bytes() {
//7 items instead of 8
let le_vec = vec![1, 1, 1, 1, 2, 1, 1];
let le_vec_res = [1, 1, 1, 1, 2, 1, 1, 0];
let num = PublicSCContext::produce_u64_from_fit_vec(le_vec);
assert_eq!(num.to_le_bytes(), le_vec_res);
}
#[test]
#[should_panic]
fn correct_u64_production_from_unfit_vec_should_panic() {
//9 items instead of 8
let le_vec = vec![1, 1, 1, 1, 2, 1, 1, 1, 1];
PublicSCContext::produce_u64_from_fit_vec(le_vec);
}
#[test]
fn consistent_len_of_context_commitments() {
let test_context = create_test_context();
let context_num_vec1 = test_context.produce_u64_list_from_context().unwrap();
let context_num_vec2 = test_context.produce_u64_list_from_context().unwrap();
assert_eq!(context_num_vec1.len(), context_num_vec2.len());
}
}

View File

@ -1,100 +0,0 @@
use accounts::{account_core::Account, key_management::ephemeral_key_holder::EphemeralKeyHolder};
use anyhow::Result;
use common::transaction::{TransactionBody, TxKind};
use rand::thread_rng;
use risc0_zkvm::Receipt;
use secp256k1_zkp::{CommitmentSecrets, PedersenCommitment, Tweak};
use utxo::utxo_core::UTXO;
use crate::proofs_circuits::{commit, generate_nullifiers, tag_random};
pub fn create_public_transaction_payload(
execution_input: Vec<u8>,
commitment: Vec<PedersenCommitment>,
tweak: Tweak,
secret_r: [u8; 32],
sc_addr: String,
) -> TransactionBody {
TransactionBody {
tx_kind: TxKind::Public,
execution_input,
execution_output: vec![],
utxo_commitments_spent_hashes: vec![],
utxo_commitments_created_hashes: vec![],
nullifier_created_hashes: vec![],
execution_proof_private: "".to_string(),
encoded_data: vec![],
ephemeral_pub_key: vec![],
commitment,
tweak,
secret_r,
sc_addr,
}
}
pub fn encode_utxos_to_receivers(
utxos_receivers: Vec<(UTXO, &Account)>,
) -> Vec<(Vec<u8>, Vec<u8>)> {
let mut all_encoded_data = vec![];
for (utxo, receiver) in utxos_receivers {
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
let encoded_data = Account::encrypt_data(
&ephm_key_holder,
receiver.key_holder.viewing_public_key,
&serde_json::to_vec(&utxo).unwrap(),
);
let encoded_data_vec = (encoded_data.0, encoded_data.1.to_vec());
all_encoded_data.push(encoded_data_vec);
}
all_encoded_data
}
pub fn generate_nullifiers_spent_utxos(utxos_spent: Vec<(UTXO, &Account)>) -> Vec<Vec<u8>> {
let mut all_nullifiers = vec![];
for (utxo, spender) in utxos_spent {
let nullifier = generate_nullifiers(
&utxo,
&spender
.key_holder
.utxo_secret_key_holder
.nullifier_secret_key
.to_bytes(),
);
all_nullifiers.push(nullifier);
}
all_nullifiers
}
pub fn encode_receipt(receipt: Receipt) -> Result<String> {
Ok(hex::encode(serde_json::to_vec(&receipt)?))
}
pub fn generate_secret_random_commitment(
value: u64,
account: &Account,
) -> Result<PedersenCommitment> {
let commitment_secrets = CommitmentSecrets {
value,
value_blinding_factor: Tweak::from_slice(
&account
.key_holder
.utxo_secret_key_holder
.viewing_secret_key
.to_bytes(),
)?,
generator_blinding_factor: Tweak::new(&mut thread_rng()),
};
let tag = tag_random();
let commitment = commit(&commitment_secrets, tag);
Ok(commitment)
}

View File

@ -23,9 +23,6 @@ hex.workspace = true
actix-rt.workspace = true actix-rt.workspace = true
clap.workspace = true clap.workspace = true
[dependencies.sc_core]
path = "../sc_core"
[dependencies.accounts] [dependencies.accounts]
path = "../accounts" path = "../accounts"

View File

@ -1,10 +1,9 @@
use std::collections::{BTreeMap, HashMap}; use std::collections::HashMap;
use accounts::account_core::Account; use accounts::account_core::Account;
use anyhow::Result; use anyhow::Result;
use common::merkle_tree_public::merkle_tree::UTXOCommitmentsMerkleTree; use common::merkle_tree_public::merkle_tree::UTXOCommitmentsMerkleTree;
use nssa::Address; use nssa::Address;
use sc_core::public_context::PublicSCContext;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::config::WalletConfig; use crate::config::WalletConfig;
@ -54,22 +53,6 @@ impl WalletChainStore {
wallet_config: config, wallet_config: config,
}) })
} }
pub fn produce_context(&self, caller: Address) -> PublicSCContext {
let mut account_masks = BTreeMap::new();
for (acc_addr, acc) in &self.acc_map {
account_masks.insert(*acc_addr, acc.make_account_public_mask());
}
PublicSCContext {
caller_address: caller,
caller_balance: self.acc_map.get(&caller).unwrap().balance,
account_masks,
comitment_store_root: self.utxo_commitments_store.get_root().unwrap_or([0; 32]),
commitments_tree: self.utxo_commitments_store.clone(),
}
}
} }
#[cfg(test)] #[cfg(test)]

View File

@ -1,7 +1,6 @@
use std::path::PathBuf; use std::path::PathBuf;
use accounts::account_core::Account; use accounts::account_core::Account;
use nssa::gas_calculator::GasCalculator;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -22,20 +21,6 @@ pub struct GasConfig {
pub gas_limit_runtime: u64, pub gas_limit_runtime: u64,
} }
impl From<GasConfig> for GasCalculator {
fn from(value: GasConfig) -> Self {
GasCalculator::new(
value.gas_fee_per_byte_deploy,
value.gas_fee_per_input_buffer_runtime,
value.gas_fee_per_byte_runtime,
value.gas_cost_runtime,
value.gas_cost_deploy,
value.gas_limit_deploy,
value.gas_limit_runtime,
)
}
}
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub struct WalletConfig { pub struct WalletConfig {
///Home dir of sequencer storage ///Home dir of sequencer storage

View File

@ -2,7 +2,7 @@ use std::{fs::File, io::BufReader, path::PathBuf, str::FromStr};
use accounts::account_core::Account; use accounts::account_core::Account;
use anyhow::Result; use anyhow::Result;
use nssa::{address::HexString, Address}; use nssa::Address;
use crate::{config::WalletConfig, HOME_DIR_ENV_VAR}; use crate::{config::WalletConfig, HOME_DIR_ENV_VAR};
@ -22,7 +22,7 @@ pub fn fetch_config() -> Result<WalletConfig> {
//ToDo: Replace with structures conversion in future //ToDo: Replace with structures conversion in future
pub fn produce_account_addr_from_hex(hex_str: String) -> Result<Address> { pub fn produce_account_addr_from_hex(hex_str: String) -> Result<Address> {
Ok(HexString::try_from(hex_str.as_str())?.into()) Ok(hex_str.parse()?)
} }
///Fetch list of accounts stored at `NSSA_WALLET_HOME_DIR/curr_accounts.json` ///Fetch list of accounts stored at `NSSA_WALLET_HOME_DIR/curr_accounts.json`

View File

@ -1,4 +1,4 @@
use std::{fs::File, io::Write, path::PathBuf, sync::Arc}; use std::sync::Arc;
use common::{ use common::{
sequencer_client::{json::SendTxResponse, SequencerClient}, sequencer_client::{json::SendTxResponse, SequencerClient},
@ -15,7 +15,7 @@ use nssa::Address;
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use crate::helperfunctions::{ use crate::helperfunctions::{
fetch_config, fetch_persistent_accounts, get_home, produce_account_addr_from_hex, fetch_config, fetch_persistent_accounts, produce_account_addr_from_hex,
}; };
pub const HOME_DIR_ENV_VAR: &str = "NSSA_WALLET_HOME_DIR"; pub const HOME_DIR_ENV_VAR: &str = "NSSA_WALLET_HOME_DIR";
@ -96,33 +96,6 @@ impl WalletCore {
Err(ExecutionFailureKind::AmountMismatchError) Err(ExecutionFailureKind::AmountMismatchError)
} }
} }
///Dumps all accounts from acc_map at `path`
///
///Currently storing everything in one file
///
///ToDo: extend storage
pub fn store_present_accounts_at_path(&self, path: PathBuf) -> Result<PathBuf> {
let dump_path = path.join("curr_accounts.json");
let curr_accs: Vec<Account> = self.storage.acc_map.values().cloned().collect();
let accs_serialized = serde_json::to_vec_pretty(&curr_accs)?;
let mut acc_file = File::create(&dump_path).unwrap();
acc_file.write_all(&accs_serialized).unwrap();
Ok(dump_path)
}
///Dumps all accounts from acc_map at `NSSA_WALLET_HOME_DIR`
///
///Currently storing everything in one file
///
///ToDo: extend storage
pub fn store_present_accounts_at_home(&self) -> Result<PathBuf> {
let home = get_home()?;
self.store_present_accounts_at_path(home)
}
} }
///Represents CLI command for a wallet ///Represents CLI command for a wallet
@ -144,12 +117,6 @@ pub enum Command {
#[arg(long)] #[arg(long)]
amount: u128, amount: u128,
}, },
///Dump accounts at destination
DumpAccountsOnDisc {
///Dump path for accounts
#[arg(short, long)]
dump_path: PathBuf,
},
} }
///To execute commands, env var NSSA_WALLET_HOME_DIR must be set into directory with config ///To execute commands, env var NSSA_WALLET_HOME_DIR must be set into directory with config
@ -183,19 +150,6 @@ pub async fn execute_subcommand(command: Command) -> Result<()> {
info!("Results of tx send is {res:#?}"); info!("Results of tx send is {res:#?}");
//ToDo: Insert transaction polling logic here //ToDo: Insert transaction polling logic here
let acc_storage_path = wallet_core.store_present_accounts_at_home()?;
info!("Accounts stored at {acc_storage_path:#?}");
}
Command::DumpAccountsOnDisc { dump_path } => {
let node_config = fetch_config()?;
let wallet_core = WalletCore::start_from_config_update_chain(node_config).await?;
wallet_core.store_present_accounts_at_path(dump_path.clone())?;
info!("Accounts stored at path {dump_path:#?}");
} }
} }