Merge branch 'main' into Pravdyvy/state-transition-token-transfer

This commit is contained in:
Oleksandr Pravdyvyi 2025-07-18 12:35:34 +03:00
commit 140a051fc9
29 changed files with 963 additions and 380 deletions

61
Cargo.lock generated
View File

@ -767,7 +767,7 @@ dependencies = [
[[package]]
name = "bonsai-sdk"
version = "1.4.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"duplicate",
"maybe-async",
@ -3715,8 +3715,8 @@ checksum = "3df6368f71f205ff9c33c076d170dd56ebf68e8161c733c0caa07a7a5509ed53"
[[package]]
name = "risc0-binfmt"
version = "2.0.1"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"borsh",
@ -3733,8 +3733,8 @@ dependencies = [
[[package]]
name = "risc0-build"
version = "2.1.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "2.3.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"cargo_metadata",
@ -3757,7 +3757,7 @@ dependencies = [
[[package]]
name = "risc0-build-kernel"
version = "2.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"cc",
"directories",
@ -3770,8 +3770,8 @@ dependencies = [
[[package]]
name = "risc0-circuit-keccak"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "3.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"bytemuck",
@ -3791,8 +3791,8 @@ dependencies = [
[[package]]
name = "risc0-circuit-keccak-sys"
version = "2.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "3.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"cc",
"cust",
@ -3806,8 +3806,8 @@ dependencies = [
[[package]]
name = "risc0-circuit-recursion"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "3.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"bytemuck",
@ -3831,8 +3831,8 @@ dependencies = [
[[package]]
name = "risc0-circuit-recursion-sys"
version = "2.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "3.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"glob",
"risc0-build-kernel",
@ -3843,8 +3843,8 @@ dependencies = [
[[package]]
name = "risc0-circuit-rv32im"
version = "2.0.4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "3.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"auto_ops",
@ -3874,8 +3874,8 @@ dependencies = [
[[package]]
name = "risc0-circuit-rv32im-sys"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "3.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"cc",
"cust",
@ -3890,7 +3890,7 @@ dependencies = [
[[package]]
name = "risc0-core"
version = "2.0.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"bytemuck",
"bytemuck_derive",
@ -3901,8 +3901,8 @@ dependencies = [
[[package]]
name = "risc0-groth16"
version = "2.0.1"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"ark-bn254",
@ -3926,7 +3926,7 @@ dependencies = [
[[package]]
name = "risc0-sys"
version = "1.4.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"cust",
@ -3937,7 +3937,7 @@ dependencies = [
[[package]]
name = "risc0-zkos-v1compat"
version = "2.0.1"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"include_bytes_aligned",
"no_std_strings",
@ -3945,8 +3945,8 @@ dependencies = [
[[package]]
name = "risc0-zkp"
version = "2.0.1"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"anyhow",
"blake2",
@ -3976,8 +3976,8 @@ dependencies = [
[[package]]
name = "risc0-zkvm"
version = "2.1.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "2.3.0"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"addr2line 0.22.0",
"anyhow",
@ -4022,8 +4022,8 @@ dependencies = [
[[package]]
name = "risc0-zkvm-platform"
version = "2.0.2"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
version = "2.0.3"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"bytemuck",
"cfg-if",
@ -4185,7 +4185,7 @@ checksum = "28d3b2b1366ec20994f1fd18c3c594f05c5dd4bc44d8bb0c1c632c8d6829481f"
[[package]]
name = "rzup"
version = "0.4.1"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.1#f34d6913945ab9f214219f3cbee1703f63936cc4"
source = "git+https://github.com/risc0/risc0.git?branch=release-2.3#c6297fc2075cb66aadb733ee677223b5a7f8c85a"
dependencies = [
"semver",
"serde",
@ -4336,6 +4336,7 @@ dependencies = [
"common",
"elliptic-curve",
"env_logger",
"hex",
"k256",
"log",
"mempool",

21
LICENSE Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2025 Vac
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -85,6 +85,8 @@ impl<'de> Deserialize<'de> for Account {
///A strucure, which represents all the visible(public) information
///
/// known to each node about account `address`
///
/// Main usage is to encode data for other account
#[derive(Serialize, Clone)]
pub struct AccountPublicMask {
pub nullifier_public_key: AffinePoint,
@ -93,6 +95,21 @@ pub struct AccountPublicMask {
pub balance: u64,
}
impl AccountPublicMask {
pub fn encrypt_data(
ephemeral_key_holder: &EphemeralKeyHolder,
viewing_public_key_receiver: AffinePoint,
data: &[u8],
) -> (CipherText, Nonce) {
//Using of parent Account fuction
Account::encrypt_data(ephemeral_key_holder, viewing_public_key_receiver, data)
}
pub fn make_tag(&self) -> Tag {
self.address[0]
}
}
impl Account {
pub fn new() -> Self {
let key_holder = AddressKeyHolder::new_os_random();
@ -121,10 +138,6 @@ impl Account {
}
}
pub fn produce_ephemeral_key_holder(&self) -> EphemeralKeyHolder {
self.key_holder.produce_ephemeral_key_holder()
}
pub fn encrypt_data(
ephemeral_key_holder: &EphemeralKeyHolder,
viewing_public_key_receiver: AffinePoint,
@ -249,4 +262,13 @@ mod tests {
assert!(result.is_ok());
assert_eq!(account.utxos.len(), 1);
}
#[test]
fn accounts_accounts_mask_tag_consistency() {
let account = Account::new();
let account_mask = account.make_account_public_mask();
assert_eq!(account.make_tag(), account_mask.make_tag());
}
}

View File

@ -2,7 +2,6 @@ use aes_gcm::{aead::Aead, Aes256Gcm, KeyInit};
use common::merkle_tree_public::TreeHashType;
use constants_types::{CipherText, Nonce};
use elliptic_curve::point::AffineCoordinates;
use ephemeral_key_holder::EphemeralKeyHolder;
use k256::AffinePoint;
use log::info;
use secret_holders::{SeedHolder, TopSecretKeyHolder, UTXOSecretKeyHolder};
@ -55,10 +54,6 @@ impl AddressKeyHolder {
(ephemeral_public_key_sender * self.utxo_secret_key_holder.viewing_secret_key).into()
}
pub fn produce_ephemeral_key_holder(&self) -> EphemeralKeyHolder {
EphemeralKeyHolder::new_os_random()
}
pub fn decrypt_data(
&self,
ephemeral_public_key_sender: AffinePoint,
@ -114,6 +109,8 @@ mod tests {
use elliptic_curve::point::AffineCoordinates;
use k256::{AffinePoint, ProjectivePoint, Scalar};
use crate::key_management::ephemeral_key_holder::EphemeralKeyHolder;
use super::*;
#[test]
@ -136,7 +133,7 @@ mod tests {
// Generate a random ephemeral public key sender
let scalar = Scalar::random(&mut OsRng);
let ephemeral_public_key_sender = (ProjectivePoint::generator() * scalar).to_affine();
let ephemeral_public_key_sender = (ProjectivePoint::GENERATOR * scalar).to_affine();
// Calculate shared secret
let shared_secret =
@ -151,9 +148,8 @@ mod tests {
let address_key_holder = AddressKeyHolder::new_os_random();
// Generate an ephemeral key and shared secret
let ephemeral_public_key_sender = address_key_holder
.produce_ephemeral_key_holder()
.generate_ephemeral_public_key();
let ephemeral_public_key_sender =
EphemeralKeyHolder::new_os_random().generate_ephemeral_public_key();
let shared_secret =
address_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);

View File

@ -9,7 +9,7 @@ thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
reqwest.workspace = true
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.1" }
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.3" }
rs_merkle.workspace = true
sha2.workspace = true

View File

@ -11,6 +11,7 @@ use crate::{transaction::Transaction, utxo_commitment::UTXOCommitment};
use super::{hasher::OwnHasher, tree_leav_item::TreeLeavItem, TreeHashType};
#[derive(Clone)]
pub struct HashStorageMerkleTree<Leav: TreeLeavItem + Clone> {
leaves: HashMap<usize, Leav>,
hash_to_id_map: HashMap<TreeHashType, usize>,

View File

@ -13,8 +13,6 @@ pub struct HelloRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct RegisterAccountRequest {
pub nullifier_public_key: Vec<u8>,
pub viewing_public_key: Vec<u8>,
pub address: [u8; 32],
}

View File

@ -18,7 +18,7 @@ reqwest.workspace = true
thiserror.workspace = true
tokio.workspace = true
tempfile.workspace = true
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.1" }
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.3" }
hex.workspace = true
actix-rt.workspace = true

View File

@ -12,7 +12,7 @@ use common::{
};
use k256::AffinePoint;
use log::{info, warn};
use public_context::PublicSCContext;
use sc_core::public_context::PublicSCContext;
use serde::{Deserialize, Serialize};
use utxo::utxo_core::UTXO;
@ -20,7 +20,6 @@ use crate::{config::NodeConfig, ActionData};
pub mod accounts_store;
pub mod block_store;
pub mod public_context;
#[derive(Deserialize, Serialize)]
pub struct AccMap {
@ -282,6 +281,12 @@ impl NodeChainStore {
account_masks,
comitment_store_root: self.utxo_commitments_store.get_root().unwrap_or([0; 32]),
pub_tx_store_root: self.pub_tx_store.get_root().unwrap_or([0; 32]),
nullifiers_set: self
.nullifier_store
.iter()
.map(|item| item.utxo_hash)
.collect(),
commitments_tree: self.utxo_commitments_store.clone(),
}
}
}

View File

@ -5,7 +5,10 @@ use std::sync::{
use common::{public_transfer_receipts::PublicNativeTokenSend, ExecutionFailureKind};
use accounts::account_core::{Account, AccountAddress};
use accounts::{
account_core::{Account, AccountAddress},
key_management::ephemeral_key_holder::EphemeralKeyHolder,
};
use anyhow::Result;
use chain_storage::NodeChainStore;
use common::transaction::{Transaction, TransactionPayload, TxKind};
@ -197,7 +200,7 @@ impl NodeCore {
let account = acc_map_read_guard.acc_map.get(&acc).unwrap();
let ephm_key_holder = &account.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
ephm_key_holder.log();
let eph_pub_key =
@ -286,7 +289,7 @@ impl NodeCore {
let account = acc_map_read_guard.acc_map.get(&acc).unwrap();
let ephm_key_holder = &account.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
ephm_key_holder.log();
let eph_pub_key =
@ -401,7 +404,7 @@ impl NodeCore {
.map(|(utxo, _)| utxo.clone())
.collect();
let ephm_key_holder = &account.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
ephm_key_holder.log();
let eph_pub_key =
@ -523,7 +526,7 @@ impl NodeCore {
.map(|utxo| utxo.hash)
.collect();
let ephm_key_holder = &account.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
ephm_key_holder.log();
let eph_pub_key =
@ -669,7 +672,7 @@ impl NodeCore {
.map(|(utxo, _)| utxo.clone())
.collect();
let ephm_key_holder = &account.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
ephm_key_holder.log();
let eph_pub_key =
@ -1433,7 +1436,7 @@ impl NodeCore {
.map(|(utxo, _)| utxo.clone())
.collect();
let ephm_key_holder = &account.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
ephm_key_holder.log();
let eph_pub_key =

View File

@ -7,7 +7,6 @@ use common::rpc_primitives::requests::{
use common::transaction::Transaction;
use common::{SequencerClientError, SequencerRpcError};
use json::{SendTxRequest, SendTxResponse, SequencerRpcRequest, SequencerRpcResponse};
use k256::elliptic_curve::group::GroupEncoding;
use reqwest::Client;
use serde_json::Value;
@ -94,8 +93,6 @@ impl SequencerClient {
account: &Account,
) -> Result<RegisterAccountResponse, SequencerClientError> {
let acc_req = RegisterAccountRequest {
nullifier_public_key: account.key_holder.nullifer_public_key.to_bytes().to_vec(),
viewing_public_key: account.key_holder.viewing_public_key.to_bytes().to_vec(),
address: account.address,
};

View File

@ -19,7 +19,7 @@ light-poseidon.workspace = true
ark-bn254.workspace = true
ark-ff.workspace = true
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.1" }
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.3" }
[dependencies.accounts]
path = "../accounts"

136
sc_core/src/blob_utils.rs Normal file
View File

@ -0,0 +1,136 @@
use serde::Serialize;
use storage::{
sc_db_utils::{produce_blob_from_fit_vec, DataBlob, DataBlobChangeVariant},
SC_DATA_BLOB_SIZE,
};
///Creates blob list from generic serializable state
///
///`ToDo`: Find a way to align data in a way, to minimize read and write operations in db
pub fn produce_blob_list_from_sc_public_state<S: Serialize>(
state: &S,
) -> Result<Vec<DataBlob>, serde_json::Error> {
let mut blob_list = vec![];
let ser_data = serde_json::to_vec(state)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / SC_DATA_BLOB_SIZE) {
let next_chunk: Vec<u8>;
if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)]
.iter()
.cloned()
.collect();
} else {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
blob_list.push(produce_blob_from_fit_vec(next_chunk));
}
Ok(blob_list)
}
///Compare two consecutive in time blob lists to produce list of modified ids
pub fn compare_blob_lists(
blob_list_old: &[DataBlob],
blob_list_new: &[DataBlob],
) -> Vec<DataBlobChangeVariant> {
let mut changed_ids = vec![];
let mut id_end = 0;
let old_len = blob_list_old.len();
let new_len = blob_list_new.len();
if old_len > new_len {
for id in new_len..old_len {
changed_ids.push(DataBlobChangeVariant::Deleted { id });
}
} else if new_len > old_len {
for id in old_len..new_len {
changed_ids.push(DataBlobChangeVariant::Created {
id,
blob: blob_list_new[id],
});
}
}
loop {
let old_blob = blob_list_old.get(id_end);
let new_blob = blob_list_new.get(id_end);
match (old_blob, new_blob) {
(Some(old), Some(new)) => {
if old != new {
changed_ids.push(DataBlobChangeVariant::Modified {
id: id_end,
blob_old: *old,
blob_new: *new,
});
}
}
_ => break,
}
id_end += 1;
}
changed_ids
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Serialize;
const TEST_BLOB_SIZE: usize = 256; // Define a test blob size for simplicity
static SC_DATA_BLOB_SIZE: usize = TEST_BLOB_SIZE;
#[derive(Serialize)]
struct TestState {
a: u32,
b: u32,
}
#[test]
fn test_produce_blob_list_from_sc_public_state() {
let state = TestState { a: 42, b: 99 };
let result = produce_blob_list_from_sc_public_state(&state).unwrap();
assert!(!result.is_empty());
}
#[test]
fn test_compare_blob_lists_created() {
let old_list: Vec<DataBlob> = vec![];
let new_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Created { .. }));
}
#[test]
fn test_compare_blob_lists_deleted() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Deleted { .. }));
}
#[test]
fn test_compare_blob_lists_modified() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![[2; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Modified { .. }));
}
}

View File

@ -1,8 +1,7 @@
use ark_bn254::Fr;
use light_poseidon::{Poseidon, PoseidonBytesHasher};
#[allow(unused)]
fn poseidon_hash(inputs: &[&[u8]]) -> anyhow::Result<[u8; 32]> {
pub fn poseidon_hash(inputs: &[&[u8]]) -> anyhow::Result<[u8; 32]> {
let mut poseidon = Poseidon::<Fr>::new_circom(2).unwrap();
let hash = poseidon.hash_bytes_be(inputs)?;

View File

@ -1,3 +1,5 @@
pub mod blob_utils;
pub mod cryptography;
pub mod proofs_circuits;
pub mod public_context;
pub mod transaction_payloads_tools;

View File

@ -1,28 +1,43 @@
use bincode;
use k256::Scalar;
use common::merkle_tree_public::merkle_tree::UTXOCommitmentsMerkleTree;
use rand::{thread_rng, RngCore};
use secp256k1_zkp::{CommitmentSecrets, Generator, PedersenCommitment, Tag, Tweak, SECP256K1};
use sha2::{Digest, Sha256};
use utxo::utxo_core::UTXO;
//
use crate::{cryptography::poseidon_hash, public_context::PublicSCContext};
fn hash(input: &[u8]) -> Vec<u8> {
Sha256::digest(input).to_vec()
}
// Generate nullifiers
// takes the input_utxo and nsk
// returns the nullifiers[i], where the nullifier[i] = hash(in_commitments[i] || nsk) where the hash function
pub fn generate_nullifiers(input_utxo: &UTXO, nsk: &[u8]) -> Vec<u8> {
let mut input = bincode::serialize(input_utxo).unwrap().to_vec();
input.extend_from_slice(nsk);
hash(&input)
/// Generate nullifiers
///
/// takes the input_utxo and npk
///
/// returns the nullifiers[i], where the nullifiers[i] = poseidon_hash(in_commitments[i] || npk)
pub fn generate_nullifiers(input_utxo: &UTXO, npk: &[u8]) -> Vec<u8> {
let commitment = generate_commitment(input_utxo);
poseidon_hash(&[commitment.as_ref(), npk]).unwrap().to_vec()
}
// Generate commitments for output UTXOs
/// Generate commitment for UTXO
///
/// uses the input_utxo
///
/// returns commitment here commitment is a hash(bincode(input_utxo))
pub fn generate_commitment(input_utxo: &UTXO) -> Vec<u8> {
let serialized = bincode::serialize(input_utxo).unwrap(); // Serialize UTXO.
hash(&serialized)
}
// uses the list of input_utxos[]
// returns in_commitments[] where each in_commitments[i] = Commitment(in_utxos[i]) where the commitment
/// Generate commitments for UTXO
///
/// uses the input_utxos
///
/// returns commitments
pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
input_utxos
.iter()
@ -33,60 +48,112 @@ pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
.collect()
}
// Validate inclusion proof for in_commitments
// takes the in_commitments[i] as a leaf, the root hash root_commitment and the path in_commitments_proofs[i][],
// returns True if the in_commitments[i] is in the tree with root hash root_commitment otherwise returns False, as membership proof.
pub fn validate_in_commitments_proof(
_in_commitment: &Vec<u8>,
_root_commitment: Vec<u8>,
_in_commitments_proof: &[Vec<u8>],
/// Validate inclusion proof for in_commitments
///
/// ToDo: Solve it in more scalable way
pub fn validate_in_commitments_tree(
in_commitment: &Vec<u8>,
commitment_tree: &UTXOCommitmentsMerkleTree,
) -> bool {
// ToDo: Implement correct check
let alighned_hash: [u8; 32] = in_commitment.clone().try_into().unwrap();
todo!()
commitment_tree.get_proof(alighned_hash).is_some()
}
// Validate that `nullifier` has not been present in set items before
pub fn validate_nullifier_not_present_in_set_items(
nullifier: [u8; 32],
nullifiers_items: &[[u8; 32]],
) -> bool {
!nullifiers_items.contains(&nullifier)
/// Check, that input utxos balances is equal to out utxo balances
pub fn check_balances_private(in_utxos: &[UTXO], out_utxos: &[UTXO]) -> bool {
let in_sum = in_utxos.iter().fold(0, |prev, utxo| prev + utxo.amount);
let out_sum = out_utxos.iter().fold(0, |prev, utxo| prev + utxo.amount);
in_sum == out_sum
}
#[allow(unused)]
fn private_kernel(
root_commitment: &[u8],
root_nullifier: [u8; 32],
pub fn private_circuit(
input_utxos: &[UTXO],
in_commitments_proof: &[Vec<u8>],
nullifiers_proof: &[[u8; 32]],
nullifier_secret_key: Scalar,
) -> (Vec<u8>, Vec<Vec<u8>>) {
let nullifiers: Vec<_> = input_utxos
.into_iter()
.map(|utxo| generate_nullifiers(&utxo, &nullifier_secret_key.to_bytes()))
.collect();
output_utxos: &[UTXO],
public_context: &PublicSCContext,
) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
assert!(check_balances_private(input_utxos, output_utxos));
let in_commitments = generate_commitments(&input_utxos);
let mut in_nullifiers = vec![];
for in_utxo in input_utxos {
let nullifier_public_key = public_context
.account_masks
.get(&in_utxo.owner)
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
in_nullifiers.push(generate_nullifiers(in_utxo, &key_ser));
}
for in_commitment in in_commitments {
validate_in_commitments_proof(
assert!(validate_in_commitments_tree(
&in_commitment,
root_commitment.to_vec(),
in_commitments_proof,
);
&public_context.commitments_tree,
));
}
for nullifier in nullifiers.iter() {
validate_nullifier_not_present_in_set_items(
nullifier[0..32].try_into().unwrap(),
nullifiers_proof,
);
for nullifier in in_nullifiers.iter() {
let nullifier: [u8; 32] = nullifier.clone().try_into().unwrap();
assert!(!public_context.nullifiers_set.contains(&nullifier));
}
(vec![], nullifiers)
(in_nullifiers, generate_commitments(&output_utxos))
}
/// Check balances DE
///
/// takes the input_utxos[] and output_balance,
///
/// returns the True if the token amount in output_balance matches the sum of all input_utxos[], otherwise return False.
pub fn check_balances_de(input_utxos: &[UTXO], output_balance: u128) -> bool {
let total_input: u128 = input_utxos.iter().map(|utxo| utxo.amount).sum();
total_input == output_balance
}
pub fn deshielded_circuit(
input_utxos: &[UTXO],
output_balance: u128,
public_context: &PublicSCContext,
) -> Vec<Vec<u8>> {
assert!(check_balances_de(input_utxos, output_balance));
let in_commitments = generate_commitments(&input_utxos);
let mut in_nullifiers = vec![];
for in_utxo in input_utxos {
let nullifier_public_key = public_context
.account_masks
.get(&in_utxo.owner)
.unwrap()
.nullifier_public_key;
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
in_nullifiers.push(generate_nullifiers(in_utxo, &key_ser));
}
for in_commitment in in_commitments {
assert!(validate_in_commitments_tree(
&in_commitment,
&public_context.commitments_tree,
));
}
for nullifier in in_nullifiers.iter() {
let nullifier: [u8; 32] = nullifier.clone().try_into().unwrap();
assert!(!public_context.nullifiers_set.contains(&nullifier));
}
in_nullifiers
}
#[allow(unused)]
@ -114,16 +181,7 @@ pub fn commit(comm: &CommitmentSecrets, tag: Tag) -> PedersenCommitment {
PedersenCommitment::new(SECP256K1, comm.value, comm.value_blinding_factor, generator)
}
// Check balances
// takes the public_info and output_utxos[],
// returns the True if the token amount in public_info matches the sum of all output_utxos[], otherwise return False.
pub fn check_balances(public_info: u128, output_utxos: &[UTXO]) -> bool {
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
public_info == total_output
}
// new_commitment for a Vec of values
/// new_commitment for a Vec of values
pub fn pedersen_commitment_vec(
public_info_vec: Vec<u64>,
) -> (Tweak, [u8; 32], Vec<PedersenCommitment>) {
@ -149,10 +207,11 @@ pub fn pedersen_commitment_vec(
(generator_blinding_factor, random_val, vec_commitments)
}
// Verify Pedersen commitment
// takes the public_info, secret_r and pedersen_commitment and
// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment.
/// Verify Pedersen commitment
///
/// takes the public_info, secret_r and pedersen_commitment and
///
/// checks that commitment(public_info,secret_r) is equal pedersen_commitment where the commitment is pedersen commitment.
pub fn verify_commitment(
public_info: u64,
secret_r: &[u8],
@ -170,95 +229,69 @@ pub fn verify_commitment(
commitment == *pedersen_commitment
}
#[allow(unused)]
fn de_kernel(
root_commitment: &[u8],
root_nullifier: [u8; 32],
public_info: u64,
input_utxos: &[UTXO],
in_commitments_proof: &[Vec<u8>],
nullifiers_proof: &[[u8; 32]],
nullifier_secret_key: Scalar,
) -> (Vec<u8>, Vec<Vec<u8>>) {
check_balances(public_info as u128, input_utxos);
let nullifiers: Vec<_> = input_utxos
.into_iter()
.map(|utxo| generate_nullifiers(&utxo, &nullifier_secret_key.to_bytes()))
.collect();
let in_commitments = generate_commitments(&input_utxos);
for in_commitment in in_commitments {
validate_in_commitments_proof(
&in_commitment,
root_commitment.to_vec(),
in_commitments_proof,
);
}
for nullifier in nullifiers.iter() {
validate_nullifier_not_present_in_set_items(
nullifier[0..32].try_into().unwrap(),
nullifiers_proof,
);
}
(vec![], nullifiers)
}
// Validate inclusion proof for in_commitments
// takes the pedersen_commitment as a leaf, the root hash root_commitment and the path in_commitments_proof[],
// returns True if the pedersen_commitment is in the tree with root hash root_commitment
// otherwise
// returns False, as membership proof.
pub fn validate_in_commitments_proof_se(
_pedersen_commitment: &PedersenCommitment,
_root_commitment: Vec<u8>,
_in_commitments_proof: &[Vec<u8>],
/// Validate inclusion proof for pedersen_commitment
///
/// ToDo: Solve it in more scalable way
pub fn validate_in_commitments_tree_se(
pedersen_commitment: &PedersenCommitment,
commitment_tree: &UTXOCommitmentsMerkleTree,
) -> bool {
// ToDo: Implement correct check
let alighned_hash: [u8; 32] = pedersen_commitment.serialize()[0..32].try_into().unwrap();
todo!()
commitment_tree.get_proof(alighned_hash).is_some()
}
// Generate nullifiers SE
/// Generate nullifier SE
///
/// takes the pedersen_commitment and npk then
/// returns a nullifier, where the nullifier = poseidon_hash(pedersen_commitment || npk)
pub fn generate_nullifiers_se(pedersen_commitment: &PedersenCommitment, npk: &[u8]) -> Vec<u8> {
let commitment_ser = pedersen_commitment.serialize().to_vec();
// takes the pedersen_commitment and nsk then
// returns a list of nullifiers, where the nullifier = hash(pedersen_commitment || nsk) where the hash function will be determined
pub fn generate_nullifiers_se(pedersen_commitment: &PedersenCommitment, nsk: &[u8]) -> Vec<u8> {
let mut input = pedersen_commitment.serialize().to_vec();
input.extend_from_slice(nsk);
hash(&input)
poseidon_hash(&[&commitment_ser, npk]).unwrap().to_vec()
}
#[allow(unused)]
fn se_kernel(
root_commitment: &[u8],
root_nullifier: [u8; 32],
/// Check balances SE
///
/// takes the input_balance and output_utxos[],
///
/// returns the True if the token amount in input_balance matches the sum of all output_utxos[], otherwise return False.
pub fn check_balances_se(input_balance: u128, output_utxos: &[UTXO]) -> bool {
let total_output: u128 = output_utxos.iter().map(|utxo| utxo.amount).sum();
total_output == input_balance
}
pub fn shielded_circuit(
public_info: u64,
output_utxos: &[UTXO],
pedersen_commitment: PedersenCommitment,
secret_r: &[u8],
output_utxos: &[UTXO],
in_commitments_proof: &[Vec<u8>],
nullifiers_proof: &[[u8; 32]],
nullifier_secret_key: Scalar,
) -> (Vec<u8>, Vec<Vec<u8>>, Vec<u8>) {
check_balances(public_info as u128, output_utxos);
public_context: &PublicSCContext,
) -> (Vec<Vec<u8>>, Vec<u8>) {
assert!(check_balances_se(public_info as u128, output_utxos));
let out_commitments = generate_commitments(output_utxos);
let nullifier = generate_nullifiers_se(&pedersen_commitment, &nullifier_secret_key.to_bytes());
let nullifier_public_key = public_context
.account_masks
.get(&public_context.caller_address)
.unwrap()
.nullifier_public_key;
validate_in_commitments_proof_se(
let key_ser = serde_json::to_vec(&nullifier_public_key).unwrap();
let nullifier = generate_nullifiers_se(&pedersen_commitment, &key_ser);
assert!(validate_in_commitments_tree_se(
&pedersen_commitment,
root_commitment.to_vec(),
in_commitments_proof,
);
&public_context.commitments_tree,
));
verify_commitment(public_info, secret_r, &pedersen_commitment);
assert!(verify_commitment(
public_info,
secret_r,
&pedersen_commitment
));
(vec![], out_commitments, nullifier)
(out_commitments, nullifier)
}

View File

@ -1,7 +1,7 @@
use std::collections::BTreeMap;
use std::collections::{BTreeMap, HashSet};
use accounts::account_core::{AccountAddress, AccountPublicMask};
use common::merkle_tree_public::TreeHashType;
use common::merkle_tree_public::{merkle_tree::UTXOCommitmentsMerkleTree, TreeHashType};
use serde::{ser::SerializeStruct, Serialize};
pub const PUBLIC_SC_CONTEXT: &str = "PublicSCContext";
@ -11,6 +11,8 @@ pub const ACCOUNT_MASKS_KEYS_SORTED: &str = "account_masks_keys_sorted";
pub const ACCOUNT_MASKS_VALUES_SORTED: &str = "account_masks_values_sorted";
pub const COMMITMENT_STORE_ROOT: &str = "commitment_store_root";
pub const PUT_TX_STORE_ROOT: &str = "put_tx_store_root";
pub const COMMITMENT_TREE: &str = "commitments_tree";
pub const NULLIFIERS_SET: &str = "nullifiers_set";
///Strucutre, representing context, given to a smart contract on a call
pub struct PublicSCContext {
@ -19,6 +21,8 @@ pub struct PublicSCContext {
pub account_masks: BTreeMap<AccountAddress, AccountPublicMask>,
pub comitment_store_root: TreeHashType,
pub pub_tx_store_root: TreeHashType,
pub commitments_tree: UTXOCommitmentsMerkleTree,
pub nullifiers_set: HashSet<[u8; 32]>,
}
impl Serialize for PublicSCContext {
@ -41,6 +45,8 @@ impl Serialize for PublicSCContext {
s.serialize_field(ACCOUNT_MASKS_VALUES_SORTED, &account_mask_values)?;
s.serialize_field(COMMITMENT_STORE_ROOT, &self.comitment_store_root)?;
s.serialize_field(PUT_TX_STORE_ROOT, &self.pub_tx_store_root)?;
s.serialize_field(COMMITMENT_TREE, &self.commitments_tree)?;
s.serialize_field(NULLIFIERS_SET, &self.nullifiers_set)?;
s.end()
}
@ -92,6 +98,7 @@ impl PublicSCContext {
#[cfg(test)]
mod tests {
use accounts::account_core::Account;
use common::utxo_commitment::UTXOCommitment;
use super::*;
@ -100,6 +107,11 @@ mod tests {
let comitment_store_root = [3; 32];
let pub_tx_store_root = [4; 32];
let commitments_tree =
UTXOCommitmentsMerkleTree::new(vec![UTXOCommitment { hash: [5; 32] }]);
let mut nullifiers_set = HashSet::new();
nullifiers_set.insert([6; 32]);
let mut account_masks = BTreeMap::new();
let acc_1 = Account::new();
@ -116,6 +128,8 @@ mod tests {
account_masks,
comitment_store_root,
pub_tx_store_root,
commitments_tree,
nullifiers_set,
}
}

View File

@ -1,4 +1,4 @@
use accounts::account_core::Account;
use accounts::{account_core::Account, key_management::ephemeral_key_holder::EphemeralKeyHolder};
use anyhow::Result;
use common::transaction::{TransactionPayload, TxKind};
use rand::thread_rng;
@ -40,7 +40,7 @@ pub fn encode_utxos_to_receivers(
let mut all_encoded_data = vec![];
for (utxo, receiver) in utxos_receivers {
let ephm_key_holder = &receiver.produce_ephemeral_key_holder();
let ephm_key_holder = EphemeralKeyHolder::new_os_random();
let encoded_data = Account::encrypt_data(
&ephm_key_holder,

View File

@ -4,6 +4,7 @@ version = "0.1.0"
edition = "2021"
[dependencies]
hex.workspace = true
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true

View File

@ -1,6 +1,15 @@
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
//
#[derive(Debug, Serialize, Deserialize, Clone)]
///Helperstruct for account serialization
pub struct AccountInitialData {
///Hex encoded `AccountAddress`
pub addr: String,
pub balance: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SequencerConfig {
@ -18,4 +27,6 @@ pub struct SequencerConfig {
pub block_create_timeout_millis: u64,
///Port to listen
pub port: u16,
///List of pairs (account_address, initial_balance)
pub initial_accounts: Vec<AccountInitialData>,
}

View File

@ -1,5 +1,6 @@
use std::fmt::Display;
use accounts::account_core::AccountAddress;
use anyhow::Result;
use common::{
block::{Block, HashableBlockData},
@ -10,7 +11,7 @@ use common::{
};
use config::SequencerConfig;
use mempool::MemPool;
use sequencer_store::{accounts_store::AccountPublicData, SequecerChainStore};
use sequencer_store::SequecerChainStore;
use serde::{Deserialize, Serialize};
use transaction_mempool::TransactionMempool;
@ -52,6 +53,7 @@ impl SequencerCore {
&config.home,
config.genesis_id,
config.is_genesis_random,
&config.initial_accounts,
),
mempool: MemPool::<TransactionMempool>::default(),
chain_height: config.genesis_id,
@ -209,11 +211,8 @@ impl SequencerCore {
Ok(())
}
pub fn register_account(&mut self, acc_data: AccountPublicData) {
self.store
.acc_store
.accounts
.insert(acc_data.address, acc_data);
pub fn register_account(&mut self, account_addr: AccountAddress) {
self.store.acc_store.register_account(account_addr);
}
///Produces new block from transactions in mempool
@ -254,6 +253,8 @@ impl SequencerCore {
#[cfg(test)]
mod tests {
use crate::config::AccountInitialData;
use super::*;
use std::path::PathBuf;
@ -262,7 +263,9 @@ mod tests {
use secp256k1_zkp::Tweak;
use transaction_mempool::TransactionMempool;
fn setup_sequencer_config() -> SequencerConfig {
fn setup_sequencer_config_variable_initial_accounts(
initial_accounts: Vec<AccountInitialData>,
) -> SequencerConfig {
let mut rng = rand::thread_rng();
let random_u8: u8 = rng.gen();
@ -276,9 +279,27 @@ mod tests {
max_num_tx_in_block: 10,
block_create_timeout_millis: 1000,
port: 8080,
initial_accounts,
}
}
fn setup_sequencer_config() -> SequencerConfig {
let initial_accounts = vec![
AccountInitialData {
addr: "bfd91e6703273a115ad7f099ef32f621243be69369d00ddef5d3a25117d09a8c"
.to_string(),
balance: 10,
},
AccountInitialData {
addr: "20573479053979b98d2ad09ef31a0750f22c77709bed51c4e64946bd1e376f31"
.to_string(),
balance: 100,
},
];
setup_sequencer_config_variable_initial_accounts(initial_accounts)
}
fn create_dummy_transaction(
hash: TreeHashType,
nullifier_created_hashes: Vec<[u8; 32]>,
@ -306,12 +327,14 @@ mod tests {
}
}
fn common_setup(mut sequencer: &mut SequencerCore) {
fn common_setup(sequencer: &mut SequencerCore) {
let tx = create_dummy_transaction([12; 32], vec![[9; 32]], vec![[7; 32]], vec![[8; 32]]);
let tx_mempool = TransactionMempool { tx };
sequencer.mempool.push_item(tx_mempool);
sequencer.produce_new_block_with_mempool_transactions();
sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
}
#[test]
@ -322,6 +345,95 @@ mod tests {
assert_eq!(sequencer.chain_height, config.genesis_id);
assert_eq!(sequencer.sequencer_config.max_num_tx_in_block, 10);
assert_eq!(sequencer.sequencer_config.port, 8080);
let acc1_addr: [u8; 32] = hex::decode(
"bfd91e6703273a115ad7f099ef32f621243be69369d00ddef5d3a25117d09a8c".to_string(),
)
.unwrap()
.try_into()
.unwrap();
let acc2_addr: [u8; 32] = hex::decode(
"20573479053979b98d2ad09ef31a0750f22c77709bed51c4e64946bd1e376f31".to_string(),
)
.unwrap()
.try_into()
.unwrap();
assert!(sequencer.store.acc_store.contains_account(&acc1_addr));
assert!(sequencer.store.acc_store.contains_account(&acc2_addr));
assert_eq!(
10,
sequencer
.store
.acc_store
.get_account_balance(&acc1_addr)
.unwrap()
);
assert_eq!(
100,
sequencer
.store
.acc_store
.get_account_balance(&acc2_addr)
.unwrap()
);
}
#[test]
fn test_start_different_intial_accounts() {
let initial_accounts = vec![
AccountInitialData {
addr: "bfd91e6703273a115ad7f099ef32f621243be69369d00ddef5d3a25117ffffff"
.to_string(),
balance: 1000,
},
AccountInitialData {
addr: "20573479053979b98d2ad09ef31a0750f22c77709bed51c4e64946bd1effffff"
.to_string(),
balance: 1000,
},
];
let intial_accounts_len = initial_accounts.len();
let config = setup_sequencer_config_variable_initial_accounts(initial_accounts);
let sequencer = SequencerCore::start_from_config(config.clone());
let acc1_addr: [u8; 32] = hex::decode(
"bfd91e6703273a115ad7f099ef32f621243be69369d00ddef5d3a25117ffffff".to_string(),
)
.unwrap()
.try_into()
.unwrap();
let acc2_addr: [u8; 32] = hex::decode(
"20573479053979b98d2ad09ef31a0750f22c77709bed51c4e64946bd1effffff".to_string(),
)
.unwrap()
.try_into()
.unwrap();
assert!(sequencer.store.acc_store.contains_account(&acc1_addr));
assert!(sequencer.store.acc_store.contains_account(&acc2_addr));
assert_eq!(sequencer.store.acc_store.len(), intial_accounts_len);
assert_eq!(
1000,
sequencer
.store
.acc_store
.get_account_balance(&acc1_addr)
.unwrap()
);
assert_eq!(
1000,
sequencer
.store
.acc_store
.get_account_balance(&acc2_addr)
.unwrap()
);
}
#[test]

View File

@ -1,53 +1,216 @@
use accounts::account_core::{AccountAddress, PublicKey};
use k256::AffinePoint;
use accounts::account_core::AccountAddress;
use anyhow::Result;
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
#[derive(Debug, Clone)]
pub struct AccountPublicData {
pub nullifier_public_key: PublicKey,
pub viewing_public_key: PublicKey,
#[derive(Debug, Clone, Serialize, Deserialize)]
pub(crate) struct AccountPublicData {
pub balance: u64,
pub address: AccountAddress,
}
impl AccountPublicData {
pub fn from_raw(
address: AccountAddress,
nullifier_public_key: Vec<u8>,
viewing_public_key: Vec<u8>,
) -> Self {
pub fn new(address: AccountAddress) -> Self {
Self {
nullifier_public_key: serde_json::from_slice::<AffinePoint>(&nullifier_public_key)
.unwrap(),
viewing_public_key: serde_json::from_slice::<AffinePoint>(&viewing_public_key).unwrap(),
balance: 0,
address,
}
}
fn new_with_balance(address: AccountAddress, balance: u64) -> Self {
Self { balance, address }
}
}
#[derive(Debug, Clone)]
pub struct SequencerAccountsStore {
pub accounts: HashMap<AccountAddress, AccountPublicData>,
accounts: HashMap<AccountAddress, AccountPublicData>,
}
impl SequencerAccountsStore {
pub fn new() -> Self {
Self {
accounts: HashMap::new(),
pub fn new(initial_accounts: &[(AccountAddress, u64)]) -> Self {
let mut accounts = HashMap::new();
for (account_addr, balance) in initial_accounts {
accounts.insert(
*account_addr,
AccountPublicData::new_with_balance(*account_addr, *balance),
);
}
Self { accounts }
}
///Register new account in accounts store
///
///Starts with zero public balance
pub fn register_account(&mut self, account_addr: AccountAddress) {
self.accounts
.insert(account_addr, AccountPublicData::new(account_addr));
}
///Check, if `account_addr` present in account store
pub fn contains_account(&self, account_addr: &AccountAddress) -> bool {
self.accounts.contains_key(account_addr)
}
///Check `account_addr` balance,
///
///returns `None`, if account address not found
pub fn get_account_balance(&self, account_addr: &AccountAddress) -> Option<u64> {
self.accounts.get(account_addr).map(|acc| acc.balance)
}
///Remove account from storage
///
/// Fails, if `balance` is != 0
///
/// Returns `Option<AccountAddress>` which is `None` if `account_addr` vere not present in store
pub fn unregister_account(
&mut self,
account_addr: AccountAddress,
) -> Result<Option<AccountAddress>> {
if let Some(account_balance) = self.get_account_balance(&account_addr) {
if account_balance == 0 {
Ok(self.accounts.remove(&account_addr).map(|data| data.address))
} else {
anyhow::bail!("Chain consistency violation: It is forbidden to remove account with nonzero balance");
}
} else {
Ok(None)
}
}
pub fn register_account(&mut self, account_pub_data: AccountPublicData) {
self.accounts
.insert(account_pub_data.address, account_pub_data);
}
pub fn unregister_account(&mut self, account_addr: AccountAddress) {
self.accounts.remove(&account_addr);
///Number of accounts present in store
pub fn len(&self) -> usize {
self.accounts.len()
}
}
impl Default for SequencerAccountsStore {
fn default() -> Self {
Self::new()
Self::new(&[])
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_zero_balance_account_data_creation() {
let new_acc = AccountPublicData::new([1; 32]);
assert_eq!(new_acc.balance, 0);
assert_eq!(new_acc.address, [1; 32]);
}
#[test]
fn test_non_zero_balance_account_data_creation() {
let new_acc = AccountPublicData::new_with_balance([1; 32], 10);
assert_eq!(new_acc.balance, 10);
assert_eq!(new_acc.address, [1; 32]);
}
#[test]
fn default_account_sequencer_store() {
let seq_acc_store = SequencerAccountsStore::default();
assert!(seq_acc_store.accounts.is_empty());
}
#[test]
fn account_sequencer_store_register_acc() {
let mut seq_acc_store = SequencerAccountsStore::default();
seq_acc_store.register_account([1; 32]);
assert!(seq_acc_store.contains_account(&[1; 32]));
let acc_balance = seq_acc_store.get_account_balance(&[1; 32]).unwrap();
assert_eq!(acc_balance, 0);
}
#[test]
fn account_sequencer_store_unregister_acc_not_present() {
let mut seq_acc_store = SequencerAccountsStore::default();
seq_acc_store.register_account([1; 32]);
let rem_res = seq_acc_store.unregister_account([2; 32]).unwrap();
assert!(rem_res.is_none());
}
#[test]
fn account_sequencer_store_unregister_acc_not_zero_balance() {
let mut seq_acc_store = SequencerAccountsStore::new(&[([1; 32], 12), ([2; 32], 100)]);
let rem_res = seq_acc_store.unregister_account([1; 32]);
assert!(rem_res.is_err());
}
#[test]
fn account_sequencer_store_unregister_acc() {
let mut seq_acc_store = SequencerAccountsStore::default();
seq_acc_store.register_account([1; 32]);
assert!(seq_acc_store.contains_account(&[1; 32]));
seq_acc_store.unregister_account([1; 32]).unwrap().unwrap();
assert!(!seq_acc_store.contains_account(&[1; 32]));
}
#[test]
fn account_sequencer_store_with_preset_accounts_1() {
let seq_acc_store = SequencerAccountsStore::new(&[([1; 32], 12), ([2; 32], 100)]);
assert!(seq_acc_store.contains_account(&[1; 32]));
assert!(seq_acc_store.contains_account(&[2; 32]));
let acc_balance = seq_acc_store.get_account_balance(&[1; 32]).unwrap();
assert_eq!(acc_balance, 12);
let acc_balance = seq_acc_store.get_account_balance(&[2; 32]).unwrap();
assert_eq!(acc_balance, 100);
}
#[test]
fn account_sequencer_store_with_preset_accounts_2() {
let seq_acc_store =
SequencerAccountsStore::new(&[([6; 32], 120), ([7; 32], 15), ([8; 32], 10)]);
assert!(seq_acc_store.contains_account(&[6; 32]));
assert!(seq_acc_store.contains_account(&[7; 32]));
assert!(seq_acc_store.contains_account(&[8; 32]));
let acc_balance = seq_acc_store.get_account_balance(&[6; 32]).unwrap();
assert_eq!(acc_balance, 120);
let acc_balance = seq_acc_store.get_account_balance(&[7; 32]).unwrap();
assert_eq!(acc_balance, 15);
let acc_balance = seq_acc_store.get_account_balance(&[8; 32]).unwrap();
assert_eq!(acc_balance, 10);
}
#[test]
fn account_sequencer_store_fetch_unknown_account() {
let seq_acc_store =
SequencerAccountsStore::new(&[([6; 32], 120), ([7; 32], 15), ([8; 32], 10)]);
let acc_balance = seq_acc_store.get_account_balance(&[9; 32]);
assert!(acc_balance.is_none());
}
}

View File

@ -9,6 +9,8 @@ use common::{
};
use rand::{rngs::OsRng, RngCore};
use crate::config::AccountInitialData;
pub mod accounts_store;
pub mod block_store;
@ -21,8 +23,28 @@ pub struct SequecerChainStore {
}
impl SequecerChainStore {
pub fn new_with_genesis(home_dir: &Path, genesis_id: u64, is_genesis_random: bool) -> Self {
let acc_store = SequencerAccountsStore::default();
pub fn new_with_genesis(
home_dir: &Path,
genesis_id: u64,
is_genesis_random: bool,
initial_accounts: &[AccountInitialData],
) -> Self {
let acc_data_decoded: Vec<([u8; 32], u64)> = initial_accounts
.iter()
.map(|acc_data| {
(
//ToDo: Handle this error for direct error message
//Failure to produce account address is critical, so error handling is needed only for clarity
hex::decode(acc_data.addr.clone())
.unwrap()
.try_into()
.unwrap(),
acc_data.balance,
)
})
.collect();
let acc_store = SequencerAccountsStore::new(&acc_data_decoded);
let nullifier_store = HashSet::new();
let utxo_commitments_store = UTXOCommitmentsMerkleTree::new(vec![]);
let pub_tx_store = PublicTransactionMerkleTree::new(vec![]);

View File

@ -1,5 +1,4 @@
use actix_web::Error as HttpError;
use sequencer_core::sequencer_store::accounts_store::AccountPublicData;
use serde_json::Value;
use common::rpc_primitives::{
@ -61,11 +60,7 @@ impl JsonHandler {
{
let mut acc_store = self.sequencer_state.lock().await;
acc_store.register_account(AccountPublicData::from_raw(
acc_req.address,
acc_req.nullifier_public_key,
acc_req.viewing_public_key,
));
acc_store.register_account(acc_req.address);
}
let helperstruct = RegisterAccountResponse {

View File

@ -5,5 +5,15 @@
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"block_create_timeout_millis": 10000,
"port": 3040
"port": 3040,
"initial_accounts": [
{
"addr": "bfd91e6703273a115ad7f099ef32f621243be69369d00ddef5d3a25117d09a8c",
"balance": 10
},
{
"addr": "20573479053979b98d2ad09ef31a0750f22c77709bed51c4e64946bd1e376f31",
"balance": 100
}
]
}

View File

@ -49,7 +49,7 @@ impl DataBlob {
}
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum DataBlobChangeVariant {
Created {
id: usize,
@ -83,93 +83,66 @@ pub fn produce_blob_from_fit_vec(data: Vec<u8>) -> DataBlob {
blob
}
///Creates blob list from generic serializable state
///
///`ToDo`: Find a way to align data in a way, to minimize read and write operations in db
pub fn produce_blob_list_from_sc_public_state<S: Serialize>(
state: &S,
) -> Result<Vec<DataBlob>, serde_json::Error> {
let mut blob_list = vec![];
let ser_data = serde_json::to_vec(state)?;
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / SC_DATA_BLOB_SIZE) {
let next_chunk: Vec<u8>;
if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)]
.iter()
.cloned()
.collect();
} else {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
blob_list.push(produce_blob_from_fit_vec(next_chunk));
}
Ok(blob_list)
}
///Compare two consecutive in time blob lists to produce list of modified ids
pub fn compare_blob_lists(
blob_list_old: &[DataBlob],
blob_list_new: &[DataBlob],
) -> Vec<DataBlobChangeVariant> {
let mut changed_ids = vec![];
let mut id_end = 0;
let old_len = blob_list_old.len();
let new_len = blob_list_new.len();
if old_len > new_len {
for id in new_len..old_len {
changed_ids.push(DataBlobChangeVariant::Deleted { id });
}
} else if new_len > old_len {
for id in old_len..new_len {
changed_ids.push(DataBlobChangeVariant::Created {
id,
blob: blob_list_new[id],
});
}
}
loop {
let old_blob = blob_list_old.get(id_end);
let new_blob = blob_list_new.get(id_end);
match (old_blob, new_blob) {
(Some(old), Some(new)) => {
if old != new {
changed_ids.push(DataBlobChangeVariant::Modified {
id: id_end,
blob_old: *old,
blob_new: *new,
});
}
}
_ => break,
}
id_end += 1;
}
changed_ids
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Serialize;
use serde_json;
const TEST_BLOB_SIZE: usize = 256; // Define a test blob size for simplicity
static SC_DATA_BLOB_SIZE: usize = TEST_BLOB_SIZE;
fn sample_vec() -> Vec<u8> {
(0..SC_DATA_BLOB_SIZE)
.collect::<Vec<usize>>()
.iter()
.map(|&x| x as u8)
.collect()
}
fn sample_data_blob() -> DataBlob {
let vec: Vec<u8> = sample_vec();
produce_blob_from_fit_vec(vec)
}
#[test]
fn test_serialize_data_blob() {
let blob = sample_data_blob();
let json = serde_json::to_string(&blob).unwrap();
let expected_json = serde_json::to_string(&sample_vec()).unwrap();
assert_eq!(json, expected_json);
}
#[test]
fn test_deserialize_data_blob() {
let data = sample_vec();
let json = serde_json::to_string(&data).unwrap();
let deserialized: DataBlob = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.to_vec(), data);
}
#[test]
fn test_serialize_deserialize_data_blob_change_variant() {
let blob1 = sample_data_blob();
let blob2 = produce_blob_from_fit_vec((50..50 + SC_DATA_BLOB_SIZE as u8).collect());
let variants = vec![
DataBlobChangeVariant::Created { id: 1, blob: blob1 },
DataBlobChangeVariant::Modified {
id: 2,
blob_old: blob1,
blob_new: blob2,
},
DataBlobChangeVariant::Deleted { id: 3 },
];
for variant in variants {
let json = serde_json::to_string(&variant).unwrap();
let deserialized: DataBlobChangeVariant = serde_json::from_str(&json).unwrap();
assert_eq!(variant, deserialized);
}
}
#[test]
fn test_produce_blob_from_fit_vec() {
let data = (0..0 + 255).collect();
@ -183,47 +156,4 @@ mod tests {
let data = vec![0; SC_DATA_BLOB_SIZE + 1];
let _ = produce_blob_from_fit_vec(data);
}
#[derive(Serialize)]
struct TestState {
a: u32,
b: u32,
}
#[test]
fn test_produce_blob_list_from_sc_public_state() {
let state = TestState { a: 42, b: 99 };
let result = produce_blob_list_from_sc_public_state(&state).unwrap();
assert!(!result.is_empty());
}
#[test]
fn test_compare_blob_lists_created() {
let old_list: Vec<DataBlob> = vec![];
let new_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Created { .. }));
}
#[test]
fn test_compare_blob_lists_deleted() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Deleted { .. }));
}
#[test]
fn test_compare_blob_lists_modified() {
let old_list: Vec<DataBlob> = vec![[1; SC_DATA_BLOB_SIZE].into()];
let new_list: Vec<DataBlob> = vec![[2; SC_DATA_BLOB_SIZE].into()];
let changes = compare_blob_lists(&old_list, &new_list);
assert_eq!(changes.len(), 1);
assert!(matches!(changes[0], DataBlobChangeVariant::Modified { .. }));
}
}

View File

@ -12,7 +12,7 @@ serde.workspace = true
thiserror.workspace = true
rand.workspace = true
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.1" }
risc0-zkvm = { git = "https://github.com/risc0/risc0.git", branch = "release-2.3" }
test-methods = { path = "test_methods" }
[dependencies.accounts]

View File

@ -317,11 +317,16 @@ pub fn prove_mint_utxo_multiple_assets(
))
}
pub fn execute_mint_utxo(amount_to_mint: u128, owner: AccountAddress) -> anyhow::Result<UTXO> {
pub fn execute_mint_utxo(
amount_to_mint: u128,
owner: AccountAddress,
randomness: [u8; 32],
) -> anyhow::Result<UTXO> {
let mut builder = ExecutorEnv::builder();
builder.write(&amount_to_mint)?;
builder.write(&owner)?;
builder.write(&randomness)?;
let env = builder.build()?;
@ -420,6 +425,8 @@ pub fn verify(receipt: Receipt, image_id: impl Into<Digest>) -> anyhow::Result<(
#[cfg(test)]
mod tests {
use crate::gas_calculator::GasCalculator;
use super::*;
use test_methods::BIG_CALCULATION_ELF;
use test_methods::{MULTIPLICATION_ELF, MULTIPLICATION_ID};
@ -432,7 +439,7 @@ mod tests {
let (digest, receipt) = prove(vec![message, message_2], SUMMATION_ELF).unwrap();
verify(receipt, SUMMATION_ID);
verify(receipt, SUMMATION_ID).unwrap();
assert_eq!(digest, message + message_2);
}
@ -443,7 +450,7 @@ mod tests {
let (digest, receipt) = prove(vec![message, message_2], SUMMATION_ELF).unwrap();
verify(receipt, SUMMATION_ID);
verify(receipt, SUMMATION_ID).unwrap();
assert_eq!(digest, message + message_2);
}
@ -454,7 +461,7 @@ mod tests {
let (digest, receipt) = prove(vec![message, message_2], MULTIPLICATION_ELF).unwrap();
verify(receipt, MULTIPLICATION_ID);
verify(receipt, MULTIPLICATION_ID).unwrap();
assert_eq!(digest, message * message_2);
}
@ -465,7 +472,7 @@ mod tests {
let (digest, receipt) = prove(vec![message, message_2], MULTIPLICATION_ELF).unwrap();
verify(receipt, MULTIPLICATION_ID);
verify(receipt, MULTIPLICATION_ID).unwrap();
assert_eq!(digest, message * message_2);
}
@ -514,4 +521,108 @@ mod tests {
res
}
#[test]
fn test_gas_limits_check_sufficient_funds() {
let message = 1;
let message_2 = 2;
let gas_calc = GasCalculator::new(1, 1, 1, 1, 1, 1000000, 1000000);
let result = gas_limits_check(vec![message, message_2], SUMMATION_ELF, &gas_calc, 1000000);
assert!(result.is_ok());
}
#[test]
fn test_gas_limits_check_insufficient_funds() {
let message = 1;
let message_2 = 2;
let gas_calc = GasCalculator::new(1, 1, 1, 1, 1, 1000000, 1000000);
let result = gas_limits_check(vec![message, message_2], SUMMATION_ELF, &gas_calc, 1);
assert!(matches!(
result,
Err(ExecutionFailureKind::InsufficientFundsError)
));
}
#[test]
fn test_execute_mint_utxo() {
let owner = AccountAddress::default();
let amount = 123456789;
let mut randomness = [0u8; 32];
OsRng.fill_bytes(&mut randomness);
let utxo_exec = execute_mint_utxo(amount, owner, randomness).expect("execution failed");
assert_eq!(utxo_exec.amount, amount);
assert_eq!(utxo_exec.owner, owner);
}
#[test]
fn test_prove_mint_utxo() {
let owner = AccountAddress::default();
let amount = 123456789;
let (utxo, _) = prove_mint_utxo(amount, owner).expect("proof failed");
assert_eq!(utxo.amount, amount);
assert_eq!(utxo.owner, owner);
}
#[test]
fn test_prove_send_utxo() {
let owner = AccountAddress::default();
let amount = 100;
let (input_utxo, _) = prove_mint_utxo(amount, owner).expect("mint failed");
let parts = vec![(40, owner), (60, owner)];
let (outputs, _receipt) = prove_send_utxo(input_utxo, parts.clone()).expect("send failed");
let total: u128 = outputs.iter().map(|(utxo, _)| utxo.amount).sum();
assert_eq!(total, amount);
assert_eq!(outputs.len(), 2);
}
#[test]
fn test_prove_send_utxo_deshielded() {
let owner = AccountAddress::default();
let amount = 100;
let (utxo, _) = prove_mint_utxo(amount, owner).unwrap();
let parts = vec![(60, owner), (40, owner)];
let (outputs, _) = prove_send_utxo_deshielded(utxo, parts.clone()).unwrap();
let total: u128 = outputs.iter().map(|(amt, _)| amt).sum();
assert_eq!(total, amount);
assert_eq!(outputs.len(), 2);
}
#[test]
fn test_prove_send_utxo_shielded() {
let owner = AccountAddress::default();
let amount = 100;
let parts = vec![(60, owner), (40, owner)];
let (outputs, _) = prove_send_utxo_shielded(owner, amount, parts.clone()).unwrap();
let total: u128 = outputs.iter().map(|(utxo, _)| utxo.amount).sum();
assert_eq!(total, amount);
assert_eq!(outputs.len(), 2);
}
#[test]
fn test_prove_send_utxo_multiple_assets_one_receiver() {
let owner = AccountAddress::default();
let receiver = AccountAddress::default();
let utxos = vec![
prove_mint_utxo(100, owner).unwrap().0,
prove_mint_utxo(50, owner).unwrap().0,
];
let (to_receiver, to_change, _receipt) =
prove_send_utxo_multiple_assets_one_receiver(utxos, 1, receiver).unwrap();
let total_to_receiver: u128 = to_receiver.iter().map(|u| u.amount).sum();
assert!(total_to_receiver > 0);
assert_eq!(to_receiver.len() + to_change.len(), 2);
}
}

View File

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[build-dependencies]
risc0-build = { git = "https://github.com/risc0/risc0.git", branch = "release-2.1" }
risc0-build = { git = "https://github.com/risc0/risc0.git", branch = "release-2.3" }
[package.metadata.risc0]
methods = ["guest"]