Merge branch 'main' into Pravdyvy/state-transition-token-transfer

This commit is contained in:
Oleksandr Pravdyvyi 2025-07-23 19:03:54 +03:00
commit 262531799b
No known key found for this signature in database
GPG Key ID: 9F8955C63C443871
20 changed files with 115 additions and 172 deletions

View File

@ -14,35 +14,13 @@ on:
name: General
jobs:
build-ubuntu-latest:
ubuntu-latest-pipeline:
runs-on: ubuntu-latest
timeout-minutes: 60
name: build - ubuntu-latest
name: ubuntu-latest-pipeline
steps:
- uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- name: build - ubuntu-latest
if: success() || failure()
run: chmod 777 ./ci_scripts/build-ubuntu.sh && ./ci_scripts/build-ubuntu.sh
lint:
strategy:
matrix:
platform: [ ubuntu-latest ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: lint - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
@ -53,25 +31,6 @@ jobs:
- name: lint - ubuntu-latest
if: success() || failure()
run: chmod 777 ./ci_scripts/lint-ubuntu.sh && ./ci_scripts/lint-ubuntu.sh
test:
strategy:
matrix:
platform: [ ubuntu-latest ]
runs-on: ${{ matrix.platform }}
timeout-minutes: 60
name: test - ${{ matrix.crate }} - ${{ matrix.platform }}
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- name: test ubuntu-latest
if: success() || failure()
run: chmod 777 ./ci_scripts/test-ubuntu.sh && ./ci_scripts/test-ubuntu.sh

View File

@ -61,7 +61,7 @@ impl AddressKeyHolder {
pub fn get_pub_account_signing_key(&self) -> SigningKey {
let field_bytes = FieldBytes::from_slice(&self.pub_account_signing_key);
// TODO: remove unwrap
SigningKey::from_bytes(&field_bytes).unwrap()
SigningKey::from_bytes(field_bytes).unwrap()
}
pub fn calculate_shared_secret_receiver(
@ -183,7 +183,7 @@ mod tests {
.decrypt_data(
ephemeral_public_key_sender,
CipherText::from(ciphertext),
nonce.clone(),
*nonce,
)
.unwrap();
@ -203,7 +203,7 @@ mod tests {
assert!(!Into::<bool>::into(
address_key_holder.viewing_public_key.is_identity()
));
assert!(address_key_holder.address.as_slice().len() > 0); // Assume TreeHashType has non-zero length for a valid address
assert!(!address_key_holder.address.as_slice().is_empty()); // Assume TreeHashType has non-zero length for a valid address
}
#[test]
@ -245,7 +245,7 @@ mod tests {
.decrypt_data(
ephemeral_public_key_sender,
CipherText::from(ciphertext.clone()),
incorrect_nonce.clone(),
*incorrect_nonce,
)
.unwrap();
@ -281,7 +281,7 @@ mod tests {
.decrypt_data(
ephemeral_public_key_sender,
CipherText::from(corrupted_ciphertext),
nonce.clone(),
*nonce,
)
.unwrap();
@ -314,7 +314,7 @@ mod tests {
.decrypt_data(
ephemeral_public_key_sender,
CipherText::from(ciphertext),
nonce.clone(),
*nonce,
)
.unwrap();

View File

@ -1,9 +1,10 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
source env.sh
cargo install taplo-cli --locked
cargo fmt -- --check
taplo fmt --check
taplo fmt --check
export RISC0_SKIP_BUILD=1
cargo clippy --workspace --all-targets -- -D warnings

View File

@ -447,7 +447,7 @@ mod tests {
/// A helper for the `broken` test.
///
/// Check that the given JSON string parses, but is not recognized as a valid RPC message.
///
/// Test things that are almost but not entirely JSONRPC are rejected
///
/// The reject is done by returning it as Unmatched.

View File

@ -54,7 +54,7 @@ mod tests {
let mut store = NodeAccountsStore::new();
let account = create_sample_account(100);
let account_addr = account.address.clone();
let account_addr = account.address;
store.register_account(account);
@ -68,7 +68,7 @@ mod tests {
let mut store = NodeAccountsStore::new();
let account = create_sample_account(100);
let account_addr = account.address.clone();
let account_addr = account.address;
store.register_account(account);
assert_eq!(store.accounts.len(), 1);
@ -94,8 +94,8 @@ mod tests {
let account1 = create_sample_account(100);
let account2 = create_sample_account(200);
let address_1 = account1.address.clone();
let address_2 = account2.address.clone();
let address_1 = account1.address;
let address_2 = account2.address;
store.register_account(account1);
store.register_account(account2);

View File

@ -145,8 +145,8 @@ mod tests {
fn create_sample_block(block_id: u64, prev_block_id: u64) -> Block {
Block {
block_id: block_id,
prev_block_id: prev_block_id,
block_id,
prev_block_id,
prev_block_hash: [0; 32],
hash: [1; 32],
transactions: vec![],
@ -211,7 +211,7 @@ mod tests {
// The genesis block should be available on reload
let result = node_store.get_block_at_id(0);
assert!(!result.is_err());
assert!(result.is_ok());
}
#[test]

View File

@ -80,7 +80,7 @@ impl NodeChainStore {
Ok((
Self {
acc_map: From::from(acc_map),
acc_map,
block_store,
nullifier_store,
utxo_commitments_store,
@ -226,7 +226,7 @@ impl NodeChainStore {
self.block_store.put_block_at_id(block)?;
//Snapshot
if block_id % self.node_config.shapshot_frequency_in_blocks == 0 {
if block_id.is_multiple_of(self.node_config.shapshot_frequency_in_blocks) {
//Serializing all important data structures
//If we fail serialization, it is not the reason to stop running
@ -258,8 +258,7 @@ impl NodeChainStore {
);
info!(
"Snapshot executed at {:?} with results {snapshot_trace:#?}",
block_id
"Snapshot executed at {block_id:?} with results {snapshot_trace:#?}"
);
}
}
@ -344,8 +343,8 @@ mod tests {
fn create_sample_block(block_id: u64, prev_block_id: u64) -> Block {
Block {
block_id: block_id,
prev_block_id: prev_block_id,
block_id,
prev_block_id,
prev_block_hash: [0; 32],
hash: [1; 32],
transactions: vec![],
@ -453,9 +452,6 @@ mod tests {
assert_eq!(block_id, 1);
assert_eq!(recovered_store.acc_map.len(), 1);
assert_eq!(
recovered_store.utxo_commitments_store.get_root().is_some(),
true
);
assert!(recovered_store.utxo_commitments_store.get_root().is_some());
}
}

View File

@ -41,7 +41,7 @@ pub mod sequencer_client;
fn vec_u8_to_vec_u64(bytes: Vec<u8>) -> Vec<u64> {
// Pad with zeros to make sure it's a multiple of 8
let mut padded = bytes.clone();
while padded.len() % 8 != 0 {
while !padded.len().is_multiple_of(8) {
padded.push(0);
}
@ -223,12 +223,11 @@ impl NodeCore {
let tag = account.make_tag();
let comm = generate_commitments(&vec![utxo]);
let comm = generate_commitments(&[utxo]);
let mint_utxo_addr_bytes: Vec<u8> = zkvm::test_methods::MINT_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(mint_utxo_addr_bytes);
@ -323,8 +322,7 @@ impl NodeCore {
let mint_multiple_utxo_addr_bytes: Vec<u8> =
zkvm::test_methods::MINT_UTXO_MULTIPLE_ASSETS_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(mint_multiple_utxo_addr_bytes);
@ -397,14 +395,13 @@ impl NodeCore {
.key_holder
.utxo_secret_key_holder
.nullifier_secret_key
.to_bytes()
.to_vec(),
.to_bytes(),
);
let (resulting_utxos, receipt) = prove_send_utxo(utxo, receivers)?;
let utxo_hashes = resulting_utxos
.iter()
.map(|(utxo, addr)| (addr.clone(), utxo.hash))
.map(|(utxo, addr)| (*addr, utxo.hash))
.collect();
let utxos: Vec<UTXO> = resulting_utxos
@ -439,8 +436,7 @@ impl NodeCore {
let send_utxo_addr_bytes: Vec<u8> = zkvm::test_methods::SEND_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(send_utxo_addr_bytes);
@ -585,8 +581,7 @@ impl NodeCore {
let send_multiple_utxo_addr_bytes: Vec<u8> =
zkvm::test_methods::SEND_UTXO_MULTIPLE_ASSETS_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(send_multiple_utxo_addr_bytes);
@ -667,14 +662,13 @@ impl NodeCore {
.key_holder
.utxo_secret_key_holder
.nullifier_secret_key
.to_bytes()
.to_vec(),
.to_bytes(),
);
let (resulting_utxos, receipt) = prove_send_utxo_shielded(acc, balance as u128, receivers)?;
let utxo_hashes = resulting_utxos
.iter()
.map(|(utxo, addr)| (addr.clone(), utxo.hash))
.map(|(utxo, addr)| (*addr, utxo.hash))
.collect();
let utxos: Vec<UTXO> = resulting_utxos
@ -709,8 +703,7 @@ impl NodeCore {
let mint_utxo_addr_bytes: Vec<u8> = zkvm::test_methods::SEND_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(mint_utxo_addr_bytes);
@ -796,16 +789,14 @@ impl NodeCore {
.key_holder
.utxo_secret_key_holder
.nullifier_secret_key
.to_bytes()
.to_vec(),
.to_bytes(),
);
let (resulting_balances, receipt) = prove_send_utxo_deshielded(utxo, receivers)?;
let send_utxo_addr_bytes: Vec<u8> = zkvm::test_methods::SEND_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(send_utxo_addr_bytes);
@ -1452,14 +1443,13 @@ impl NodeCore {
.key_holder
.utxo_secret_key_holder
.nullifier_secret_key
.to_bytes()
.to_vec(),
.to_bytes(),
);
let (resulting_utxos, receipt) = prove_send_utxo(utxo, receivers)?;
let utxo_hashes = resulting_utxos
.iter()
.map(|(utxo, addr)| (addr.clone(), utxo.hash))
.map(|(utxo, addr)| (*addr, utxo.hash))
.collect();
let utxos: Vec<UTXO> = resulting_utxos
@ -1508,8 +1498,7 @@ impl NodeCore {
let send_utxo_addr_bytes: Vec<u8> = zkvm::test_methods::SEND_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let sc_addr = hex::encode(send_utxo_addr_bytes);
@ -1608,10 +1597,7 @@ impl NodeCore {
.send_split_tx(
utxo.clone(),
comm_gen_hash,
addrs_receivers
.clone()
.map(|addr| (utxo.amount / 3, addr))
.to_vec(),
addrs_receivers.map(|addr| (utxo.amount / 3, addr)).to_vec(),
visibility_list,
)
.await?;

View File

@ -22,8 +22,7 @@ pub async fn setup_empty_sc_states(node: &NodeChainStore) -> Result<()> {
let mint_utxo_addr_bytes: Vec<u8> = zkvm::test_methods::MINT_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let mint_utxo_addr = hex::encode(mint_utxo_addr_bytes);
node.block_store
@ -32,8 +31,7 @@ pub async fn setup_empty_sc_states(node: &NodeChainStore) -> Result<()> {
let single_utxo_transfer_addr_bytes: Vec<u8> = zkvm::test_methods::SEND_UTXO_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let single_utxo_transfer_addr = hex::encode(single_utxo_transfer_addr_bytes);
node.block_store.put_sc_sc_state(
@ -46,8 +44,7 @@ pub async fn setup_empty_sc_states(node: &NodeChainStore) -> Result<()> {
let mint_utxo_multiple_assets_addr_bytes: Vec<u8> =
zkvm::test_methods::MINT_UTXO_MULTIPLE_ASSETS_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let mint_utxo_multiple_assets_addr = hex::encode(mint_utxo_multiple_assets_addr_bytes);
node.block_store.put_sc_sc_state(
@ -60,8 +57,7 @@ pub async fn setup_empty_sc_states(node: &NodeChainStore) -> Result<()> {
let multiple_assets_utxo_transfer_addr_bytes: Vec<u8> =
zkvm::test_methods::SEND_UTXO_MULTIPLE_ASSETS_ID
.iter()
.map(|num| num.to_le_bytes())
.flatten()
.flat_map(|num| num.to_le_bytes())
.collect();
let multiple_assets_utxo_transfer_addr = hex::encode(multiple_assets_utxo_transfer_addr_bytes);
node.block_store.put_sc_sc_state(

View File

@ -53,7 +53,7 @@ pub fn new_http_server(
polling_config,
limits_config,
} = config;
info!(target:"network", "Starting http server at {}", addr);
info!(target:"network", "Starting http server at {addr}");
let handler = web::Data::new(JsonHandler {
polling_config,
node_core_config: node_config,

View File

@ -321,19 +321,19 @@ impl JsonHandler {
.body()
.utxo_commitments_created_hashes
.iter()
.map(|val| hex::encode(val.clone()))
.map(hex::encode)
.collect::<Vec<_>>(),
utxo_commitments_spent_hashes: tx
.body()
.utxo_commitments_spent_hashes
.iter()
.map(|val| hex::encode(val.clone()))
.map(hex::encode)
.collect::<Vec<_>>(),
utxo_nullifiers_created_hashes: tx
.body()
.nullifier_created_hashes
.iter()
.map(|val| hex::encode(val.clone()))
.map(hex::encode)
.collect::<Vec<_>>(),
encoded_data: tx
.body()
@ -493,7 +493,7 @@ impl JsonHandler {
utxo_result: UTXOShortEssentialStruct {
hash: hex::encode(new_utxo_rec.hash),
asset: new_utxo_rec.asset.clone(),
commitment_hash: hex::encode(generate_commitments_helper(&vec![new_utxo_rec])[0]),
commitment_hash: hex::encode(generate_commitments_helper(&[new_utxo_rec])[0]),
},
};
@ -546,7 +546,7 @@ impl JsonHandler {
utxo_result: UTXOShortEssentialStruct {
hash: hex::encode(new_utxo_rec.hash),
asset: new_utxo_rec.asset.clone(),
commitment_hash: hex::encode(generate_commitments_helper(&vec![new_utxo_rec])[0]),
commitment_hash: hex::encode(generate_commitments_helper(&[new_utxo_rec])[0]),
},
};

View File

@ -16,19 +16,11 @@ pub fn produce_blob_list_from_sc_public_state<S: Serialize>(
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / SC_DATA_BLOB_SIZE) {
let next_chunk: Vec<u8>;
if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)]
.iter()
.cloned()
.collect();
let next_chunk: Vec<u8> = if (i + 1) * SC_DATA_BLOB_SIZE < ser_data.len() {
ser_data[(i * SC_DATA_BLOB_SIZE)..((i + 1) * SC_DATA_BLOB_SIZE)].to_vec()
} else {
next_chunk = ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
ser_data[(i * SC_DATA_BLOB_SIZE)..(ser_data.len())].to_vec()
};
blob_list.push(produce_blob_from_fit_vec(next_chunk));
}
@ -52,11 +44,8 @@ pub fn compare_blob_lists(
changed_ids.push(DataBlobChangeVariant::Deleted { id });
}
} else if new_len > old_len {
for id in old_len..new_len {
changed_ids.push(DataBlobChangeVariant::Created {
id,
blob: blob_list_new[id],
});
for (id, blob) in blob_list_new.iter().enumerate().take(new_len).skip(old_len) {
changed_ids.push(DataBlobChangeVariant::Created { id, blob: *blob });
}
}

View File

@ -52,10 +52,10 @@ pub fn generate_commitments(input_utxos: &[UTXO]) -> Vec<Vec<u8>> {
///
/// ToDo: Solve it in more scalable way
pub fn validate_in_commitments_tree(
in_commitment: &Vec<u8>,
in_commitment: &[u8],
commitment_tree: &UTXOCommitmentsMerkleTree,
) -> bool {
let alighned_hash: [u8; 32] = in_commitment.clone().try_into().unwrap();
let alighned_hash: [u8; 32] = in_commitment.try_into().unwrap();
commitment_tree.get_proof(alighned_hash).is_some()
}
@ -75,7 +75,7 @@ pub fn private_circuit(
) -> (Vec<Vec<u8>>, Vec<Vec<u8>>) {
assert!(check_balances_private(input_utxos, output_utxos));
let in_commitments = generate_commitments(&input_utxos);
let in_commitments = generate_commitments(input_utxos);
let mut in_nullifiers = vec![];
@ -104,7 +104,7 @@ pub fn private_circuit(
assert!(!public_context.nullifiers_set.contains(&nullifier));
}
(in_nullifiers, generate_commitments(&output_utxos))
(in_nullifiers, generate_commitments(output_utxos))
}
/// Check balances DE
@ -124,7 +124,7 @@ pub fn deshielded_circuit(
) -> Vec<Vec<u8>> {
assert!(check_balances_de(input_utxos, output_balance));
let in_commitments = generate_commitments(&input_utxos);
let in_commitments = generate_commitments(input_utxos);
let mut in_nullifiers = vec![];

View File

@ -77,16 +77,11 @@ impl PublicSCContext {
//`ToDo` Replace with `next_chunk` usage, when feature stabilizes in Rust
for i in 0..=(ser_data.len() / 8) {
let next_chunk: Vec<u8>;
if (i + 1) * 8 < ser_data.len() {
next_chunk = ser_data[(i * 8)..((i + 1) * 8)].iter().cloned().collect();
let next_chunk: Vec<u8> = if (i + 1) * 8 < ser_data.len() {
ser_data[(i * 8)..((i + 1) * 8)].to_vec()
} else {
next_chunk = ser_data[(i * 8)..(ser_data.len())]
.iter()
.cloned()
.collect();
}
ser_data[(i * 8)..(ser_data.len())].to_vec()
};
u64_list.push(PublicSCContext::produce_u64_from_fit_vec(next_chunk));
}

View File

@ -66,8 +66,7 @@ pub fn generate_nullifiers_spent_utxos(utxos_spent: Vec<(UTXO, &Account)>) -> Ve
.key_holder
.utxo_secret_key_holder
.nullifier_secret_key
.to_bytes()
.to_vec(),
.to_bytes(),
);
all_nullifiers.push(nullifier);
@ -91,8 +90,7 @@ pub fn generate_secret_random_commitment(
.key_holder
.utxo_secret_key_holder
.viewing_secret_key
.to_bytes()
.to_vec(),
.to_bytes(),
)?,
generator_blinding_factor: Tweak::new(&mut thread_rng()),
};

View File

@ -95,6 +95,12 @@ impl SequencerCore {
let tx_hash = *tx.hash();
let mempool_size = self.mempool.len();
if mempool_size >= self.sequencer_config.max_num_tx_in_block {
return Err(TransactionMalformationErrorKind::MempoolFullForRound { tx: tx_hash });
}
let curr_sequencer_roots = self.get_tree_roots();
if tx_roots != curr_sequencer_roots {
@ -165,23 +171,20 @@ impl SequencerCore {
//Tree checks
let tx_tree_check = self.store.pub_tx_store.get_tx(tx_hash).is_some();
let nullifier_tree_check = nullifier_created_hashes
.iter()
.map(|nullifier_hash| {
self.store.nullifier_store.contains(&UTXONullifier {
utxo_hash: *nullifier_hash,
})
let nullifier_tree_check = nullifier_created_hashes.iter().any(|nullifier_hash| {
self.store.nullifier_store.contains(&UTXONullifier {
utxo_hash: *nullifier_hash,
})
.any(|check| check);
let utxo_commitments_check = utxo_commitments_created_hashes
.iter()
.map(|utxo_commitment_hash| {
self.store
.utxo_commitments_store
.get_tx(*utxo_commitment_hash)
.is_some()
})
.any(|check| check);
});
let utxo_commitments_check =
utxo_commitments_created_hashes
.iter()
.any(|utxo_commitment_hash| {
self.store
.utxo_commitments_store
.get_tx(*utxo_commitment_hash)
.is_some()
});
if tx_tree_check {
return Err(
@ -267,7 +270,7 @@ impl SequencerCore {
.pop_size(self.sequencer_config.max_num_tx_in_block);
for tx in &transactions {
self.execute_check_transaction_on_state(&tx)?;
self.execute_check_transaction_on_state(tx)?;
}
let prev_block_hash = self
@ -315,7 +318,7 @@ mod tests {
let mut rng = rand::thread_rng();
let random_u8: u8 = rng.gen();
let path_str = format!("/tmp/sequencer_{:?}", random_u8);
let path_str = format!("/tmp/sequencer_{random_u8:?}");
SequencerConfig {
home: PathBuf::from(path_str),

View File

@ -101,6 +101,11 @@ impl SequencerAccountsStore {
pub fn len(&self) -> usize {
self.accounts.len()
}
///Is accounts store empty
pub fn is_empty(&self) -> bool {
self.accounts.is_empty()
}
}
impl Default for SequencerAccountsStore {
@ -250,4 +255,11 @@ mod tests {
assert_eq!(acc_balance, 0);
}
#[test]
fn account_sequencer_store_is_empty_test() {
let seq_acc_store = SequencerAccountsStore::default();
assert!(seq_acc_store.is_empty());
}
}

View File

@ -53,7 +53,7 @@ pub fn new_http_server(
polling_config,
limits_config,
} = config;
info!(target:NETWORK, "Starting http server at {}", addr);
info!(target:NETWORK, "Starting http server at {addr}");
let handler = web::Data::new(JsonHandler {
polling_config,
sequencer_state: seuquencer_core.clone(),

View File

@ -49,6 +49,7 @@ impl DataBlob {
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)]
pub enum DataBlobChangeVariant {
Created {
@ -145,7 +146,7 @@ mod tests {
#[test]
fn test_produce_blob_from_fit_vec() {
let data = (0..0 + 255).collect();
let data = (0..255).collect();
let blob = produce_blob_from_fit_vec(data);
assert_eq!(blob.0[..4], [0, 1, 2, 3]);
}

View File

@ -9,6 +9,7 @@ pub mod gas_calculator;
pub use test_methods;
#[allow(clippy::result_large_err)]
pub fn gas_limits_check<INP: Serialize>(
input_buffer: INP,
elf: &[u8],
@ -31,6 +32,7 @@ pub fn gas_limits_check<INP: Serialize>(
Ok(())
}
#[allow(clippy::result_large_err)]
pub fn prove_mint_utxo(
amount_to_mint: u128,
owner: AccountAddress,
@ -66,6 +68,7 @@ pub fn prove_mint_utxo(
Ok((UTXO::create_utxo_from_payload(digest), receipt))
}
#[allow(clippy::result_large_err)]
pub fn prove_send_utxo(
spent_utxo: UTXO,
owners_parts: Vec<(u128, AccountAddress)>,
@ -118,6 +121,7 @@ pub fn prove_send_utxo(
))
}
#[allow(clippy::result_large_err)]
pub fn prove_send_utxo_multiple_assets_one_receiver(
spent_utxos: Vec<UTXO>,
number_to_send: usize,
@ -160,17 +164,18 @@ pub fn prove_send_utxo_multiple_assets_one_receiver(
digest
.0
.into_iter()
.map(|payload| UTXO::create_utxo_from_payload(payload))
.map(UTXO::create_utxo_from_payload)
.collect(),
digest
.1
.into_iter()
.map(|payload| UTXO::create_utxo_from_payload(payload))
.map(UTXO::create_utxo_from_payload)
.collect(),
receipt,
))
}
#[allow(clippy::result_large_err)]
pub fn prove_send_utxo_shielded(
owner: AccountAddress,
amount: u128,
@ -226,6 +231,7 @@ pub fn prove_send_utxo_shielded(
))
}
#[allow(clippy::result_large_err)]
pub fn prove_send_utxo_deshielded(
spent_utxo: UTXO,
owners_parts: Vec<(u128, AccountAddress)>,
@ -278,6 +284,7 @@ pub fn prove_send_utxo_deshielded(
))
}
#[allow(clippy::result_large_err)]
pub fn prove_mint_utxo_multiple_assets(
amount_to_mint: u128,
number_of_assets: usize,