fix: proper type for token program instruction

This commit is contained in:
Daniil Polyakov 2025-12-25 20:46:37 +03:00
parent de751952af
commit 9d37a88069
12 changed files with 130 additions and 65 deletions

1
Cargo.lock generated
View File

@ -2658,6 +2658,7 @@ version = "0.1.0"
dependencies = [
"borsh",
"bytemuck",
"env_logger",
"hex",
"hex-literal 1.1.0",
"log",

Binary file not shown.

Binary file not shown.

View File

@ -347,11 +347,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
// bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0
]
);
@ -506,11 +509,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
// bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0
]
);
@ -663,8 +669,8 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
.account;
assert_eq!(supply_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// The data of a token holding account has the following layout:
// [ 0x01 || definition id (32 bytes) || balance (little endian 16 bytes) ]
assert_eq!(
supply_acc.data.as_ref(),
&[
@ -754,17 +760,20 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
// bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0
]
);
assert_eq!(supply_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// The data of a token holding account has the following layout:
// [ 0x01 || definition id (32 bytes) || balance (little endian 16 bytes) ]
assert_eq!(
supply_acc.data.as_ref(),
&[
@ -844,11 +853,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
// bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0
]
);
@ -980,11 +992,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
// bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0
]
);
@ -1116,11 +1131,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
// bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0
]
);

View File

@ -25,6 +25,7 @@ risc0-binfmt = "3.0.2"
[dev-dependencies]
test_program_methods.workspace = true
hex-literal = "1.0.0"
env_logger.workspace = true
[features]
default = []

View File

@ -2283,7 +2283,7 @@ pub mod tests {
// TODO: repeated code needs to be cleaned up
// from token.rs (also repeated in amm.rs)
const TOKEN_DEFINITION_DATA_SIZE: usize = 23;
const TOKEN_DEFINITION_DATA_SIZE: usize = 55;
const TOKEN_HOLDING_DATA_SIZE: usize = 49;
@ -2291,6 +2291,7 @@ pub mod tests {
account_type: u8,
name: [u8; 6],
total_supply: u128,
metadata_id: AccountId,
}
struct TokenHolding {
@ -2300,14 +2301,17 @@ pub mod tests {
}
impl TokenDefinition {
fn into_data(self) -> Data {
let mut bytes = [0; TOKEN_DEFINITION_DATA_SIZE];
bytes[0] = self.account_type;
bytes[1..7].copy_from_slice(&self.name);
bytes[7..].copy_from_slice(&self.total_supply.to_le_bytes());
bytes
.to_vec()
.try_into()
.expect("23 bytes should fit into Data")
let mut bytes = Vec::<u8>::new();
bytes.extend_from_slice(&[self.account_type]);
bytes.extend_from_slice(&self.name);
bytes.extend_from_slice(&self.total_supply.to_le_bytes());
bytes.extend_from_slice(&self.metadata_id.to_bytes());
if bytes.len() != TOKEN_DEFINITION_DATA_SIZE {
panic!("Invalid Token Definition data");
}
Data::try_from(bytes).expect("Token definition data size must fit into data")
}
}
@ -2746,6 +2750,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: BalanceForTests::token_a_supply(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -2759,6 +2764,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: BalanceForTests::token_b_supply(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -2772,6 +2778,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: BalanceForTests::token_lp_supply(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -3053,6 +3060,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: BalanceForTests::token_lp_supply_add(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -3151,6 +3159,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: BalanceForTests::token_lp_supply_remove(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -3164,6 +3173,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: 0,
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -3262,6 +3272,7 @@ pub mod tests {
account_type: 0u8,
name: [1u8; 6],
total_supply: BalanceForTests::vault_a_balance_init(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
}
@ -3667,6 +3678,7 @@ pub mod tests {
#[test]
fn test_simple_amm_add() {
env_logger::init();
let mut state = state_for_amm_tests();
let mut instruction: Vec<u8> = Vec::new();
@ -4072,7 +4084,7 @@ pub mod tests {
// definition and supply accounts
let total_supply: u128 = 10_000_000;
// instruction: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[1..17].copy_from_slice(&total_supply.to_le_bytes());
instruction[17..].copy_from_slice(b"PINATA");
let message = public_transaction::Message::try_new(
@ -4087,7 +4099,7 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
// Execution of the token program transfer just to initialize the winner token account
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 2;
let message = public_transaction::Message::try_new(
token.id(),

View File

@ -424,7 +424,7 @@ fn initialize_token_transfer_chained_call(
amount_to_move: u128,
pda_seed: Vec<PdaSeed>,
) -> ChainedCall {
let mut instruction_data = [0; 23];
let mut instruction_data = vec![0u8; 23];
instruction_data[0] = token_program_command;
instruction_data[1..17].copy_from_slice(&amount_to_move.to_le_bytes());
let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data)
@ -563,7 +563,7 @@ fn new_definition(
);
// Chain call for liquidity token (TokenLP definition -> User LP Holding)
let mut instruction_data = [0; 23];
let mut instruction_data = vec![0u8; 23];
instruction_data[0] = if pool.account == Account::default() {
TOKEN_PROGRAM_NEW
} else {
@ -1093,24 +1093,28 @@ mod tests {
const TOKEN_PROGRAM_ID: ProgramId = [15; 8];
const AMM_PROGRAM_ID: ProgramId = [42; 8];
const TOKEN_DEFINITION_DATA_SIZE: usize = 23;
const TOKEN_DEFINITION_DATA_SIZE: usize = 55;
struct TokenDefinition {
account_type: u8,
name: [u8; 6],
total_supply: u128,
metadata_id: AccountId,
}
impl TokenDefinition {
fn into_data(self) -> Data {
let mut bytes = [0; TOKEN_DEFINITION_DATA_SIZE];
bytes[0] = self.account_type;
bytes[1..7].copy_from_slice(&self.name);
bytes[7..].copy_from_slice(&self.total_supply.to_le_bytes());
bytes
.to_vec()
.try_into()
.expect("23 bytes should fit into Data")
let mut bytes = Vec::<u8>::new();
bytes.extend_from_slice(&[self.account_type]);
bytes.extend_from_slice(&self.name);
bytes.extend_from_slice(&self.total_supply.to_le_bytes());
bytes.extend_from_slice(&self.metadata_id.to_bytes());
if bytes.len() != TOKEN_DEFINITION_DATA_SIZE {
panic!("Invalid Token Definition data");
}
Data::try_from(bytes).expect("Token definition data size must fit into data")
}
}
@ -1246,7 +1250,7 @@ mod tests {
impl ChainedCallForTests {
fn cc_swap_token_a_test_1() -> ChainedCall {
let mut instruction_data: [u8; 23] = [0; 23];
let mut instruction_data = vec![0; 23];
instruction_data[0] = 1;
instruction_data[1..17]
.copy_from_slice(&BalanceForTests::add_max_amount_a().to_le_bytes());
@ -1269,7 +1273,7 @@ mod tests {
let mut vault_b_auth = AccountForTests::vault_b_init();
vault_b_auth.is_authorized = true;
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17].copy_from_slice(&swap_amount.to_le_bytes());
let instruction_data = risc0_zkvm::serde::to_vec(&instruction)
@ -1291,7 +1295,7 @@ mod tests {
let mut vault_a_auth = AccountForTests::vault_a_init();
vault_a_auth.is_authorized = true;
let mut instruction_data: [u8; 23] = [0; 23];
let mut instruction_data = vec![0; 23];
instruction_data[0] = 1;
instruction_data[1..17].copy_from_slice(&swap_amount.to_le_bytes());
let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data)
@ -1308,7 +1312,7 @@ mod tests {
}
fn cc_swap_token_b_test_2() -> ChainedCall {
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17].copy_from_slice(&BalanceForTests::add_max_amount_b().to_le_bytes());
let instruction_data = risc0_zkvm::serde::to_vec(&instruction)
@ -1325,7 +1329,7 @@ mod tests {
}
fn cc_add_token_a() -> ChainedCall {
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes());
@ -1343,7 +1347,7 @@ mod tests {
}
fn cc_add_token_b() -> ChainedCall {
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::add_successful_amount_b().to_le_bytes());
@ -1364,7 +1368,7 @@ mod tests {
let mut pool_lp_auth = AccountForTests::pool_lp_init();
pool_lp_auth.is_authorized = true;
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[0] = 4;
instruction[1..17]
.copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes());
@ -1384,7 +1388,7 @@ mod tests {
let mut vault_a_auth = AccountForTests::vault_a_init();
vault_a_auth.is_authorized = true;
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::remove_actual_a_successful().to_le_bytes());
@ -1405,7 +1409,7 @@ mod tests {
let mut vault_b_auth = AccountForTests::vault_b_init();
vault_b_auth.is_authorized = true;
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::remove_min_amount_b_low().to_le_bytes());
@ -1426,7 +1430,7 @@ mod tests {
let mut pool_lp_auth = AccountForTests::pool_lp_init();
pool_lp_auth.is_authorized = true;
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 3;
instruction[1..17]
.copy_from_slice(&BalanceForTests::remove_actual_a_successful().to_le_bytes());
@ -1446,7 +1450,7 @@ mod tests {
}
fn cc_new_definition_token_a() -> ChainedCall {
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes());
@ -1464,7 +1468,7 @@ mod tests {
}
fn cc_new_definition_token_b() -> ChainedCall {
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::add_successful_amount_b().to_le_bytes());
@ -1482,7 +1486,7 @@ mod tests {
}
fn cc_new_definition_token_lp() -> ChainedCall {
let mut instruction: [u8; 23] = [0; 23];
let mut instruction = vec![0; 23];
instruction[0] = 1;
instruction[1..17]
.copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes());
@ -1736,6 +1740,7 @@ mod tests {
account_type: 0u8,
name: [1; 6],
total_supply: BalanceForTests::vault_a_reserve_init(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
},
@ -1753,6 +1758,7 @@ mod tests {
account_type: 0u8,
name: [1; 6],
total_supply: BalanceForTests::vault_a_reserve_init(),
metadata_id: AccountId::new([0; 32]),
}),
nonce: 0,
},

View File

@ -82,7 +82,7 @@ fn main() {
let winner_token_holding_post = winner_token_holding.account.clone();
pinata_definition_post.data = data.next_data();
let mut instruction_data: [u8; 23] = [0; 23];
let mut instruction_data = vec![0; 23];
instruction_data[0] = 1;
instruction_data[1..17].copy_from_slice(&PRIZE.to_le_bytes());

View File

@ -458,7 +458,7 @@ fn new_definition_with_metadata(
definition_id: definition_target_account.account_id,
uri,
creators,
primary_sale_date: 0u64, // TODO: future works to implement this
primary_sale_date: 0u64, // TODO #261: future works to implement this
};
let mut definition_target_account_post = definition_target_account.account.clone();

View File

@ -4,6 +4,7 @@ use clap::Subcommand;
use itertools::Itertools as _;
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use nssa::{Account, AccountId, program::Program};
use nssa_core::account::Data;
use serde::Serialize;
use crate::{
@ -12,17 +13,20 @@ use crate::{
helperfunctions::{AccountPrivacyKind, HumanReadableAccount, parse_addr_with_privacy_prefix},
};
const TOKEN_DEFINITION_TYPE: u8 = 0;
const TOKEN_DEFINITION_DATA_SIZE: usize = 23;
const TOKEN_DEFINITION_DATA_SIZE: usize = 55;
const TOKEN_HOLDING_TYPE: u8 = 1;
const TOKEN_HOLDING_DATA_SIZE: usize = 49;
const TOKEN_STANDARD_FUNGIBLE_TOKEN: u8 = 0;
const TOKEN_STANDARD_NONFUNGIBLE: u8 = 2;
struct TokenDefinition {
#[allow(unused)]
account_type: u8,
name: [u8; 6],
total_supply: u128,
#[allow(unused)]
metadata_id: AccountId,
}
struct TokenHolding {
@ -33,19 +37,37 @@ struct TokenHolding {
}
impl TokenDefinition {
fn parse(data: &[u8]) -> Option<Self> {
if data.len() != TOKEN_DEFINITION_DATA_SIZE || data[0] != TOKEN_DEFINITION_TYPE {
fn parse(data: &Data) -> Option<Self> {
let data = Vec::<u8>::from(data.clone());
if data.len() != TOKEN_DEFINITION_DATA_SIZE {
None
} else {
let account_type = data[0];
let name = data[1..7].try_into().unwrap();
let total_supply = u128::from_le_bytes(data[7..].try_into().unwrap());
let name = data[1..7].try_into().expect("Name must be a 6 bytes");
let total_supply = u128::from_le_bytes(
data[7..23]
.try_into()
.expect("Total supply must be 16 bytes little-endian"),
);
let metadata_id = AccountId::new(
data[23..TOKEN_DEFINITION_DATA_SIZE]
.try_into()
.expect("Token Program expects valid Account Id for Metadata"),
);
Some(Self {
let this = Some(Self {
account_type,
name,
total_supply,
})
metadata_id,
});
match account_type {
TOKEN_STANDARD_NONFUNGIBLE if total_supply != 1 => None,
TOKEN_STANDARD_FUNGIBLE_TOKEN if metadata_id != AccountId::new([0; 32]) => None,
_ => this,
}
}
}
}
@ -344,6 +366,8 @@ impl WalletSubcommand for AccountSubcommand {
#[cfg(test)]
mod tests {
use nssa::AccountId;
use crate::cli::account::{TokedDefinitionAccountView, TokenDefinition};
#[test]
@ -352,6 +376,7 @@ mod tests {
account_type: 1,
name: [137, 12, 14, 3, 5, 4],
total_supply: 100,
metadata_id: AccountId::new([0; 32]),
};
let token_def_view: TokedDefinitionAccountView = token_def.into();
@ -365,6 +390,7 @@ mod tests {
account_type: 1,
name: [240, 159, 146, 150, 66, 66],
total_supply: 100,
metadata_id: AccountId::new([0; 32]),
};
let token_def_view: TokedDefinitionAccountView = token_def.into();
@ -378,6 +404,7 @@ mod tests {
account_type: 1,
name: [78, 65, 77, 69, 0, 0],
total_supply: 100,
metadata_id: AccountId::new([0; 32]),
};
let token_def_view: TokedDefinitionAccountView = token_def.into();

View File

@ -20,7 +20,7 @@ impl Token<'_> {
let account_ids = vec![definition_account_id, supply_account_id];
let program_id = nssa::program::Program::token().id();
// Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
let mut instruction = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[1..17].copy_from_slice(&total_supply.to_le_bytes());
instruction[17..].copy_from_slice(&name);
let message = nssa::public_transaction::Message::try_new(
@ -131,7 +131,7 @@ impl Token<'_> {
let program_id = nssa::program::Program::token().id();
// Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 ||
// 0x00 || 0x00 || 0x00].
let mut instruction = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[0] = 0x01;
instruction[1..17].copy_from_slice(&amount.to_le_bytes());
let Ok(nonces) = self.0.get_accounts_nonces(vec![sender_account_id]).await else {
@ -306,7 +306,7 @@ impl Token<'_> {
fn token_program_preparation_transfer(amount: u128) -> (InstructionData, Program) {
// Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 ||
// 0x00 || 0x00 || 0x00].
let mut instruction = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[0] = 0x01;
instruction[1..17].copy_from_slice(&amount.to_le_bytes());
let instruction_data = Program::serialize_instruction(instruction).unwrap();
@ -320,7 +320,7 @@ fn token_program_preparation_definition(
total_supply: u128,
) -> (InstructionData, Program) {
// Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
let mut instruction = [0; 23];
let mut instruction = vec![0u8; 23];
instruction[1..17].copy_from_slice(&total_supply.to_le_bytes());
instruction[17..].copy_from_slice(&name);
let instruction_data = Program::serialize_instruction(instruction).unwrap();