mirror of
https://github.com/logos-blockchain/lssa.git
synced 2026-01-02 13:23:10 +00:00
Merge branch 'main' into Pravdyvy/token-burn-mint-wallet-update
This commit is contained in:
commit
bece9a9108
2
Cargo.lock
generated
2
Cargo.lock
generated
@ -2657,6 +2657,8 @@ name = "nssa"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"borsh",
|
||||
"bytemuck",
|
||||
"env_logger",
|
||||
"hex",
|
||||
"hex-literal 1.1.0",
|
||||
"log",
|
||||
|
||||
BIN
artifacts/program_methods/amm.bin
Normal file
BIN
artifacts/program_methods/amm.bin
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -347,11 +347,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
|
||||
assert_eq!(definition_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
|
||||
// bytes)]
|
||||
assert_eq!(
|
||||
definition_acc.data.as_ref(),
|
||||
&[
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0
|
||||
]
|
||||
);
|
||||
|
||||
@ -590,11 +593,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
|
||||
assert_eq!(definition_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
|
||||
// bytes)]
|
||||
assert_eq!(
|
||||
definition_acc.data.as_ref(),
|
||||
&[
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0
|
||||
]
|
||||
);
|
||||
|
||||
@ -929,8 +935,8 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
.account;
|
||||
|
||||
assert_eq!(supply_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// The data of a token holding account has the following layout:
|
||||
// [ 0x01 || definition id (32 bytes) || balance (little endian 16 bytes) ]
|
||||
assert_eq!(
|
||||
supply_acc.data.as_ref(),
|
||||
&[
|
||||
@ -1291,17 +1297,20 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
|
||||
assert_eq!(definition_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
|
||||
// bytes)]
|
||||
assert_eq!(
|
||||
definition_acc.data.as_ref(),
|
||||
&[
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0
|
||||
]
|
||||
);
|
||||
|
||||
assert_eq!(supply_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// The data of a token holding account has the following layout:
|
||||
// [ 0x01 || definition id (32 bytes) || balance (little endian 16 bytes) ]
|
||||
assert_eq!(
|
||||
supply_acc.data.as_ref(),
|
||||
&[
|
||||
@ -1381,11 +1390,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
|
||||
assert_eq!(definition_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
|
||||
// bytes)]
|
||||
assert_eq!(
|
||||
definition_acc.data.as_ref(),
|
||||
&[
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0
|
||||
]
|
||||
);
|
||||
|
||||
@ -1517,11 +1529,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
|
||||
assert_eq!(definition_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
|
||||
// bytes)]
|
||||
assert_eq!(
|
||||
definition_acc.data.as_ref(),
|
||||
&[
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0
|
||||
]
|
||||
);
|
||||
|
||||
@ -1653,11 +1668,14 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
|
||||
|
||||
assert_eq!(definition_acc.program_owner, Program::token().id());
|
||||
// The data of a token definition account has the following layout:
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) ]
|
||||
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32
|
||||
// bytes)]
|
||||
assert_eq!(
|
||||
definition_acc.data.as_ref(),
|
||||
&[
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
|
||||
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@ -15,6 +15,7 @@ borsh.workspace = true
|
||||
hex.workspace = true
|
||||
secp256k1 = "0.31.1"
|
||||
risc0-binfmt = "3.0.2"
|
||||
bytemuck = "1.24.0"
|
||||
log.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
@ -24,6 +25,7 @@ risc0-binfmt = "3.0.2"
|
||||
[dev-dependencies]
|
||||
test_program_methods.workspace = true
|
||||
hex-literal = "1.0.0"
|
||||
env_logger.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
@ -9,7 +9,7 @@ borsh.workspace = true
|
||||
serde = { workspace = true }
|
||||
thiserror.workspace = true
|
||||
chacha20 = { version = "0.9", default-features = false }
|
||||
bytemuck = { workspace = true, optional = true }
|
||||
bytemuck.workspace = true
|
||||
k256 = { workspace = true, optional = true }
|
||||
base58 = { workspace = true, optional = true }
|
||||
anyhow = { workspace = true, optional = true }
|
||||
@ -19,4 +19,4 @@ serde_json.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
host = ["dep:bytemuck", "dep:k256", "dep:base58", "dep:anyhow"]
|
||||
host = ["dep:k256", "dep:base58", "dep:anyhow"]
|
||||
|
||||
@ -15,7 +15,7 @@ pub type Nonce = u128;
|
||||
|
||||
/// Account to be used both in public and private contexts
|
||||
#[derive(
|
||||
Serialize, Deserialize, Clone, Default, PartialEq, Eq, BorshSerialize, BorshDeserialize,
|
||||
Clone, Default, Eq, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize,
|
||||
)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug))]
|
||||
pub struct Account {
|
||||
@ -25,7 +25,7 @@ pub struct Account {
|
||||
pub nonce: Nonce,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[derive(Clone, Eq, PartialEq, Serialize, Deserialize)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug))]
|
||||
pub struct AccountWithMetadata {
|
||||
pub account: Account,
|
||||
@ -45,9 +45,18 @@ impl AccountWithMetadata {
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize,
|
||||
Copy,
|
||||
Clone,
|
||||
Default,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
BorshSerialize,
|
||||
BorshDeserialize,
|
||||
)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialOrd, Ord, Default))]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialOrd, Ord))]
|
||||
pub struct AccountId {
|
||||
value: [u8; 32],
|
||||
}
|
||||
@ -180,4 +189,11 @@ mod tests {
|
||||
let result = base58_str.parse::<AccountId>().unwrap_err();
|
||||
assert!(matches!(result, AccountIdError::InvalidLength(_)));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn default_account_id() {
|
||||
let default_account_id = AccountId::default();
|
||||
let expected_account_id = AccountId::new([0; 32]);
|
||||
assert!(default_account_id == expected_account_id);
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,7 +3,7 @@ use std::collections::HashSet;
|
||||
use risc0_zkvm::{DeserializeOwned, guest::env, serde::Deserializer};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "host")]
|
||||
//#[cfg(feature = "host")]
|
||||
use crate::account::AccountId;
|
||||
use crate::account::{Account, AccountWithMetadata};
|
||||
|
||||
@ -22,8 +22,8 @@ pub struct ProgramInput<T> {
|
||||
/// Each program can derive up to `2^256` unique account IDs by choosing different
|
||||
/// seeds. PDAs allow programs to control namespaced account identifiers without
|
||||
/// collisions between programs.
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))]
|
||||
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug))]
|
||||
pub struct PdaSeed([u8; 32]);
|
||||
|
||||
impl PdaSeed {
|
||||
@ -32,7 +32,7 @@ impl PdaSeed {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "host")]
|
||||
//#[cfg(feature = "host")]
|
||||
impl From<(&ProgramId, &PdaSeed)> for AccountId {
|
||||
fn from(value: (&ProgramId, &PdaSeed)) -> Self {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
@ -54,8 +54,8 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))]
|
||||
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||
#[cfg_attr(any(feature = "host", test), derive(Debug,))]
|
||||
pub struct ChainedCall {
|
||||
/// The program ID of the program to execute
|
||||
pub program_id: ProgramId,
|
||||
|
||||
@ -7,7 +7,7 @@ use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
error::NssaError,
|
||||
program_methods::{AUTHENTICATED_TRANSFER_ELF, PINATA_ELF, TOKEN_ELF},
|
||||
program_methods::{AMM_ELF, AUTHENTICATED_TRANSFER_ELF, PINATA_ELF, TOKEN_ELF},
|
||||
};
|
||||
|
||||
/// Maximum number of cycles for a public execution.
|
||||
@ -95,6 +95,10 @@ impl Program {
|
||||
// `program_methods`
|
||||
Self::new(TOKEN_ELF.to_vec()).unwrap()
|
||||
}
|
||||
|
||||
pub fn amm() -> Self {
|
||||
Self::new(AMM_ELF.to_vec()).expect("The AMM program must be a valid Risc0 program")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Testnet only. Refactor to prevent compilation on mainnet.
|
||||
|
||||
1569
nssa/src/state.rs
1569
nssa/src/state.rs
File diff suppressed because it is too large
Load Diff
3587
program_methods/guest/src/bin/amm.rs
Normal file
3587
program_methods/guest/src/bin/amm.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -82,7 +82,7 @@ fn main() {
|
||||
let winner_token_holding_post = winner_token_holding.account.clone();
|
||||
pinata_definition_post.data = data.next_data();
|
||||
|
||||
let mut instruction_data: [u8; 23] = [0; 23];
|
||||
let mut instruction_data = vec![0; 23];
|
||||
instruction_data[0] = 1;
|
||||
instruction_data[1..17].copy_from_slice(&PRIZE.to_le_bytes());
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@ -4,6 +4,7 @@ use clap::Subcommand;
|
||||
use itertools::Itertools as _;
|
||||
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
|
||||
use nssa::{Account, AccountId, program::Program};
|
||||
use nssa_core::account::Data;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::{
|
||||
@ -12,17 +13,20 @@ use crate::{
|
||||
helperfunctions::{AccountPrivacyKind, HumanReadableAccount, parse_addr_with_privacy_prefix},
|
||||
};
|
||||
|
||||
const TOKEN_DEFINITION_TYPE: u8 = 0;
|
||||
const TOKEN_DEFINITION_DATA_SIZE: usize = 23;
|
||||
const TOKEN_DEFINITION_DATA_SIZE: usize = 55;
|
||||
|
||||
const TOKEN_HOLDING_TYPE: u8 = 1;
|
||||
const TOKEN_HOLDING_DATA_SIZE: usize = 49;
|
||||
const TOKEN_STANDARD_FUNGIBLE_TOKEN: u8 = 0;
|
||||
const TOKEN_STANDARD_NONFUNGIBLE: u8 = 2;
|
||||
|
||||
struct TokenDefinition {
|
||||
#[allow(unused)]
|
||||
account_type: u8,
|
||||
name: [u8; 6],
|
||||
total_supply: u128,
|
||||
#[allow(unused)]
|
||||
metadata_id: AccountId,
|
||||
}
|
||||
|
||||
struct TokenHolding {
|
||||
@ -33,19 +37,37 @@ struct TokenHolding {
|
||||
}
|
||||
|
||||
impl TokenDefinition {
|
||||
fn parse(data: &[u8]) -> Option<Self> {
|
||||
if data.len() != TOKEN_DEFINITION_DATA_SIZE || data[0] != TOKEN_DEFINITION_TYPE {
|
||||
fn parse(data: &Data) -> Option<Self> {
|
||||
let data = Vec::<u8>::from(data.clone());
|
||||
|
||||
if data.len() != TOKEN_DEFINITION_DATA_SIZE {
|
||||
None
|
||||
} else {
|
||||
let account_type = data[0];
|
||||
let name = data[1..7].try_into().unwrap();
|
||||
let total_supply = u128::from_le_bytes(data[7..].try_into().unwrap());
|
||||
let name = data[1..7].try_into().expect("Name must be a 6 bytes");
|
||||
let total_supply = u128::from_le_bytes(
|
||||
data[7..23]
|
||||
.try_into()
|
||||
.expect("Total supply must be 16 bytes little-endian"),
|
||||
);
|
||||
let metadata_id = AccountId::new(
|
||||
data[23..TOKEN_DEFINITION_DATA_SIZE]
|
||||
.try_into()
|
||||
.expect("Token Program expects valid Account Id for Metadata"),
|
||||
);
|
||||
|
||||
Some(Self {
|
||||
let this = Some(Self {
|
||||
account_type,
|
||||
name,
|
||||
total_supply,
|
||||
})
|
||||
metadata_id,
|
||||
});
|
||||
|
||||
match account_type {
|
||||
TOKEN_STANDARD_NONFUNGIBLE if total_supply != 1 => None,
|
||||
TOKEN_STANDARD_FUNGIBLE_TOKEN if metadata_id != AccountId::new([0; 32]) => None,
|
||||
_ => this,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -344,6 +366,8 @@ impl WalletSubcommand for AccountSubcommand {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use nssa::AccountId;
|
||||
|
||||
use crate::cli::account::{TokedDefinitionAccountView, TokenDefinition};
|
||||
|
||||
#[test]
|
||||
@ -352,6 +376,7 @@ mod tests {
|
||||
account_type: 1,
|
||||
name: [137, 12, 14, 3, 5, 4],
|
||||
total_supply: 100,
|
||||
metadata_id: AccountId::new([0; 32]),
|
||||
};
|
||||
|
||||
let token_def_view: TokedDefinitionAccountView = token_def.into();
|
||||
@ -365,6 +390,7 @@ mod tests {
|
||||
account_type: 1,
|
||||
name: [240, 159, 146, 150, 66, 66],
|
||||
total_supply: 100,
|
||||
metadata_id: AccountId::new([0; 32]),
|
||||
};
|
||||
|
||||
let token_def_view: TokedDefinitionAccountView = token_def.into();
|
||||
@ -378,6 +404,7 @@ mod tests {
|
||||
account_type: 1,
|
||||
name: [78, 65, 77, 69, 0, 0],
|
||||
total_supply: 100,
|
||||
metadata_id: AccountId::new([0; 32]),
|
||||
};
|
||||
|
||||
let token_def_view: TokedDefinitionAccountView = token_def.into();
|
||||
|
||||
@ -20,7 +20,7 @@ impl Token<'_> {
|
||||
let account_ids = vec![definition_account_id, supply_account_id];
|
||||
let program_id = nssa::program::Program::token().id();
|
||||
// Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
|
||||
let mut instruction = [0; 23];
|
||||
let mut instruction = vec![0u8; 23];
|
||||
instruction[1..17].copy_from_slice(&total_supply.to_le_bytes());
|
||||
instruction[17..].copy_from_slice(&name);
|
||||
let message = nssa::public_transaction::Message::try_new(
|
||||
@ -131,7 +131,7 @@ impl Token<'_> {
|
||||
let program_id = nssa::program::Program::token().id();
|
||||
// Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 ||
|
||||
// 0x00 || 0x00 || 0x00].
|
||||
let mut instruction = [0; 23];
|
||||
let mut instruction = vec![0u8; 23];
|
||||
instruction[0] = 0x01;
|
||||
instruction[1..17].copy_from_slice(&amount.to_le_bytes());
|
||||
let Ok(nonces) = self.0.get_accounts_nonces(vec![sender_account_id]).await else {
|
||||
@ -609,7 +609,7 @@ impl Token<'_> {
|
||||
fn token_program_preparation_transfer(amount: u128) -> (InstructionData, Program) {
|
||||
// Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 ||
|
||||
// 0x00 || 0x00 || 0x00].
|
||||
let mut instruction = [0; 23];
|
||||
let mut instruction = vec![0u8; 23];
|
||||
instruction[0] = 0x01;
|
||||
instruction[1..17].copy_from_slice(&amount.to_le_bytes());
|
||||
let instruction_data = Program::serialize_instruction(instruction).unwrap();
|
||||
@ -623,7 +623,7 @@ fn token_program_preparation_definition(
|
||||
total_supply: u128,
|
||||
) -> (InstructionData, Program) {
|
||||
// Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
|
||||
let mut instruction = [0; 23];
|
||||
let mut instruction = vec![0u8; 23];
|
||||
instruction[1..17].copy_from_slice(&total_supply.to_le_bytes());
|
||||
instruction[17..].copy_from_slice(&name);
|
||||
let instruction_data = Program::serialize_instruction(instruction).unwrap();
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user