diff --git a/.deny.toml b/.deny.toml new file mode 100644 index 00000000..1cb3879c --- /dev/null +++ b/.deny.toml @@ -0,0 +1,50 @@ +# Config file reference can be found at https://embarkstudios.github.io/cargo-deny/checks/cfg.html. + +[graph] +all-features = true +exclude-dev = true +no-default-features = true + +[advisories] +ignore = [ + { id = "RUSTSEC-2023-0071", reason = "Marvin Attack: potential key recovery through timing sidechannels" }, + { id = "RUSTSEC-2024-0388", reason = "`derivative` is unmaintained; consider using an alternative. Use `cargo tree -p derivative -i > tmp.txt` to check the dependency tree." }, + { id = "RUSTSEC-2024-0436", reason = "`paste` has a security vulnerability; consider using an alternative. Use `cargo tree -p paste -i > tmp.txt` to check the dependency tree." }, + { id = "RUSTSEC-2025-0055", reason = "`tracing-subscriber` v0.2.25 pulled in by ark-relations v0.4.0 - will be addressed before mainnet" }, + { id = "RUSTSEC-2025-0141", reason = "`bincode` is unmaintained but continuing to use it." }, +] +yanked = "deny" +unused-ignored-advisory = "deny" + +[bans] +allow-wildcard-paths = false +multiple-versions = "allow" + +[licenses] +allow = [ + "Apache-2.0 WITH LLVM-exception", + "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "BSL-1.0", + "CC0-1.0", + "CDLA-Permissive-2.0", + "ISC", + "MIT", + "MPL-2.0", + "Unicode-3.0", + "Zlib", +] +private = { ignore = false } +unused-allowed-license = "deny" + +[sources] +allow-git = [ + "https://github.com/EspressoSystems/jellyfish.git", + "https://github.com/logos-blockchain/logos-blockchain.git", +] +unknown-git = "deny" +unknown-registry = "deny" + +[sources.allow-org] +github = ["logos-co"] diff --git a/.github/actions/install-logos-blockchain-circuits/action.yaml b/.github/actions/install-logos-blockchain-circuits/action.yaml new file mode 100644 index 00000000..e62aea6b --- /dev/null +++ b/.github/actions/install-logos-blockchain-circuits/action.yaml @@ -0,0 +1,19 @@ +name: Setup Logos Blockchain Circuits + +description: Set up Logos Blockchain Circom Circuits, Rapidsnark prover and Rapidsnark verifier using the setup-logos-blockchain-circuits.sh script. + +inputs: + github-token: + description: GitHub token for downloading releases + required: true + +runs: + using: "composite" + steps: + - name: Setup logos-blockchain-circuits + shell: bash + working-directory: ${{ github.workspace }} + env: + GITHUB_TOKEN: ${{ inputs.github-token }} + run: | + curl -sSL https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/main/scripts/setup-logos-blockchain-circuits.sh | bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8efedd9..6292a786 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,6 +56,19 @@ jobs: - name: Check for unused dependencies run: cargo machete + deny: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + with: + ref: ${{ github.head_ref }} + + - name: Install cargo-deny + run: cargo install --locked cargo-deny + + - name: Check licenses and advisories + run: cargo deny check + lint: runs-on: ubuntu-latest timeout-minutes: 60 @@ -70,6 +83,10 @@ jobs: - uses: ./.github/actions/install-risc0 + - uses: ./.github/actions/install-logos-blockchain-circuits + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install active toolchain run: rustup install @@ -95,6 +112,10 @@ jobs: - uses: ./.github/actions/install-risc0 + - uses: ./.github/actions/install-logos-blockchain-circuits + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install active toolchain run: rustup install @@ -119,6 +140,10 @@ jobs: - uses: ./.github/actions/install-risc0 + - uses: ./.github/actions/install-logos-blockchain-circuits + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install active toolchain run: rustup install diff --git a/.gitignore b/.gitignore index 6162763b..8a454208 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,5 @@ data/ .vscode/ rocksdb sequencer_runner/data/ -storage.json \ No newline at end of file +storage.json +result diff --git a/Cargo.lock b/Cargo.lock index c49395a7..fd633651 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -927,9 +927,14 @@ name = "bedrock_client" version = "0.1.0" dependencies = [ "anyhow", + "futures", + "log", + "logos-blockchain-chain-broadcast-service", "logos-blockchain-common-http-client", "logos-blockchain-core", "reqwest", + "serde", + "tokio-retry", ] [[package]] @@ -1165,6 +1170,25 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "cbindgen" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "befbfd072a8e81c02f8c507aefce431fe5e7d051f83d48a23ffc9b9fe5a11799" +dependencies = [ + "clap", + "heck", + "indexmap 2.12.1", + "log", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 2.0.111", + "tempfile", + "toml 0.9.9+spec-1.0.0", +] + [[package]] name = "cc" version = "1.2.49" @@ -1177,6 +1201,12 @@ dependencies = [ "shlex", ] +[[package]] +name = "cesu8" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c" + [[package]] name = "cexpr" version = "0.6.0" @@ -1310,6 +1340,16 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75" +[[package]] +name = "combine" +version = "4.6.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" +dependencies = [ + "bytes", + "memchr", +] + [[package]] name = "common" version = "0.1.0" @@ -1319,6 +1359,7 @@ dependencies = [ "borsh", "hex", "log", + "logos-blockchain-common-http-client", "nssa", "nssa_core", "reqwest", @@ -1326,6 +1367,7 @@ dependencies = [ "serde_json", "sha2", "thiserror 2.0.17", + "url", ] [[package]] @@ -1399,6 +1441,16 @@ dependencies = [ "libc", ] +[[package]] +name = "core-foundation" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2a6cd9ae233e7f62ba4e9353e81a88df7fc8a5987b8d445b4d90c879bd156f6" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "core-foundation-sys" version = "0.8.7" @@ -1412,7 +1464,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "45390e6114f68f718cc7a830514a96f903cccd70d02a8f6d9f643ac4ba45afaf" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.4", "libc", ] @@ -2151,6 +2203,10 @@ name = "futures-timer" version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" +dependencies = [ + "gloo-timers", + "send_wrapper", +] [[package]] name = "futures-util" @@ -2235,6 +2291,52 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" +[[package]] +name = "gloo-net" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c06f627b1a58ca3d42b45d6104bf1e1a03799df472df00988b6ba21accc10580" +dependencies = [ + "futures-channel", + "futures-core", + "futures-sink", + "gloo-utils", + "http 1.4.0", + "js-sys", + "pin-project", + "serde", + "serde_json", + "thiserror 1.0.69", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", +] + +[[package]] +name = "gloo-timers" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "gloo-utils" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b5555354113b18c547c1d3a98fbf7fb32a9ff4f6fa112ce823a21641a0ba3aa" +dependencies = [ + "js-sys", + "serde", + "serde_json", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "governor" version = "0.6.3" @@ -2507,6 +2609,7 @@ dependencies = [ "http 1.4.0", "hyper", "hyper-util", + "log", "rustls", "rustls-pki-types", "tokio", @@ -2549,7 +2652,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.5.10", + "socket2 0.6.1", "system-configuration", "tokio", "tower-service", @@ -2695,6 +2798,62 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee796ad498c8d9a1d68e477df8f754ed784ef875de1414ebdaf169f70a6a784" +[[package]] +name = "indexer_core" +version = "0.1.0" +dependencies = [ + "anyhow", + "bedrock_client", + "borsh", + "common", + "futures", + "log", + "logos-blockchain-core", + "serde", + "serde_json", + "tokio", + "url", +] + +[[package]] +name = "indexer_service" +version = "0.1.0" +dependencies = [ + "anyhow", + "async-trait", + "clap", + "env_logger", + "indexer_service_protocol", + "indexer_service_rpc", + "jsonrpsee", + "log", + "tokio", + "tokio-util", +] + +[[package]] +name = "indexer_service_protocol" +version = "0.1.0" +dependencies = [ + "base64", + "borsh", + "common", + "nssa", + "nssa_core", + "schemars 1.2.0", + "serde", +] + +[[package]] +name = "indexer_service_rpc" +version = "0.1.0" +dependencies = [ + "indexer_service_protocol", + "jsonrpsee", + "schemars 1.2.0", + "serde_json", +] + [[package]] name = "indexmap" version = "1.9.3" @@ -2753,15 +2912,20 @@ dependencies = [ "env_logger", "futures", "hex", + "indexer_core", "key_protocol", "log", "nssa", "nssa_core", "sequencer_core", "sequencer_runner", + "serde_json", "tempfile", + "token_core", "tokio", + "url", "wallet", + "wallet-ffi", ] [[package]] @@ -2855,6 +3019,28 @@ dependencies = [ "zeroize", ] +[[package]] +name = "jni" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97" +dependencies = [ + "cesu8", + "cfg-if", + "combine", + "jni-sys", + "log", + "thiserror 1.0.69", + "walkdir", + "windows-sys 0.45.0", +] + +[[package]] +name = "jni-sys" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" + [[package]] name = "jobserver" version = "0.1.34" @@ -2875,6 +3061,178 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "jsonrpsee" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f3f48dc3e6b8bd21e15436c1ddd0bc22a6a54e8ec46fedd6adf3425f396ec6a" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-http-client", + "jsonrpsee-proc-macros", + "jsonrpsee-server", + "jsonrpsee-types", + "jsonrpsee-wasm-client", + "jsonrpsee-ws-client", + "tokio", + "tracing", +] + +[[package]] +name = "jsonrpsee-client-transport" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98" +dependencies = [ + "base64", + "futures-channel", + "futures-util", + "gloo-net", + "http 1.4.0", + "jsonrpsee-core", + "pin-project", + "rustls", + "rustls-pki-types", + "rustls-platform-verifier", + "soketto", + "thiserror 2.0.17", + "tokio", + "tokio-rustls", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "jsonrpsee-core" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "316c96719901f05d1137f19ba598b5fe9c9bc39f4335f67f6be8613921946480" +dependencies = [ + "async-trait", + "bytes", + "futures-timer", + "futures-util", + "http 1.4.0", + "http-body", + "http-body-util", + "jsonrpsee-types", + "parking_lot", + "pin-project", + "rand 0.9.2", + "rustc-hash", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tokio-stream", + "tower 0.5.2", + "tracing", + "wasm-bindgen-futures", +] + +[[package]] +name = "jsonrpsee-http-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8" +dependencies = [ + "base64", + "http-body", + "hyper", + "hyper-rustls", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "rustls", + "rustls-platform-verifier", + "serde", + "serde_json", + "thiserror 2.0.17", + "tokio", + "tower 0.5.2", + "url", +] + +[[package]] +name = "jsonrpsee-proc-macros" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2da3f8ab5ce1bb124b6d082e62dffe997578ceaf0aeb9f3174a214589dc00f07" +dependencies = [ + "heck", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "jsonrpsee-server" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f" +dependencies = [ + "futures-util", + "http 1.4.0", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "jsonrpsee-core", + "jsonrpsee-types", + "pin-project", + "route-recognizer", + "serde", + "serde_json", + "soketto", + "thiserror 2.0.17", + "tokio", + "tokio-stream", + "tokio-util", + "tower 0.5.2", + "tracing", +] + +[[package]] +name = "jsonrpsee-types" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" +dependencies = [ + "http 1.4.0", + "serde", + "serde_json", + "thiserror 2.0.17", +] + +[[package]] +name = "jsonrpsee-wasm-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7902885de4779f711a95d82c8da2d7e5f9f3a7c7cfa44d51c067fd1c29d72a3c" +dependencies = [ + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "tower 0.5.2", +] + +[[package]] +name = "jsonrpsee-ws-client" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" +dependencies = [ + "http 1.4.0", + "jsonrpsee-client-transport", + "jsonrpsee-core", + "jsonrpsee-types", + "tower 0.5.2", + "url", +] + [[package]] name = "k256" version = "0.13.4" @@ -3718,10 +4076,10 @@ dependencies = [ "libc", "log", "openssl", - "openssl-probe", + "openssl-probe 0.1.6", "openssl-sys", "schannel", - "security-framework", + "security-framework 2.11.1", "security-framework-sys", "tempfile", ] @@ -3808,6 +4166,7 @@ dependencies = [ "test-case", "test_program_methods", "thiserror 2.0.17", + "token_core", ] [[package]] @@ -3968,6 +4327,12 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-probe" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" + [[package]] name = "openssl-sys" version = "0.9.111" @@ -4258,6 +4623,8 @@ dependencies = [ "nssa_core", "risc0-zkvm", "serde", + "token_core", + "token_program", ] [[package]] @@ -4325,7 +4692,7 @@ dependencies = [ "quinn-udp", "rustc-hash", "rustls", - "socket2 0.5.10", + "socket2 0.6.1", "thiserror 2.0.17", "tokio", "tracing", @@ -4362,7 +4729,7 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2 0.5.10", + "socket2 0.6.1", "tracing", "windows-sys 0.60.2", ] @@ -4830,6 +5197,12 @@ dependencies = [ "librocksdb-sys", ] +[[package]] +name = "route-recognizer" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afab94fb28594581f62d981211a9a4d53cc8130bbcbbb89a0440d9b8e81a7746" + [[package]] name = "rpds" version = "1.2.0" @@ -4945,6 +5318,7 @@ version = "0.23.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "533f54bc6a7d4f647e46ad909549eda97bf5afc1585190ef692b4286b198bd8f" dependencies = [ + "log", "once_cell", "ring", "rustls-pki-types", @@ -4953,6 +5327,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-native-certs" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" +dependencies = [ + "openssl-probe 0.2.1", + "rustls-pki-types", + "schannel", + "security-framework 3.5.1", +] + [[package]] name = "rustls-pki-types" version = "1.13.2" @@ -4963,6 +5349,33 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rustls-platform-verifier" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19787cda76408ec5404443dc8b31795c87cd8fec49762dc75fa727740d34acc1" +dependencies = [ + "core-foundation 0.10.1", + "core-foundation-sys", + "jni", + "log", + "once_cell", + "rustls", + "rustls-native-certs", + "rustls-platform-verifier-android", + "rustls-webpki", + "security-framework 3.5.1", + "security-framework-sys", + "webpki-root-certs 0.26.11", + "windows-sys 0.52.0", +] + +[[package]] +name = "rustls-platform-verifier-android" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f" + [[package]] name = "rustls-webpki" version = "0.103.8" @@ -5001,10 +5414,19 @@ dependencies = [ "strum", "tempfile", "thiserror 2.0.17", - "toml", + "toml 0.8.23", "yaml-rust2", ] +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] + [[package]] name = "schannel" version = "0.1.28" @@ -5028,16 +5450,29 @@ dependencies = [ [[package]] name = "schemars" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9558e172d4e8533736ba97870c4b2cd63f84b382a3d6eb063da41b91cce17289" +checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" dependencies = [ "dyn-clone", "ref-cast", + "schemars_derive", "serde", "serde_json", ] +[[package]] +name = "schemars_derive" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4908ad288c5035a8eb12cfdf0d49270def0a268ee162b75eeee0f85d155a7c45" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 2.0.111", +] + [[package]] name = "scopeguard" version = "1.2.0" @@ -5086,7 +5521,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.10.0", - "core-foundation", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" +dependencies = [ + "bitflags 2.10.0", + "core-foundation 0.10.1", "core-foundation-sys", "libc", "security-framework-sys", @@ -5112,6 +5560,12 @@ dependencies = [ "serde_core", ] +[[package]] +name = "send_wrapper" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" + [[package]] name = "sequencer_core" version = "0.1.0" @@ -5217,6 +5671,17 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "serde_derive_internals" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "serde_json" version = "1.0.145" @@ -5250,6 +5715,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -5274,7 +5748,7 @@ dependencies = [ "indexmap 1.9.3", "indexmap 2.12.1", "schemars 0.9.0", - "schemars 1.1.0", + "schemars 1.2.0", "serde_core", "serde_json", "serde_with_macros", @@ -5402,6 +5876,22 @@ dependencies = [ "windows-sys 0.60.2", ] +[[package]] +name = "soketto" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721" +dependencies = [ + "base64", + "bytes", + "futures", + "http 1.4.0", + "httparse", + "log", + "rand 0.8.5", + "sha1", +] + [[package]] name = "spin" version = "0.9.8" @@ -5455,6 +5945,7 @@ version = "0.1.0" dependencies = [ "borsh", "common", + "nssa", "rocksdb", "thiserror 2.0.17", ] @@ -5541,7 +6032,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys", ] @@ -5730,6 +6221,23 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "token_core" +version = "0.1.0" +dependencies = [ + "borsh", + "nssa_core", + "serde", +] + +[[package]] +name = "token_program" +version = "0.1.0" +dependencies = [ + "nssa_core", + "token_core", +] + [[package]] name = "tokio" version = "1.48.0" @@ -5768,6 +6276,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.4" @@ -5792,12 +6311,13 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.17" +version = "0.7.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2efa149fe76073d6e8fd97ef4f4eca7b67f599660115591483572e406e165594" +checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", + "futures-io", "futures-sink", "pin-project-lite", "tokio", @@ -5810,11 +6330,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", - "serde_spanned", + "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_edit 0.22.27", ] +[[package]] +name = "toml" +version = "0.9.9+spec-1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eb5238e643fc34a1d5d7e753e1532a91912d74b63b92b3ea51fde8d1b7bc79dd" +dependencies = [ + "indexmap 2.12.1", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 0.7.4+spec-1.0.0", + "toml_parser", + "toml_writer", + "winnow", +] + [[package]] name = "toml_datetime" version = "0.6.11" @@ -5841,7 +6376,7 @@ checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap 2.12.1", "serde", - "serde_spanned", + "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_write", "winnow", @@ -5874,6 +6409,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + [[package]] name = "tower" version = "0.4.13" @@ -6126,6 +6667,16 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wallet" version = "0.1.0" @@ -6153,7 +6704,22 @@ dependencies = [ "serde", "serde_json", "sha2", + "token_core", "tokio", + "url", +] + +[[package]] +name = "wallet-ffi" +version = "0.1.0" +dependencies = [ + "cbindgen", + "common", + "nssa", + "nssa_core", + "tempfile", + "tokio", + "wallet", ] [[package]] @@ -6271,6 +6837,24 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "webpki-root-certs" +version = "0.26.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75c7f0ef91146ebfb530314f5f1d24528d7f0767efbfd31dce919275413e393e" +dependencies = [ + "webpki-root-certs 1.0.5", +] + +[[package]] +name = "webpki-root-certs" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "webpki-roots" version = "1.0.4" @@ -6381,6 +6965,15 @@ dependencies = [ "windows-link", ] +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.52.0" @@ -6408,6 +7001,21 @@ dependencies = [ "windows-link", ] +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + [[package]] name = "windows-targets" version = "0.52.6" @@ -6441,6 +7049,12 @@ dependencies = [ "windows_x86_64_msvc 0.53.1", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" @@ -6453,6 +7067,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.52.6" @@ -6465,6 +7085,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.52.6" @@ -6489,6 +7115,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.52.6" @@ -6501,6 +7133,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.52.6" @@ -6513,6 +7151,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" @@ -6525,6 +7169,12 @@ version = "0.53.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.52.6" diff --git a/Cargo.toml b/Cargo.toml index f14f2559..ad2d9276 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,17 +1,26 @@ +[workspace.package] +license = "MIT or Apache-2.0" + [workspace] resolver = "3" members = [ "integration_tests", - "sequencer_runner", "storage", "key_protocol", - "sequencer_rpc", "mempool", "wallet", - "sequencer_core", + "wallet-ffi", "common", "nssa", "nssa/core", + "sequencer_core", + "sequencer_rpc", + "sequencer_runner", + "indexer_service", + "indexer_service/protocol", + "indexer_service/rpc", + "programs/token/core", + "programs/token", "program_methods", "program_methods/guest", "test_program_methods", @@ -20,6 +29,7 @@ members = [ "examples/program_deployment/methods", "examples/program_deployment/methods/guest", "bedrock_client", + "indexer_core", ] [workspace.dependencies] @@ -32,9 +42,16 @@ key_protocol = { path = "key_protocol" } sequencer_core = { path = "sequencer_core" } sequencer_rpc = { path = "sequencer_rpc" } sequencer_runner = { path = "sequencer_runner" } +indexer_service = { path = "indexer_service" } +indexer_service_protocol = { path = "indexer_service/protocol" } +indexer_service_rpc = { path = "indexer_service/rpc" } wallet = { path = "wallet" } +wallet-ffi = { path = "wallet-ffi" } +token_core = { path = "programs/token/core" } +token_program = { path = "programs/token" } test_program_methods = { path = "test_program_methods" } bedrock_client = { path = "bedrock_client" } +indexer_core = { path = "indexer_core" } tokio = { version = "1.28.2", features = [ "net", @@ -42,6 +59,7 @@ tokio = { version = "1.28.2", features = [ "sync", "fs", ] } +tokio-util = "0.7.18" risc0-zkvm = { version = "3.0.3", features = ['std'] } risc0-build = "3.0.3" anyhow = "1.0.98" @@ -52,6 +70,7 @@ serde = { version = "1.0.60", default-features = false, features = ["derive"] } serde_json = "1.0.81" actix = "0.13.0" actix-cors = "0.6.1" +jsonrpsee = "0.26.0" futures = "0.3" actix-rt = "*" lazy_static = "1.5.0" @@ -77,11 +96,14 @@ chrono = "0.4.41" borsh = "1.5.7" base58 = "0.2.0" itertools = "0.14.0" -url = "2.5.4" +url = { version = "2.5.4", features = ["serde"] } +tokio-retry = "0.3.0" +schemars = "1.2.0" logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } +logos-blockchain-chain-broadcast-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } rocksdb = { version = "0.24.0", default-features = false, features = [ "snappy", diff --git a/LEZ testnet v0.1 tutorials/amm.md b/LEZ testnet v0.1 tutorials/amm.md new file mode 100644 index 00000000..60751517 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/amm.md @@ -0,0 +1,109 @@ +# Automated Market Maker (AMM) + +This tutorial covers the AMM program in LEZ. The AMM manages liquidity pools and enables swaps between custom tokens. By the end, you will have practiced: +1. Creating a liquidity pool for a token pair. +2. Swapping tokens. +3. Withdrawing liquidity from the pool. +4. Adding liquidity to the pool. + +## 1. Creating a liquidity pool for a token pair + +We start by creating a pool for the tokens created earlier. In return for providing liquidity, you receive liquidity provider (LP) tokens. LP tokens represent your share of the pool and are required to withdraw liquidity later. + +> [!NOTE] +> The AMM does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore represent only a proportional share of the pool reserves. Fee support will be added in future versions. + +### a. Create an LP holding account + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf +``` + +### b. Initialize the pool + +Deposit tokens A and B and specify the account that will receive LP tokens: + +```bash +wallet amm new \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ + --balance-a 100 \ + --balance-b 200 +``` + +> [!Important] +> The LP holding account is owned by the token program, so LP tokens are managed using the same token infrastructure as regular tokens. + +```bash +wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100} +``` + +> [!Tip] +> If you inspect the `user-holding-a` and `user-holding-b` accounts, you will see that 100 and 200 tokens were deducted. Those tokens now reside in the pool and are available for swaps by any user. + +## 2. Swapping + +Use `wallet amm swap` to perform a token swap: + +```bash +wallet amm swap \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + # The amount of tokens to swap + --amount-in 5 \ + # The minimum number of tokens expected in return + --min-amount-out 8 \ + # The definition ID of the token being provided to the swap + # In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA + --token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 +``` + +Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (computed by the pool’s pricing function) is credited to the Token B holding account. + +## 3. Withdrawing liquidity from the pool + +Liquidity providers can withdraw assets by redeeming (burning) LP tokens. The amount received is proportional to the share of LP tokens redeemed relative to the total LP supply. + +Use `wallet amm remove-liquidity`: + +```bash +wallet amm remove-liquidity \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ + --balance-lp 20 \ + --min-amount-a 1 \ + --min-amount-b 1 +``` + +> [!Important] +> This burns `balance-lp` LP tokens from the user’s LP holding account. In return, the AMM transfers tokens A and B from the pool vaults to the user’s holding accounts, based on current reserves. +> The `min-amount-a` and `min-amount-b` parameters set the minimum acceptable outputs. If the computed amounts fall below either threshold, the instruction fails to protect against unfavorable pool changes. + +## 4. Adding liquidity to the pool + +To add liquidity, deposit tokens A and B in the ratio implied by current pool reserves. In return, the AMM mints new LP tokens that represent your proportional share. + +Use `wallet amm add-liquidity`: + +```bash +wallet amm add-liquidity \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ + --min-amount-lp 1 \ + --max-amount-a 10 \ + --max-amount-b 10 +``` + +> [!Important] +> `max-amount-a` and `max-amount-b` cap how many tokens A and B can be taken from the user’s accounts. The AMM computes the required amounts based on the pool’s reserve ratio. +> `min-amount-lp` sets the minimum LP tokens to mint. If the computed LP amount falls below this threshold, the instruction fails. diff --git a/LEZ testnet v0.1 tutorials/custom-tokens.md b/LEZ testnet v0.1 tutorials/custom-tokens.md new file mode 100644 index 00000000..ea647696 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/custom-tokens.md @@ -0,0 +1,159 @@ +This tutorial focuses on custom tokens using the Token program. As of now, you have used the authenticated-transfers program for native tokens. The Token program is for creating and managing custom tokens. By the end, you will have practiced: +1. Creating new tokens. +2. Transferring custom tokens. + +> [!Important] +> The Token program is a single program that creates and manages all tokens, so you do not deploy a new program for each token. +> Token program accounts fall into two types: +> - Token definition accounts: store token metadata such as name and total supply. This account is the token’s identifier. +> - Token holding accounts: store balances and the definition ID they belong to. + +The CLI provides commands to execute the Token program. Run `wallet token` to see the options: + +```bash +Commands: + new Produce a new token + send Send tokens from one account to another with variable privacy + help Print this message or the help of the given subcommand(s) +``` + +## 1. Creating new tokens + +Use `wallet token new` to execute the `New` function of the Token program. The command expects: +- A token name. +- A total supply. +- Two uninitialized accounts: +- One for the token definition account. +- One for the token holding account that receives the initial supply. + +### a. Public definition account and public supply account + +1. Create two new public accounts: + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 +``` + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw +``` + +2. Create the token (Token A): + +```bash +wallet token new \ + --name TOKENA \ + --total-supply 1337 \ + --definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \ + --supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw +``` + +3. Inspect the initialized accounts: + +```bash +wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 + +# Output: +Definition account owned by token program +{"account_type":"Token definition","name":"TOKENA","total_supply":1337} +``` + +```bash +wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337} +``` + +### b. Public definition account and private supply account + +1. Create fresh accounts for this example: + +> [!Important] +> You cannot reuse the accounts from the previous example. Create new ones here. + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii +``` + +```bash +wallet account new private + +# Output: +Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF +With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93 +With ipk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577 +``` + +2. Create the token (Token B): + +```bash +wallet token new \ + --name TOKENB \ + --total-supply 7331 \ + --definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \ + --supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF +``` + +3. Inspect the accounts: + +```bash +wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii + +# Output: +Definition account owned by token program +{"account_type":"Token definition","name":"TOKENB","total_supply":7331} +``` + +```bash +wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331} +``` + +> [!Important] +> As a private account, the supply account is visible only in your local wallet storage. + +## 2. Custom token transfers + +The Token program can move balances between token holding accounts. If the recipient account is uninitialized, the token program will automatically claim it. Use `wallet token send` to execute a transfer. + +### a. Create a recipient account + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 +``` + +### b. Send 1000 TOKENB to the recipient + +```bash +wallet token send \ + --from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \ + --to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --amount 1000 +``` + +### c. Inspect the recipient account + +```bash +wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000} +``` diff --git a/LEZ testnet v0.1 tutorials/token-transfer.md b/LEZ testnet v0.1 tutorials/token-transfer.md new file mode 100644 index 00000000..8019ae56 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/token-transfer.md @@ -0,0 +1,250 @@ +This tutorial walks through native token transfers between public and private accounts using the Authenticated-Transfers program. You will create and initialize accounts, fund them with the Pinata program, and run transfers across different privacy combinations. By the end, you will have practiced: +1. Public account creation and initialization. +2. Account funding through the Pinata program. +3. Native token transfers between public accounts. +4. Private account creation. +5. Native token transfer from a public account to a private account. +6. Native token transfer from a public account to a private account owned by someone else. + +--- + +The CLI provides commands to manage accounts. Run `wallet account` to see the options available: +```bash +Commands: + get Get account data + new Produce new public or private account + sync-private Sync private accounts + help Print this message or the help of the given subcommand(s) +``` + +## 1. Public account creation and initialization +> [!Important] +> Public accounts live on-chain and are identified by a 32-byte Account ID. Running `wallet account new public` generates a fresh keypair for the signature scheme used in LEZ. +> The account ID is derived from the public key, and the private key signs transactions and authorizes program executions. +> The CLI can create both public and private accounts. + +### a. New public account creation +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ +``` +> [!Tip] +> Save this account ID. You will use it in later commands. + +### b. Account initialization + +To query the account’s current status, run: + +```bash +# Replace the id with yours +wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ + +# Output: +Account is Uninitialized +``` + +In this example, we initialize the account for the authenticated-transfer program, which manages native token transfers and enforces authenticated debits. + +1. Initialize the account: +```bash +# This command submits a public transaction executing the `init` function of the +# authenticated-transfer program. The wallet polls the sequencer until the +# transaction is included in a block, which may take several seconds. +wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ +``` + +2. Check the updated account status: +```bash +wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ + +# Output: +Account owned by authenticated-transfer program +{"balance":0} +``` + +> [!NOTE] +> New accounts start uninitialized, meaning no program owns them yet. Any program may claim an uninitialized account; once claimed, that program owns it. +> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account. +> Debiting native tokens must always be performed by the owning program. + +## 2. Account funding through the Piñata program +Now that the account is initialized under the authenticated-tansfer program, fund it using the testnet Piñata program. + +```bash +# Replace with your id +wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ +``` + +After the claim succeeds, the account is funded: + +```bash +wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ + +# Output: +Account owned by authenticated-transfer program +{"balance":150} +``` + +## 3. Native token transfers between public accounts +LEZ includes a program for managing native tokens. Run `wallet auth-transfer` to see the available commands: +```bash +Commands: + init Initialize account under the authenticated-transfer program + send Send native tokens from one account to another with variable privacy + help Print this message or the help of the given subcommand(s) +``` + +We already used `init`. Now use `send` to execute a transfer. + +### a. Create a recipient account +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS +``` + +> [!NOTE] +> The new account is uninitialized. The authenticated-transfer program will claim any uninitialized account used in a transfer, so manual initialization isn’t required. + +### b. Send 37 tokens to the new account +```bash +wallet auth-transfer send \ + --from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \ + --to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ + --amount 37 +``` + +### c. Check both accounts +```bash +# Sender account (use your sender ID) +wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8 + +# Output: +Account owned by authenticated-transfer program +{"balance":113} +``` + +```bash +# Recipient account +wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS + +# Output: +Account owned by authenticated-transfer program +{"balance":37} +``` + +## 4. Private account creation + +> [!Important] +> Private accounts are structurally identical to public accounts, but their values are stored off-chain. On-chain, only a 32-byte commitment is recorded. +> Transactions include encrypted private values so the owner can recover them, and the decryption keys are never shared. +> Private accounts use two keypairs: nullifier keys for privacy-preserving executions and viewing keys for encrypting and decrypting values. +> The private account ID is derived from the nullifier public key. +> Private accounts can be initialized by anyone, but once initialized they can only be modified by the owner’s keys. +> Updates include a new commitment and a nullifier for the old state, which prevents linkage between versions. + +### a. Create a private account + +```bash +wallet account new private + +# Output: +Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL +With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951 +With ipk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17 +``` + +> [!Tip] +> Focus on the account ID for now. The `npk` and `ipk` values are stored locally and used to build privacy-preserving transactions. The private account ID is derived from `npk`. + +Just like public accounts, new private accounts start out uninitialized: + +```bash +wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL + +# Output: +Account is Uninitialized +``` + +> [!Important] +> Private accounts are never visible to the network. They exist only in your local wallet storage. + +## 5. Native token transfer from a public account to a private account + +> [!Important] +> Sending tokens to an uninitialized private account causes the authenticated-transfer program to claim it, just like with public accounts. Program logic is the same regardless of account type. + +### a. Send 17 tokens to the private account + +> [!Note] +> The syntax matches public-to-public transfers, but the recipient is a private ID. This runs locally, generates a proof, and submits it to the sequencer. It may take 30 seconds to 4 minutes. + +```bash +wallet auth-transfer send \ + --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ + --to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \ + --amount 17 +``` + +### b. Check both accounts + +```bash +# Public sender account +wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS + +# Output: +Account owned by authenticated-transfer program +{"balance":20} +``` + +```bash +# Private recipient account +wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL + +# Output: +Account owned by authenticated-transfer program +{"balance":17} +``` + +> [!Note] +> The last command does not query the network. It works offline because private account data is stored locally. Other users cannot read your private balances. + +> [!Caution] +> Private accounts can only be modified by their owner’s keys. The exception is initialization: any user can initialize an uninitialized private account. This enables transfers to a private account owned by someone else, as long as that account is uninitialized. + +## 6. Native token transfer from a public account to a private account owned by someone else + +> [!Important] +> We’ll simulate transferring to someone else by creating a new private account we own and treating it as if it belonged to another user. + +### a. Create a new uninitialized private account + +```bash +wallet account new private + +# Output: +Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5 +With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e +With ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 +``` + +> [!Tip] +> Ignore the private account ID here and use the `npk` and `ipk` values to send to a foreign private account. + +```bash +wallet auth-transfer send \ + --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ + --to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \ + --to-ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \ + --amount 3 +``` + +> [!Warning] +> This command creates a privacy-preserving transaction, which may take a few minutes. The updated values are encrypted and included in the transaction. +> Once accepted, the recipient must run `wallet account sync-private` to scan the chain for their encrypted updates and refresh local state. + +> [!Note] +> You have seen transfers between two public accounts and from a public sender to a private recipient. Transfers from a private sender, whether to a public account or to another private account, follow the same pattern. diff --git a/LEZ testnet v0.1 tutorials/wallet-setup.md b/LEZ testnet v0.1 tutorials/wallet-setup.md new file mode 100644 index 00000000..e5a43961 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/wallet-setup.md @@ -0,0 +1,26 @@ +This repository includes a CLI for interacting with the Logos Blockchain. To install it, run the following command from the root of the repository: + +```bash +cargo install --path wallet --force +``` + +To check that everythin is working, run `wallet help`. + +## Available Wallet Commands + +| Command | Description | +|------------------------|-------------------------------------------------------------| +| `wallet auth-transfer` | Authenticated transfer (init, send) | +| `wallet chain-info` | Chain info queries (current-block-id, block, transaction) | +| `wallet account` | Account management (get, list, new, sync-private) | +| `wallet pinata` | Piñata faucet (claim) | +| `wallet token` | Token operations (new, send) | +| `wallet amm` | AMM operations (new, swap, add-liquidity, remove-liquidity) | +| `wallet check-health` | Health checks that the wallet is connected to the node | +| `wallet config` | Config Setup (get, set) | +| `wallet restore-keys ` | Keys restore from a given password at given `depth` | +| `wallet deploy-program`| Program deployment | +| `wallet help` | Help | + +Some completion scripts exists, see the [completions](./completions/README.md) folder. + diff --git a/README.md b/README.md index f7885be0..70d06fab 100644 --- a/README.md +++ b/README.md @@ -1,70 +1,75 @@ -# Nescience +# Logos Execution Zone (LEZ) + +Logos Execution Zone (LEZ) is a programmable blockchain that cleanly separates public and private state while keeping them fully interoperable. Developers can build apps that operate across transparent and privacy-preserving accounts without changing their logic. Privacy is enforced by the protocol itself through zero-knowledge proofs (ZKPs), so it is always available and automatic. -Nescience State Separation Architecture (NSSA) is a programmable blockchain system that introduces a clean separation between public and private states, while keeping them fully interoperable. It lets developers build apps that can operate across both transparent and privacy-preserving accounts. Privacy is handled automatically by the protocol through zero-knowledge proofs (ZKPs). The result is a programmable blockchain where privacy comes built-in. ## Background -Typically, public blockchains maintain a fully transparent state, where the mapping from account IDs to account values is entirely visible. In NSSA, we introduce a parallel *private state*, a new layer of accounts that coexists with the public one. The public and private states can be viewed as a partition of the account ID space: accounts with public IDs are openly visible, while private accounts are accessible only to holders of the corresponding viewing keys. Consistency across both states is enforced through zero-knowledge proofs (ZKPs). +These features are provided by the Logos Execution Environment (LEE). Traditional public blockchains expose a fully transparent state: the mapping from account IDs to account values is entirely visible. LEE introduces a parallel *private state* that coexists with the public one. Together, public and private accounts form a partition of the account ID space: public IDs are visible on-chain, while private accounts are accessible only to holders of the corresponding viewing keys. Consistency across both states is enforced by ZKPs. + +Public accounts are stored on-chain as a visible map from IDs to account states, and their values are updated in place. Private accounts are never stored on-chain in raw form. Each update produces a new commitment that binds the current value while keeping it hidden. Previous commitments remain on-chain, but a nullifier set marks old versions as spent, ensuring that only the most recent private state can be used in execution. -Public accounts are represented on-chain as a visible map from IDs to account states and are modified in-place when their values change. Private accounts, by contrast, are never stored in raw form on-chain. Each update creates a new commitment, which cryptographically binds the current value of the account while preserving privacy. Commitments of previous valid versions remain on-chain, but a nullifier set is maintained to mark old versions as spent, ensuring that only the most up-to-date version of each private account can be used in any execution. ### Programmability and selective privacy -Our goal is to enable full programmability within this hybrid model, matching the flexibility and composability of public blockchains. Developers write and deploy programs in NSSA just as they would on any other blockchain. Privacy, along with the ability to execute programs involving any combination of public and private accounts, is handled entirely at the protocol level and available out of the box for all programs. From the program’s perspective, all accounts are indistinguishable. This abstraction allows developers to focus purely on business logic, while the system transparently enforces privacy and consistency guarantees. +LEZ aims to deliver full programmability in a hybrid public/private model, with the same flexibility and composability as public blockchains. Developers write and deploy programs in LEZ just as they would elsewhere. The protocol automatically supports executions that involve any combination of public and private accounts. From the program’s perspective, all accounts look the same, and privacy is enforced transparently. This lets developers focus on business logic while the system guarantees privacy and correctness. -To the best of our knowledge, this approach is unique to Nescience. Other programmable blockchains with a focus on privacy typically adopt a developer-driven model for private execution, meaning that dApp logic must explicitly handle private inputs correctly. In contrast, Nescience handles privacy at the protocol level, so developers do not need to modify their programs—private and public accounts are treated uniformly, and privacy-preserving execution is available out of the box. +To our knowledge, this design is unique to LEZ. Other privacy-focused programmable blockchains often require developers to explicitly handle private inputs inside their app logic. In LEZ, privacy is protocol-level: programs do not change, accounts are treated uniformly, and private execution works out of the box. -### Example: creating and transferring tokens across states +--- + +## Example: Creating and transferring tokens across states + +1. Token creation (public execution) + - Alice submits a transaction that executes the token program `New` function on-chain. + - A new public token definition account is created. + - The minted tokens are recorded on-chain in Alice’s public account. -1. Token creation (public execution): - - Alice submits a transaction to execute the token program `New` function on-chain. - - A new public token account is created, representing the token. - - The minted tokens are recorded on-chain and fully visible on Alice's public account. 2. Transfer from public to private (local / privacy-preserving execution) - - Alice executes the token program `Transfer` function locally, specifying a Bob’s private account as recipient. + - Alice runs the token program `Transfer` function locally, sending to Bob’s private account. - A ZKP of correct execution is generated. - - The proof is submitted to the blockchain, and validator nodes verify it. - - Alice's public account balance is modified accordingly. - - Bob’s private account and balance remain hidden, while the transfer is provably valid. + - The proof is submitted to the blockchain and verified by validators. + - Alice’s public balance is updated on-chain. + - Bob’s private balance remains hidden, while the transfer is provably correct. + 3. Transferring private to public (local / privacy-preserving execution) - - Bob executes the token program `Transfer` function locally, specifying a Charlie’s public account as recipient. + - Bob executes the token program `Transfer` function locally, sending to Charlie’s public account. - A ZKP of correct execution is generated. - - Bob’s private account and balance still remain hidden. - - Charlie's public account is modified with the new tokens added. -4. Transferring public to public (public execution): - - Alice submits a transaction to execute the token program `Transfer` function on-chain, specifying Charlie's public account as recipient. - - The execution is handled on-chain without ZKPs involved. - - Alice's and Charlie's accounts are modified according to the transaction. + - Bob’s private balance stays hidden. + - Charlie’s public account is updated on-chain. + +4. Transfer from public to public (public execution) + - Alice submits an on-chain transaction to run `Transfer`, sending to Charlie’s public account. + - Execution is handled fully on-chain without ZKPs. + - Alice’s and Charlie’s public balances are updated. + -#### Key points: -- The same token program is used in all executions. -- The difference lies in execution mode: public executions update visible accounts on-chain, while private executions rely on ZKPs. -- Validators only need to verify proofs for privacy-preserving transactions, keeping processing efficient. +### Key points: +- The same token program is used in every execution. +- The only difference is execution mode: public execution updates visible state on-chain, while private execution relies on ZKPs. +- Validators verify proofs only for privacy-preserving transactions, keeping processing efficient. -### The account’s model +--- -To achieve both state separation and full programmability, NSSA adopts a stateless program model. Programs do not hold internal state. Instead, all persistent data resides in accounts explicitly passed to the program during execution. This design enables fine-grained control over access and visibility while maintaining composability across public and private states. +## The account’s model + +To achieve both state separation and full programmability, LEZ uses a stateless program model. Programs hold no internal state. All persistent data is stored in accounts passed explicitly into each execution. This enables precise access control and visibility while preserving composability across public and private states. ### Execution types -Execution is divided into two fundamentally distinct types based on how they are processed: public execution, which is executed transparently on-chain, and private execution, which occurs off-chain. For private execution, the blockchain relies on ZKPs to verify the correctness of execution and ensure that all system invariants are preserved. +LEZ supports two execution types: +- Public execution runs transparently on-chain. +- Private execution runs off-chain and is verified on-chain with ZKPs. -Both public and private executions of the same program are enforced to use the same Risc0 VM bytecode. For public transactions, programs are executed directly on-chain like any standard RISC-V VM execution, without generating or verifying proofs. For privacy-preserving transactions, users generate Risc0 ZKPs of correct execution, and validator nodes only verify these proofs rather than re-executing the program. This design ensures that from a validator’s perspective, public transactions are processed as quickly as any RISC-V–based VM, while verification of ZKPs keeps privacy-preserving transactions efficient as well. Additionally, the system naturally supports parallel execution similar to Solana, further increasing throughput. The main computational bottleneck for privacy-preserving transactions lies on the user side, in generating zk proofs. +Both public and private executions use the same Risc0 VM bytecode. Public transactions are executed directly on-chain like any standard RISC-V VM call, without proof generation. Private transactions are executed locally by users, who generate Risc0 proofs that validators verify instead of re-executing the program. -### Resources -- [IFT Research call](https://forum.vac.dev/t/ift-research-call-september-10th-2025-updates-on-the-development-of-nescience/566) -- [NSSA v0.2 specs](https://www.notion.so/NSSA-v0-2-specifications-2848f96fb65c800c9818e6f66d9be8f2) -- [Choice of VM/zkVM](https://www.notion.so/Conclusion-on-the-chosen-VM-and-zkVM-for-NSSA-2318f96fb65c806a810ed1300f56992d) -- [NSSA vs other privacy projects](https://www.notion.so/Privacy-projects-comparison-2688f96fb65c8096b694ecf7e4deca30) -- [NSSA state model](https://www.notion.so/Public-state-model-decision-2388f96fb65c80758b20c76de07b1fcc) -- [NSSA sequencer specs](https://www.notion.so/Sequencer-specs-2428f96fb65c802da2bfea7b0b214ecb) -- [NSSA sequencer code](https://www.notion.so/NSSA-sequencer-pseudocode-2508f96fb65c805e8859e047dffd6785) -- [NSSA Token program desing](https://www.notion.so/Token-program-design-2538f96fb65c80a1b4bdc4fd9dd162d7) -- [NSSA cross program calls](https://www.notion.so/NSSA-cross-program-calls-Tail-call-model-proposal-extended-version-2838f96fb65c8096b3a2d390444193b6) +This design keeps public transactions as fast as any RISC-V–based VM and makes private transactions efficient for validators. It also supports parallel execution similar to Solana, improving throughput. The main computational cost for privacy-preserving transactions is on the user side, where ZK proofs are generated. +--- +--- # Install dependencies -Install build dependencies +### Install build dependencies - On Linux Ubuntu / Debian @@ -72,7 +77,7 @@ Ubuntu / Debian apt install build-essential clang libclang-dev libssl-dev pkg-config ``` -Fedora +- On Fedora ```sh sudo dnf install clang clang-devel openssl-devel pkgconf ``` @@ -83,25 +88,26 @@ xcode-select --install brew install pkg-config openssl ``` -Install Rust +### Install Rust + ```sh curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh ``` -Install Risc0 +### Install Risc0 ```sh curl -L https://risczero.com/install | bash ``` -Then restart your shell and run +### Then restart your shell and run ```sh rzup install ``` # Run tests -The NSSA repository includes both unit and integration test suites. +The LEZ repository includes both unit and integration test suites. ### Unit tests @@ -119,635 +125,23 @@ cd integration_tests RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all ``` -# Run the sequencer +# Run the sequencer and node -The sequencer can be run locally: +The sequencer and node can be run locally: -```bash -cd sequencer_runner -RUST_LOG=info cargo run --release configs/debug -``` + 1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node: + - `git checkout master; git pull` + - `cargo clean` + - `rm ~/.logos-blockchain-circuits` + - `./scripts/setup-logos-blockchain-circuits.sh` + - `cargo build --all-features` + - `./target/debug/logos-blockchain-node nodes/node/config-one-node.yaml` -If everything went well you should see an output similar to this: -```bash -[2025-11-13T19:50:29Z INFO sequencer_runner] Sequencer core set up -[2025-11-13T19:50:29Z INFO network] Starting http server at 0.0.0.0:3040 -[2025-11-13T19:50:29Z INFO actix_server::builder] starting 8 workers -[2025-11-13T19:50:29Z INFO sequencer_runner] HTTP server started -[2025-11-13T19:50:29Z INFO sequencer_runner] Starting main sequencer loop -[2025-11-13T19:50:29Z INFO actix_server::server] Tokio runtime found; starting in existing Tokio runtime -[2025-11-13T19:50:29Z INFO actix_server::server] starting service: "actix-web-service-0.0.0.0:3040", workers: 8, listening on: 0.0.0.0:3040 -[2025-11-13T19:50:39Z INFO sequencer_runner] Collecting transactions from mempool, block creation -[2025-11-13T19:50:39Z INFO sequencer_core] Created block with 0 transactions in 0 seconds -[2025-11-13T19:50:39Z INFO sequencer_runner] Block with id 2 created -[2025-11-13T19:50:39Z INFO sequencer_runner] Waiting for new transactions -``` + 2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service: + - `git checkout schouhy/full-bedrock-integration` + - `RUST_LOG=info cargo run --release -p indexer_service $(pwd)/integration_tests/configs/indexer/indexer_config.json` -# Try the Wallet CLI - -## Install - -This repository includes a CLI for interacting with the Nescience sequencer. To install it, run the following command from the root of the repository: - -```bash -cargo install --path wallet --force -``` - -Run `wallet help` to check everything went well. - -Some completion scripts exists, see the [completions](./completions/README.md) folder. - -## Tutorial - -This tutorial walks you through creating accounts and executing NSSA programs in both public and private contexts. - -> [!NOTE] -> The NSSA state is split into two separate but interconnected components: the public state and the private state. -> The public state is an on-chain, publicly visible record of accounts indexed by their Account IDs -> The private state mirrors this, but the actual account values are stored locally by each account owner. On-chain, only a hidden commitment to each private account state is recorded. This allows the chain to enforce freshness (i.e., prevent the reuse of stale private states) while preserving privacy and unlinkability across executions and private accounts. -> -> Every piece of state in NSSA is stored in an account (public or private). Accounts are either uninitialized or are owned by a program, and programs can only modify the accounts they own. -> -> In NSSA, accounts can only be modified through program execution. A program is the sole mechanism that can change an account’s value. -> Programs run publicly when all involved accounts are public, and privately when at least one private account participates. - -### Health-check - -Verify that the node is running and that the wallet can connect to it: - -```bash -wallet check-health -``` - -You should see `✅ All looks good!`. - -### The commands - -The wallet provides several commands to interact with the node and query state. To see the full list, run `wallet help`: - -```bash -Commands: - auth-transfer Authenticated transfer subcommand - chain-info Generic chain info subcommand - account Account view and sync subcommand - pinata Pinata program interaction subcommand - token Token program interaction subcommand - amm AMM program interaction subcommand - check-health Check the wallet can connect to the node and builtin local programs match the remote versions -``` - -### Accounts - -> [!NOTE] -> Accounts are the basic unit of state in NSSA. They essentially hold native tokens and arbitrary data managed by some program. - -The CLI provides commands to manage accounts. Run `wallet account` to see the options available: -```bash -Commands: - get Get account data - new Produce new public or private account - sync-private Sync private accounts - help Print this message or the help of the given subcommand(s) -``` - -#### Create a new public account - -You can create both public and private accounts through the CLI. For example: - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ -``` - -This id is required when executing any program that interacts with the account. - -> [!NOTE] -> Public accounts live on-chain and are identified by a 32-byte Account ID. -> Running `wallet account new public` generates a fresh keypair for the signature scheme used in NSSA. -> The account ID is derived from the public key. The private key is used to sign transactions and to authorize the account in program executions. - -#### Account initialization - -To query the account’s current status, run: - -```bash -# Replace the id with yours -wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ - -# Output: -Account is Uninitialized -``` - -> [!NOTE] -> New accounts begin in an uninitialized state, meaning they are not yet owned by any program. A program may claim an uninitialized account; once claimed, the account becomes owned by that program. -> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account. -> However, debiting native tokens from an account must always be performed by its owning program. - -In this example, we will initialize the account for the Authenticated transfer program, which securely manages native token transfers by requiring authentication for debits. - -Initialize the account by running: - -```bash -# This command submits a public transaction executing the `init` function of the -# Authenticated-transfer program. The wallet polls the sequencer until the -# transaction is included in a block, which may take several seconds. -wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ -``` - -After it completes, check the updated account status: - -```bash -wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ - -# Output: -Account owned by authenticated transfer program -{"balance":0} -``` - -### Funding the account: executing the Piñata program - -Now that we have a public account initialized by the authenticated transfer program, we need to fund it. For that, the testnet provides the Piñata program. - -```bash -# Complete with your id -wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ -``` - -After the claim succeeds, the account will be funded with some tokens: - -```bash -wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ - -# Output: -Account owned by authenticated transfer program -{"balance":150} -``` - -### Native token transfers: executing the Authenticated transfers program - -NSSA comes with a program for managing and transferring native tokens. Run `wallet auth-transfer` to see the options available: -```bash -Commands: - init Initialize account under authenticated transfer program - send Send native tokens from one account to another with variable privacy - help Print this message or the help of the given subcommand(s) -``` - -We have already used the `init` command. The `send` command is used to execute the `Transfer` function of the authenticated program. -Let's try it. For that we need to create another account for the recipient of the transfer. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS -``` - - -> [!NOTE] -> The new account is uninitialized. The authenticated transfers program will claim any uninitialized account used in a transfer. So we don't need to manually initialize the recipient account. - -Let's send 37 tokens to the new account. - -```bash -wallet auth-transfer send \ - --from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \ - --to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ - --amount 37 -``` - -Once that succeeds we can check the states. - -```bash -# Sender account -wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8 - -# Output: -Account owned by authenticated transfer program -{"balance":113} -``` - -```bash -# Recipient account -wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS - -# Output: -Account owned by authenticated transfer program -{"balance":37} -``` - -#### Create a new private account - -> [!NOTE] -> Private accounts are structurally identical to public accounts; they differ only in how their state is stored off-chain and represented on-chain. -> The raw values of a private account are never stored on-chain. Instead, the chain only holds a 32-byte commitment (a hash-like binding to the actual values). Transactions include encrypted versions of the private values so that users can recover them from the blockchain. The decryption keys are known only to the user and are never shared. -> Private accounts are not managed through the usual signature mechanism used for public accounts. Instead, each private account is associated with two keypairs: -> - *Nullifier keys*, for using the corresponding private account in privacy preserving executions. -> - *Viewing keys*, used for encrypting and decrypting the values included in transactions. -> -> Private accounts also have a 32-byte identifier, derived from the nullifier public key. -> -> Just like public accounts, private accounts can only be initialized once. Any user can initialize them without knowing the owner's secret keys. However, modifying an initialized private account through an off-chain program execution requires knowledge of the owner’s secret keys. -> -> Transactions that modify the values of a private account include a commitment to the new values, which will be added to the on-chain commitment set. They also include a nullifier that marks the previous version as old. -> The nullifier is constructed so that it cannot be linked to any prior commitment, ensuring that updates to the same private account cannot be correlated. - -Now let’s switch to the private state and create a private account. - -```bash -wallet account new private - -# Output: -Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL -With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951 -With ipk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17 -``` - -For now, focus only on the account id. Ignore the `npk` and `ipk` values. These are the Nullifier public key and the Viewing public key. They are stored locally in the wallet and are used internally to build privacy-preserving transactions. -Also, the account id for private accounts is derived from the `npk` value. But we won't need them now. - -Just like public accounts, new private accounts start out uninitialized: - -```bash -wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL - -# Output: -Account is Uninitialized -``` -Unlike public accounts, private accounts are never visible to the network. They exist only in your local wallet storage. - -#### Sending tokens from the public account to the private account - -Sending tokens to an uninitialized private account causes the Authenticated-Transfers program to claim it. Just like with public accounts. -This happens because program execution logic does not depend on whether the involved accounts are public or private. - -Let’s send 17 tokens to the new private account. - -The syntax is identical to the public-to-public transfer; just set the private ID as the recipient. - -This command will run the Authenticated-Transfer program locally, generate a proof, and submit it to the sequencer. Depending on your machine, this can take from 30 seconds to 4 minutes. - -```bash -wallet auth-transfer send \ - --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ - --to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \ - --amount 17 -``` - -After it succeeds, check both accounts: - -```bash -# Public sender account -wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS - -# Output: -Account owned by authenticated transfer program -{"balance":20} -``` - -```bash -# Private recipient account -wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL - -# Output: -Account owned by authenticated transfer program -{"balance":17} -``` - -> [!NOTE] -> The last command does not query the network. -> It works even offline because private account data lives only in your wallet storage. Other users cannot read your private balances. - -#### Digression: modifying private accounts - -As a general rule, private accounts can only be modified through a program execution performed by their owner. That is, the person who holds the private key for that account. There is one exception: an uninitialized private account may be initialized by any user, without requiring the private key. After initialization, only the owner can modify it. - -This mechanism enables a common use case: transferring funds from any account (public or private) to a private account owned by someone else. For such transfers, the recipient’s private account must be uninitialized. - - -#### Sending tokens from the public account to a private account owned by someone else - -For this tutorial, we’ll simulate that scenario by creating a new private account that we own, but we’ll treat it as if it belonged to someone else. - -Let's create a new (uninitialized) private account like before: - -```bash -wallet account new private - -# Output: -Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5 -With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e -With ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 -``` - -Now we'll ignore the private account ID and focus on the `npk` and `ipk` values. We'll need this to send tokens to a foreign private account. Syntax is very similar. - -```bash -wallet auth-transfer send \ - --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ - --to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \ - --to-ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \ - --amount 3 -``` - -The command above produces a privacy-preserving transaction, which may take a few minutes to complete. The updated values of the private account are encrypted and included in the transaction. - -Once the transaction is accepted, the recipient must run `wallet account sync-private`. This command scans the chain for encrypted values that belong to their private accounts and updates the local versions accordingly. - - -#### Transfers in other combinations of public and private accounts - -We’ve shown how to use the authenticated-transfers program for transfers between two public accounts, and for transfers from a public sender to a private recipient. Sending tokens from a private account (whether to a public account or to another private account) works in essentially the same way. - -### The token program - -So far, we’ve made transfers using the authenticated-transfers program, which handles native token transfers. The Token program, on the other hand, is used for creating and managing custom tokens. - -> [!NOTE] -> The token program is a single program responsible for creating and managing all tokens. There is no need to deploy new programs to introduce new tokens. All token-related operations are performed by invoking the appropriate functions of the token program. - -The CLI provides commands to execute the token program. To see the options available run `wallet token`: - -```bash -Commands: - new Produce a new token - send Send tokens from one account to another with variable privacy - help Print this message or the help of the given subcommand(s) -``` - - -> [!NOTE] -> The Token program manages its accounts in two categories. Meaning, all accounts owned by the Token program fall into one of these types. -> - Token definition accounts: these accounts store metadata about a token, such as its name, total supply, and other identifying properties. They act as the token’s unique identifier. -> - Token holding accounts: these accounts hold actual token balances. In addition to the balance, they also record which token definition they belong to. - -#### Creating a new token - -To create a new token, simply run `wallet token new`. This will create a transaction to execute the `New` function of the token program. -The command expects a name, the desired total supply, and two uninitialized accounts: -- One that will be initialized as the token definition account for the new token. -- Another that will be initialized as a token holding account and receive the token’s entire initial supply. - - -##### New token with both definition and supply accounts set as public - -For example, let's create two new (uninitialized) public accounts and then use them to create a new token. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 -``` - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw -``` - -Now we use them to create a new token. Let's call it the "Token A" - -```bash -wallet token new \ - --name TOKENA \ - --total-supply 1337 \ - --definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \ - --supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw -``` - -After it succeeds, we can inspect the two accounts to see how they were initialized. - -```bash -wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 - -# Output: -Definition account owned by token program -{"account_type":"Token definition","name":"TOKENA","total_supply":1337} -``` - -```bash -wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337} -``` - -##### New token with public account definition but private holding account for initial supply - -Let’s create a new token, but this time using a public definition account and a private holding account to store the entire supply. - -Since we can’t reuse the accounts from the previous example, we need to create fresh ones for this case. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii -``` - -```bash -wallet account new private - - -# Output: -Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF -With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93 -With ipk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577 -``` - -And we use them to create the token. - -Now we use them to create a new token. Let's call it "Token B". - -```bash -wallet token new \ - --name TOKENB \ - --total-supply 7331 \ - --definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \ - --supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF -``` - -After it succeeds, we can check their values - -```bash -wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii - -# Output: -Definition account owned by token program -{"account_type":"Token definition","name":"TOKENB","total_supply":7331} -``` - -```bash -wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331} -``` - -Like any other private account owned by us, it cannot be seen by other users. - -#### Custom token transfers - -The Token program has a function to move funds from one token holding account to another one. If executed with an uninitialized account as the recipient, this will be automatically claimed by the token program. - -The transfer function can be executed with the `wallet token send` command. - -Let's create a new public account for the recipient. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 -``` - -Let's send 1000 B tokens to this new account. We'll debit this from the supply account used in the creation of the token. - -```bash -wallet token send \ - --from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \ - --to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --amount 1000 -``` - -Let's inspect the public account: - -```bash -wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000} -``` - -### Chain information - -The wallet provides some commands to query information about the chain. These are under the `wallet chain-info` command. - -```bash -Commands: - current-block-id Get current block id from sequencer - block Get block at id from sequencer - transaction Get transaction at hash from sequencer -``` - -For example, run this to find the current block id. - -```bash -wallet chain-info current-block-id - -# Output: -Last block id is 65537 -``` - - -### Automated Market Maker (AMM) - -NSSA includes an AMM program that manages liquidity pools and enables swaps between custom tokens. To test this functionality, we first need to create a liquidity pool. - -#### Creating a liquidity pool for a token pair - -We start by creating a new pool for the tokens previously created. In return for providing liquidity, we will receive liquidity provider (LP) tokens, which represent our share of the pool and are required to withdraw liquidity later. - ->[!NOTE] -> The AMM program does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore only represent a proportional share of the pool reserves and do not provide additional value from swap activity. Fee support for liquidity providers will be added in future versions of the AMM program. - -To hold these LP tokens, we first create a new account: - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf -``` - -Next, we initialize the liquidity pool by depositing tokens A and B and specifying the account that will receive the LP tokens: - -```bash -wallet amm new \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ - --balance-a 100 \ - --balance-b 200 -``` - -The newly created account is owned by the token program, meaning that LP tokens are managed by the same token infrastructure as regular tokens. - -```bash -wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100} -``` - -If you inspect the `user-holding-a` and `user-holding-b` accounts passed to the `wallet amm new` command, you will see that 100 and 200 tokens were deducted, respectively. These tokens now reside in the liquidity pool and are available for swaps by any user. - - -#### Swaping - -Token swaps can be performed using the wallet amm swap command: - -```bash -wallet amm swap \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - # The amount of tokens to swap - --amount-in 5 \ - # The minimum number of tokens expected in return - --min-amount-out 8 \ - # The definition ID of the token being provided to the swap - # In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA - --token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 -``` - -Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (determined by the pool’s pricing function) is credited to the Token B holding account. - - -#### Withdrawing liquidity from the pool - -Liquidity providers can withdraw assets from the pool by redeeming (burning) LP tokens. The amount of tokens received is proportional to the share of LP tokens being redeemed relative to the total LP supply. - -This operation is performed using the `wallet amm remove-liquidity` command: - -```bash -wallet amm remove-liquidity \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ - --balance-lp 20 \ - --min-amount-a 1 \ - --min-amount-b 1 -``` - -This instruction burns `balance-lp` LP tokens from the user’s LP holding account. In exchange, the AMM transfers tokens A and B from the pool’s vault accounts to the user’s holding accounts, according to the current pool reserves. - -The `min-amount-a` and `min-amount-b` parameters specify the minimum acceptable amounts of tokens A and B to be received. If the computed outputs fall below either threshold, the instruction fails, protecting the user against unfavorable pool state changes. - -#### Adding liquidity to the pool - -Additional liquidity can be added to an existing pool by depositing tokens A and B in the ratio implied by the current pool reserves. In return, new LP tokens are minted to represent the user’s proportional share of the pool. - -This is done using the `wallet amm add-liquidity` command: - -```bash -wallet amm add-liquidity \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ - --min-amount-lp 1 \ - --max-amount-a 10 \ - --max-amount-b 10 -``` - -In this instruction, `max-amount-a` and `max-amount-b` define upper bounds on the number of tokens A and B that may be withdrawn from the user’s accounts. The AMM computes the actual required amounts based on the pool’s reserve ratio. - -The `min-amount-lp` parameter specifies the minimum number of LP tokens that must be minted for the transaction to succeed. If the resulting LP token amount is below this threshold, the instruction fails. + 3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer: + - `git checkout schouhy/full-bedrock-integration` + - `RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner sequencer_runner/configs/debug` diff --git a/artifacts/program_methods/amm.bin b/artifacts/program_methods/amm.bin index 1a2fabca..e7f89d53 100644 Binary files a/artifacts/program_methods/amm.bin and b/artifacts/program_methods/amm.bin differ diff --git a/artifacts/program_methods/authenticated_transfer.bin b/artifacts/program_methods/authenticated_transfer.bin index 73d5fec1..480ea8d0 100644 Binary files a/artifacts/program_methods/authenticated_transfer.bin and b/artifacts/program_methods/authenticated_transfer.bin differ diff --git a/artifacts/program_methods/pinata.bin b/artifacts/program_methods/pinata.bin index 278e88f4..87f779f8 100644 Binary files a/artifacts/program_methods/pinata.bin and b/artifacts/program_methods/pinata.bin differ diff --git a/artifacts/program_methods/pinata_token.bin b/artifacts/program_methods/pinata_token.bin index 4a4c8bb6..460dbee0 100644 Binary files a/artifacts/program_methods/pinata_token.bin and b/artifacts/program_methods/pinata_token.bin differ diff --git a/artifacts/program_methods/privacy_preserving_circuit.bin b/artifacts/program_methods/privacy_preserving_circuit.bin index d8f5915e..edd0f126 100644 Binary files a/artifacts/program_methods/privacy_preserving_circuit.bin and b/artifacts/program_methods/privacy_preserving_circuit.bin differ diff --git a/artifacts/program_methods/token.bin b/artifacts/program_methods/token.bin index 5f6d3781..f091c55e 100644 Binary files a/artifacts/program_methods/token.bin and b/artifacts/program_methods/token.bin differ diff --git a/artifacts/test_program_methods/burner.bin b/artifacts/test_program_methods/burner.bin index 96e339c2..bce7959d 100644 Binary files a/artifacts/test_program_methods/burner.bin and b/artifacts/test_program_methods/burner.bin differ diff --git a/artifacts/test_program_methods/chain_caller.bin b/artifacts/test_program_methods/chain_caller.bin index 731d2dc7..2f35f7f8 100644 Binary files a/artifacts/test_program_methods/chain_caller.bin and b/artifacts/test_program_methods/chain_caller.bin differ diff --git a/artifacts/test_program_methods/changer_claimer.bin b/artifacts/test_program_methods/changer_claimer.bin index 692d152b..baf09a17 100644 Binary files a/artifacts/test_program_methods/changer_claimer.bin and b/artifacts/test_program_methods/changer_claimer.bin differ diff --git a/artifacts/test_program_methods/claimer.bin b/artifacts/test_program_methods/claimer.bin index da0a9bef..37eb35d0 100644 Binary files a/artifacts/test_program_methods/claimer.bin and b/artifacts/test_program_methods/claimer.bin differ diff --git a/artifacts/test_program_methods/data_changer.bin b/artifacts/test_program_methods/data_changer.bin index 86fde894..457e1fd1 100644 Binary files a/artifacts/test_program_methods/data_changer.bin and b/artifacts/test_program_methods/data_changer.bin differ diff --git a/artifacts/test_program_methods/extra_output.bin b/artifacts/test_program_methods/extra_output.bin index 1ae1cd98..cccc75dd 100644 Binary files a/artifacts/test_program_methods/extra_output.bin and b/artifacts/test_program_methods/extra_output.bin differ diff --git a/artifacts/test_program_methods/malicious_authorization_changer.bin b/artifacts/test_program_methods/malicious_authorization_changer.bin index 8f80ab58..791c749f 100644 Binary files a/artifacts/test_program_methods/malicious_authorization_changer.bin and b/artifacts/test_program_methods/malicious_authorization_changer.bin differ diff --git a/artifacts/test_program_methods/minter.bin b/artifacts/test_program_methods/minter.bin index 50199403..81a79926 100644 Binary files a/artifacts/test_program_methods/minter.bin and b/artifacts/test_program_methods/minter.bin differ diff --git a/artifacts/test_program_methods/missing_output.bin b/artifacts/test_program_methods/missing_output.bin index 4994ae3f..6c62c017 100644 Binary files a/artifacts/test_program_methods/missing_output.bin and b/artifacts/test_program_methods/missing_output.bin differ diff --git a/artifacts/test_program_methods/modified_transfer.bin b/artifacts/test_program_methods/modified_transfer.bin index 796de2d3..801761d8 100644 Binary files a/artifacts/test_program_methods/modified_transfer.bin and b/artifacts/test_program_methods/modified_transfer.bin differ diff --git a/artifacts/test_program_methods/nonce_changer.bin b/artifacts/test_program_methods/nonce_changer.bin index 017de8b3..305a0d0e 100644 Binary files a/artifacts/test_program_methods/nonce_changer.bin and b/artifacts/test_program_methods/nonce_changer.bin differ diff --git a/artifacts/test_program_methods/noop.bin b/artifacts/test_program_methods/noop.bin index 23195ef2..667e575d 100644 Binary files a/artifacts/test_program_methods/noop.bin and b/artifacts/test_program_methods/noop.bin differ diff --git a/artifacts/test_program_methods/program_owner_changer.bin b/artifacts/test_program_methods/program_owner_changer.bin index db1f87fa..8c0c80b6 100644 Binary files a/artifacts/test_program_methods/program_owner_changer.bin and b/artifacts/test_program_methods/program_owner_changer.bin differ diff --git a/artifacts/test_program_methods/simple_balance_transfer.bin b/artifacts/test_program_methods/simple_balance_transfer.bin index 17c95475..e0e1ffed 100644 Binary files a/artifacts/test_program_methods/simple_balance_transfer.bin and b/artifacts/test_program_methods/simple_balance_transfer.bin differ diff --git a/bedrock_client/Cargo.toml b/bedrock_client/Cargo.toml index 50a54815..fec9f1c0 100644 --- a/bedrock_client/Cargo.toml +++ b/bedrock_client/Cargo.toml @@ -2,9 +2,15 @@ name = "bedrock_client" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] reqwest.workspace = true anyhow.workspace = true +tokio-retry.workspace = true +futures.workspace = true +log.workspace = true +serde.workspace = true logos-blockchain-common-http-client.workspace = true logos-blockchain-core.workspace = true +logos-blockchain-chain-broadcast-service.workspace = true diff --git a/bedrock_client/src/lib.rs b/bedrock_client/src/lib.rs index 530fdfc2..b34687c3 100644 --- a/bedrock_client/src/lib.rs +++ b/bedrock_client/src/lib.rs @@ -1,10 +1,24 @@ use anyhow::Result; +use futures::{Stream, TryFutureExt}; +use log::warn; +pub use logos_blockchain_chain_broadcast_service::BlockInfo; pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error}; -use logos_blockchain_core::mantle::SignedMantleTx; +pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx}; use reqwest::{Client, Url}; +use serde::{Deserialize, Serialize}; +use tokio_retry::Retry; + +/// Fibonacci backoff retry strategy configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BackoffConfig { + pub start_delay_millis: u64, + pub max_retries: usize, +} // Simple wrapper // maybe extend in the future for our purposes +// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`). +#[derive(Clone)] pub struct BedrockClient { http_client: CommonHttpClient, node_url: Url, @@ -29,4 +43,25 @@ impl BedrockClient { .post_transaction(self.node_url.clone(), tx) .await } + + pub async fn get_lib_stream(&self) -> Result, Error> { + self.http_client.get_lib_stream(self.node_url.clone()).await + } + + pub async fn get_block_by_id( + &self, + header_id: HeaderId, + backoff: &BackoffConfig, + ) -> Result>, Error> { + let strategy = + tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis) + .take(backoff.max_retries); + + Retry::spawn(strategy, || { + self.http_client + .get_block_by_id(self.node_url.clone(), header_id) + .inspect_err(|err| warn!("Block fetching failed with err: {err:#?}")) + }) + .await + } } diff --git a/common/Cargo.toml b/common/Cargo.toml index a6e26fad..09cb10fa 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -2,6 +2,7 @@ name = "common" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true @@ -17,3 +18,5 @@ log.workspace = true hex.workspace = true borsh.workspace = true base64.workspace = true +url.workspace = true +logos-blockchain-common-http-client.workspace = true diff --git a/common/src/block.rs b/common/src/block.rs index 84b7a419..391bc57d 100644 --- a/common/src/block.rs +++ b/common/src/block.rs @@ -4,6 +4,7 @@ use sha2::{Digest, Sha256, digest::FixedOutput}; use crate::transaction::EncodedTransaction; pub type HashType = [u8; 32]; +pub type MantleMsgId = [u8; 32]; #[derive(Debug, Clone)] /// Our own hasher. @@ -49,6 +50,7 @@ pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, + pub bedrock_parent_id: MantleMsgId, } #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] @@ -60,7 +62,11 @@ pub struct HashableBlockData { } impl HashableBlockData { - pub fn into_pending_block(self, signing_key: &nssa::PrivateKey) -> Block { + pub fn into_pending_block( + self, + signing_key: &nssa::PrivateKey, + bedrock_parent_id: MantleMsgId, + ) -> Block { let data_bytes = borsh::to_vec(&self).unwrap(); let signature = nssa::Signature::new(signing_key, &data_bytes); let hash = OwnHasher::hash(&data_bytes); @@ -76,8 +82,13 @@ impl HashableBlockData { transactions: self.transactions, }, bedrock_status: BedrockStatus::Pending, + bedrock_parent_id, } } + + pub fn block_hash(&self) -> BlockHash { + OwnHasher::hash(&borsh::to_vec(&self).unwrap()) + } } impl From for HashableBlockData { diff --git a/common/src/communication/indexer.rs b/common/src/communication/indexer.rs new file mode 100644 index 00000000..a0edc176 --- /dev/null +++ b/common/src/communication/indexer.rs @@ -0,0 +1,6 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum Message { + L2BlockFinalized { l2_block_height: u64 }, +} diff --git a/common/src/communication/mod.rs b/common/src/communication/mod.rs new file mode 100644 index 00000000..d99eb481 --- /dev/null +++ b/common/src/communication/mod.rs @@ -0,0 +1 @@ +pub mod indexer; diff --git a/common/src/lib.rs b/common/src/lib.rs index b64e6ef9..68902811 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,4 +1,5 @@ pub mod block; +pub mod communication; pub mod error; pub mod rpc_primitives; pub mod sequencer_client; diff --git a/common/src/rpc_primitives/requests.rs b/common/src/rpc_primitives/requests.rs index 71641936..6191df44 100644 --- a/common/src/rpc_primitives/requests.rs +++ b/common/src/rpc_primitives/requests.rs @@ -73,6 +73,11 @@ pub struct GetProofForCommitmentRequest { #[derive(Serialize, Deserialize, Debug)] pub struct GetProgramIdsRequest {} +#[derive(Serialize, Deserialize, Debug)] +pub struct PostIndexerMessageRequest { + pub message: crate::communication::indexer::Message, +} + parse_request!(HelloRequest); parse_request!(RegisterAccountRequest); parse_request!(SendTxRequest); @@ -87,6 +92,7 @@ parse_request!(GetAccountsNoncesRequest); parse_request!(GetProofForCommitmentRequest); parse_request!(GetAccountRequest); parse_request!(GetProgramIdsRequest); +parse_request!(PostIndexerMessageRequest); #[derive(Serialize, Deserialize, Debug)] pub struct HelloResponse { @@ -216,3 +222,8 @@ pub struct GetInitialTestnetAccountsResponse { pub account_id: String, pub balance: u64, } + +#[derive(Serialize, Deserialize, Debug)] +pub struct PostIndexerMessageResponse { + pub status: String, +} diff --git a/common/src/sequencer_client.rs b/common/src/sequencer_client.rs index 0cb03f6f..7a14d425 100644 --- a/common/src/sequencer_client.rs +++ b/common/src/sequencer_client.rs @@ -1,10 +1,12 @@ -use std::{collections::HashMap, ops::RangeInclusive}; +use std::{collections::HashMap, ops::RangeInclusive, str::FromStr}; use anyhow::Result; +use logos_blockchain_common_http_client::BasicAuthCredentials; use nssa_core::program::ProgramId; use reqwest::Client; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use serde_json::Value; +use url::Url; use super::rpc_primitives::requests::{ GetAccountBalanceRequest, GetAccountBalanceResponse, GetBlockDataRequest, GetBlockDataResponse, @@ -20,28 +22,75 @@ use crate::{ GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, GetProofForCommitmentResponse, GetTransactionByHashRequest, - GetTransactionByHashResponse, SendTxRequest, SendTxResponse, + GetTransactionByHashResponse, PostIndexerMessageRequest, PostIndexerMessageResponse, + SendTxRequest, SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, }; +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BasicAuth { + pub username: String, + pub password: Option, +} + +impl std::fmt::Display for BasicAuth { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.username)?; + if let Some(password) = &self.password { + write!(f, ":{password}")?; + } + + Ok(()) + } +} + +impl FromStr for BasicAuth { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let parse = || { + let mut parts = s.splitn(2, ':'); + let username = parts.next()?; + let password = parts.next().filter(|p| !p.is_empty()); + if parts.next().is_some() { + return None; + } + + Some((username, password)) + }; + + let (username, password) = parse().ok_or_else(|| { + anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'") + })?; + + Ok(Self { + username: username.to_string(), + password: password.map(|p| p.to_string()), + }) + } +} + +impl From for BasicAuthCredentials { + fn from(value: BasicAuth) -> Self { + BasicAuthCredentials::new(value.username, value.password) + } +} + #[derive(Clone)] pub struct SequencerClient { pub client: reqwest::Client, - pub sequencer_addr: String, - pub basic_auth: Option<(String, Option)>, + pub sequencer_addr: Url, + pub basic_auth: Option, } impl SequencerClient { - pub fn new(sequencer_addr: String) -> Result { + pub fn new(sequencer_addr: Url) -> Result { Self::new_with_auth(sequencer_addr, None) } - pub fn new_with_auth( - sequencer_addr: String, - basic_auth: Option<(String, Option)>, - ) -> Result { + pub fn new_with_auth(sequencer_addr: Url, basic_auth: Option) -> Result { Ok(Self { client: Client::builder() // Add more fields if needed @@ -66,9 +115,9 @@ impl SequencerClient { "Calling method {method} with payload {request:?} to sequencer at {}", self.sequencer_addr ); - let mut call_builder = self.client.post(&self.sequencer_addr); + let mut call_builder = self.client.post(self.sequencer_addr.clone()); - if let Some((username, password)) = &self.basic_auth { + if let Some(BasicAuth { username, password }) = &self.basic_auth { call_builder = call_builder.basic_auth(username, password.as_deref()); } @@ -347,4 +396,23 @@ impl SequencerClient { Ok(resp_deser) } + + /// Post indexer into sequencer + pub async fn post_indexer_message( + &self, + message: crate::communication::indexer::Message, + ) -> Result { + let last_req = PostIndexerMessageRequest { message }; + + let req = serde_json::to_value(last_req).unwrap(); + + let resp = self + .call_method_with_payload("post_indexer_message", req) + .await + .unwrap(); + + let resp_deser = serde_json::from_value(resp).unwrap(); + + Ok(resp_deser) + } } diff --git a/common/src/test_utils.rs b/common/src/test_utils.rs index 1125b86e..80703342 100644 --- a/common/src/test_utils.rs +++ b/common/src/test_utils.rs @@ -30,7 +30,7 @@ pub fn produce_dummy_block( transactions, }; - block_data.into_pending_block(&sequencer_sign_key_for_testing()) + block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32]) } pub fn produce_dummy_empty_transaction() -> EncodedTransaction { diff --git a/completions/zsh/_wallet b/completions/zsh/_wallet index d2831ef0..c90dadc4 100644 --- a/completions/zsh/_wallet +++ b/completions/zsh/_wallet @@ -150,10 +150,11 @@ _wallet_account() { subcommand) subcommands=( 'get:Get account data' - 'list:List all accounts' + 'list:List all accounts owned by the wallet' 'ls:List all accounts (alias for list)' 'new:Produce new public or private account' 'sync-private:Sync private accounts' + 'label:Set a label for an account' 'help:Print this message or the help of the given subcommand(s)' ) _describe -t subcommands 'account subcommands' subcommands @@ -184,6 +185,11 @@ _wallet_account() { ;; esac ;; + label) + _arguments \ + '(-a --account-id)'{-a,--account-id}'[Account ID to label]:account_id:_wallet_account_ids' \ + '(-l --label)'{-l,--label}'[The label to assign to the account]:label:' + ;; esac ;; esac diff --git a/examples/program_deployment/Cargo.toml b/examples/program_deployment/Cargo.toml index 6aff2d0f..2199fe21 100644 --- a/examples/program_deployment/Cargo.toml +++ b/examples/program_deployment/Cargo.toml @@ -2,6 +2,7 @@ name = "program_deployment" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true diff --git a/examples/program_deployment/methods/Cargo.toml b/examples/program_deployment/methods/Cargo.toml index a25aecf2..95b10ea8 100644 --- a/examples/program_deployment/methods/Cargo.toml +++ b/examples/program_deployment/methods/Cargo.toml @@ -2,6 +2,7 @@ name = "example_program_deployment_methods" version = "0.1.0" edition = "2024" +license = { workspace = true } [build-dependencies] risc0-build.workspace = true diff --git a/examples/program_deployment/methods/guest/Cargo.toml b/examples/program_deployment/methods/guest/Cargo.toml index 245bc5db..1f4db355 100644 --- a/examples/program_deployment/methods/guest/Cargo.toml +++ b/examples/program_deployment/methods/guest/Cargo.toml @@ -2,6 +2,7 @@ name = "example_program_deployment_programs" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000..a12fbc85 --- /dev/null +++ b/flake.lock @@ -0,0 +1,64 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1769737823, + "narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=", + "owner": "ipetkov", + "repo": "crane", + "rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1770019141, + "narHash": "sha256-VKS4ZLNx4PNrABoB0L8KUpc1fE7CLpQXQs985tGfaCU=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "cb369ef2efd432b3cdf8622b0ffc0a97a02f3137", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "crane": "crane", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1770088046, + "narHash": "sha256-4hfYDnUTvL1qSSZEA4CEThxfz+KlwSFQ30Z9jgDguO0=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "71f9daa4e05e49c434d08627e755495ae222bc34", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000..f09d4f77 --- /dev/null +++ b/flake.nix @@ -0,0 +1,96 @@ +{ + description = "Logos Execution Zone"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + + crane.url = "github:ipetkov/crane"; + }; + + outputs = + { + self, + nixpkgs, + rust-overlay, + crane, + ... + }: + let + systems = [ + "x86_64-linux" + "aarch64-linux" + "aarch64-darwin" + "x86_64-windows" + ]; + + forAll = nixpkgs.lib.genAttrs systems; + + mkPkgs = + system: + import nixpkgs { + inherit system; + overlays = [ rust-overlay.overlays.default ]; + }; + in + { + packages = forAll ( + system: + let + pkgs = mkPkgs system; + rustToolchain = pkgs.rust-bin.stable.latest.default; + craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; + src = ./.; + + commonArgs = { + inherit src; + buildInputs = [ pkgs.openssl ]; + nativeBuildInputs = [ + pkgs.pkg-config + pkgs.clang + pkgs.llvmPackages.libclang.lib + ]; + LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib"; + }; + + walletFfiPackage = craneLib.buildPackage ( + commonArgs + // { + pname = "logos-execution-zone-wallet-ffi"; + version = "0.1.0"; + cargoExtraArgs = "-p wallet-ffi"; + postInstall = '' + mkdir -p $out/include + cp wallet-ffi/wallet_ffi.h $out/include/ + '' + + pkgs.lib.optionalString pkgs.stdenv.isDarwin '' + install_name_tool -id @rpath/libwallet_ffi.dylib $out/lib/libwallet_ffi.dylib + ''; + } + ); + in + { + wallet = walletFfiPackage; + default = walletFfiPackage; + } + ); + devShells = forAll ( + system: + let + pkgs = mkPkgs system; + walletFfiPackage = self.packages.${system}.wallet; + walletFfiShell = pkgs.mkShell { + inputsFrom = [ walletFfiPackage ]; + }; + in + { + wallet = walletFfiShell; + default = walletFfiShell; + } + ); + }; +} diff --git a/indexer_core/Cargo.toml b/indexer_core/Cargo.toml new file mode 100644 index 00000000..abe1e629 --- /dev/null +++ b/indexer_core/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "indexer_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +common.workspace = true +bedrock_client.workspace = true + +anyhow.workspace = true +log.workspace = true +serde.workspace = true +tokio.workspace = true +borsh.workspace = true +futures.workspace = true +url.workspace = true +logos-blockchain-core.workspace = true +serde_json.workspace = true diff --git a/indexer_core/src/config.rs b/indexer_core/src/config.rs new file mode 100644 index 00000000..784f5840 --- /dev/null +++ b/indexer_core/src/config.rs @@ -0,0 +1,36 @@ +use std::{fs::File, io::BufReader, path::Path}; + +use anyhow::{Context, Result}; +use bedrock_client::BackoffConfig; +use common::sequencer_client::BasicAuth; +use logos_blockchain_core::mantle::ops::channel::ChannelId; +use serde::{Deserialize, Serialize}; +use url::Url; + +#[derive(Debug, Clone, Serialize, Deserialize)] +/// ToDo: Expand if necessary +pub struct ClientConfig { + pub addr: Url, + pub auth: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +/// Note: For individual RPC requests we use Fibonacci backoff retry strategy +pub struct IndexerConfig { + pub resubscribe_interval_millis: u64, + pub backoff: BackoffConfig, + pub bedrock_client_config: ClientConfig, + pub sequencer_client_config: ClientConfig, + pub channel_id: ChannelId, +} + +impl IndexerConfig { + pub fn from_path(config_home: &Path) -> Result { + let file = File::open(config_home) + .with_context(|| format!("Failed to open indexer config at {config_home:?}"))?; + let reader = BufReader::new(file); + + serde_json::from_reader(reader) + .with_context(|| format!("Failed to parse indexer config at {config_home:?}")) + } +} diff --git a/indexer_core/src/lib.rs b/indexer_core/src/lib.rs new file mode 100644 index 00000000..ca9ec22f --- /dev/null +++ b/indexer_core/src/lib.rs @@ -0,0 +1,124 @@ +use std::sync::Arc; + +use anyhow::Result; +use bedrock_client::BedrockClient; +use common::{ + block::HashableBlockData, communication::indexer::Message, + rpc_primitives::requests::PostIndexerMessageResponse, sequencer_client::SequencerClient, +}; +use futures::StreamExt; +use log::info; +use logos_blockchain_core::mantle::{ + Op, SignedMantleTx, + ops::channel::{ChannelId, inscribe::InscriptionOp}, +}; +use tokio::sync::RwLock; + +use crate::{config::IndexerConfig, state::IndexerState}; + +pub mod config; +pub mod state; + +pub struct IndexerCore { + pub bedrock_client: BedrockClient, + pub sequencer_client: SequencerClient, + pub config: IndexerConfig, + pub state: IndexerState, +} + +impl IndexerCore { + pub fn new(config: IndexerConfig) -> Result { + Ok(Self { + bedrock_client: BedrockClient::new( + config.bedrock_client_config.auth.clone().map(Into::into), + config.bedrock_client_config.addr.clone(), + )?, + sequencer_client: SequencerClient::new_with_auth( + config.sequencer_client_config.addr.clone(), + config.sequencer_client_config.auth.clone(), + )?, + config, + // No state setup for now, future task. + state: IndexerState { + latest_seen_block: Arc::new(RwLock::new(0)), + }, + }) + } + + pub async fn subscribe_parse_block_stream(&self) -> Result<()> { + loop { + let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?); + + info!("Block stream joined"); + + while let Some(block_info) = stream_pinned.next().await { + let header_id = block_info.header_id; + + info!("Observed L1 block at height {}", block_info.height); + + if let Some(l1_block) = self + .bedrock_client + .get_block_by_id(header_id, &self.config.backoff) + .await? + { + info!("Extracted L1 block at height {}", block_info.height); + + let l2_blocks_parsed = parse_blocks( + l1_block.into_transactions().into_iter(), + &self.config.channel_id, + ); + + for l2_block in l2_blocks_parsed { + // State modification, will be updated in future + { + let mut guard = self.state.latest_seen_block.write().await; + if l2_block.block_id > *guard { + *guard = l2_block.block_id; + } + } + + // Sending data into sequencer, may need to be expanded. + let message = Message::L2BlockFinalized { + l2_block_height: l2_block.block_id, + }; + + let status = self.send_message_to_sequencer(message.clone()).await?; + + info!("Sent message {message:#?} to sequencer; status {status:#?}"); + } + } + } + + // Refetch stream after delay + tokio::time::sleep(std::time::Duration::from_millis( + self.config.resubscribe_interval_millis, + )) + .await; + } + } + + pub async fn send_message_to_sequencer( + &self, + message: Message, + ) -> Result { + Ok(self.sequencer_client.post_indexer_message(message).await?) + } +} + +fn parse_blocks( + block_txs: impl Iterator, + decoded_channel_id: &ChannelId, +) -> impl Iterator { + block_txs.flat_map(|tx| { + tx.mantle_tx.ops.into_iter().filter_map(|op| match op { + Op::ChannelInscribe(InscriptionOp { + channel_id, + inscription, + .. + }) if channel_id == *decoded_channel_id => { + borsh::from_slice::(&inscription).ok() + } + _ => None, + }) + }) +} diff --git a/indexer_core/src/state.rs b/indexer_core/src/state.rs new file mode 100644 index 00000000..bd05971f --- /dev/null +++ b/indexer_core/src/state.rs @@ -0,0 +1,9 @@ +use std::sync::Arc; + +use tokio::sync::RwLock; + +#[derive(Debug, Clone)] +pub struct IndexerState { + // Only one field for now, for testing. + pub latest_seen_block: Arc>, +} diff --git a/indexer_service/Cargo.toml b/indexer_service/Cargo.toml new file mode 100644 index 00000000..361328cf --- /dev/null +++ b/indexer_service/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "indexer_service" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +indexer_service_protocol.workspace = true +indexer_service_rpc = { workspace = true, features = ["server"] } + +clap = { workspace = true, features = ["derive"] } +anyhow.workspace = true +tokio.workspace = true +tokio-util.workspace = true +env_logger.workspace = true +log.workspace = true +jsonrpsee.workspace = true +async-trait = "0.1.89" diff --git a/indexer_service/Dockerfile b/indexer_service/Dockerfile new file mode 100644 index 00000000..b283e2ec --- /dev/null +++ b/indexer_service/Dockerfile @@ -0,0 +1,64 @@ +# Chef stage - uses pre-built cargo-chef image +FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef + +# Install build dependencies +RUN apt-get update && apt-get install -y \ + pkg-config \ + libssl-dev \ + libclang-dev \ + clang \ + curl \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /indexer_service + +# Planner stage - generates dependency recipe +FROM chef AS planner +COPY . . +RUN cargo chef prepare --bin indexer_service --recipe-path recipe.json + +# Builder stage - builds dependencies and application +FROM chef AS builder +COPY --from=planner /indexer_service/recipe.json recipe.json +# Build dependencies only (this layer will be cached) +RUN cargo chef cook --bin indexer_service --release --recipe-path recipe.json + +# Copy source code +COPY . . + +# Build the actual application +RUN cargo build --release --bin indexer_service + +# Strip debug symbols to reduce binary size +RUN strip /indexer_service/target/release/indexer_service + +# Runtime stage - minimal image +FROM debian:trixie-slim + +# Create non-root user for security +RUN useradd -m -u 1000 -s /bin/bash indexer_service_user + +# Copy binary from builder +COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service + +# Expose default port +EXPOSE 8779 + +# Health check +HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \ + CMD curl http://localhost:8779 \ + -H "Content-Type: application/json" \ + -d "{ \ + \"jsonrpc\": \"2.0\", \ + \"method\": \"get_schema\", \ + \"params\": {}, \ + \"id\": 1 \ + }" || exit 1 + +# Run the application +ENV RUST_LOG=info + +USER indexer_service_user + +WORKDIR /indexer_service +CMD ["indexer_service"] diff --git a/indexer_service/docker-compose.yml b/indexer_service/docker-compose.yml new file mode 100644 index 00000000..81e68cfa --- /dev/null +++ b/indexer_service/docker-compose.yml @@ -0,0 +1,9 @@ +services: + indexer_service: + image: lssa/indexer_service + build: + context: .. + dockerfile: indexer_service/Dockerfile + container_name: indexer_service + ports: + - "8779:8779" diff --git a/indexer_service/protocol/Cargo.toml b/indexer_service/protocol/Cargo.toml new file mode 100644 index 00000000..2646086c --- /dev/null +++ b/indexer_service/protocol/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "indexer_service_protocol" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +nssa_core = { workspace = true, optional = true, features = ["host"] } +nssa = { workspace = true, optional = true } +common = { workspace = true, optional = true } + +serde = { workspace = true, features = ["derive"] } +schemars.workspace = true +base64.workspace = true +borsh = { workspace = true, optional = true } + +[features] +# Enable conversion to/from NSSA core types +convert = ["dep:nssa_core", "dep:nssa", "dep:common", "dep:borsh"] diff --git a/indexer_service/protocol/src/convert.rs b/indexer_service/protocol/src/convert.rs new file mode 100644 index 00000000..8c6de2f4 --- /dev/null +++ b/indexer_service/protocol/src/convert.rs @@ -0,0 +1,652 @@ +//! Conversions between indexer_service_protocol types and nssa/nssa_core types + +use crate::*; + +// ============================================================================ +// Account-related conversions +// ============================================================================ + +impl From for AccountId { + fn from(value: nssa_core::account::AccountId) -> Self { + Self { + value: value.into_value(), + } + } +} + +impl From for nssa_core::account::AccountId { + fn from(value: AccountId) -> Self { + let AccountId { value } = value; + nssa_core::account::AccountId::new(value) + } +} + +impl From for Account { + fn from(value: nssa_core::account::Account) -> Self { + let nssa_core::account::Account { + program_owner, + balance, + data, + nonce, + } = value; + + Self { + program_owner, + balance, + data: data.into(), + nonce, + } + } +} + +impl TryFrom for nssa_core::account::Account { + type Error = nssa_core::account::data::DataTooBigError; + + fn try_from(value: Account) -> Result { + let Account { + program_owner, + balance, + data, + nonce, + } = value; + + Ok(nssa_core::account::Account { + program_owner, + balance, + data: data.try_into()?, + nonce, + }) + } +} + +impl From for Data { + fn from(value: nssa_core::account::Data) -> Self { + Self(value.into_inner()) + } +} + +impl TryFrom for nssa_core::account::Data { + type Error = nssa_core::account::data::DataTooBigError; + + fn try_from(value: Data) -> Result { + nssa_core::account::Data::try_from(value.0) + } +} + +// ============================================================================ +// Commitment and Nullifier conversions +// ============================================================================ + +impl From for Commitment { + fn from(value: nssa_core::Commitment) -> Self { + Self(value.to_byte_array()) + } +} + +impl From for nssa_core::Commitment { + fn from(value: Commitment) -> Self { + nssa_core::Commitment::from_byte_array(value.0) + } +} + +impl From for Nullifier { + fn from(value: nssa_core::Nullifier) -> Self { + Self(value.to_byte_array()) + } +} + +impl From for nssa_core::Nullifier { + fn from(value: Nullifier) -> Self { + nssa_core::Nullifier::from_byte_array(value.0) + } +} + +impl From for CommitmentSetDigest { + fn from(value: nssa_core::CommitmentSetDigest) -> Self { + Self(value) + } +} + +impl From for nssa_core::CommitmentSetDigest { + fn from(value: CommitmentSetDigest) -> Self { + value.0 + } +} + +// ============================================================================ +// Encryption-related conversions +// ============================================================================ + +impl From for Ciphertext { + fn from(value: nssa_core::encryption::Ciphertext) -> Self { + Self(value.into_inner()) + } +} + +impl From for nssa_core::encryption::Ciphertext { + fn from(value: Ciphertext) -> Self { + nssa_core::encryption::Ciphertext::from_inner(value.0) + } +} + +impl From for EphemeralPublicKey { + fn from(value: nssa_core::encryption::EphemeralPublicKey) -> Self { + Self(value.0) + } +} + +impl From for nssa_core::encryption::EphemeralPublicKey { + fn from(value: EphemeralPublicKey) -> Self { + nssa_core::encryption::shared_key_derivation::Secp256k1Point(value.0) + } +} + +// ============================================================================ +// Signature and PublicKey conversions +// ============================================================================ + +impl From for Signature { + fn from(value: nssa::Signature) -> Self { + let nssa::Signature { value } = value; + Self(value) + } +} + +impl From for nssa::Signature { + fn from(value: Signature) -> Self { + let Signature(sig_value) = value; + nssa::Signature { value: sig_value } + } +} + +impl From for PublicKey { + fn from(value: nssa::PublicKey) -> Self { + Self(*value.value()) + } +} + +impl TryFrom for nssa::PublicKey { + type Error = nssa::error::NssaError; + + fn try_from(value: PublicKey) -> Result { + nssa::PublicKey::try_new(value.0) + } +} + +// ============================================================================ +// Proof conversions +// ============================================================================ + +impl From for Proof { + fn from(value: nssa::privacy_preserving_transaction::circuit::Proof) -> Self { + Self(value.into_inner()) + } +} + +impl From for nssa::privacy_preserving_transaction::circuit::Proof { + fn from(value: Proof) -> Self { + nssa::privacy_preserving_transaction::circuit::Proof::from_inner(value.0) + } +} + +// ============================================================================ +// EncryptedAccountData conversions +// ============================================================================ + +impl From + for EncryptedAccountData +{ + fn from(value: nssa::privacy_preserving_transaction::message::EncryptedAccountData) -> Self { + Self { + ciphertext: value.ciphertext.into(), + epk: value.epk.into(), + view_tag: value.view_tag, + } + } +} + +impl From + for nssa::privacy_preserving_transaction::message::EncryptedAccountData +{ + fn from(value: EncryptedAccountData) -> Self { + Self { + ciphertext: value.ciphertext.into(), + epk: value.epk.into(), + view_tag: value.view_tag, + } + } +} + +// ============================================================================ +// Transaction Message conversions +// ============================================================================ + +impl From for PublicMessage { + fn from(value: nssa::public_transaction::Message) -> Self { + let nssa::public_transaction::Message { + program_id, + account_ids, + nonces, + instruction_data, + } = value; + Self { + program_id, + account_ids: account_ids.into_iter().map(Into::into).collect(), + nonces, + instruction_data, + } + } +} + +impl From for nssa::public_transaction::Message { + fn from(value: PublicMessage) -> Self { + let PublicMessage { + program_id, + account_ids, + nonces, + instruction_data, + } = value; + Self::new_preserialized( + program_id, + account_ids.into_iter().map(Into::into).collect(), + nonces, + instruction_data, + ) + } +} + +impl From for PrivacyPreservingMessage { + fn from(value: nssa::privacy_preserving_transaction::message::Message) -> Self { + let nssa::privacy_preserving_transaction::message::Message { + public_account_ids, + nonces, + public_post_states, + encrypted_private_post_states, + new_commitments, + new_nullifiers, + } = value; + Self { + public_account_ids: public_account_ids.into_iter().map(Into::into).collect(), + nonces, + public_post_states: public_post_states.into_iter().map(Into::into).collect(), + encrypted_private_post_states: encrypted_private_post_states + .into_iter() + .map(Into::into) + .collect(), + new_commitments: new_commitments.into_iter().map(Into::into).collect(), + new_nullifiers: new_nullifiers + .into_iter() + .map(|(n, d)| (n.into(), d.into())) + .collect(), + } + } +} + +impl TryFrom for nssa::privacy_preserving_transaction::message::Message { + type Error = nssa_core::account::data::DataTooBigError; + + fn try_from(value: PrivacyPreservingMessage) -> Result { + let PrivacyPreservingMessage { + public_account_ids, + nonces, + public_post_states, + encrypted_private_post_states, + new_commitments, + new_nullifiers, + } = value; + Ok(Self { + public_account_ids: public_account_ids.into_iter().map(Into::into).collect(), + nonces, + public_post_states: public_post_states + .into_iter() + .map(TryInto::try_into) + .collect::, _>>()?, + encrypted_private_post_states: encrypted_private_post_states + .into_iter() + .map(Into::into) + .collect(), + new_commitments: new_commitments.into_iter().map(Into::into).collect(), + new_nullifiers: new_nullifiers + .into_iter() + .map(|(n, d)| (n.into(), d.into())) + .collect(), + }) + } +} + +impl From for ProgramDeploymentMessage { + fn from(value: nssa::program_deployment_transaction::Message) -> Self { + Self { + bytecode: value.into_bytecode(), + } + } +} + +impl From for nssa::program_deployment_transaction::Message { + fn from(value: ProgramDeploymentMessage) -> Self { + let ProgramDeploymentMessage { bytecode } = value; + Self::new(bytecode) + } +} + +// ============================================================================ +// WitnessSet conversions +// ============================================================================ + +impl TryFrom for WitnessSet { + type Error = (); + + fn try_from(_value: nssa::public_transaction::WitnessSet) -> Result { + // Public transaction witness sets don't have proofs, so we can't convert them directly + Err(()) + } +} + +impl From for WitnessSet { + fn from(value: nssa::privacy_preserving_transaction::witness_set::WitnessSet) -> Self { + let (sigs_and_pks, proof) = value.into_raw_parts(); + Self { + signatures_and_public_keys: sigs_and_pks + .into_iter() + .map(|(sig, pk)| (sig.into(), pk.into())) + .collect(), + proof: proof.into(), + } + } +} + +impl TryFrom for nssa::privacy_preserving_transaction::witness_set::WitnessSet { + type Error = nssa::error::NssaError; + + fn try_from(value: WitnessSet) -> Result { + let WitnessSet { + signatures_and_public_keys, + proof, + } = value; + let signatures_and_public_keys = signatures_and_public_keys + .into_iter() + .map(|(sig, pk)| Ok((sig.into(), pk.try_into()?))) + .collect::, Self::Error>>()?; + + Ok(Self::from_raw_parts( + signatures_and_public_keys, + proof.into(), + )) + } +} + +// ============================================================================ +// Transaction conversions +// ============================================================================ + +impl From for PublicTransaction { + fn from(value: nssa::PublicTransaction) -> Self { + Self { + message: value.message().clone().into(), + witness_set: WitnessSet { + signatures_and_public_keys: value + .witness_set() + .signatures_and_public_keys() + .iter() + .map(|(sig, pk)| (sig.clone().into(), pk.clone().into())) + .collect(), + proof: Proof(vec![]), // Public transactions don't have proofs + }, + } + } +} + +impl TryFrom for nssa::PublicTransaction { + type Error = nssa::error::NssaError; + + fn try_from(value: PublicTransaction) -> Result { + let PublicTransaction { + message, + witness_set, + } = value; + let WitnessSet { + signatures_and_public_keys, + proof: _, + } = witness_set; + Ok(Self::new( + message.into(), + nssa::public_transaction::WitnessSet::from_raw_parts( + signatures_and_public_keys + .into_iter() + .map(|(sig, pk)| Ok((sig.into(), pk.try_into()?))) + .collect::, Self::Error>>()?, + ), + )) + } +} + +impl From for PrivacyPreservingTransaction { + fn from(value: nssa::PrivacyPreservingTransaction) -> Self { + Self { + message: value.message().clone().into(), + witness_set: value.witness_set().clone().into(), + } + } +} + +impl TryFrom for nssa::PrivacyPreservingTransaction { + type Error = nssa::error::NssaError; + + fn try_from(value: PrivacyPreservingTransaction) -> Result { + let PrivacyPreservingTransaction { + message, + witness_set, + } = value; + Ok(Self::new( + message.try_into().map_err(|_| { + nssa::error::NssaError::InvalidInput("Data too big error".to_string()) + })?, + witness_set.try_into()?, + )) + } +} + +impl From for ProgramDeploymentTransaction { + fn from(value: nssa::ProgramDeploymentTransaction) -> Self { + Self { + message: value.into_message().into(), + } + } +} + +impl From for nssa::ProgramDeploymentTransaction { + fn from(value: ProgramDeploymentTransaction) -> Self { + let ProgramDeploymentTransaction { message } = value; + Self::new(message.into()) + } +} + +impl From for Transaction { + fn from(value: common::transaction::NSSATransaction) -> Self { + match value { + common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()), + common::transaction::NSSATransaction::PrivacyPreserving(tx) => { + Transaction::PrivacyPreserving(tx.into()) + } + common::transaction::NSSATransaction::ProgramDeployment(tx) => { + Transaction::ProgramDeployment(tx.into()) + } + } + } +} + +impl TryFrom for common::transaction::NSSATransaction { + type Error = nssa::error::NssaError; + + fn try_from(value: Transaction) -> Result { + match value { + Transaction::Public(tx) => { + Ok(common::transaction::NSSATransaction::Public(tx.try_into()?)) + } + Transaction::PrivacyPreserving(tx) => Ok( + common::transaction::NSSATransaction::PrivacyPreserving(tx.try_into()?), + ), + Transaction::ProgramDeployment(tx) => Ok( + common::transaction::NSSATransaction::ProgramDeployment(tx.into()), + ), + } + } +} + +// ============================================================================ +// Block conversions +// ============================================================================ + +impl From for BlockHeader { + fn from(value: common::block::BlockHeader) -> Self { + let common::block::BlockHeader { + block_id, + prev_block_hash, + hash, + timestamp, + signature, + } = value; + Self { + block_id, + prev_block_hash: Hash(prev_block_hash), + hash: Hash(hash), + timestamp, + signature: signature.into(), + } + } +} + +impl TryFrom for common::block::BlockHeader { + type Error = nssa::error::NssaError; + + fn try_from(value: BlockHeader) -> Result { + let BlockHeader { + block_id, + prev_block_hash, + hash, + timestamp, + signature, + } = value; + Ok(Self { + block_id, + prev_block_hash: prev_block_hash.0, + hash: hash.0, + timestamp, + signature: signature.into(), + }) + } +} + +impl TryFrom for BlockBody { + type Error = std::io::Error; + + fn try_from(value: common::block::BlockBody) -> Result { + // Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction + // This conversion will decode and re-encode the transactions + use borsh::BorshDeserialize as _; + + let common::block::BlockBody { transactions } = value; + + let transactions = transactions + .into_iter() + .map(|encoded_tx| match encoded_tx.tx_kind { + common::transaction::TxKind::Public => { + nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data) + .map(|tx| Transaction::Public(tx.into())) + } + common::transaction::TxKind::PrivacyPreserving => { + nssa::PrivacyPreservingTransaction::try_from_slice( + &encoded_tx.encoded_transaction_data, + ) + .map(|tx| Transaction::PrivacyPreserving(tx.into())) + } + common::transaction::TxKind::ProgramDeployment => { + nssa::ProgramDeploymentTransaction::try_from_slice( + &encoded_tx.encoded_transaction_data, + ) + .map(|tx| Transaction::ProgramDeployment(tx.into())) + } + }) + .collect::, _>>()?; + + Ok(Self { transactions }) + } +} + +impl TryFrom for common::block::BlockBody { + type Error = nssa::error::NssaError; + + fn try_from(value: BlockBody) -> Result { + let BlockBody { transactions } = value; + + let transactions = transactions + .into_iter() + .map(|tx| { + let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?; + Ok::<_, nssa::error::NssaError>(nssa_tx.into()) + }) + .collect::, _>>()?; + + Ok(Self { transactions }) + } +} + +impl TryFrom for Block { + type Error = std::io::Error; + + fn try_from(value: common::block::Block) -> Result { + let common::block::Block { + header, + body, + bedrock_status, + bedrock_parent_id, + } = value; + + Ok(Self { + header: header.into(), + body: body.try_into()?, + bedrock_status: bedrock_status.into(), + bedrock_parent_id: MantleMsgId(bedrock_parent_id), + }) + } +} + +impl TryFrom for common::block::Block { + type Error = nssa::error::NssaError; + + fn try_from(value: Block) -> Result { + let Block { + header, + body, + bedrock_status, + bedrock_parent_id, + } = value; + + Ok(Self { + header: header.try_into()?, + body: body.try_into()?, + bedrock_status: bedrock_status.into(), + bedrock_parent_id: bedrock_parent_id.0, + }) + } +} + +impl From for BedrockStatus { + fn from(value: common::block::BedrockStatus) -> Self { + match value { + common::block::BedrockStatus::Pending => Self::Pending, + common::block::BedrockStatus::Safe => Self::Safe, + common::block::BedrockStatus::Finalized => Self::Finalized, + } + } +} + +impl From for common::block::BedrockStatus { + fn from(value: BedrockStatus) -> Self { + match value { + BedrockStatus::Pending => Self::Pending, + BedrockStatus::Safe => Self::Safe, + BedrockStatus::Finalized => Self::Finalized, + } + } +} diff --git a/indexer_service/protocol/src/lib.rs b/indexer_service/protocol/src/lib.rs new file mode 100644 index 00000000..f12bdf5b --- /dev/null +++ b/indexer_service/protocol/src/lib.rs @@ -0,0 +1,238 @@ +//! This crate defines the protocol types used by the indexer service. +//! +//! Currently it mostly mimics types from `nssa_core`, but it's important to have a separate crate +//! to define a stable interface for the indexer service RPCs which evolves in its own way. + +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[cfg(feature = "convert")] +mod convert; + +pub type Nonce = u128; + +pub type ProgramId = [u32; 8]; + +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct AccountId { + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded account ID")] + pub value: [u8; 32], +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Account { + pub program_owner: ProgramId, + pub balance: u128, + pub data: Data, + pub nonce: Nonce, +} + +pub type BlockId = u64; +pub type TimeStamp = u64; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Block { + pub header: BlockHeader, + pub body: BlockBody, + pub bedrock_status: BedrockStatus, + pub bedrock_parent_id: MantleMsgId, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct BlockHeader { + pub block_id: BlockId, + pub prev_block_hash: Hash, + pub hash: Hash, + pub timestamp: TimeStamp, + pub signature: Signature, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Signature( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded signature")] + pub [u8; 64], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct BlockBody { + pub transactions: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub enum Transaction { + Public(PublicTransaction), + PrivacyPreserving(PrivacyPreservingTransaction), + ProgramDeployment(ProgramDeploymentTransaction), +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct PublicTransaction { + pub message: PublicMessage, + pub witness_set: WitnessSet, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct PrivacyPreservingTransaction { + pub message: PrivacyPreservingMessage, + pub witness_set: WitnessSet, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct PublicMessage { + pub program_id: ProgramId, + pub account_ids: Vec, + pub nonces: Vec, + pub instruction_data: InstructionData, +} + +pub type InstructionData = Vec; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct PrivacyPreservingMessage { + pub public_account_ids: Vec, + pub nonces: Vec, + pub public_post_states: Vec, + pub encrypted_private_post_states: Vec, + pub new_commitments: Vec, + pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct WitnessSet { + pub signatures_and_public_keys: Vec<(Signature, PublicKey)>, + pub proof: Proof, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Proof( + #[serde(with = "base64")] + #[schemars(with = "String", description = "base64-encoded proof")] + pub Vec, +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct EncryptedAccountData { + pub ciphertext: Ciphertext, + pub epk: EphemeralPublicKey, + pub view_tag: ViewTag, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct ProgramDeploymentTransaction { + pub message: ProgramDeploymentMessage, +} + +pub type ViewTag = u8; + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Ciphertext( + #[serde(with = "base64")] + #[schemars(with = "String", description = "base64-encoded ciphertext")] + pub Vec, +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct PublicKey( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded public key")] + pub [u8; 32], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct EphemeralPublicKey( + #[serde(with = "base64")] + #[schemars(with = "String", description = "base64-encoded ephemeral public key")] + pub Vec, +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Commitment( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded commitment")] + pub [u8; 32], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Nullifier( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded nullifier")] + pub [u8; 32], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct CommitmentSetDigest( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded commitment set digest")] + pub [u8; 32], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct ProgramDeploymentMessage { + #[serde(with = "base64")] + #[schemars(with = "String", description = "base64-encoded program bytecode")] + pub bytecode: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Data( + #[serde(with = "base64")] + #[schemars(with = "String", description = "base64-encoded account data")] + pub Vec, +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct Hash( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded hash")] + pub [u8; 32], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct MantleMsgId( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded Bedrock message id")] + pub [u8; 32], +); + +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub enum BedrockStatus { + Pending, + Safe, + Finalized, +} + +mod base64 { + use base64::prelude::{BASE64_STANDARD, Engine as _}; + use serde::{Deserialize, Deserializer, Serialize, Serializer}; + + pub mod arr { + use super::*; + + pub fn serialize(v: &[u8], s: S) -> Result { + super::serialize(v, s) + } + + pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>( + d: D, + ) -> Result<[u8; N], D::Error> { + let vec = super::deserialize(d)?; + vec.try_into().map_err(|_| { + serde::de::Error::custom(format!("Invalid length, expected {N} bytes")) + }) + } + } + + pub fn serialize(v: &[u8], s: S) -> Result { + let base64 = BASE64_STANDARD.encode(v); + String::serialize(&base64, s) + } + + pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { + let base64 = String::deserialize(d)?; + BASE64_STANDARD + .decode(base64.as_bytes()) + .map_err(serde::de::Error::custom) + } +} diff --git a/indexer_service/rpc/Cargo.toml b/indexer_service/rpc/Cargo.toml new file mode 100644 index 00000000..2bed63ae --- /dev/null +++ b/indexer_service/rpc/Cargo.toml @@ -0,0 +1,16 @@ +[package] +name = "indexer_service_rpc" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +indexer_service_protocol = { workspace = true } + +jsonrpsee = { workspace = true, features = ["macros"] } +serde_json.workspace = true +schemars.workspace = true + +[features] +client = ["jsonrpsee/client"] +server = ["jsonrpsee/server"] diff --git a/indexer_service/rpc/src/lib.rs b/indexer_service/rpc/src/lib.rs new file mode 100644 index 00000000..c1c4a560 --- /dev/null +++ b/indexer_service/rpc/src/lib.rs @@ -0,0 +1,40 @@ +use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction}; +use jsonrpsee::{core::SubscriptionResult, proc_macros::rpc, types::ErrorObjectOwned}; + +#[cfg(all(not(feature = "server"), not(feature = "client")))] +compile_error!("At least one of `server` or `client` features must be enabled."); + +#[cfg_attr(feature = "server", rpc(server))] +#[cfg_attr(feature = "client", rpc(client))] +pub trait Rpc { + #[method(name = "get_schema")] + fn get_schema(&self) -> Result { + // TODO: Canonical solution would be to provide `describe` method returning OpenRPC spec, + // But for now it's painful to implement, although can be done if really needed. + // Currently we can wait until we can auto-generated it: https://github.com/paritytech/jsonrpsee/issues/737 + // and just return JSON schema. + + // Block schema contains all other types used in the protocol, so it's sufficient to return + // its schema. + let block_schema = schemars::schema_for!(Block); + Ok(serde_json::to_value(block_schema).expect("Schema serialization should not fail")) + } + + #[subscription(name = "subscribeToBlocks", item = Vec)] + async fn subscribe_to_blocks(&self, from: BlockId) -> SubscriptionResult; + + #[method(name = "getBlockById")] + async fn get_block_by_id(&self, block_id: BlockId) -> Result; + + #[method(name = "getBlockByHash")] + async fn get_block_by_hash(&self, block_hash: Hash) -> Result; + + #[method(name = "getLastBlockId")] + async fn get_last_block_id(&self) -> Result; + + #[method(name = "getAccount")] + async fn get_account(&self, account_id: AccountId) -> Result; + + #[method(name = "getTransaction")] + async fn get_transaction(&self, tx_hash: Hash) -> Result; +} diff --git a/indexer_service/src/lib.rs b/indexer_service/src/lib.rs new file mode 100644 index 00000000..1f278a4d --- /dev/null +++ b/indexer_service/src/lib.rs @@ -0,0 +1 @@ +pub mod service; diff --git a/indexer_service/src/main.rs b/indexer_service/src/main.rs new file mode 100644 index 00000000..bfdd3259 --- /dev/null +++ b/indexer_service/src/main.rs @@ -0,0 +1,72 @@ +use std::net::SocketAddr; + +use anyhow::{Context as _, Result}; +use clap::Parser; +use indexer_service_rpc::RpcServer as _; +use jsonrpsee::server::Server; +use log::{error, info}; +use tokio_util::sync::CancellationToken; + +#[derive(Debug, Parser)] +#[clap(version)] +struct Args { + #[clap(short, long, default_value = "8779")] + port: u16, +} + +#[tokio::main] +async fn main() -> Result<()> { + env_logger::init(); + + let args = Args::parse(); + + let cancellation_token = listen_for_shutdown_signal(); + + let handle = run_server(args.port).await?; + let handle_clone = handle.clone(); + + tokio::select! { + _ = cancellation_token.cancelled() => { + info!("Shutting down server..."); + } + _ = handle_clone.stopped() => { + error!("Server stopped unexpectedly"); + } + } + + info!("Server shutdown complete"); + + Ok(()) +} + +async fn run_server(port: u16) -> Result { + let server = Server::builder() + .build(SocketAddr::from(([0, 0, 0, 0], port))) + .await + .context("Failed to build RPC server")?; + + let addr = server + .local_addr() + .context("Failed to get local address of RPC server")?; + + info!("Starting Indexer Service RPC server on {addr}"); + + let handle = server.start(indexer_service::service::IndexerService.into_rpc()); + Ok(handle) +} + +fn listen_for_shutdown_signal() -> CancellationToken { + let cancellation_token = CancellationToken::new(); + let cancellation_token_clone = cancellation_token.clone(); + + tokio::spawn(async move { + if let Err(err) = tokio::signal::ctrl_c().await { + error!("Failed to listen for Ctrl-C signal: {err}"); + return; + } + info!("Received Ctrl-C signal"); + cancellation_token_clone.cancel(); + }); + + cancellation_token +} diff --git a/indexer_service/src/service.rs b/indexer_service/src/service.rs new file mode 100644 index 00000000..46c5fb2d --- /dev/null +++ b/indexer_service/src/service.rs @@ -0,0 +1,36 @@ +use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction}; +use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned}; + +pub struct IndexerService; + +// `async_trait` is required by `jsonrpsee` +#[async_trait::async_trait] +impl indexer_service_rpc::RpcServer for IndexerService { + async fn subscribe_to_blocks( + &self, + _subscription_sink: jsonrpsee::PendingSubscriptionSink, + _from: BlockId, + ) -> SubscriptionResult { + todo!() + } + + async fn get_block_by_id(&self, _block_id: BlockId) -> Result { + todo!() + } + + async fn get_block_by_hash(&self, _block_hash: Hash) -> Result { + todo!() + } + + async fn get_last_block_id(&self) -> Result { + todo!() + } + + async fn get_account(&self, _account_id: AccountId) -> Result { + todo!() + } + + async fn get_transaction(&self, _tx_hash: Hash) -> Result { + todo!() + } +} diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index b888c177..142ad78d 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -2,6 +2,7 @@ name = "integration_tests" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core = { workspace = true, features = ["host"] } @@ -11,7 +12,12 @@ sequencer_runner.workspace = true wallet.workspace = true common.workspace = true key_protocol.workspace = true +indexer_core.workspace = true +wallet-ffi.workspace = true +serde_json.workspace = true +token_core.workspace = true +url.workspace = true anyhow.workspace = true env_logger.workspace = true log.workspace = true diff --git a/integration_tests/configs/indexer/indexer_config.json b/integration_tests/configs/indexer/indexer_config.json new file mode 100644 index 00000000..fd5309b2 --- /dev/null +++ b/integration_tests/configs/indexer/indexer_config.json @@ -0,0 +1,17 @@ +{ + "bedrock_client_config": { + "addr": "http://127.0.0.1:8080", + "auth": { + "username": "user" + } + }, + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", + "backoff": { + "max_retries": 10, + "start_delay_millis": 100 + }, + "resubscribe_interval_millis": 1000, + "sequencer_client_config": { + "addr": "will_be_replaced_in_runtime" + } +} \ No newline at end of file diff --git a/integration_tests/configs/sequencer/bedrock_local_attached/sequencer_config.json b/integration_tests/configs/sequencer/bedrock_local_attached/sequencer_config.json new file mode 100644 index 00000000..3253115b --- /dev/null +++ b/integration_tests/configs/sequencer/bedrock_local_attached/sequencer_config.json @@ -0,0 +1,165 @@ +{ + "home": "", + "override_rust_log": null, + "genesis_id": 1, + "is_genesis_random": true, + "max_num_tx_in_block": 20, + "mempool_max_size": 10000, + "block_create_timeout_millis": 10000, + "port": 0, + "initial_accounts": [ + { + "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "balance": 10000 + }, + { + "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "balance": 20000 + } + ], + "initial_commitments": [ + { + "npk": [ + 63, + 202, + 178, + 231, + 183, + 82, + 237, + 212, + 216, + 221, + 215, + 255, + 153, + 101, + 177, + 161, + 254, + 210, + 128, + 122, + 54, + 190, + 230, + 151, + 183, + 64, + 225, + 229, + 113, + 1, + 228, + 97 + ], + "account": { + "program_owner": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "balance": 10000, + "data": [], + "nonce": 0 + } + }, + { + "npk": [ + 192, + 251, + 166, + 243, + 167, + 236, + 84, + 249, + 35, + 136, + 130, + 172, + 219, + 225, + 161, + 139, + 229, + 89, + 243, + 125, + 194, + 213, + 209, + 30, + 23, + 174, + 100, + 244, + 124, + 74, + 140, + 47 + ], + "account": { + "program_owner": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "balance": 20000, + "data": [], + "nonce": 0 + } + } + ], + "signing_key": [ + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37 + ], + "bedrock_config": { + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", + "node_url": "http://127.0.0.1:8080", + "auth": { + "username": "user" + } + } +} diff --git a/integration_tests/configs/sequencer/sequencer_config.json b/integration_tests/configs/sequencer/detached/sequencer_config.json similarity index 98% rename from integration_tests/configs/sequencer/sequencer_config.json rename to integration_tests/configs/sequencer/detached/sequencer_config.json index af8d4249..b4927af1 100644 --- a/integration_tests/configs/sequencer/sequencer_config.json +++ b/integration_tests/configs/sequencer/detached/sequencer_config.json @@ -6,6 +6,7 @@ "max_num_tx_in_block": 20, "mempool_max_size": 10000, "block_create_timeout_millis": 10000, + "retry_pending_blocks_timeout_millis": 240000, "port": 0, "initial_accounts": [ { diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index ecccc99f..a38da565 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -3,19 +3,21 @@ use std::{net::SocketAddr, path::PathBuf, sync::LazyLock}; use actix_web::dev::ServerHandle; -use anyhow::{Context as _, Result}; +use anyhow::{Context, Result}; use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; use common::{ sequencer_client::SequencerClient, transaction::{EncodedTransaction, NSSATransaction}, }; use futures::FutureExt as _; +use indexer_core::{IndexerCore, config::IndexerConfig}; use log::debug; use nssa::PrivacyPreservingTransaction; use nssa_core::Commitment; use sequencer_core::config::SequencerConfig; use tempfile::TempDir; use tokio::task::JoinHandle; +use url::Url; use wallet::{WalletCore, config::WalletConfigOverrides}; // TODO: Remove this and control time from tests @@ -38,40 +40,71 @@ static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init); pub struct TestContext { sequencer_server_handle: ServerHandle, sequencer_loop_handle: JoinHandle>, + sequencer_retry_pending_blocks_handle: JoinHandle>, + indexer_loop_handle: Option>>, sequencer_client: SequencerClient, wallet: WalletCore, + wallet_password: String, _temp_sequencer_dir: TempDir, _temp_wallet_dir: TempDir, } impl TestContext { - /// Create new test context. + /// Create new test context in detached mode. Default. pub async fn new() -> Result { let manifest_dir = env!("CARGO_MANIFEST_DIR"); let sequencer_config_path = - PathBuf::from(manifest_dir).join("configs/sequencer/sequencer_config.json"); + PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json"); let sequencer_config = SequencerConfig::from_path(&sequencer_config_path) .context("Failed to create sequencer config from file")?; - Self::new_with_sequencer_config(sequencer_config).await + Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, None).await } - /// Create new test context with custom sequencer config. + /// Create new test context in local bedrock node attached mode. + pub async fn new_bedrock_local_attached() -> Result { + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + + let sequencer_config_path = PathBuf::from(manifest_dir) + .join("configs/sequencer/bedrock_local_attached/sequencer_config.json"); + + let sequencer_config = SequencerConfig::from_path(&sequencer_config_path) + .context("Failed to create sequencer config from file")?; + + let indexer_config_path = + PathBuf::from(manifest_dir).join("configs/indexer/indexer_config.json"); + + let indexer_config = IndexerConfig::from_path(&indexer_config_path) + .context("Failed to create indexer config from file")?; + + Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, Some(indexer_config)) + .await + } + + /// Create new test context with custom sequencer config and maybe indexer config. /// /// `home` and `port` fields of the provided config will be overridden to meet tests parallelism /// requirements. - pub async fn new_with_sequencer_config(sequencer_config: SequencerConfig) -> Result { + pub async fn new_with_sequencer_and_maybe_indexer_configs( + sequencer_config: SequencerConfig, + indexer_config: Option, + ) -> Result { // Ensure logger is initialized only once *LOGGER; debug!("Test context setup"); - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle, temp_sequencer_dir) = - Self::setup_sequencer(sequencer_config) - .await - .context("Failed to setup sequencer")?; + let ( + sequencer_server_handle, + sequencer_addr, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + temp_sequencer_dir, + ) = Self::setup_sequencer(sequencer_config) + .await + .context("Failed to setup sequencer")?; // Convert 0.0.0.0 to 127.0.0.1 for client connections // When binding to port 0, the server binds to 0.0.0.0: @@ -82,26 +115,60 @@ impl TestContext { format!("http://{sequencer_addr}") }; - let (wallet, temp_wallet_dir) = Self::setup_wallet(sequencer_addr.clone()) + let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone()) .await .context("Failed to setup wallet")?; - let sequencer_client = - SequencerClient::new(sequencer_addr).context("Failed to create sequencer client")?; + let sequencer_client = SequencerClient::new( + Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?, + ) + .context("Failed to create sequencer client")?; - Ok(Self { - sequencer_server_handle, - sequencer_loop_handle, - sequencer_client, - wallet, - _temp_sequencer_dir: temp_sequencer_dir, - _temp_wallet_dir: temp_wallet_dir, - }) + if let Some(mut indexer_config) = indexer_config { + indexer_config.sequencer_client_config.addr = + Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?; + + let indexer_core = IndexerCore::new(indexer_config)?; + + let indexer_loop_handle = Some(tokio::spawn(async move { + indexer_core.subscribe_parse_block_stream().await + })); + + Ok(Self { + sequencer_server_handle, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + indexer_loop_handle, + sequencer_client, + wallet, + _temp_sequencer_dir: temp_sequencer_dir, + _temp_wallet_dir: temp_wallet_dir, + wallet_password, + }) + } else { + Ok(Self { + sequencer_server_handle, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + indexer_loop_handle: None, + sequencer_client, + wallet, + _temp_sequencer_dir: temp_sequencer_dir, + _temp_wallet_dir: temp_wallet_dir, + wallet_password, + }) + } } async fn setup_sequencer( mut config: SequencerConfig, - ) -> Result<(ServerHandle, SocketAddr, JoinHandle>, TempDir)> { + ) -> Result<( + ServerHandle, + SocketAddr, + JoinHandle>, + JoinHandle>, + TempDir, + )> { let temp_sequencer_dir = tempfile::tempdir().context("Failed to create temp dir for sequencer home")?; @@ -113,18 +180,23 @@ impl TestContext { // Setting port to 0 lets the OS choose a free port for us config.port = 0; - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle) = - sequencer_runner::startup_sequencer(config).await?; + let ( + sequencer_server_handle, + sequencer_addr, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + ) = sequencer_runner::startup_sequencer(config).await?; Ok(( sequencer_server_handle, sequencer_addr, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, temp_sequencer_dir, )) } - async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir)> { + async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> { let manifest_dir = env!("CARGO_MANIFEST_DIR"); let wallet_config_source_path = PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json"); @@ -142,11 +214,12 @@ impl TestContext { ..Default::default() }; + let wallet_password = "test_pass".to_owned(); let wallet = WalletCore::new_init_storage( config_path, storage_path, Some(config_overrides), - "test_pass".to_owned(), + wallet_password.clone(), ) .context("Failed to init wallet")?; wallet @@ -154,7 +227,7 @@ impl TestContext { .await .context("Failed to store wallet persistent data")?; - Ok((wallet, temp_wallet_dir)) + Ok((wallet, temp_wallet_dir, wallet_password)) } /// Get reference to the wallet. @@ -162,6 +235,10 @@ impl TestContext { &self.wallet } + pub fn wallet_password(&self) -> &str { + &self.wallet_password + } + /// Get mutable reference to the wallet. pub fn wallet_mut(&mut self) -> &mut WalletCore { &mut self.wallet @@ -180,19 +257,40 @@ impl Drop for TestContext { let Self { sequencer_server_handle, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + indexer_loop_handle, sequencer_client: _, wallet: _, _temp_sequencer_dir, _temp_wallet_dir, + wallet_password: _, } = self; sequencer_loop_handle.abort(); + sequencer_retry_pending_blocks_handle.abort(); + if let Some(indexer_loop_handle) = indexer_loop_handle { + indexer_loop_handle.abort(); + } // Can't wait here as Drop can't be async, but anyway stop signal should be sent sequencer_server_handle.stop(true).now_or_never(); } } +/// A test context to be used in normal #[test] tests +pub struct BlockingTestContext { + pub ctx: TestContext, + pub runtime: tokio::runtime::Runtime, +} + +impl BlockingTestContext { + pub fn new() -> Result { + let runtime = tokio::runtime::Runtime::new().unwrap(); + let ctx = runtime.block_on(TestContext::new())?; + Ok(Self { ctx, runtime }) + } +} + pub fn format_public_account_id(account_id: &str) -> String { format!("Public/{account_id}") } diff --git a/integration_tests/tests/indexer.rs b/integration_tests/tests/indexer.rs new file mode 100644 index 00000000..b25c887b --- /dev/null +++ b/integration_tests/tests/indexer.rs @@ -0,0 +1,23 @@ +use anyhow::Result; +use integration_tests::TestContext; +use log::info; +use tokio::test; + +#[ignore = "needs complicated setup"] +#[test] +// To run this test properly, you need nomos node running in the background. +// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node). +// +// Recommended to run node locally from build binary. +async fn indexer_run_local_node() -> Result<()> { + let _ctx = TestContext::new_bedrock_local_attached().await?; + + info!("Let's observe behaviour"); + + tokio::time::sleep(std::time::Duration::from_secs(180)).await; + + // No way to check state of indexer now + // When it will be a service, then it will become possible. + + Ok(()) +} diff --git a/integration_tests/tests/token.rs b/integration_tests/tests/token.rs index 9a8b714a..2dd1d90d 100644 --- a/integration_tests/tests/token.rs +++ b/integration_tests/tests/token.rs @@ -8,6 +8,7 @@ use integration_tests::{ use key_protocol::key_management::key_tree::chain_index::ChainIndex; use log::info; use nssa::program::Program; +use token_core::{TokenDefinition, TokenHolding}; use tokio::test; use wallet::cli::{ Command, SubcommandReturnValue, @@ -59,11 +60,13 @@ async fn create_and_transfer_public_token() -> Result<()> { }; // Create new token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_public_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name: name.clone(), + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -76,16 +79,16 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!(definition_acc.program_owner, Program::token().id()); - // The data of a token definition account has the following layout: - // [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32 bytes)] assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply, + metadata_id: None + } ); // Check the status of the token holding account with the total supply @@ -97,24 +100,23 @@ async fn create_and_transfer_public_token() -> Result<()> { // The account must be owned by the token program assert_eq!(supply_acc.program_owner, Program::token().id()); - // The data of a token holding account has the following layout: - // [ 0x01 || corresponding_token_definition_id (32 bytes) || balance (little endian 16 bytes) ] - // First byte of the data equal to 1 means it's a token holding account - assert_eq!(supply_acc.data.as_ref()[0], 1); - // Bytes from 1 to 33 represent the id of the token this account is associated with + let token_holding = TokenHolding::try_from(&supply_acc.data)?; assert_eq!( - &supply_acc.data.as_ref()[1..33], - definition_account_id.to_bytes() + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply + } ); - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37); // Transfer 7 tokens from supply_acc to recipient_account_id + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_public_account_id(&supply_account_id.to_string()), to: Some(format_public_account_id(&recipient_account_id.to_string())), to_npk: None, to_ipk: None, - amount: 7, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -129,9 +131,14 @@ async fn create_and_transfer_public_token() -> Result<()> { .await? .account; assert_eq!(supply_acc.program_owner, Program::token().id()); - assert_eq!(supply_acc.data[0], 1); - assert_eq!(&supply_acc.data[1..33], definition_account_id.to_bytes()); - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); // Check the status of the recipient account after transfer let recipient_acc = ctx @@ -140,15 +147,21 @@ async fn create_and_transfer_public_token() -> Result<()> { .await? .account; assert_eq!(recipient_acc.program_owner, Program::token().id()); - assert_eq!(recipient_acc.data[0], 1); - assert_eq!(&recipient_acc.data[1..33], definition_account_id.to_bytes()); - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); // Burn 3 tokens from recipient_acc + let burn_amount = 3; let subcommand = TokenProgramAgnosticSubcommand::Burn { definition: format_public_account_id(&definition_account_id.to_string()), holder: format_public_account_id(&recipient_account_id.to_string()), - amount: 3, + amount: burn_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -162,13 +175,15 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply: total_supply - burn_amount, + metadata_id: None + } ); // Check the status of the recipient account after burn @@ -177,16 +192,24 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(recipient_account_id.to_string()) .await? .account; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 4); + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount - burn_amount + } + ); // Mint 10 tokens at recipient_acc + let mint_amount = 10; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_public_account_id(&definition_account_id.to_string()), holder: Some(format_public_account_id(&recipient_account_id.to_string())), holder_npk: None, holder_ipk: None, - amount: 10, + amount: mint_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -200,13 +223,15 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name, + total_supply: total_supply - burn_amount + mint_amount, + metadata_id: None + } ); // Check the status of the recipient account after mint @@ -215,10 +240,14 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(recipient_account_id.to_string()) .await? .account; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc.data[33..].try_into()?), - 14 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount - burn_amount + mint_amount + } ); info!("Successfully created and transferred public token"); @@ -270,11 +299,13 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { }; // Create new token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name: name.clone(), + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -288,14 +319,16 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!(definition_acc.program_owner, Program::token().id()); assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply, + metadata_id: None + } ); let new_commitment1 = ctx @@ -305,12 +338,13 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await); // Transfer 7 tokens from supply_acc to recipient_account_id + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_private_account_id(&supply_account_id.to_string()), to: Some(format_private_account_id(&recipient_account_id.to_string())), to_npk: None, to_ipk: None, - amount: 7, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -331,10 +365,11 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await); // Burn 3 tokens from recipient_acc + let burn_amount = 3; let subcommand = TokenProgramAgnosticSubcommand::Burn { definition: format_public_account_id(&definition_account_id.to_string()), holder: format_private_account_id(&recipient_account_id.to_string()), - amount: 3, + amount: burn_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -348,13 +383,15 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name, + total_supply: total_supply - burn_amount, + metadata_id: None + } ); let new_commitment2 = ctx @@ -368,10 +405,14 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc.data[33..].try_into()?), - 4 // 7 - 3 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount - burn_amount + } ); info!("Successfully created and transferred token with private supply"); @@ -414,11 +455,13 @@ async fn create_token_with_private_definition() -> Result<()> { }; // Create token with private definition + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(&definition_account_id.to_string()), supply_account_id: format_public_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name: name.clone(), + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -441,8 +484,14 @@ async fn create_token_with_private_definition() -> Result<()> { .account; assert_eq!(supply_acc.program_owner, Program::token().id()); - assert_eq!(supply_acc.data.as_ref()[0], 1); - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply + } + ); // Create private recipient account let result = wallet::cli::execute_subcommand( @@ -471,6 +520,7 @@ async fn create_token_with_private_definition() -> Result<()> { }; // Mint to public account + let mint_amount_public = 10; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_private_account_id(&definition_account_id.to_string()), holder: Some(format_public_account_id( @@ -478,7 +528,7 @@ async fn create_token_with_private_definition() -> Result<()> { )), holder_npk: None, holder_ipk: None, - amount: 10, + amount: mint_amount_public, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -491,10 +541,15 @@ async fn create_token_with_private_definition() -> Result<()> { .wallet() .get_account_private(&definition_account_id) .context("Failed to get definition account")?; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - u128::from_le_bytes(definition_acc.data[7..23].try_into()?), - 47 // 37 + 10 + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply: total_supply + mint_amount_public, + metadata_id: None + } ); // Verify public recipient received tokens @@ -503,13 +558,18 @@ async fn create_token_with_private_definition() -> Result<()> { .get_account(recipient_account_id_public.to_string()) .await? .account; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc.data[33..].try_into()?), - 10 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: mint_amount_public + } ); // Mint to private account + let mint_amount_private = 5; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_private_account_id(&definition_account_id.to_string()), holder: Some(format_private_account_id( @@ -517,7 +577,7 @@ async fn create_token_with_private_definition() -> Result<()> { )), holder_npk: None, holder_ipk: None, - amount: 5, + amount: mint_amount_private, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -537,10 +597,14 @@ async fn create_token_with_private_definition() -> Result<()> { .wallet() .get_account_private(&recipient_account_id_private) .context("Failed to get private recipient account")?; + let token_holding = TokenHolding::try_from(&recipient_acc_private.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc_private.data[33..].try_into()?), - 5 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: mint_amount_private + } ); info!("Successfully created token with private definition and minted to both account types"); @@ -579,11 +643,13 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { }; // Create token with both private definition and supply + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -610,8 +676,15 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { .wallet() .get_account_private(&supply_account_id) .context("Failed to get supply account")?; + let token_holding = TokenHolding::try_from(&supply_acc.data)?; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37); + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply + } + ); // Create recipient account let result = wallet::cli::execute_subcommand( @@ -627,12 +700,13 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { }; // Transfer tokens + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_private_account_id(&supply_account_id.to_string()), to: Some(format_private_account_id(&recipient_account_id.to_string())), to_npk: None, to_ipk: None, - amount: 7, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -658,13 +732,27 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { .wallet() .get_account_private(&supply_account_id) .context("Failed to get supply account")?; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); let recipient_acc = ctx .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); info!("Successfully created and transferred token with both private definition and supply"); @@ -715,11 +803,13 @@ async fn shielded_token_transfer() -> Result<()> { }; // Create token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_public_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -728,12 +818,13 @@ async fn shielded_token_transfer() -> Result<()> { tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; // Perform shielded transfer: public supply -> private recipient + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_public_account_id(&supply_account_id.to_string()), to: Some(format_private_account_id(&recipient_account_id.to_string())), to_npk: None, to_ipk: None, - amount: 7, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -747,7 +838,14 @@ async fn shielded_token_transfer() -> Result<()> { .get_account(supply_account_id.to_string()) .await? .account; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); // Verify recipient commitment exists let new_commitment = ctx @@ -761,7 +859,14 @@ async fn shielded_token_transfer() -> Result<()> { .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); info!("Successfully performed shielded token transfer"); @@ -812,11 +917,13 @@ async fn deshielded_token_transfer() -> Result<()> { }; // Create token with private supply + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -825,12 +932,13 @@ async fn deshielded_token_transfer() -> Result<()> { tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; // Perform deshielded transfer: private supply -> public recipient + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_private_account_id(&supply_account_id.to_string()), to: Some(format_public_account_id(&recipient_account_id.to_string())), to_npk: None, to_ipk: None, - amount: 7, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -850,7 +958,14 @@ async fn deshielded_token_transfer() -> Result<()> { .wallet() .get_account_private(&supply_account_id) .context("Failed to get supply account")?; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); // Verify recipient balance let recipient_acc = ctx @@ -858,7 +973,14 @@ async fn deshielded_token_transfer() -> Result<()> { .get_account(recipient_account_id.to_string()) .await? .account; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); info!("Successfully performed deshielded token transfer"); @@ -896,11 +1018,13 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { }; // Create token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -931,12 +1055,13 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { .context("Failed to get private account keys")?; // Mint using claiming path (foreign account) + let mint_amount = 9; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_private_account_id(&definition_account_id.to_string()), holder: None, holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)), holder_ipk: Some(hex::encode(holder_keys.incoming_viewing_public_key.0)), - amount: 9, + amount: mint_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -960,7 +1085,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 9); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: mint_amount + } + ); info!("Successfully minted tokens using claiming path"); diff --git a/integration_tests/tests/tps.rs b/integration_tests/tests/tps.rs index 3fdc8ac8..5fc09c4c 100644 --- a/integration_tests/tests/tps.rs +++ b/integration_tests/tests/tps.rs @@ -25,7 +25,11 @@ pub async fn tps_test() -> Result<()> { let target_tps = 12; let tps_test = TpsTestManager::new(target_tps, num_transactions); - let ctx = TestContext::new_with_sequencer_config(tps_test.generate_sequencer_config()).await?; + let ctx = TestContext::new_with_sequencer_and_maybe_indexer_configs( + tps_test.generate_sequencer_config(), + None, + ) + .await?; let target_time = tps_test.target_time(); info!( @@ -186,6 +190,7 @@ impl TpsTestManager { initial_commitments: vec![initial_commitment], signing_key: [37; 32], bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } } diff --git a/integration_tests/tests/wallet_ffi.rs b/integration_tests/tests/wallet_ffi.rs new file mode 100644 index 00000000..bb6b9805 --- /dev/null +++ b/integration_tests/tests/wallet_ffi.rs @@ -0,0 +1,618 @@ +use std::{ + collections::HashSet, + ffi::{CStr, CString, c_char}, + io::Write, + time::Duration, +}; + +use anyhow::Result; +use integration_tests::{ + ACC_RECEIVER, ACC_SENDER, ACC_SENDER_PRIVATE, BlockingTestContext, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, +}; +use log::info; +use nssa::{Account, AccountId, PublicKey, program::Program}; +use nssa_core::program::DEFAULT_PROGRAM_ID; +use tempfile::tempdir; +use wallet::WalletCore; +use wallet_ffi::{ + FfiAccount, FfiAccountList, FfiBytes32, FfiPrivateAccountKeys, FfiPublicAccountKey, + FfiTransferResult, WalletHandle, error, +}; + +unsafe extern "C" { + fn wallet_ffi_create_new( + config_path: *const c_char, + storage_path: *const c_char, + password: *const c_char, + ) -> *mut WalletHandle; + + fn wallet_ffi_destroy(handle: *mut WalletHandle); + + fn wallet_ffi_create_account_public( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, + ) -> error::WalletFfiError; + + fn wallet_ffi_create_account_private( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, + ) -> error::WalletFfiError; + + fn wallet_ffi_list_accounts( + handle: *mut WalletHandle, + out_list: *mut FfiAccountList, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_account_list(list: *mut FfiAccountList); + + fn wallet_ffi_get_balance( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + is_public: bool, + out_balance: *mut [u8; 16], + ) -> error::WalletFfiError; + + fn wallet_ffi_get_account_public( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_account: *mut FfiAccount, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_account_data(account: *mut FfiAccount); + + fn wallet_ffi_get_public_account_key( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_public_key: *mut FfiPublicAccountKey, + ) -> error::WalletFfiError; + + fn wallet_ffi_get_private_account_keys( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_keys: *mut FfiPrivateAccountKeys, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_private_account_keys(keys: *mut FfiPrivateAccountKeys); + + fn wallet_ffi_account_id_to_base58(account_id: *const FfiBytes32) -> *mut std::ffi::c_char; + + fn wallet_ffi_free_string(ptr: *mut c_char); + + fn wallet_ffi_account_id_from_base58( + base58_str: *const std::ffi::c_char, + out_account_id: *mut FfiBytes32, + ) -> error::WalletFfiError; + + fn wallet_ffi_transfer_public( + handle: *mut WalletHandle, + from: *const FfiBytes32, + to: *const FfiBytes32, + amount: *const [u8; 16], + out_result: *mut FfiTransferResult, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_transfer_result(result: *mut FfiTransferResult); + + fn wallet_ffi_register_public_account( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_result: *mut FfiTransferResult, + ) -> error::WalletFfiError; +} + +fn new_wallet_ffi_with_test_context_config(ctx: &BlockingTestContext) -> *mut WalletHandle { + let tempdir = tempfile::tempdir().unwrap(); + let config_path = tempdir.path().join("wallet_config.json"); + let storage_path = tempdir.path().join("storage.json"); + let mut config = ctx.ctx.wallet().config().to_owned(); + if let Some(config_overrides) = ctx.ctx.wallet().config_overrides().clone() { + config.apply_overrides(config_overrides); + } + let mut file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&config_path) + .unwrap(); + + let config_with_overrides_serialized = serde_json::to_vec_pretty(&config).unwrap(); + + file.write_all(&config_with_overrides_serialized).unwrap(); + + let config_path = CString::new(config_path.to_str().unwrap()).unwrap(); + let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap(); + let password = CString::new(ctx.ctx.wallet_password()).unwrap(); + + unsafe { + wallet_ffi_create_new( + config_path.as_ptr(), + storage_path.as_ptr(), + password.as_ptr(), + ) + } +} + +fn new_wallet_ffi_with_default_config(password: &str) -> *mut WalletHandle { + let tempdir = tempdir().unwrap(); + let config_path = tempdir.path().join("wallet_config.json"); + let storage_path = tempdir.path().join("storage.json"); + let config_path_c = CString::new(config_path.to_str().unwrap()).unwrap(); + let storage_path_c = CString::new(storage_path.to_str().unwrap()).unwrap(); + let password = CString::new(password).unwrap(); + + unsafe { + wallet_ffi_create_new( + config_path_c.as_ptr(), + storage_path_c.as_ptr(), + password.as_ptr(), + ) + } +} + +fn new_wallet_rust_with_default_config(password: &str) -> WalletCore { + let tempdir = tempdir().unwrap(); + let config_path = tempdir.path().join("wallet_config.json"); + let storage_path = tempdir.path().join("storage.json"); + + WalletCore::new_init_storage( + config_path.to_path_buf(), + storage_path.to_path_buf(), + None, + password.to_string(), + ) + .unwrap() +} + +#[test] +fn test_wallet_ffi_create_public_accounts() { + let password = "password_for_tests"; + let n_accounts = 10; + // First `n_accounts` public accounts created with Rust wallet + let new_public_account_ids_rust = { + let mut account_ids = Vec::new(); + + let mut wallet_rust = new_wallet_rust_with_default_config(password); + for _ in 0..n_accounts { + let account_id = wallet_rust.create_new_account_public(None).0; + account_ids.push(*account_id.value()); + } + account_ids + }; + + // First `n_accounts` public accounts created with wallet FFI + let new_public_account_ids_ffi = unsafe { + let mut account_ids = Vec::new(); + + let wallet_ffi_handle = new_wallet_ffi_with_default_config(password); + for _ in 0..n_accounts { + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + wallet_ffi_create_account_public( + wallet_ffi_handle, + (&mut out_account_id) as *mut FfiBytes32, + ); + account_ids.push(out_account_id.data); + } + wallet_ffi_destroy(wallet_ffi_handle); + account_ids + }; + + assert_eq!(new_public_account_ids_ffi, new_public_account_ids_rust); +} + +#[test] +fn test_wallet_ffi_create_private_accounts() { + let password = "password_for_tests"; + let n_accounts = 10; + // First `n_accounts` private accounts created with Rust wallet + let new_private_account_ids_rust = { + let mut account_ids = Vec::new(); + + let mut wallet_rust = new_wallet_rust_with_default_config(password); + for _ in 0..n_accounts { + let account_id = wallet_rust.create_new_account_private(None).0; + account_ids.push(*account_id.value()); + } + account_ids + }; + + // First `n_accounts` private accounts created with wallet FFI + let new_private_account_ids_ffi = unsafe { + let mut account_ids = Vec::new(); + + let wallet_ffi_handle = new_wallet_ffi_with_default_config(password); + for _ in 0..n_accounts { + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + wallet_ffi_create_account_private( + wallet_ffi_handle, + (&mut out_account_id) as *mut FfiBytes32, + ); + account_ids.push(out_account_id.data); + } + wallet_ffi_destroy(wallet_ffi_handle); + account_ids + }; + + assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust) +} + +#[test] +fn test_wallet_ffi_list_accounts() { + let password = "password_for_tests"; + + // Create the wallet FFI + let wallet_ffi_handle = unsafe { + let handle = new_wallet_ffi_with_default_config(password); + // Create 5 public accounts and 5 private accounts + for _ in 0..5 { + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + wallet_ffi_create_account_public(handle, (&mut out_account_id) as *mut FfiBytes32); + wallet_ffi_create_account_private(handle, (&mut out_account_id) as *mut FfiBytes32); + } + + handle + }; + + // Create the wallet Rust + let wallet_rust = { + let mut wallet = new_wallet_rust_with_default_config(password); + // Create 5 public accounts and 5 private accounts + for _ in 0..5 { + wallet.create_new_account_public(None); + wallet.create_new_account_private(None); + } + wallet + }; + + // Get the account list with FFI method + let mut wallet_ffi_account_list = unsafe { + let mut out_list = FfiAccountList::default(); + wallet_ffi_list_accounts(wallet_ffi_handle, (&mut out_list) as *mut FfiAccountList); + out_list + }; + + let wallet_rust_account_ids = wallet_rust + .storage() + .user_data + .account_ids() + .collect::>(); + + // Assert same number of elements between Rust and FFI result + assert_eq!(wallet_rust_account_ids.len(), wallet_ffi_account_list.count); + + let wallet_ffi_account_list_slice = unsafe { + core::slice::from_raw_parts( + wallet_ffi_account_list.entries, + wallet_ffi_account_list.count, + ) + }; + + // Assert same account ids between Rust and FFI result + assert_eq!( + wallet_rust_account_ids + .iter() + .map(|id| id.value()) + .collect::>(), + wallet_ffi_account_list_slice + .iter() + .map(|entry| &entry.account_id.data) + .collect::>() + ); + + // Assert `is_pub` flag is correct in the FFI result + for entry in wallet_ffi_account_list_slice.iter() { + let account_id = AccountId::new(entry.account_id.data); + let is_pub_default_in_rust_wallet = wallet_rust + .storage() + .user_data + .default_pub_account_signing_keys + .contains_key(&account_id); + let is_pub_key_tree_wallet_rust = wallet_rust + .storage() + .user_data + .public_key_tree + .account_id_map + .contains_key(&account_id); + + let is_public_in_rust_wallet = is_pub_default_in_rust_wallet || is_pub_key_tree_wallet_rust; + + assert_eq!(entry.is_public, is_public_in_rust_wallet); + } + + unsafe { + wallet_ffi_free_account_list((&mut wallet_ffi_account_list) as *mut FfiAccountList); + wallet_ffi_destroy(wallet_ffi_handle); + } +} + +#[test] +fn test_wallet_ffi_get_balance_public() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + + let balance = unsafe { + let mut out_balance: [u8; 16] = [0; 16]; + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_balance( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + true, + (&mut out_balance) as *mut [u8; 16], + ); + u128::from_le_bytes(out_balance) + }; + assert_eq!(balance, 10000); + + info!("Successfully retrieved account balance"); + + unsafe { + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} + +#[test] +fn test_wallet_ffi_get_account_public() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let mut out_account = FfiAccount::default(); + + let account: Account = unsafe { + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_account_public( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + (&mut out_account) as *mut FfiAccount, + ); + (&out_account).try_into().unwrap() + }; + + assert_eq!( + account.program_owner, + Program::authenticated_transfer_program().id() + ); + assert_eq!(account.balance, 10000); + assert!(account.data.is_empty()); + assert_eq!(account.nonce, 0); + + unsafe { + wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount); + wallet_ffi_destroy(wallet_ffi_handle); + } + + info!("Successfully retrieved account with correct details"); + + Ok(()) +} + +#[test] +fn test_wallet_ffi_get_public_account_keys() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let mut out_key = FfiPublicAccountKey::default(); + + let key: PublicKey = unsafe { + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_public_account_key( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + (&mut out_key) as *mut FfiPublicAccountKey, + ); + (&out_key).try_into().unwrap() + }; + + let expected_key = { + let private_key = ctx + .ctx + .wallet() + .get_account_public_signing_key(&account_id) + .unwrap(); + PublicKey::new_from_private_key(private_key) + }; + + assert_eq!(key, expected_key); + + info!("Successfully retrieved account key"); + + unsafe { + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} + +#[test] +fn test_wallet_ffi_get_private_account_keys() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER_PRIVATE.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let mut keys = FfiPrivateAccountKeys::default(); + + unsafe { + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_private_account_keys( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + (&mut keys) as *mut FfiPrivateAccountKeys, + ); + }; + + let key_chain = &ctx + .ctx + .wallet() + .storage() + .user_data + .get_private_account(&account_id) + .unwrap() + .0; + + let expected_npk = &key_chain.nullifer_public_key; + let expected_ivk = &key_chain.incoming_viewing_public_key; + + assert_eq!(&keys.npk(), expected_npk); + assert_eq!(&keys.ivk().unwrap(), expected_ivk); + + unsafe { + wallet_ffi_free_private_account_keys((&mut keys) as *mut FfiPrivateAccountKeys); + wallet_ffi_destroy(wallet_ffi_handle); + } + + info!("Successfully retrieved account keys"); + + Ok(()) +} + +#[test] +fn test_wallet_ffi_account_id_to_base58() { + let account_id_str = ACC_SENDER; + let account_id: AccountId = account_id_str.parse().unwrap(); + let ffi_bytes: FfiBytes32 = (&account_id).into(); + let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) }; + + let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() }; + + assert_eq!(account_id_str, ffi_result); + + unsafe { + wallet_ffi_free_string(ptr); + } +} + +#[test] +fn test_wallet_ffi_base58_to_account_id() { + let account_id_str = ACC_SENDER; + let account_id_c_str = CString::new(account_id_str).unwrap(); + let account_id: AccountId = unsafe { + let mut out_account_id_bytes = FfiBytes32::default(); + wallet_ffi_account_id_from_base58( + account_id_c_str.as_ptr(), + (&mut out_account_id_bytes) as *mut FfiBytes32, + ); + out_account_id_bytes.into() + }; + + let expected_account_id = account_id_str.parse().unwrap(); + + assert_eq!(account_id, expected_account_id); +} + +#[test] +fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { + let ctx = BlockingTestContext::new().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + + // Create a new uninitialized public account + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + unsafe { + wallet_ffi_create_account_public( + wallet_ffi_handle, + (&mut out_account_id) as *mut FfiBytes32, + ); + } + + // Check its program owner is the default program id + let account: Account = unsafe { + let mut out_account = FfiAccount::default(); + let _result = wallet_ffi_get_account_public( + wallet_ffi_handle, + (&out_account_id) as *const FfiBytes32, + (&mut out_account) as *mut FfiAccount, + ); + (&out_account).try_into().unwrap() + }; + assert_eq!(account.program_owner, DEFAULT_PROGRAM_ID); + + // Call the init funciton + let mut transfer_result = FfiTransferResult::default(); + unsafe { + wallet_ffi_register_public_account( + wallet_ffi_handle, + (&out_account_id) as *const FfiBytes32, + (&mut transfer_result) as *mut FfiTransferResult, + ); + } + + info!("Waiting for next block creation"); + std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)); + + // Check that the program owner is now the authenticated transfer program + let account: Account = unsafe { + let mut out_account = FfiAccount::default(); + let _result = wallet_ffi_get_account_public( + wallet_ffi_handle, + (&out_account_id) as *const FfiBytes32, + (&mut out_account) as *mut FfiAccount, + ); + (&out_account).try_into().unwrap() + }; + assert_eq!( + account.program_owner, + Program::authenticated_transfer_program().id() + ); + + unsafe { + wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} + +#[test] +fn test_wallet_ffi_transfer_public() -> Result<()> { + let ctx = BlockingTestContext::new().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let from: FfiBytes32 = (&ACC_SENDER.parse::().unwrap()).into(); + let to: FfiBytes32 = (&ACC_RECEIVER.parse::().unwrap()).into(); + let amount: [u8; 16] = 100u128.to_le_bytes(); + + let mut transfer_result = FfiTransferResult::default(); + unsafe { + wallet_ffi_transfer_public( + wallet_ffi_handle, + (&from) as *const FfiBytes32, + (&to) as *const FfiBytes32, + (&amount) as *const [u8; 16], + (&mut transfer_result) as *mut FfiTransferResult, + ); + } + + info!("Waiting for next block creation"); + std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)); + + let from_balance = unsafe { + let mut out_balance: [u8; 16] = [0; 16]; + let _result = wallet_ffi_get_balance( + wallet_ffi_handle, + (&from) as *const FfiBytes32, + true, + (&mut out_balance) as *mut [u8; 16], + ); + u128::from_le_bytes(out_balance) + }; + + let to_balance = unsafe { + let mut out_balance: [u8; 16] = [0; 16]; + let _result = wallet_ffi_get_balance( + wallet_ffi_handle, + (&to) as *const FfiBytes32, + true, + (&mut out_balance) as *mut [u8; 16], + ); + u128::from_le_bytes(out_balance) + }; + + assert_eq!(from_balance, 9900); + assert_eq!(to_balance, 20100); + + unsafe { + wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} diff --git a/key_protocol/Cargo.toml b/key_protocol/Cargo.toml index 91bea9be..de0a3bf2 100644 --- a/key_protocol/Cargo.toml +++ b/key_protocol/Cargo.toml @@ -2,6 +2,7 @@ name = "key_protocol" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] secp256k1 = "0.31.1" diff --git a/mempool/Cargo.toml b/mempool/Cargo.toml index 46014389..ee7e884c 100644 --- a/mempool/Cargo.toml +++ b/mempool/Cargo.toml @@ -2,6 +2,7 @@ name = "mempool" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] tokio = { workspace = true, features = ["sync"] } diff --git a/nssa/Cargo.toml b/nssa/Cargo.toml index a508cc08..e6952eee 100644 --- a/nssa/Cargo.toml +++ b/nssa/Cargo.toml @@ -2,6 +2,7 @@ name = "nssa" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core = { workspace = true, features = ["host"] } @@ -23,7 +24,9 @@ risc0-build = "3.0.3" risc0-binfmt = "3.0.2" [dev-dependencies] +token_core.workspace = true test_program_methods.workspace = true + env_logger.workspace = true hex-literal = "1.0.0" test-case = "3.3.1" diff --git a/nssa/core/Cargo.toml b/nssa/core/Cargo.toml index 473cde90..f00f2857 100644 --- a/nssa/core/Cargo.toml +++ b/nssa/core/Cargo.toml @@ -2,6 +2,7 @@ name = "nssa_core" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] risc0-zkvm.workspace = true diff --git a/nssa/core/src/account.rs b/nssa/core/src/account.rs index 55ab0ded..b1f41b65 100644 --- a/nssa/core/src/account.rs +++ b/nssa/core/src/account.rs @@ -68,6 +68,10 @@ impl AccountId { pub fn value(&self) -> &[u8; 32] { &self.value } + + pub fn into_value(self) -> [u8; 32] { + self.value + } } impl AsRef<[u8]> for AccountId { diff --git a/nssa/core/src/commitment.rs b/nssa/core/src/commitment.rs index 52344177..b08e3005 100644 --- a/nssa/core/src/commitment.rs +++ b/nssa/core/src/commitment.rs @@ -5,7 +5,10 @@ use serde::{Deserialize, Serialize}; use crate::{NullifierPublicKey, account::Account}; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))] +#[cfg_attr( + any(feature = "host", test), + derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord) +)] pub struct Commitment(pub(super) [u8; 32]); /// A commitment to all zero data. diff --git a/nssa/core/src/encoding.rs b/nssa/core/src/encoding.rs index 24ac050c..34be3782 100644 --- a/nssa/core/src/encoding.rs +++ b/nssa/core/src/encoding.rs @@ -69,6 +69,11 @@ impl Commitment { self.0 } + #[cfg(feature = "host")] + pub fn from_byte_array(bytes: [u8; 32]) -> Self { + Self(bytes) + } + #[cfg(feature = "host")] pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { let mut bytes = [0u8; 32]; @@ -89,6 +94,11 @@ impl Nullifier { self.0 } + #[cfg(feature = "host")] + pub fn from_byte_array(bytes: [u8; 32]) -> Self { + Self(bytes) + } + pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { let mut bytes = [0u8; 32]; cursor.read_exact(&mut bytes)?; @@ -106,6 +116,16 @@ impl Ciphertext { bytes } + #[cfg(feature = "host")] + pub fn into_inner(self) -> Vec { + self.0 + } + + #[cfg(feature = "host")] + pub fn from_inner(inner: Vec) -> Self { + Self(inner) + } + #[cfg(feature = "host")] pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { let mut u32_bytes = [0; 4]; diff --git a/nssa/core/src/nullifier.rs b/nssa/core/src/nullifier.rs index 8d9d59fa..5c420cb1 100644 --- a/nssa/core/src/nullifier.rs +++ b/nssa/core/src/nullifier.rs @@ -42,7 +42,10 @@ impl From<&NullifierSecretKey> for NullifierPublicKey { pub type NullifierSecretKey = [u8; 32]; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))] +#[cfg_attr( + any(feature = "host", test), + derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash) +)] pub struct Nullifier(pub(super) [u8; 32]); impl Nullifier { diff --git a/nssa/core/src/program.rs b/nssa/core/src/program.rs index 32b3e2c0..a6a04425 100644 --- a/nssa/core/src/program.rs +++ b/nssa/core/src/program.rs @@ -20,8 +20,7 @@ pub struct ProgramInput { /// Each program can derive up to `2^256` unique account IDs by choosing different /// seeds. PDAs allow programs to control namespaced account identifiers without /// collisions between programs. -#[derive(Serialize, Deserialize, Clone, Eq, PartialEq)] -#[cfg_attr(any(feature = "host", test), derive(Debug))] +#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] pub struct PdaSeed([u8; 32]); impl PdaSeed { @@ -65,23 +64,44 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId { } } -#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] -#[cfg_attr(any(feature = "host", test), derive(Debug,))] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub struct ChainedCall { /// The program ID of the program to execute pub program_id: ProgramId, + pub pre_states: Vec, /// The instruction data to pass pub instruction_data: InstructionData, - pub pre_states: Vec, pub pda_seeds: Vec, } +impl ChainedCall { + /// Creates a new chained call serializing the given instruction. + pub fn new( + program_id: ProgramId, + pre_states: Vec, + instruction: &I, + ) -> Self { + Self { + program_id, + pre_states, + instruction_data: risc0_zkvm::serde::to_vec(instruction) + .expect("Serialization to Vec should not fail"), + pda_seeds: Vec::new(), + } + } + + pub fn with_pda_seeds(mut self, pda_seeds: Vec) -> Self { + self.pda_seeds = pda_seeds; + self + } +} + /// Represents the final state of an `Account` after a program execution. /// A post state may optionally request that the executing program /// becomes the owner of the account (a “claim”). This is used to signal /// that the program intends to take ownership of the account. -#[derive(Serialize, Deserialize, Clone)] -#[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(any(feature = "host", test), derive(PartialEq, Eq))] pub struct AccountPostState { account: Account, claim: bool, diff --git a/nssa/src/lib.rs b/nssa/src/lib.rs index de4b65b2..47a0eadb 100644 --- a/nssa/src/lib.rs +++ b/nssa/src/lib.rs @@ -14,7 +14,7 @@ mod state; pub use nssa_core::{ SharedSecretKey, - account::{Account, AccountId}, + account::{Account, AccountId, Data}, encryption::EphemeralPublicKey, program::ProgramId, }; diff --git a/nssa/src/merkle_tree/mod.rs b/nssa/src/merkle_tree/mod.rs index c4501cf8..b3637b13 100644 --- a/nssa/src/merkle_tree/mod.rs +++ b/nssa/src/merkle_tree/mod.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use sha2::{Digest, Sha256}; mod default_values; @@ -20,6 +21,7 @@ fn hash_value(value: &Value) -> Node { } #[cfg_attr(test, derive(Debug, PartialEq, Eq))] +#[derive(BorshSerialize, BorshDeserialize)] pub struct MerkleTree { nodes: Vec, capacity: usize, diff --git a/nssa/src/privacy_preserving_transaction/circuit.rs b/nssa/src/privacy_preserving_transaction/circuit.rs index 1b490de8..1ebe90f3 100644 --- a/nssa/src/privacy_preserving_transaction/circuit.rs +++ b/nssa/src/privacy_preserving_transaction/circuit.rs @@ -20,6 +20,16 @@ use crate::{ #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Proof(pub(crate) Vec); +impl Proof { + pub fn into_inner(self) -> Vec { + self.0 + } + + pub fn from_inner(inner: Vec) -> Self { + Self(inner) + } +} + #[derive(Clone)] pub struct ProgramWithDependencies { pub program: Program, diff --git a/nssa/src/privacy_preserving_transaction/message.rs b/nssa/src/privacy_preserving_transaction/message.rs index 6d195321..f507e65c 100644 --- a/nssa/src/privacy_preserving_transaction/message.rs +++ b/nssa/src/privacy_preserving_transaction/message.rs @@ -45,12 +45,12 @@ impl EncryptedAccountData { #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Message { - pub(crate) public_account_ids: Vec, - pub(crate) nonces: Vec, - pub(crate) public_post_states: Vec, + pub public_account_ids: Vec, + pub nonces: Vec, + pub public_post_states: Vec, pub encrypted_private_post_states: Vec, pub new_commitments: Vec, - pub(crate) new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>, + pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>, } impl Message { diff --git a/nssa/src/privacy_preserving_transaction/transaction.rs b/nssa/src/privacy_preserving_transaction/transaction.rs index 2cb0889b..34649d2d 100644 --- a/nssa/src/privacy_preserving_transaction/transaction.rs +++ b/nssa/src/privacy_preserving_transaction/transaction.rs @@ -16,7 +16,7 @@ use crate::{ #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct PrivacyPreservingTransaction { pub message: Message, - witness_set: WitnessSet, + pub witness_set: WitnessSet, } impl PrivacyPreservingTransaction { diff --git a/nssa/src/privacy_preserving_transaction/witness_set.rs b/nssa/src/privacy_preserving_transaction/witness_set.rs index b38b0fb9..365b61b9 100644 --- a/nssa/src/privacy_preserving_transaction/witness_set.rs +++ b/nssa/src/privacy_preserving_transaction/witness_set.rs @@ -46,4 +46,18 @@ impl WitnessSet { pub fn proof(&self) -> &Proof { &self.proof } + + pub fn into_raw_parts(self) -> (Vec<(Signature, PublicKey)>, Proof) { + (self.signatures_and_public_keys, self.proof) + } + + pub fn from_raw_parts( + signatures_and_public_keys: Vec<(Signature, PublicKey)>, + proof: Proof, + ) -> Self { + Self { + signatures_and_public_keys, + proof, + } + } } diff --git a/nssa/src/program.rs b/nssa/src/program.rs index 943b16ed..06c7ad29 100644 --- a/nssa/src/program.rs +++ b/nssa/src/program.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ account::AccountWithMetadata, program::{InstructionData, ProgramId, ProgramOutput}, @@ -14,7 +15,7 @@ use crate::{ /// TODO: Make this variable when fees are implemented const MAX_NUM_CYCLES_PUBLIC_EXECUTION: u64 = 1024 * 1024 * 32; // 32M cycles -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Program { id: ProgramId, elf: Vec, diff --git a/nssa/src/program_deployment_transaction/message.rs b/nssa/src/program_deployment_transaction/message.rs index 65e9ec27..41c4e10a 100644 --- a/nssa/src/program_deployment_transaction/message.rs +++ b/nssa/src/program_deployment_transaction/message.rs @@ -9,4 +9,8 @@ impl Message { pub fn new(bytecode: Vec) -> Self { Self { bytecode } } + + pub fn into_bytecode(self) -> Vec { + self.bytecode + } } diff --git a/nssa/src/program_deployment_transaction/transaction.rs b/nssa/src/program_deployment_transaction/transaction.rs index c5f31a1c..6002aded 100644 --- a/nssa/src/program_deployment_transaction/transaction.rs +++ b/nssa/src/program_deployment_transaction/transaction.rs @@ -14,6 +14,10 @@ impl ProgramDeploymentTransaction { Self { message } } + pub fn into_message(self) -> Message { + self.message + } + pub(crate) fn validate_and_produce_public_state_diff( &self, state: &V02State, diff --git a/nssa/src/public_transaction/message.rs b/nssa/src/public_transaction/message.rs index d8bd2da0..36a20fbb 100644 --- a/nssa/src/public_transaction/message.rs +++ b/nssa/src/public_transaction/message.rs @@ -9,10 +9,10 @@ use crate::{AccountId, error::NssaError, program::Program}; #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Message { - pub(crate) program_id: ProgramId, - pub(crate) account_ids: Vec, - pub(crate) nonces: Vec, - pub(crate) instruction_data: InstructionData, + pub program_id: ProgramId, + pub account_ids: Vec, + pub nonces: Vec, + pub instruction_data: InstructionData, } impl Message { diff --git a/nssa/src/public_transaction/witness_set.rs b/nssa/src/public_transaction/witness_set.rs index 09a35a4e..9b9cd290 100644 --- a/nssa/src/public_transaction/witness_set.rs +++ b/nssa/src/public_transaction/witness_set.rs @@ -37,6 +37,16 @@ impl WitnessSet { pub fn signatures_and_public_keys(&self) -> &[(Signature, PublicKey)] { &self.signatures_and_public_keys } + + pub fn into_raw_parts(self) -> Vec<(Signature, PublicKey)> { + self.signatures_and_public_keys + } + + pub fn from_raw_parts(signatures_and_public_keys: Vec<(Signature, PublicKey)>) -> Self { + Self { + signatures_and_public_keys, + } + } } #[cfg(test)] diff --git a/nssa/src/signature/mod.rs b/nssa/src/signature/mod.rs index 780ad634..f76c480a 100644 --- a/nssa/src/signature/mod.rs +++ b/nssa/src/signature/mod.rs @@ -8,7 +8,7 @@ use rand::{RngCore, rngs::OsRng}; #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Signature { - value: [u8; 64], + pub value: [u8; 64], } impl Signature { diff --git a/nssa/src/state.rs b/nssa/src/state.rs index 1a384b2f..08cc4e02 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -1,5 +1,6 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; +use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, MembershipProof, Nullifier, account::{Account, AccountId}, @@ -15,6 +16,8 @@ use crate::{ pub const MAX_NUMBER_CHAINED_CALLS: usize = 10; +#[derive(BorshSerialize, BorshDeserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub(crate) struct CommitmentSet { merkle_tree: MerkleTree, commitments: HashMap, @@ -60,8 +63,49 @@ impl CommitmentSet { } } -type NullifierSet = HashSet; +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] +struct NullifierSet(BTreeSet); +impl NullifierSet { + fn new() -> Self { + Self(BTreeSet::new()) + } + + fn extend(&mut self, new_nullifiers: Vec) { + self.0.extend(new_nullifiers); + } + + fn contains(&self, nullifier: &Nullifier) -> bool { + self.0.contains(nullifier) + } +} + +impl BorshSerialize for NullifierSet { + fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + self.0.iter().collect::>().serialize(writer) + } +} + +impl BorshDeserialize for NullifierSet { + fn deserialize_reader(reader: &mut R) -> std::io::Result { + let vec = Vec::::deserialize_reader(reader)?; + + let mut set = BTreeSet::new(); + for n in vec { + if !set.insert(n) { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "duplicate nullifier in NullifierSet", + )); + } + } + + Ok(Self(set)) + } +} + +#[derive(BorshSerialize, BorshDeserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub struct V02State { public_state: HashMap, private_state: (CommitmentSet, NullifierSet), @@ -273,6 +317,7 @@ pub mod tests { encryption::{EphemeralPublicKey, IncomingViewingPublicKey, Scalar}, program::{PdaSeed, ProgramId}, }; + use token_core::{TokenDefinition, TokenHolding}; use crate::{ PublicKey, PublicTransaction, V02State, @@ -2284,53 +2329,6 @@ pub mod tests { )); } - // TODO: repeated code needs to be cleaned up - // from token.rs (also repeated in amm.rs) - const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - - const TOKEN_HOLDING_DATA_SIZE: usize = 49; - - struct TokenDefinition { - account_type: u8, - name: [u8; 6], - total_supply: u128, - metadata_id: AccountId, - } - - struct TokenHolding { - account_type: u8, - definition_id: AccountId, - balance: u128, - } - impl TokenDefinition { - fn into_data(self) -> Data { - let mut bytes = Vec::::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.name); - bytes.extend_from_slice(&self.total_supply.to_le_bytes()); - bytes.extend_from_slice(&self.metadata_id.to_bytes()); - - if bytes.len() != TOKEN_DEFINITION_DATA_SIZE { - panic!("Invalid Token Definition data"); - } - - Data::try_from(bytes).expect("Token definition data size must fit into data") - } - } - - impl TokenHolding { - fn into_data(self) -> Data { - let mut bytes = [0; TOKEN_HOLDING_DATA_SIZE]; - bytes[0] = self.account_type; - bytes[1..33].copy_from_slice(&self.definition_id.to_bytes()); - bytes[33..].copy_from_slice(&self.balance.to_le_bytes()); - bytes - .to_vec() - .try_into() - .expect("33 bytes should fit into Data") - } - } - // TODO repeated code should ultimately be removed; fn compute_pool_pda( amm_program_id: ProgramId, @@ -2703,8 +2701,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_init(), }), @@ -2716,8 +2713,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_init(), }), @@ -2749,11 +2745,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::token_a_supply(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -2763,11 +2758,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::token_b_supply(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -2777,11 +2771,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::token_lp_supply(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -2791,8 +2784,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_init(), }), @@ -2804,8 +2796,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_init(), }), @@ -2817,8 +2808,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_holding_init(), }), @@ -2830,8 +2820,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_swap_1(), }), @@ -2843,8 +2832,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_swap_1(), }), @@ -2876,8 +2864,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_swap_1(), }), @@ -2889,8 +2876,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_swap_1(), }), @@ -2902,8 +2888,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_swap_2(), }), @@ -2915,8 +2900,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_swap_2(), }), @@ -2948,8 +2932,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_swap_2(), }), @@ -2961,8 +2944,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_swap_2(), }), @@ -2974,8 +2956,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_add(), }), @@ -2987,8 +2968,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_add(), }), @@ -3020,8 +3000,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_add(), }), @@ -3033,8 +3012,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_add(), }), @@ -3046,8 +3024,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_holding_add(), }), @@ -3059,11 +3036,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::token_lp_supply_add(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3073,8 +3049,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_remove(), }), @@ -3086,8 +3061,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_remove(), }), @@ -3119,8 +3093,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_remove(), }), @@ -3132,8 +3105,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_remove(), }), @@ -3145,8 +3117,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_holding_remove(), }), @@ -3158,11 +3129,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::token_lp_supply_remove(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3172,11 +3142,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: 0, - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3186,8 +3155,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: 0, }), @@ -3199,8 +3167,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: 0, }), @@ -3232,8 +3199,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_new_definition(), }), @@ -3245,8 +3211,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_new_definition(), }), @@ -3258,8 +3223,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_a_holding_new_definition(), }), @@ -3271,11 +3235,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::vault_a_balance_init(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3305,8 +3268,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: 0, }), @@ -4071,13 +4033,13 @@ pub mod tests { let pinata_token_holding_id = AccountId::from((&pinata_token.id(), &PdaSeed::new([0; 32]))); let winner_token_holding_id = AccountId::new([3; 32]); - let mut expected_winner_account_data = [0; 49]; - expected_winner_account_data[0] = 1; - expected_winner_account_data[1..33].copy_from_slice(pinata_token_definition_id.value()); - expected_winner_account_data[33..].copy_from_slice(&150u128.to_le_bytes()); + let expected_winner_account_holding = token_core::TokenHolding::Fungible { + definition_id: pinata_token_definition_id, + balance: 150, + }; let expected_winner_token_holding_post = Account { program_owner: token.id(), - data: expected_winner_account_data.to_vec().try_into().unwrap(), + data: Data::from(&expected_winner_account_holding), ..Account::default() }; @@ -4087,10 +4049,10 @@ pub mod tests { // Execution of the token program to create new token for the pinata token // definition and supply accounts let total_supply: u128 = 10_000_000; - // instruction: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] - let mut instruction = vec![0; 23]; - instruction[1..17].copy_from_slice(&total_supply.to_le_bytes()); - instruction[17..].copy_from_slice(b"PINATA"); + let instruction = token_core::Instruction::NewFungibleDefinition { + name: String::from("PINATA"), + total_supply, + }; let message = public_transaction::Message::try_new( token.id(), vec![pinata_token_definition_id, pinata_token_holding_id], @@ -4102,9 +4064,8 @@ pub mod tests { let tx = PublicTransaction::new(message, witness_set); state.transition_from_public_transaction(&tx).unwrap(); - // Execution of the token program transfer just to initialize the winner token account - let mut instruction = vec![0; 23]; - instruction[0] = 2; + // Execution of winner's token holding account initialization + let instruction = token_core::Instruction::InitializeAccount; let message = public_transaction::Message::try_new( token.id(), vec![pinata_token_definition_id, winner_token_holding_id], @@ -4528,4 +4489,15 @@ pub mod tests { // Assert - should fail because the malicious program tries to manipulate is_authorized assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); } + + #[test] + fn test_state_serialization_roundtrip() { + let account_id_1 = AccountId::new([1; 32]); + let account_id_2 = AccountId::new([2; 32]); + let initial_data = [(account_id_1, 100u128), (account_id_2, 151u128)]; + let state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); + let bytes = borsh::to_vec(&state).unwrap(); + let state_from_bytes: V02State = borsh::from_slice(&bytes).unwrap(); + assert_eq!(state, state_from_bytes); + } } diff --git a/program_methods/Cargo.toml b/program_methods/Cargo.toml index 5f0688a4..999c1522 100644 --- a/program_methods/Cargo.toml +++ b/program_methods/Cargo.toml @@ -2,6 +2,7 @@ name = "program_methods" version = "0.1.0" edition = "2024" +license = { workspace = true } [build-dependencies] risc0-build.workspace = true diff --git a/program_methods/guest/Cargo.toml b/program_methods/guest/Cargo.toml index 37c1a8d9..eda23348 100644 --- a/program_methods/guest/Cargo.toml +++ b/program_methods/guest/Cargo.toml @@ -2,9 +2,11 @@ name = "programs" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true - +token_core.workspace = true +token_program.workspace = true risc0-zkvm.workspace = true serde = { workspace = true, default-features = false } diff --git a/program_methods/guest/src/bin/amm.rs b/program_methods/guest/src/bin/amm.rs index 9488db13..2c7a7f8f 100644 --- a/program_methods/guest/src/bin/amm.rs +++ b/program_methods/guest/src/bin/amm.rs @@ -152,56 +152,6 @@ impl PoolDefinition { } } -// TODO: remove repeated code for Token_Definition and TokenHoldling - -const TOKEN_HOLDING_TYPE: u8 = 1; -const TOKEN_HOLDING_DATA_SIZE: usize = 49; - -struct TokenHolding { - #[cfg_attr(not(test), expect(dead_code, reason = "TODO: fix later"))] - account_type: u8, - definition_id: AccountId, - balance: u128, -} - -impl TokenHolding { - fn parse(data: &[u8]) -> Option { - if data.len() != TOKEN_HOLDING_DATA_SIZE || data[0] != TOKEN_HOLDING_TYPE { - None - } else { - let account_type = data[0]; - let definition_id = AccountId::new( - data[1..33] - .try_into() - .expect("Defintion ID must be 32 bytes long"), - ); - let balance = u128::from_le_bytes( - data[33..] - .try_into() - .expect("balance must be 16 bytes little-endian"), - ); - Some(Self { - definition_id, - balance, - account_type, - }) - } - } - - #[cfg(test)] - fn into_data(self) -> Data { - let mut bytes = [0; TOKEN_HOLDING_DATA_SIZE]; - bytes[0] = self.account_type; - bytes[1..33].copy_from_slice(&self.definition_id.to_bytes()); - bytes[33..].copy_from_slice(&self.balance.to_le_bytes()); - - bytes - .to_vec() - .try_into() - .expect("49 bytes should fit into Data") - } -} - type Instruction = Vec; fn main() { let ( @@ -412,32 +362,6 @@ fn compute_liquidity_token_pda_seed(pool_id: AccountId) -> PdaSeed { ) } -const TOKEN_PROGRAM_NEW: u8 = 0; -const TOKEN_PROGRAM_TRANSFER: u8 = 1; -const TOKEN_PROGRAM_MINT: u8 = 4; -const TOKEN_PROGRAM_BURN: u8 = 3; - -fn initialize_token_transfer_chained_call( - token_program_command: u8, - sender: AccountWithMetadata, - recipient: AccountWithMetadata, - amount_to_move: u128, - pda_seed: Vec, -) -> ChainedCall { - let mut instruction_data = vec![0u8; 23]; - instruction_data[0] = token_program_command; - instruction_data[1..17].copy_from_slice(&amount_to_move.to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("AMM Program expects valid token transfer instruction data"); - - ChainedCall { - program_id: sender.account.program_owner, - instruction_data, - pre_states: vec![sender, recipient], - pda_seeds: pda_seed, - } -} - fn new_definition( pre_states: &[AccountWithMetadata], balance_in: &[u128], @@ -471,12 +395,12 @@ fn new_definition( } // Verify token_a and token_b are different - let definition_token_a_id = TokenHolding::parse(&user_holding_a.account.data) + let definition_token_a_id = token_core::TokenHolding::try_from(&user_holding_a.account.data) .expect("New definition: AMM Program expects valid Token Holding account for Token A") - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_holding_b.account.data) + .definition_id(); + let definition_token_b_id = token_core::TokenHolding::try_from(&user_holding_b.account.data) .expect("New definition: AMM Program expects valid Token Holding account for Token B") - .definition_id; + .definition_id(); // both instances of the same token program let token_program = user_holding_a.account.program_owner; @@ -543,57 +467,48 @@ fn new_definition( AccountPostState::new(pool_post.clone()) }; - let mut chained_calls = Vec::::new(); + let token_program_id = user_holding_a.account.program_owner; // Chain call for Token A (user_holding_a -> Vault_A) - let call_token_a = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_a.clone(), - vault_a.clone(), - amount_a, - Vec::::new(), + let call_token_a = ChainedCall::new( + token_program_id, + vec![user_holding_a.clone(), vault_a.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: amount_a, + }, ); // Chain call for Token B (user_holding_b -> Vault_B) - let call_token_b = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_b.clone(), - vault_b.clone(), - amount_b, - Vec::::new(), + let call_token_b = ChainedCall::new( + token_program_id, + vec![user_holding_b.clone(), vault_b.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: amount_b, + }, ); // Chain call for liquidity token (TokenLP definition -> User LP Holding) - let mut instruction_data = vec![0u8; 23]; - instruction_data[0] = if pool.account == Account::default() { - TOKEN_PROGRAM_NEW + let instruction = if pool.account == Account::default() { + token_core::Instruction::NewFungibleDefinition { + name: String::from("LP Token"), + total_supply: amount_a, + } } else { - TOKEN_PROGRAM_MINT - }; //new or mint - let nme = if pool.account == Account::default() { - [1u8; 6] - } else { - [0u8; 6] + token_core::Instruction::Mint { + amount_to_mint: amount_a, + } }; - instruction_data[1..17].copy_from_slice(&amount_a.to_le_bytes()); - instruction_data[17..].copy_from_slice(&nme); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("New definition: AMM Program expects valid instruction_data"); - let mut pool_lp_auth = pool_lp.clone(); pool_lp_auth.is_authorized = true; - let token_program_id = user_holding_a.account.program_owner; - let call_token_lp = ChainedCall { - program_id: token_program_id, - instruction_data, - pre_states: vec![pool_lp_auth.clone(), user_holding_lp.clone()], - pda_seeds: vec![compute_liquidity_token_pda_seed(pool.account_id)], - }; + let call_token_lp = ChainedCall::new( + token_program_id, + vec![pool_lp_auth.clone(), user_holding_lp.clone()], + &instruction, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); - chained_calls.push(call_token_lp); - chained_calls.push(call_token_b); - chained_calls.push(call_token_a); + let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ pool_post.clone(), @@ -645,18 +560,30 @@ fn swap( // fetch pool reserves // validates reserves is at least the vaults' balances - if TokenHolding::parse(&vault_a.account.data) - .expect("Swap: AMM Program expects a valid Token Holding Account for Vault A") - .balance - < pool_def_data.reserve_a - { + let vault_a_token_holding = token_core::TokenHolding::try_from(&vault_a.account.data) + .expect("Swap: AMM Program expects a valid Token Holding Account for Vault A"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_a_balance, + } = vault_a_token_holding + else { + panic!("Swap: AMM Program expects a valid Fungible Token Holding Account for Vault A"); + }; + if vault_a_balance < pool_def_data.reserve_a { panic!("Reserve for Token A exceeds vault balance"); } - if TokenHolding::parse(&vault_b.account.data) - .expect("Swap: AMM Program expects a valid Token Holding Account for Vault B") - .balance - < pool_def_data.reserve_b - { + + let vault_b_token_holding = token_core::TokenHolding::try_from(&vault_b.account.data) + .expect("Swap: AMM Program expects a valid Token Holding Account for Vault B"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_b_balance, + } = vault_b_token_holding + else { + panic!("Swap: AMM Program expects a valid Fungible Token Holding Account for Vault B"); + }; + + if vault_b_balance < pool_def_data.reserve_b { panic!("Reserve for Token B exceeds vault balance"); } @@ -741,30 +668,37 @@ fn swap_logic( panic!("Withdraw amount should be nonzero"); } + let token_program_id = user_deposit.account.program_owner; + let mut chained_calls = Vec::new(); - chained_calls.push(initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_deposit.clone(), - vault_deposit.clone(), - deposit_amount, - Vec::::new(), + chained_calls.push(ChainedCall::new( + token_program_id, + vec![user_deposit, vault_deposit], + &token_core::Instruction::Transfer { + amount_to_transfer: deposit_amount, + }, )); let mut vault_withdraw = vault_withdraw.clone(); vault_withdraw.is_authorized = true; - chained_calls.push(initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - vault_withdraw.clone(), - user_withdraw.clone(), - withdraw_amount, - vec![compute_vault_pda_seed( - pool_id, - TokenHolding::parse(&vault_withdraw.account.data) - .expect("Swap Logic: AMM Program expects valid token data") - .definition_id, - )], - )); + let pda_seed = compute_vault_pda_seed( + pool_id, + token_core::TokenHolding::try_from(&vault_withdraw.account.data) + .expect("Swap Logic: AMM Program expects valid token data") + .definition_id(), + ); + + chained_calls.push( + ChainedCall::new( + token_program_id, + vec![vault_withdraw, user_withdraw], + &token_core::Instruction::Transfer { + amount_to_transfer: withdraw_amount, + }, + ) + .with_pda_seeds(vec![pda_seed]), + ); (chained_calls, deposit_amount, withdraw_amount) } @@ -816,12 +750,29 @@ fn add_liquidity( } // 2. Determine deposit amount - let vault_b_balance = TokenHolding::parse(&vault_b.account.data) - .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault B") - .balance; - let vault_a_balance = TokenHolding::parse(&vault_a.account.data) - .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault A") - .balance; + let vault_b_token_holding = token_core::TokenHolding::try_from(&vault_b.account.data) + .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault B"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_b_balance, + } = vault_b_token_holding + else { + panic!( + "Add liquidity: AMM Program expects valid Fungible Token Holding Account for Vault B" + ); + }; + + let vault_a_token_holding = token_core::TokenHolding::try_from(&vault_a.account.data) + .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault A"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_a_balance, + } = vault_a_token_holding + else { + panic!( + "Add liquidity: AMM Program expects valid Fungible Token Holding Account for Vault A" + ); + }; if pool_def_data.reserve_a == 0 || pool_def_data.reserve_b == 0 { panic!("Reserves must be nonzero"); @@ -879,38 +830,37 @@ fn add_liquidity( }; pool_post.data = pool_post_definition.into_data(); - let mut chained_call = Vec::new(); + let token_program_id = user_holding_a.account.program_owner; // Chain call for Token A (UserHoldingA -> Vault_A) - let call_token_a = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_a.clone(), - vault_a.clone(), - actual_amount_a, - Vec::::new(), + let call_token_a = ChainedCall::new( + token_program_id, + vec![user_holding_a.clone(), vault_a.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: actual_amount_a, + }, ); // Chain call for Token B (UserHoldingB -> Vault_B) - let call_token_b = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_b.clone(), - vault_b.clone(), - actual_amount_b, - Vec::::new(), + let call_token_b = ChainedCall::new( + token_program_id, + vec![user_holding_b.clone(), vault_b.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: actual_amount_b, + }, ); // Chain call for LP (mint new tokens for user_holding_lp) let mut pool_definition_lp_auth = pool_definition_lp.clone(); pool_definition_lp_auth.is_authorized = true; - let call_token_lp = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_MINT, - pool_definition_lp_auth.clone(), - user_holding_lp.clone(), - delta_lp, - vec![compute_liquidity_token_pda_seed(pool.account_id)], - ); + let call_token_lp = ChainedCall::new( + token_program_id, + vec![pool_definition_lp_auth.clone(), user_holding_lp.clone()], + &token_core::Instruction::Mint { + amount_to_mint: delta_lp, + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); - chained_call.push(call_token_lp); - chained_call.push(call_token_b); - chained_call.push(call_token_a); + let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ AccountPostState::new(pool_post), @@ -922,7 +872,7 @@ fn add_liquidity( AccountPostState::new(pre_states[6].account.clone()), ]; - (post_states, chained_call) + (post_states, chained_calls) } fn remove_liquidity( @@ -986,11 +936,20 @@ fn remove_liquidity( } // 2. Compute withdrawal amounts - let user_holding_lp_data = TokenHolding::parse(&user_holding_lp.account.data) + let user_holding_lp_data = token_core::TokenHolding::try_from(&user_holding_lp.account.data) .expect("Remove liquidity: AMM Program expects a valid Token Account for liquidity token"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: user_lp_balance, + } = user_holding_lp_data + else { + panic!( + "Remove liquidity: AMM Program expects a valid Fungible Token Holding Account for liquidity token" + ); + }; - if user_holding_lp_data.balance > pool_def_data.liquidity_pool_supply - || user_holding_lp_data.definition_id != pool_def_data.liquidity_pool_id + if user_lp_balance > pool_def_data.liquidity_pool_supply + || user_holding_lp_data.definition_id() != pool_def_data.liquidity_pool_id { panic!("Invalid liquidity account provided"); } @@ -1026,44 +985,45 @@ fn remove_liquidity( pool_post.data = pool_post_definition.into_data(); - let mut chained_calls = Vec::new(); + let token_program_id = user_holding_a.account.program_owner; // Chaincall for Token A withdraw - let call_token_a = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - running_vault_a, - user_holding_a.clone(), - withdraw_amount_a, - vec![compute_vault_pda_seed( - pool.account_id, - pool_def_data.definition_token_a_id, - )], - ); + let call_token_a = ChainedCall::new( + token_program_id, + vec![running_vault_a, user_holding_a.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: withdraw_amount_a, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + pool.account_id, + pool_def_data.definition_token_a_id, + )]); // Chaincall for Token B withdraw - let call_token_b = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - running_vault_b, - user_holding_b.clone(), - withdraw_amount_b, - vec![compute_vault_pda_seed( - pool.account_id, - pool_def_data.definition_token_b_id, - )], - ); + let call_token_b = ChainedCall::new( + token_program_id, + vec![running_vault_b, user_holding_b.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: withdraw_amount_b, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + pool.account_id, + pool_def_data.definition_token_b_id, + )]); // Chaincall for LP adjustment let mut pool_definition_lp_auth = pool_definition_lp.clone(); pool_definition_lp_auth.is_authorized = true; - let call_token_lp = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_BURN, - pool_definition_lp_auth.clone(), - user_holding_lp.clone(), - delta_lp, - vec![compute_liquidity_token_pda_seed(pool.account_id)], - ); + let call_token_lp = ChainedCall::new( + token_program_id, + vec![pool_definition_lp_auth, user_holding_lp.clone()], + &token_core::Instruction::Burn { + amount_to_burn: delta_lp, + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); - chained_calls.push(call_token_lp); - chained_calls.push(call_token_b); - chained_calls.push(call_token_a); + let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ AccountPostState::new(pool_post.clone()), @@ -1082,41 +1042,18 @@ fn remove_liquidity( mod tests { use nssa_core::{ account::{Account, AccountId, AccountWithMetadata, Data}, - program::{ChainedCall, PdaSeed, ProgramId}, + program::{ChainedCall, ProgramId}, }; + use token_core::{TokenDefinition, TokenHolding}; use crate::{ - PoolDefinition, TokenHolding, add_liquidity, compute_liquidity_token_pda, + PoolDefinition, add_liquidity, compute_liquidity_token_pda, compute_liquidity_token_pda_seed, compute_pool_pda, compute_vault_pda, compute_vault_pda_seed, new_definition, remove_liquidity, swap, }; const TOKEN_PROGRAM_ID: ProgramId = [15; 8]; const AMM_PROGRAM_ID: ProgramId = [42; 8]; - const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - - struct TokenDefinition { - account_type: u8, - name: [u8; 6], - total_supply: u128, - metadata_id: AccountId, - } - - impl TokenDefinition { - fn into_data(self) -> Data { - let mut bytes = Vec::::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.name); - bytes.extend_from_slice(&self.total_supply.to_le_bytes()); - bytes.extend_from_slice(&self.metadata_id.to_bytes()); - - if bytes.len() != TOKEN_DEFINITION_DATA_SIZE { - panic!("Invalid Token Definition data"); - } - - Data::try_from(bytes).expect("Token definition data size must fit into data") - } - } struct BalanceForTests; @@ -1250,21 +1187,16 @@ mod tests { impl ChainedCallForTests { fn cc_swap_token_a_test_1() -> ChainedCall { - let mut instruction_data = vec![0; 23]; - instruction_data[0] = 1; - instruction_data[1..17] - .copy_from_slice(&BalanceForTests::add_max_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_a(), AccountForTests::vault_a_init(), ], - pda_seeds: Vec::::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_max_amount_a(), + }, + ) } fn cc_swap_token_b_test_1() -> ChainedCall { @@ -1273,20 +1205,17 @@ mod tests { let mut vault_b_auth = AccountForTests::vault_b_init(); vault_b_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17].copy_from_slice(&swap_amount.to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_b_auth, AccountForTests::user_holding_b()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_b_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_b_auth, AccountForTests::user_holding_b()], + &token_core::Instruction::Transfer { + amount_to_transfer: swap_amount, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_b_definition_id(), + )]) } fn cc_swap_token_a_test_2() -> ChainedCall { @@ -1295,214 +1224,164 @@ mod tests { let mut vault_a_auth = AccountForTests::vault_a_init(); vault_a_auth.is_authorized = true; - let mut instruction_data = vec![0; 23]; - instruction_data[0] = 1; - instruction_data[1..17].copy_from_slice(&swap_amount.to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_a_auth, AccountForTests::user_holding_a()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_a_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_a_auth, AccountForTests::user_holding_a()], + &token_core::Instruction::Transfer { + amount_to_transfer: swap_amount, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_a_definition_id(), + )]) } fn cc_swap_token_b_test_2() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17].copy_from_slice(&BalanceForTests::add_max_amount_b().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_b(), AccountForTests::vault_b_init(), ], - pda_seeds: Vec::::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_max_amount_b(), + }, + ) } fn cc_add_token_a() -> ChainedCall { - let mut instruction = vec![0u8; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_a(), AccountForTests::vault_a_init(), ], - pda_seeds: Vec::::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_a(), + }, + ) } fn cc_add_token_b() -> ChainedCall { - let mut instruction = vec![0u8; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_b().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("Swap Logic: AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_b(), AccountForTests::vault_b_init(), ], - pda_seeds: Vec::::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_b(), + }, + ) } fn cc_add_pool_lp() -> ChainedCall { let mut pool_lp_auth = AccountForTests::pool_lp_init(); pool_lp_auth.is_authorized = true; - let mut instruction = vec![0u8; 23]; - instruction[0] = 4; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("Swap Logic: AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], - pda_seeds: vec![compute_liquidity_token_pda_seed( - IdForTests::pool_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], + &token_core::Instruction::Mint { + amount_to_mint: BalanceForTests::add_successful_amount_a(), + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed( + IdForTests::pool_definition_id(), + )]) } fn cc_remove_token_a() -> ChainedCall { let mut vault_a_auth = AccountForTests::vault_a_init(); vault_a_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::remove_actual_a_successful().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_a_auth, AccountForTests::user_holding_a()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_a_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_a_auth, AccountForTests::user_holding_a()], + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::remove_actual_a_successful(), + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_a_definition_id(), + )]) } fn cc_remove_token_b() -> ChainedCall { let mut vault_b_auth = AccountForTests::vault_b_init(); vault_b_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::remove_min_amount_b_low().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_b_auth, AccountForTests::user_holding_b()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_b_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_b_auth, AccountForTests::user_holding_b()], + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::remove_min_amount_b_low(), + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_b_definition_id(), + )]) } fn cc_remove_pool_lp() -> ChainedCall { let mut pool_lp_auth = AccountForTests::pool_lp_init(); pool_lp_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 3; - instruction[1..17] - .copy_from_slice(&BalanceForTests::remove_actual_a_successful().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_lp_init(), - ], - pda_seeds: vec![compute_liquidity_token_pda_seed( - IdForTests::pool_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], + &token_core::Instruction::Burn { + amount_to_burn: BalanceForTests::remove_amount_lp(), + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed( + IdForTests::pool_definition_id(), + )]) } fn cc_new_definition_token_a() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_a(), AccountForTests::vault_a_init(), ], - pda_seeds: Vec::::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_a(), + }, + ) } fn cc_new_definition_token_b() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_b().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("Swap Logic: AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_b(), AccountForTests::vault_b_init(), ], - pda_seeds: Vec::::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_b(), + }, + ) } fn cc_new_definition_token_lp() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::pool_lp_init(), AccountForTests::user_holding_lp_uninit(), ], - pda_seeds: vec![compute_liquidity_token_pda_seed( - IdForTests::pool_definition_id(), - )], - } + &token_core::Instruction::Mint { + amount_to_mint: BalanceForTests::add_successful_amount_a(), + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed( + IdForTests::pool_definition_id(), + )]) } } @@ -1566,8 +1445,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_balance(), }), @@ -1583,8 +1461,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_balance(), }), @@ -1600,8 +1477,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_init(), }), @@ -1617,8 +1493,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_init(), }), @@ -1634,8 +1509,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_high(), }), @@ -1651,8 +1525,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_high(), }), @@ -1668,8 +1541,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_low(), }), @@ -1685,8 +1557,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_low(), }), @@ -1702,8 +1573,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: 0, }), @@ -1719,8 +1589,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: 0, }), @@ -1736,11 +1605,10 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::vault_a_reserve_init(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, }, @@ -1754,11 +1622,10 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::vault_a_reserve_init(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, }, @@ -1772,8 +1639,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: 0, }), @@ -1789,8 +1655,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_balance(), }), @@ -2102,8 +1967,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_init(), }), @@ -2119,8 +1983,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_init(), }), @@ -3552,8 +3415,14 @@ mod tests { let chained_call_a = chained_calls[0].clone(); let chained_call_b = chained_calls[1].clone(); - assert!(chained_call_a == ChainedCallForTests::cc_swap_token_a_test_1()); - assert!(chained_call_b == ChainedCallForTests::cc_swap_token_b_test_1()); + assert_eq!( + chained_call_a, + ChainedCallForTests::cc_swap_token_a_test_1() + ); + assert_eq!( + chained_call_b, + ChainedCallForTests::cc_swap_token_b_test_1() + ); } #[test] @@ -3581,7 +3450,13 @@ mod tests { let chained_call_a = chained_calls[1].clone(); let chained_call_b = chained_calls[0].clone(); - assert!(chained_call_a == ChainedCallForTests::cc_swap_token_a_test_2()); - assert!(chained_call_b == ChainedCallForTests::cc_swap_token_b_test_2()); + assert_eq!( + chained_call_a, + ChainedCallForTests::cc_swap_token_a_test_2() + ); + assert_eq!( + chained_call_b, + ChainedCallForTests::cc_swap_token_b_test_2() + ); } } diff --git a/program_methods/guest/src/bin/pinata_token.rs b/program_methods/guest/src/bin/pinata_token.rs index 04613791..188597cb 100644 --- a/program_methods/guest/src/bin/pinata_token.rs +++ b/program_methods/guest/src/bin/pinata_token.rs @@ -5,10 +5,7 @@ use nssa_core::{ write_nssa_outputs_with_chained_call, }, }; -use risc0_zkvm::{ - serde::to_vec, - sha::{Impl, Sha256}, -}; +use risc0_zkvm::sha::{Impl, Sha256}; const PRIZE: u128 = 150; @@ -82,23 +79,21 @@ fn main() { let winner_token_holding_post = winner_token_holding.account.clone(); pinata_definition_post.data = data.next_data(); - let mut instruction_data = vec![0; 23]; - instruction_data[0] = 1; - instruction_data[1..17].copy_from_slice(&PRIZE.to_le_bytes()); - // Flip authorization to true for chained call let mut pinata_token_holding_for_chain_call = pinata_token_holding.clone(); pinata_token_holding_for_chain_call.is_authorized = true; - let chained_calls = vec![ChainedCall { - program_id: pinata_token_holding_post.program_owner, - instruction_data: to_vec(&instruction_data).unwrap(), - pre_states: vec![ + let chained_call = ChainedCall::new( + pinata_token_holding_post.program_owner, + vec![ pinata_token_holding_for_chain_call, winner_token_holding.clone(), ], - pda_seeds: vec![PdaSeed::new([0; 32])], - }]; + &token_core::Instruction::Transfer { + amount_to_transfer: PRIZE, + }, + ) + .with_pda_seeds(vec![PdaSeed::new([0; 32])]); write_nssa_outputs_with_chained_call( instruction_words, @@ -112,6 +107,6 @@ fn main() { AccountPostState::new(pinata_token_holding_post), AccountPostState::new(winner_token_holding_post), ], - chained_calls, + vec![chained_call], ); } diff --git a/program_methods/guest/src/bin/token.rs b/program_methods/guest/src/bin/token.rs index 0f7b6287..0bc3d245 100644 --- a/program_methods/guest/src/bin/token.rs +++ b/program_methods/guest/src/bin/token.rs @@ -1,700 +1,13 @@ -use nssa_core::{ - account::{Account, AccountId, AccountWithMetadata, Data}, - program::{ - AccountPostState, DEFAULT_PROGRAM_ID, ProgramInput, read_nssa_inputs, write_nssa_outputs, - }, -}; - -// The token program has three functions: -// 1. New token definition. Arguments to this function are: -// * Two **default** accounts: [definition_account, holding_account]. The first default account -// will be initialized with the token definition account values. The second account will be -// initialized to a token holding account for the new token, holding the entire total supply. -// * An instruction data of 23-bytes, indicating the total supply and the token name, with the -// following layout: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] The -// name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00] -// 2. Token transfer Arguments to this function are: -// * Two accounts: [sender_account, recipient_account]. -// * An instruction data byte string of length 23, indicating the total supply with the -// following layout [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 -// || 0x00 || 0x00]. -// 3. Initialize account with zero balance Arguments to this function are: -// * Two accounts: [definition_account, account_to_initialize]. -// * An dummy byte string of length 23, with the following layout [0x02 || 0x00 || 0x00 || 0x00 -// || ... || 0x00 || 0x00]. -// 4. Burn tokens from a Token Holding account (thus lowering total supply) Arguments to this -// function are: -// * Two accounts: [definition_account, holding_account]. -// * Authorization required: holding_account -// * An instruction data byte string of length 23, indicating the balance to burn with the -// folloiwng layout -// [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00]. -// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total -// supply) Arguments to this function are: -// * Two accounts: [definition_account, holding_account]. -// * Authorization required: definition_account -// * An instruction data byte string of length 23, indicating the balance to mint with the -// folloiwng layout -// [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00]. -// 6. New token definition with metadata. Arguments to this function are: -// * Three **default** accounts: [definition_account, metadata_account. holding_account]. The -// first default account will be initialized with the token definition account values. The -// second account will be initialized to a token metadata account for the new token -// definition. The third account will be initialized to a token holding account for the new -// token, holding the entire total supply. -// * An instruction data of 474-bytes, indicating the token name, total supply, token standard, -// metadata standard and metadata_values (uri and creators). the following layout: [0x05 || -// total_supply (little-endian 16 bytes) || name (6 bytes) || token_standard || -// metadata_standard || metadata_values] The name cannot be equal to [0x00, 0x00, 0x00, 0x00, -// 0x00, 0x00] -// 7. Print NFT copy from Master NFT Arguments to this function are: -// * Two accounts: [master_nft, printed_account (default)]. -// * Authorization required: master_nft -// * An dummy byte string of length 23, with the following layout [0x06 || 0x00 || 0x00 || 0x00 -// || ... || 0x00 || 0x00]. -const TOKEN_STANDARD_FUNGIBLE_TOKEN: u8 = 0; -const TOKEN_STANDARD_FUNGIBLE_ASSET: u8 = 1; -const TOKEN_STANDARD_NONFUNGIBLE: u8 = 2; -const TOKEN_STANDARD_NONFUNGIBLE_PRINTABLE: u8 = 3; - -const METADATA_TYPE_SIMPLE: u8 = 0; -const METADATA_TYPE_EXPANDED: u8 = 1; - -const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - -const TOKEN_HOLDING_STANDARD: u8 = 1; -const TOKEN_HOLDING_NFT_MASTER: u8 = 2; -const TOKEN_HOLDING_NFT_PRINTED_COPY: u8 = 3; - -const TOKEN_HOLDING_DATA_SIZE: usize = 49; -const CURRENT_VERSION: u8 = 1; - -const TOKEN_METADATA_DATA_SIZE: usize = 463; - -fn is_token_standard_valid(standard: u8) -> bool { - matches!( - standard, - TOKEN_STANDARD_FUNGIBLE_TOKEN - | TOKEN_STANDARD_FUNGIBLE_ASSET - | TOKEN_STANDARD_NONFUNGIBLE - | TOKEN_STANDARD_NONFUNGIBLE_PRINTABLE - ) -} - -fn is_metadata_type_valid(standard: u8) -> bool { - matches!(standard, METADATA_TYPE_SIMPLE | METADATA_TYPE_EXPANDED) -} - -fn is_token_holding_type_valid(standard: u8) -> bool { - matches!(standard, |TOKEN_HOLDING_STANDARD| TOKEN_HOLDING_NFT_MASTER - | TOKEN_HOLDING_NFT_PRINTED_COPY) -} - -struct TokenDefinition { - account_type: u8, - name: [u8; 6], - total_supply: u128, - metadata_id: AccountId, -} - -impl TokenDefinition { - fn into_data(self) -> Data { - let mut bytes = Vec::::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.name); - bytes.extend_from_slice(&self.total_supply.to_le_bytes()); - bytes.extend_from_slice(&self.metadata_id.to_bytes()); - - if bytes.len() != TOKEN_DEFINITION_DATA_SIZE { - panic!("Invalid Token Definition data"); - } - - Data::try_from(bytes).expect("Token definition data size must fit into data") - } - - fn parse(data: &Data) -> Option { - let data = Vec::::from(data.clone()); - - if data.len() != TOKEN_DEFINITION_DATA_SIZE { - None - } else { - let account_type = data[0]; - let name = data[1..7].try_into().expect("Name must be a 6 bytes"); - let total_supply = u128::from_le_bytes( - data[7..23] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let metadata_id = AccountId::new( - data[23..TOKEN_DEFINITION_DATA_SIZE] - .try_into() - .expect("Token Program expects valid Account Id for Metadata"), - ); - - let this = Some(Self { - account_type, - name, - total_supply, - metadata_id, - }); - - match account_type { - TOKEN_STANDARD_NONFUNGIBLE if total_supply != 1 => None, - TOKEN_STANDARD_FUNGIBLE_TOKEN if metadata_id != AccountId::new([0; 32]) => None, - _ => this, - } - } - } -} - -struct TokenHolding { - account_type: u8, - definition_id: AccountId, - balance: u128, -} - -impl TokenHolding { - fn new(definition_id: &AccountId) -> Self { - Self { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: *definition_id, - balance: 0, - } - } - - fn parse(data: &Data) -> Option { - let data = Vec::::from(data.clone()); - - if data.len() != TOKEN_HOLDING_DATA_SIZE { - return None; - } - - // Check account_type - if !is_token_holding_type_valid(data[0]) { - return None; - } - - let account_type = data[0]; - let definition_id = AccountId::new( - data[1..33] - .try_into() - .expect("Defintion ID must be 32 bytes long"), - ); - let balance = u128::from_le_bytes( - data[33..] - .try_into() - .expect("balance must be 16 bytes little-endian"), - ); - - Some(Self { - definition_id, - balance, - account_type, - }) - } - - fn into_data(self) -> Data { - if !is_token_holding_type_valid(self.account_type) { - panic!("Invalid Token Holding type"); - } - - let mut bytes = Vec::::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.definition_id.to_bytes()); - bytes.extend_from_slice(&self.balance.to_le_bytes()); - - if bytes.len() != TOKEN_HOLDING_DATA_SIZE { - panic!("Invalid Token Holding data"); - } - - Data::try_from(bytes).expect("Invalid data") - } -} - -struct TokenMetadata { - account_type: u8, - version: u8, - definition_id: AccountId, - uri: [u8; 200], - creators: [u8; 250], - /// Block id - primary_sale_date: u64, -} - -impl TokenMetadata { - fn into_data(self) -> Data { - if !is_metadata_type_valid(self.account_type) { - panic!("Invalid Metadata type"); - } - - let mut bytes = Vec::::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&[self.version]); - bytes.extend_from_slice(&self.definition_id.to_bytes()); - bytes.extend_from_slice(&self.uri); - bytes.extend_from_slice(&self.creators); - bytes.extend_from_slice(&self.primary_sale_date.to_le_bytes()); - - if bytes.len() != TOKEN_METADATA_DATA_SIZE { - panic!("Invalid Token Definition data length"); - } - - Data::try_from(bytes).expect("Invalid data") - } -} - -fn transfer(pre_states: &[AccountWithMetadata], balance_to_move: u128) -> Vec { - if pre_states.len() != 2 { - panic!("Invalid number of input accounts"); - } - let sender = &pre_states[0]; - let recipient = &pre_states[1]; - - if !sender.is_authorized { - panic!("Sender authorization is missing"); - } - - let sender_holding = TokenHolding::parse(&sender.account.data).expect("Invalid sender data"); - - let recipient_holding = if recipient.account == Account::default() { - TokenHolding::new(&sender_holding.definition_id) - } else { - TokenHolding::parse(&recipient.account.data).expect("Invalid recipient data") - }; - - if sender_holding.definition_id != recipient_holding.definition_id { - panic!("Sender and recipient definition id mismatch"); - } - - let (sender_holding, recipient_holding) = - if sender_holding.account_type != TOKEN_HOLDING_NFT_MASTER { - standard_transfer(sender_holding, recipient_holding, balance_to_move) - } else { - nft_master_transfer(sender_holding, recipient_holding, balance_to_move) - }; - - let sender_post = { - let mut this = sender.account.clone(); - this.data = sender_holding.into_data(); - AccountPostState::new(this) - }; - - let recipient_post = { - let mut this = recipient.account.clone(); - this.data = recipient_holding.into_data(); - - // Claim the recipient account if it has default program owner - if this.program_owner == DEFAULT_PROGRAM_ID { - AccountPostState::new_claimed(this) - } else { - AccountPostState::new(this) - } - }; - - vec![sender_post, recipient_post] -} - -fn standard_transfer( - sender_holding: TokenHolding, - recipient_holding: TokenHolding, - balance_to_move: u128, -) -> (TokenHolding, TokenHolding) { - let mut sender_holding = sender_holding; - let mut recipient_holding = recipient_holding; - - if sender_holding.balance < balance_to_move { - panic!("Insufficient balance"); - } - - sender_holding.balance = sender_holding - .balance - .checked_sub(balance_to_move) - .expect("Checked above"); - recipient_holding.balance = recipient_holding - .balance - .checked_add(balance_to_move) - .expect("Recipient balance overflow"); - - recipient_holding.account_type = sender_holding.account_type; - - (sender_holding, recipient_holding) -} - -fn nft_master_transfer( - sender_holding: TokenHolding, - recipient_holding: TokenHolding, - balance_to_move: u128, -) -> (TokenHolding, TokenHolding) { - let mut sender_holding = sender_holding; - let mut recipient_holding = recipient_holding; - - if recipient_holding.balance != 0 { - panic!("Invalid balance in recipient account for NFT transfer"); - } - - if sender_holding.balance != balance_to_move { - panic!("Invalid balance for NFT Master transfer"); - } - - sender_holding.balance = 0; - recipient_holding.balance = balance_to_move; - recipient_holding.account_type = sender_holding.account_type; - - (sender_holding, recipient_holding) -} - -fn new_definition( - pre_states: &[AccountWithMetadata], - name: [u8; 6], - total_supply: u128, -) -> Vec { - if pre_states.len() != 2 { - panic!("Invalid number of input accounts"); - } - - let definition_target_account = &pre_states[0]; - let holding_target_account = &pre_states[1]; - - if definition_target_account.account != Account::default() { - panic!("Definition target account must have default values"); - } - - if holding_target_account.account != Account::default() { - panic!("Holding target account must have default values"); - } - - let token_definition = TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name, - total_supply, - metadata_id: AccountId::new([0; 32]), - }; - - let token_holding = TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: definition_target_account.account_id, - balance: total_supply, - }; - - let mut definition_target_account_post = definition_target_account.account.clone(); - definition_target_account_post.data = token_definition.into_data(); - - let mut holding_target_account_post = holding_target_account.account.clone(); - holding_target_account_post.data = token_holding.into_data(); - - vec![ - AccountPostState::new_claimed(definition_target_account_post), - AccountPostState::new_claimed(holding_target_account_post), - ] -} - -fn new_definition_with_metadata( - pre_states: &[AccountWithMetadata], - name: [u8; 6], - total_supply: u128, - token_standard: u8, - metadata_standard: u8, - metadata_values: &Data, -) -> Vec { - if pre_states.len() != 3 { - panic!("Invalid number of input accounts"); - } - - let definition_target_account = &pre_states[0]; - let metadata_target_account = &pre_states[1]; - let holding_target_account = &pre_states[2]; - - if definition_target_account.account != Account::default() { - panic!("Definition target account must have default values"); - } - - if metadata_target_account.account != Account::default() { - panic!("Metadata target account must have default values"); - } - - if holding_target_account.account != Account::default() { - panic!("Holding target account must have default values"); - } - - if !is_token_standard_valid(token_standard) { - panic!("Invalid Token Standard provided"); - } - - if !is_metadata_type_valid(metadata_standard) { - panic!("Invalid Metadata Standadard provided"); - } - - if !valid_total_supply_for_token_standard(total_supply, token_standard) { - panic!("Invalid total supply for the specified token supply"); - } - - let token_definition = TokenDefinition { - account_type: token_standard, - name, - total_supply, - metadata_id: metadata_target_account.account_id, - }; - - let token_holding = TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: definition_target_account.account_id, - balance: total_supply, - }; - - if metadata_values.len() != 450 { - panic!("Metadata values data should be 450 bytes"); - } - - let uri: [u8; 200] = metadata_values[0..200] - .try_into() - .expect("Token program expects valid uri for Metadata"); - let creators: [u8; 250] = metadata_values[200..450] - .try_into() - .expect("Token program expects valid creators for Metadata"); - - let token_metadata = TokenMetadata { - account_type: metadata_standard, - version: CURRENT_VERSION, - definition_id: definition_target_account.account_id, - uri, - creators, - primary_sale_date: 0u64, // TODO #261: future works to implement this - }; - - let mut definition_target_account_post = definition_target_account.account.clone(); - definition_target_account_post.data = token_definition.into_data(); - - let mut holding_target_account_post = holding_target_account.account.clone(); - holding_target_account_post.data = token_holding.into_data(); - - let mut metadata_target_account_post = metadata_target_account.account.clone(); - metadata_target_account_post.data = token_metadata.into_data(); - - vec![ - AccountPostState::new_claimed(definition_target_account_post), - AccountPostState::new_claimed(holding_target_account_post), - AccountPostState::new_claimed(metadata_target_account_post), - ] -} - -fn valid_total_supply_for_token_standard(total_supply: u128, token_standard: u8) -> bool { - token_standard != TOKEN_STANDARD_NONFUNGIBLE || total_supply == 1 -} - -fn initialize_account(pre_states: &[AccountWithMetadata]) -> Vec { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let definition = &pre_states[0]; - let account_to_initialize = &pre_states[1]; - - if account_to_initialize.account != Account::default() { - panic!("Only Uninitialized accounts can be initialized"); - } - - // TODO: #212 We should check that this is an account owned by the token program. - // This check can't be done here since the ID of the program is known only after compiling it - // - // Check definition account is valid - let _definition_values = - TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid"); - let holding_values = TokenHolding::new(&definition.account_id); - - let definition_post = definition.account.clone(); - let mut account_to_initialize = account_to_initialize.account.clone(); - account_to_initialize.data = holding_values.into_data(); - - vec![ - AccountPostState::new(definition_post), - AccountPostState::new_claimed(account_to_initialize), - ] -} - -fn burn(pre_states: &[AccountWithMetadata], balance_to_burn: u128) -> Vec { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let definition = &pre_states[0]; - let user_holding = &pre_states[1]; - - if !user_holding.is_authorized { - panic!("Authorization is missing"); - } - - let definition_values = TokenDefinition::parse(&definition.account.data) - .expect("Token Definition account must be valid"); - let user_values = TokenHolding::parse(&user_holding.account.data) - .expect("Token Holding account must be valid"); - - if definition.account_id != user_values.definition_id { - panic!("Mismatch Token Definition and Token Holding"); - } - - if user_values.balance < balance_to_burn { - panic!("Insufficient balance to burn"); - } - - let mut post_user_holding = user_holding.account.clone(); - let mut post_definition = definition.account.clone(); - - post_user_holding.data = TokenHolding::into_data(TokenHolding { - account_type: user_values.account_type, - definition_id: user_values.definition_id, - balance: user_values - .balance - .checked_sub(balance_to_burn) - .expect("Checked above"), - }); - - post_definition.data = TokenDefinition::into_data(TokenDefinition { - account_type: definition_values.account_type, - name: definition_values.name, - total_supply: definition_values - .total_supply - .checked_sub(balance_to_burn) - .expect("Total supply underflow"), - metadata_id: definition_values.metadata_id, - }); - - vec![ - AccountPostState::new(post_definition), - AccountPostState::new(post_user_holding), - ] -} - -fn is_mintable(account_type: u8) -> bool { - account_type != TOKEN_STANDARD_NONFUNGIBLE -} - -fn mint_additional_supply( - pre_states: &[AccountWithMetadata], - amount_to_mint: u128, -) -> Vec { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let definition = &pre_states[0]; - let token_holding = &pre_states[1]; - - if !definition.is_authorized { - panic!("Definition authorization is missing"); - } - - let definition_values = - TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid"); - - let token_holding_values: TokenHolding = if token_holding.account == Account::default() { - TokenHolding::new(&definition.account_id) - } else { - TokenHolding::parse(&token_holding.account.data).expect("Holding account must be valid") - }; - - if !is_mintable(definition_values.account_type) { - panic!("Token Definition's standard does not permit minting additional supply"); - } - - if definition.account_id != token_holding_values.definition_id { - panic!("Mismatch Token Definition and Token Holding"); - } - - let token_holding_post_data = TokenHolding { - account_type: token_holding_values.account_type, - definition_id: token_holding_values.definition_id, - balance: token_holding_values - .balance - .checked_add(amount_to_mint) - .expect("New balance overflow"), - }; - - let post_total_supply = definition_values - .total_supply - .checked_add(amount_to_mint) - .expect("Total supply overflow"); - - let post_definition_data = TokenDefinition { - account_type: definition_values.account_type, - name: definition_values.name, - total_supply: post_total_supply, - metadata_id: definition_values.metadata_id, - }; - - let post_definition = { - let mut this = definition.account.clone(); - this.data = post_definition_data.into_data(); - AccountPostState::new(this) - }; - - let token_holding_post = { - let mut this = token_holding.account.clone(); - this.data = token_holding_post_data.into_data(); - - // Claim the recipient account if it has default program owner - if this.program_owner == DEFAULT_PROGRAM_ID { - AccountPostState::new_claimed(this) - } else { - AccountPostState::new(this) - } - }; - vec![post_definition, token_holding_post] -} - -fn print_nft(pre_states: &[AccountWithMetadata]) -> Vec { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let master_account = &pre_states[0]; - let printed_account = &pre_states[1]; - - if !master_account.is_authorized { - panic!("Master NFT Account must be authorized"); - } - - if printed_account.account != Account::default() { - panic!("Printed Account must be uninitialized"); - } - - let mut master_account_data = - TokenHolding::parse(&master_account.account.data).expect("Invalid Token Holding data"); - - if master_account_data.account_type != TOKEN_HOLDING_NFT_MASTER { - panic!("Invalid Token Holding provided as NFT Master Account"); - } - - if master_account_data.balance < 2 { - panic!("Insufficient balance to print another NFT copy"); - } - - let definition_id = master_account_data.definition_id; - - let post_master_account = { - let mut this = master_account.account.clone(); - master_account_data.balance -= 1; - this.data = master_account_data.into_data(); - AccountPostState::new(this) - }; - - let post_printed_account = { - let mut this = printed_account.account.clone(); - - let printed_data = TokenHolding { - account_type: TOKEN_HOLDING_NFT_PRINTED_COPY, - definition_id, - balance: 1, - }; - - this.data = TokenHolding::into_data(printed_data); - - AccountPostState::new_claimed(this) - }; - - vec![post_master_account, post_printed_account] -} - -type Instruction = Vec; +//! The Token Program. +//! +//! This program implements a simple token system supporting both fungible and non-fungible tokens +//! (NFTs). +//! +//! Token program accepts [`Instruction`] as input, refer to the corresponding documentation +//! for more details. + +use nssa_core::program::{ProgramInput, read_nssa_inputs, write_nssa_outputs}; +use token_program::core::Instruction; fn main() { let ( @@ -705,1622 +18,68 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let post_states = match instruction[0] { - 0 => { - // Parse instruction - let total_supply = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] + let pre_states_clone = pre_states.clone(); + + let post_states = match instruction { + Instruction::Transfer { + amount_to_transfer: balance_to_move, + } => { + let [sender, recipient] = pre_states .try_into() - .expect("Name must be 6 bytes long"); - assert_ne!(name, [0; 6]); - - // Execute - new_definition(&pre_states, name, total_supply) + .expect("Transfer instruction requires exactly two accounts"); + token_program::transfer::transfer(sender, recipient, balance_to_move) } - 1 => { - // Parse instruction - let balance_to_move = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Balance to move must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] + Instruction::NewFungibleDefinition { name, total_supply } => { + let [definition_account, holding_account] = pre_states .try_into() - .expect("Name must be 6 bytes long"); - assert_eq!(name, [0; 6]); - - // Execute - transfer(&pre_states, balance_to_move) - } - 2 => { - // Initialize account - if instruction[1..] != [0; 22] { - panic!("Invalid instruction for initialize account"); - } - initialize_account(&pre_states) - } - 3 => { - let balance_to_burn = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Balance to burn must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] - .try_into() - .expect("Name must be 6 bytes long"); - assert_eq!(name, [0; 6]); - - // Execute - burn(&pre_states, balance_to_burn) - } - 4 => { - let balance_to_mint = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Balance to burn must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] - .try_into() - .expect("Name must be 6 bytes long"); - assert_eq!(name, [0; 6]); - - // Execute - mint_additional_supply(&pre_states, balance_to_mint) - } - 5 => { - if instruction.len() != 474 { - panic!("Invalid instruction length") - } - - // Parse instruction - let total_supply = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let name = instruction[17..23] - .try_into() - .expect("Name must be 6 bytes long"); - assert_ne!(name, [0; 6]); - let token_standard = instruction[23]; - let metadata_standard = instruction[24]; - let metadata_values: Data = - Data::try_from(instruction[25..474].to_vec()).expect("Invalid metadata"); - - // Execute - new_definition_with_metadata( - &pre_states, + .expect("NewFungibleDefinition instruction requires exactly two accounts"); + token_program::new_definition::new_fungible_definition( + definition_account, + holding_account, name, total_supply, - token_standard, - metadata_standard, - &metadata_values, ) } - 6 => { - if instruction.len() != 23 { - panic!("Invalid instruction length"); - } - - // Initialize account - if instruction[1..] != [0; 22] { - panic!("Invalid instruction for initialize account"); - } - - print_nft(&pre_states) + Instruction::NewDefinitionWithMetadata { + new_definition, + metadata, + } => { + let [definition_account, holding_account, metadata_account] = pre_states + .try_into() + .expect("NewDefinitionWithMetadata instruction requires exactly three accounts"); + token_program::new_definition::new_definition_with_metadata( + definition_account, + holding_account, + metadata_account, + new_definition, + *metadata, + ) + } + Instruction::InitializeAccount => { + let [definition_account, account_to_initialize] = pre_states + .try_into() + .expect("InitializeAccount instruction requires exactly two accounts"); + token_program::initialize::initialize_account(definition_account, account_to_initialize) + } + Instruction::Burn { amount_to_burn } => { + let [definition_account, user_holding_account] = pre_states + .try_into() + .expect("Burn instruction requires exactly two accounts"); + token_program::burn::burn(definition_account, user_holding_account, amount_to_burn) + } + Instruction::Mint { amount_to_mint } => { + let [definition_account, user_holding_account] = pre_states + .try_into() + .expect("Mint instruction requires exactly two accounts"); + token_program::mint::mint(definition_account, user_holding_account, amount_to_mint) + } + Instruction::PrintNft => { + let [master_account, printed_account] = pre_states + .try_into() + .expect("PrintNft instruction requires exactly two accounts"); + token_program::print_nft::print_nft(master_account, printed_account) } - _ => panic!("Invalid instruction"), }; - write_nssa_outputs(instruction_words, pre_states, post_states); -} - -#[cfg(test)] -mod tests { - use nssa_core::account::{Account, AccountId, AccountWithMetadata, Data}; - - use crate::{ - TOKEN_DEFINITION_DATA_SIZE, TOKEN_HOLDING_DATA_SIZE, TOKEN_HOLDING_NFT_MASTER, - TOKEN_HOLDING_NFT_PRINTED_COPY, TOKEN_HOLDING_STANDARD, TOKEN_STANDARD_FUNGIBLE_TOKEN, - TOKEN_STANDARD_NONFUNGIBLE, TokenDefinition, TokenHolding, burn, mint_additional_supply, - new_definition, new_definition_with_metadata, print_nft, transfer, - }; - - struct BalanceForTests; - struct IdForTests; - - struct AccountForTests; - - impl AccountForTests { - fn definition_account_auth() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_without_auth() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_different_definition() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id_diff(), - balance: BalanceForTests::holding_balance(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_same_definition_with_authorization() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_same_definition_without_authorization() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_same_definition_without_authorization_overflow() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn definition_account_post_burn() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply_burned(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_account_post_burn() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance_burned(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_uninit() -> AccountWithMetadata { - AccountWithMetadata { - account: Account::default(), - is_authorized: false, - account_id: IdForTests::holding_id_2(), - } - } - - fn init_mint() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::mint_success(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_same_definition_mint() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance_mint(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_mint() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply_mint(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_same_definition_with_authorization_and_large_balance() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::mint_overflow(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_with_authorization_nonfungible() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_NONFUNGIBLE, - name: [2; 6], - total_supply: 1, - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_uninit() -> AccountWithMetadata { - AccountWithMetadata { - account: Account::default(), - is_authorized: false, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_account_init() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn definition_account_unclaimed() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_account_unclaimed() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account2_init() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id_2(), - } - } - - fn holding_account2_init_post_transfer() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::recipient_post_transfer(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id_2(), - } - } - - fn holding_account_init_post_transfer() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::sender_post_transfer(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_master_nft() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::printable_copies(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_master_nft_insufficient_balance() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: 1, - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_master_nft_after_print() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::printable_copies() - 1, - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_printed_nft() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_PRINTED_COPY, - definition_id: IdForTests::pool_definition_id(), - balance: 1, - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_with_master_nft_transferred_to() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::printable_copies(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id_2(), - } - } - - fn holding_account_master_nft_post_transfer() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: 0, - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - } - - impl BalanceForTests { - fn init_supply() -> u128 { - 100_000 - } - - fn holding_balance() -> u128 { - 1_000 - } - - fn init_supply_burned() -> u128 { - 99_500 - } - - fn holding_balance_burned() -> u128 { - 500 - } - - fn burn_success() -> u128 { - 500 - } - - fn burn_insufficient() -> u128 { - 1_500 - } - - fn mint_success() -> u128 { - 50_000 - } - - fn holding_balance_mint() -> u128 { - 51_000 - } - - fn mint_overflow() -> u128 { - u128::MAX - 40_000 - } - - fn init_supply_mint() -> u128 { - 150_000 - } - - fn sender_post_transfer() -> u128 { - 95_000 - } - - fn recipient_post_transfer() -> u128 { - 105_000 - } - - fn transfer_amount() -> u128 { - 5_000 - } - - fn printable_copies() -> u128 { - 10 - } - } - - impl IdForTests { - fn pool_definition_id() -> AccountId { - AccountId::new([15; 32]) - } - - fn pool_definition_id_diff() -> AccountId { - AccountId::new([16; 32]) - } - - fn holding_id() -> AccountId { - AccountId::new([17; 32]) - } - - fn holding_id_2() -> AccountId { - AccountId::new([42; 32]) - } - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_with_invalid_number_of_accounts_1() { - let pre_states = vec![AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_with_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[should_panic(expected = "Definition target account must have default values")] - #[test] - fn test_new_definition_non_default_first_account_should_fail() { - let pre_states = vec![ - AccountWithMetadata { - account: Account { - program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[should_panic(expected = "Holding target account must have default values")] - #[test] - fn test_new_definition_non_default_second_account_should_fail() { - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account { - program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[test] - fn test_new_definition_with_valid_inputs_succeeds() { - let pre_states = vec![ - AccountForTests::definition_account_uninit(), - AccountForTests::holding_account_uninit(), - ]; - - let post_states = new_definition(&pre_states, [2u8; 6], BalanceForTests::init_supply()); - - let [definition_account, holding_account] = post_states.try_into().ok().unwrap(); - assert!( - *definition_account.account() - == AccountForTests::definition_account_unclaimed().account - ); - - assert!(*holding_account.account() == AccountForTests::holding_account_unclaimed().account); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_transfer_with_invalid_number_of_accounts_1() { - let pre_states = vec![AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_transfer_with_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid sender data")] - #[test] - fn test_transfer_invalid_instruction_type_should_fail() { - let invalid_type = TOKEN_HOLDING_STANDARD ^ 1; - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // First byte should be `TOKEN_HOLDING_STANDARD` for token holding accounts - data: Data::try_from(vec![invalid_type; TOKEN_HOLDING_DATA_SIZE]) - .expect("Invalid data"), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid sender data")] - #[test] - fn test_transfer_invalid_data_size_should_fail_1() { - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // Data must be of exact length `TOKEN_HOLDING_DATA_SIZE` - data: Data::try_from(vec![1; TOKEN_HOLDING_DATA_SIZE - 1]).unwrap(), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid sender data")] - #[test] - fn test_transfer_invalid_data_size_should_fail_2() { - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // Data must be of exact length `TOKEN_HOLDING_DATA_SIZE` - data: Data::try_from(vec![1; TOKEN_HOLDING_DATA_SIZE - 1]).unwrap(), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Sender and recipient definition id mismatch")] - #[test] - fn test_transfer_with_different_definition_ids_should_fail() { - let pre_states = vec![ - AccountForTests::holding_same_definition_with_authorization(), - AccountForTests::holding_different_definition(), - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Insufficient balance")] - #[test] - fn test_transfer_with_insufficient_balance_should_fail() { - let pre_states = vec![ - AccountForTests::holding_same_definition_with_authorization(), - AccountForTests::holding_account_same_definition_mint(), - ]; - // Attempt to transfer 38 tokens - let _post_states = transfer(&pre_states, BalanceForTests::burn_insufficient()); - } - - #[should_panic(expected = "Sender authorization is missing")] - #[test] - fn test_transfer_without_sender_authorization_should_fail() { - let mut def_data = Vec::::new(); - def_data.extend_from_slice(&[1; TOKEN_DEFINITION_DATA_SIZE - 16]); - def_data.extend_from_slice(&u128::to_le_bytes(37)); - - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // Account with balance 37 - data: Data::try_from(def_data).unwrap(), - ..Account::default() - }, - is_authorized: false, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account { - data: Data::try_from(vec![1; TOKEN_HOLDING_DATA_SIZE - 1]).unwrap(), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 37); - } - - #[test] - fn test_transfer_with_valid_inputs_succeeds() { - let pre_states = vec![ - AccountForTests::holding_account_init(), - AccountForTests::holding_account2_init(), - ]; - let post_states = transfer(&pre_states, BalanceForTests::transfer_amount()); - let [sender_post, recipient_post] = post_states.try_into().ok().unwrap(); - - assert!( - *sender_post.account() == AccountForTests::holding_account_init_post_transfer().account - ); - assert!( - *recipient_post.account() - == AccountForTests::holding_account2_init_post_transfer().account - ); - } - - #[should_panic(expected = "Invalid balance for NFT Master transfer")] - #[test] - fn test_transfer_with_master_nft_invalid_balance() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = transfer(&pre_states, BalanceForTests::transfer_amount()); - } - - #[should_panic(expected = "Invalid balance in recipient account for NFT transfer")] - #[test] - fn test_transfer_with_master_nft_invalid_recipient_balance() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_with_master_nft_transferred_to(), - ]; - let _post_states = transfer(&pre_states, BalanceForTests::printable_copies()); - } - - #[test] - fn test_transfer_with_master_nft_success() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_uninit(), - ]; - let post_states = transfer(&pre_states, BalanceForTests::printable_copies()); - let [sender_post, recipient_post] = post_states.try_into().ok().unwrap(); - - assert!( - *sender_post.account() - == AccountForTests::holding_account_master_nft_post_transfer().account - ); - assert!( - *recipient_post.account() - == AccountForTests::holding_account_with_master_nft_transferred_to().account - ); - } - - #[test] - fn test_token_initialize_account_succeeds() { - let pre_states = vec![ - AccountForTests::holding_account_init(), - AccountForTests::holding_account2_init(), - ]; - let post_states = transfer(&pre_states, BalanceForTests::transfer_amount()); - let [sender_post, recipient_post] = post_states.try_into().ok().unwrap(); - - assert!( - *sender_post.account() == AccountForTests::holding_account_init_post_transfer().account - ); - assert!( - *recipient_post.account() - == AccountForTests::holding_account2_init_post_transfer().account - ); - } - - #[test] - #[should_panic(expected = "Invalid number of accounts")] - fn test_burn_invalid_number_of_accounts() { - let pre_states = vec![AccountForTests::definition_account_auth()]; - let _post_states = burn(&pre_states, BalanceForTests::burn_success()); - } - - #[test] - #[should_panic(expected = "Mismatch Token Definition and Token Holding")] - fn test_burn_mismatch_def() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_different_definition(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::burn_success()); - } - - #[test] - #[should_panic(expected = "Authorization is missing")] - fn test_burn_missing_authorization() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::burn_success()); - } - - #[test] - #[should_panic(expected = "Insufficient balance to burn")] - fn test_burn_insufficient_balance() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_with_authorization(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::burn_insufficient()); - } - - #[test] - #[should_panic(expected = "Total supply underflow")] - fn test_burn_total_supply_underflow() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_with_authorization_and_large_balance(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::mint_overflow()); - } - - #[test] - fn test_burn_success() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_with_authorization(), - ]; - let post_states = burn(&pre_states, BalanceForTests::burn_success()); - - let def_post = post_states[0].clone(); - let holding_post = post_states[1].clone(); - - assert!(*def_post.account() == AccountForTests::definition_account_post_burn().account); - assert!(*holding_post.account() == AccountForTests::holding_account_post_burn().account); - } - - #[test] - #[should_panic(expected = "Invalid number of accounts")] - fn test_mint_invalid_number_of_accounts_1() { - let pre_states = vec![AccountForTests::definition_account_auth()]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Invalid number of accounts")] - fn test_mint_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_same_definition_mint(), - AccountForTests::holding_same_definition_with_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Holding account must be valid")] - fn test_mint_not_valid_holding_account() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::definition_account_without_auth(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Definition account must be valid")] - fn test_mint_not_valid_definition_account() { - let pre_states = vec![ - AccountForTests::holding_same_definition_with_authorization(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Definition authorization is missing")] - fn test_mint_missing_authorization() { - let pre_states = vec![ - AccountForTests::definition_account_without_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Mismatch Token Definition and Token Holding")] - fn test_mint_mismatched_token_definition() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_different_definition(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - fn test_mint_success() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - - let def_post = post_states[0].clone(); - let holding_post = post_states[1].clone(); - - assert!(*def_post.account() == AccountForTests::definition_account_mint().account); - assert!( - *holding_post.account() - == AccountForTests::holding_account_same_definition_mint().account - ); - } - - #[test] - fn test_mint_uninit_holding_success() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_uninit(), - ]; - let post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - - let def_post = post_states[0].clone(); - let holding_post = post_states[1].clone(); - - assert!(*def_post.account() == AccountForTests::definition_account_mint().account); - assert!(*holding_post.account() == AccountForTests::init_mint().account); - assert!(holding_post.requires_claim()); - } - - #[test] - #[should_panic(expected = "Total supply overflow")] - fn test_mint_total_supply_overflow() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_overflow()); - } - - #[test] - #[should_panic(expected = "New balance overflow")] - fn test_mint_holding_account_overflow() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization_overflow(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_overflow()); - } - - #[test] - #[should_panic( - expected = "Token Definition's standard does not permit minting additional supply" - )] - fn test_mint_cannot_mint_unmintable_tokens() { - let pre_states = vec![ - AccountForTests::definition_account_with_authorization_nonfungible(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_metadata_with_invalid_number_of_accounts_1() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_metadata_with_invalid_number_of_accounts_2() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_metadata_with_invalid_number_of_accounts_3() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([4; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Definition target account must have default values")] - #[test] - fn test_call_new_definition_metadata_with_init_definition() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Metadata target account must have default values")] - #[test] - fn test_call_new_definition_metadata_with_init_metadata() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountForTests::holding_account_same_definition_mint(), - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Holding target account must have default values")] - #[test] - fn test_call_new_definition_metadata_with_init_holding() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountForTests::holding_account_same_definition_mint(), - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Metadata values data should be 450 bytes")] - #[test] - fn test_call_new_definition_metadata_with_too_short_metadata_length() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 449].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Metadata values data should be 450 bytes")] - #[test] - fn test_call_new_definition_metadata_with_too_long_metadata_length() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 451].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid Token Standard provided")] - #[test] - fn test_call_new_definition_metadata_with_invalid_token_standard() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 14u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid Metadata Standadard provided")] - #[test] - fn test_call_new_definition_metadata_with_invalid_metadata_standard() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 14u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid total supply for the specified token supply")] - #[test] - fn test_call_new_definition_metadata_invalid_supply_for_nonfungible() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = TOKEN_STANDARD_NONFUNGIBLE; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid number of accounts")] - #[test] - fn test_print_nft_invalid_number_of_accounts_1() { - let pre_states = vec![AccountForTests::holding_account_master_nft()]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Invalid number of accounts")] - #[test] - fn test_print_nft_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Master NFT Account must be authorized")] - #[test] - fn test_print_nft_master_account_must_be_authorized() { - let pre_states = vec![ - AccountForTests::holding_account_uninit(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Printed Account must be uninitialized")] - #[test] - fn test_print_nft_print_account_initialized() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_init(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Invalid Token Holding data")] - #[test] - fn test_print_nft_master_nft_invalid_token_holding() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Invalid Token Holding provided as NFT Master Account")] - #[test] - fn test_print_nft_master_nft_not_nft_master_account() { - let pre_states = vec![ - AccountForTests::holding_account_init(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Insufficient balance to print another NFT copy")] - #[test] - fn test_print_nft_master_nft_insufficient_balance() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft_insufficient_balance(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[test] - fn test_print_nft_success() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_uninit(), - ]; - let post_states = print_nft(&pre_states); - - let post_master_nft = post_states[0].account(); - let post_printed = post_states[1].account(); - - assert!( - *post_master_nft == AccountForTests::holding_account_master_nft_after_print().account - ); - assert!(*post_printed == AccountForTests::holding_account_printed_nft().account); - } + write_nssa_outputs(instruction_words, pre_states_clone, post_states); } diff --git a/programs/token/Cargo.toml b/programs/token/Cargo.toml new file mode 100644 index 00000000..39beb96a --- /dev/null +++ b/programs/token/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "token_program" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +nssa_core.workspace = true +token_core.workspace = true diff --git a/programs/token/core/Cargo.toml b/programs/token/core/Cargo.toml new file mode 100644 index 00000000..cf61a35f --- /dev/null +++ b/programs/token/core/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "token_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +nssa_core.workspace = true +serde.workspace = true +borsh.workspace = true diff --git a/programs/token/core/src/lib.rs b/programs/token/core/src/lib.rs new file mode 100644 index 00000000..140ae38f --- /dev/null +++ b/programs/token/core/src/lib.rs @@ -0,0 +1,241 @@ +//! This crate contains core data structures and utilities for the Token Program. + +use borsh::{BorshDeserialize, BorshSerialize}; +use nssa_core::account::{AccountId, Data}; +use serde::{Deserialize, Serialize}; + +/// Token Program Instruction. +#[derive(Serialize, Deserialize)] +pub enum Instruction { + /// Transfer tokens from sender to recipient. + /// + /// Required accounts: + /// - Sender's Token Holding account (authorized), + /// - Recipient's Token Holding account. + Transfer { amount_to_transfer: u128 }, + + /// Create a new fungible token definition without metadata. + /// + /// Required accounts: + /// - Token Definition account (uninitialized), + /// - Token Holding account (uninitialized). + NewFungibleDefinition { name: String, total_supply: u128 }, + + /// Create a new fungible or non-fungible token definition with metadata. + /// + /// Required accounts: + /// - Token Definition account (uninitialized), + /// - Token Holding account (uninitialized), + /// - Token Metadata account (uninitialized). + NewDefinitionWithMetadata { + new_definition: NewTokenDefinition, + /// Boxed to avoid large enum variant size + metadata: Box, + }, + + /// Initialize a token holding account for a given token definition. + /// + /// Required accounts: + /// - Token Definition account (initialized), + /// - Token Holding account (uninitialized), + InitializeAccount, + + /// Burn tokens from the holder's account. + /// + /// Required accounts: + /// - Token Definition account (initialized), + /// - Token Holding account (authorized). + Burn { amount_to_burn: u128 }, + + /// Mint new tokens to the holder's account. + /// + /// Required accounts: + /// - Token Definition account (authorized), + /// - Token Holding account (uninitialized or initialized). + Mint { amount_to_mint: u128 }, + + /// Print a new NFT from the master copy. + /// + /// Required accounts: + /// - NFT Master Token Holding account (authorized), + /// - NFT Printed Copy Token Holding account (uninitialized). + PrintNft, +} + +#[derive(Serialize, Deserialize)] +pub enum NewTokenDefinition { + Fungible { + name: String, + total_supply: u128, + }, + NonFungible { + name: String, + printable_supply: u128, + }, +} + +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub enum TokenDefinition { + Fungible { + name: String, + total_supply: u128, + metadata_id: Option, + }, + NonFungible { + name: String, + printable_supply: u128, + metadata_id: AccountId, + }, +} + +impl TryFrom<&Data> for TokenDefinition { + type Error = std::io::Error; + + fn try_from(data: &Data) -> Result { + TokenDefinition::try_from_slice(data.as_ref()) + } +} + +impl From<&TokenDefinition> for Data { + fn from(definition: &TokenDefinition) -> Self { + // Using size_of_val as size hint for Vec allocation + let mut data = Vec::with_capacity(std::mem::size_of_val(definition)); + + BorshSerialize::serialize(definition, &mut data) + .expect("Serialization to Vec should not fail"); + + Data::try_from(data).expect("Token definition encoded data should fit into Data") + } +} + +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub enum TokenHolding { + Fungible { + definition_id: AccountId, + balance: u128, + }, + NftMaster { + definition_id: AccountId, + /// The amount of printed copies left - 1 (1 reserved for master copy itself). + print_balance: u128, + }, + NftPrintedCopy { + definition_id: AccountId, + /// Whether nft is owned by the holder. + owned: bool, + }, +} + +impl TokenHolding { + pub fn zeroized_clone_from(other: &Self) -> Self { + match other { + TokenHolding::Fungible { definition_id, .. } => TokenHolding::Fungible { + definition_id: *definition_id, + balance: 0, + }, + TokenHolding::NftMaster { definition_id, .. } => TokenHolding::NftMaster { + definition_id: *definition_id, + print_balance: 0, + }, + TokenHolding::NftPrintedCopy { definition_id, .. } => TokenHolding::NftPrintedCopy { + definition_id: *definition_id, + owned: false, + }, + } + } + + pub fn zeroized_from_definition( + definition_id: AccountId, + definition: &TokenDefinition, + ) -> Self { + match definition { + TokenDefinition::Fungible { .. } => TokenHolding::Fungible { + definition_id, + balance: 0, + }, + TokenDefinition::NonFungible { .. } => TokenHolding::NftPrintedCopy { + definition_id, + owned: false, + }, + } + } + + pub fn definition_id(&self) -> AccountId { + match self { + TokenHolding::Fungible { definition_id, .. } => *definition_id, + TokenHolding::NftMaster { definition_id, .. } => *definition_id, + TokenHolding::NftPrintedCopy { definition_id, .. } => *definition_id, + } + } +} + +impl TryFrom<&Data> for TokenHolding { + type Error = std::io::Error; + + fn try_from(data: &Data) -> Result { + TokenHolding::try_from_slice(data.as_ref()) + } +} + +impl From<&TokenHolding> for Data { + fn from(holding: &TokenHolding) -> Self { + // Using size_of_val as size hint for Vec allocation + let mut data = Vec::with_capacity(std::mem::size_of_val(holding)); + + BorshSerialize::serialize(holding, &mut data) + .expect("Serialization to Vec should not fail"); + + Data::try_from(data).expect("Token holding encoded data should fit into Data") + } +} + +#[derive(Serialize, Deserialize)] +pub struct NewTokenMetadata { + /// Metadata standard. + pub standard: MetadataStandard, + /// Pointer to off-chain metadata + pub uri: String, + /// Creators of the token. + pub creators: String, +} + +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub struct TokenMetadata { + /// Token Definition account id. + pub definition_id: AccountId, + /// Metadata standard . + pub standard: MetadataStandard, + /// Pointer to off-chain metadata. + pub uri: String, + /// Creators of the token. + pub creators: String, + /// Block id of primary sale. + pub primary_sale_date: u64, +} + +/// Metadata standard defining the expected format of JSON located off-chain. +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub enum MetadataStandard { + Simple, + Expanded, +} + +impl TryFrom<&Data> for TokenMetadata { + type Error = std::io::Error; + + fn try_from(data: &Data) -> Result { + TokenMetadata::try_from_slice(data.as_ref()) + } +} + +impl From<&TokenMetadata> for Data { + fn from(metadata: &TokenMetadata) -> Self { + // Using size_of_val as size hint for Vec allocation + let mut data = Vec::with_capacity(std::mem::size_of_val(metadata)); + + BorshSerialize::serialize(metadata, &mut data) + .expect("Serialization to Vec should not fail"); + + Data::try_from(data).expect("Token metadata encoded data should fit into Data") + } +} diff --git a/programs/token/src/burn.rs b/programs/token/src/burn.rs new file mode 100644 index 00000000..94637d92 --- /dev/null +++ b/programs/token/src/burn.rs @@ -0,0 +1,104 @@ +use nssa_core::{ + account::{AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{TokenDefinition, TokenHolding}; + +pub fn burn( + definition_account: AccountWithMetadata, + user_holding_account: AccountWithMetadata, + amount_to_burn: u128, +) -> Vec { + assert!( + user_holding_account.is_authorized, + "Authorization is missing" + ); + + let mut definition = TokenDefinition::try_from(&definition_account.account.data) + .expect("Token Definition account must be valid"); + let mut holding = TokenHolding::try_from(&user_holding_account.account.data) + .expect("Token Holding account must be valid"); + + assert_eq!( + definition_account.account_id, + holding.definition_id(), + "Mismatch Token Definition and Token Holding" + ); + + match (&mut definition, &mut holding) { + ( + TokenDefinition::Fungible { + name: _, + metadata_id: _, + total_supply, + }, + TokenHolding::Fungible { + definition_id: _, + balance, + }, + ) => { + *balance = balance + .checked_sub(amount_to_burn) + .expect("Insufficient balance to burn"); + + *total_supply = total_supply + .checked_sub(amount_to_burn) + .expect("Total supply underflow"); + } + ( + TokenDefinition::NonFungible { + name: _, + printable_supply, + metadata_id: _, + }, + TokenHolding::NftMaster { + definition_id: _, + print_balance, + }, + ) => { + *printable_supply = printable_supply + .checked_sub(amount_to_burn) + .expect("Printable supply underflow"); + + *print_balance = print_balance + .checked_sub(amount_to_burn) + .expect("Insufficient balance to burn"); + } + ( + TokenDefinition::NonFungible { + name: _, + printable_supply, + metadata_id: _, + }, + TokenHolding::NftPrintedCopy { + definition_id: _, + owned, + }, + ) => { + assert_eq!( + amount_to_burn, 1, + "Invalid balance to burn for NFT Printed Copy" + ); + + assert!(*owned, "Cannot burn unowned NFT Printed Copy"); + + *printable_supply = printable_supply + .checked_sub(1) + .expect("Printable supply underflow"); + + *owned = false; + } + _ => panic!("Mismatched Token Definition and Token Holding types"), + } + + let mut definition_post = definition_account.account; + definition_post.data = Data::from(&definition); + + let mut holding_post = user_holding_account.account; + holding_post.data = Data::from(&holding); + + vec![ + AccountPostState::new(definition_post), + AccountPostState::new(holding_post), + ] +} diff --git a/programs/token/src/initialize.rs b/programs/token/src/initialize.rs new file mode 100644 index 00000000..744fdb64 --- /dev/null +++ b/programs/token/src/initialize.rs @@ -0,0 +1,34 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{TokenDefinition, TokenHolding}; + +pub fn initialize_account( + definition_account: AccountWithMetadata, + account_to_initialize: AccountWithMetadata, +) -> Vec { + assert_eq!( + account_to_initialize.account, + Account::default(), + "Only Uninitialized accounts can be initialized" + ); + + // TODO: #212 We should check that this is an account owned by the token program. + // This check can't be done here since the ID of the program is known only after compiling it + // + // Check definition account is valid + let definition = TokenDefinition::try_from(&definition_account.account.data) + .expect("Definition account must be valid"); + let holding = + TokenHolding::zeroized_from_definition(definition_account.account_id, &definition); + + let definition_post = definition_account.account; + let mut account_to_initialize = account_to_initialize.account; + account_to_initialize.data = Data::from(&holding); + + vec![ + AccountPostState::new(definition_post), + AccountPostState::new_claimed(account_to_initialize), + ] +} diff --git a/programs/token/src/lib.rs b/programs/token/src/lib.rs new file mode 100644 index 00000000..8b0698c5 --- /dev/null +++ b/programs/token/src/lib.rs @@ -0,0 +1,12 @@ +//! The Token Program implementation. + +pub use token_core as core; + +pub mod burn; +pub mod initialize; +pub mod mint; +pub mod new_definition; +pub mod print_nft; +pub mod transfer; + +mod tests; diff --git a/programs/token/src/mint.rs b/programs/token/src/mint.rs new file mode 100644 index 00000000..2f17cc62 --- /dev/null +++ b/programs/token/src/mint.rs @@ -0,0 +1,71 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{TokenDefinition, TokenHolding}; + +pub fn mint( + definition_account: AccountWithMetadata, + user_holding_account: AccountWithMetadata, + amount_to_mint: u128, +) -> Vec { + assert!( + definition_account.is_authorized, + "Definition authorization is missing" + ); + + let mut definition = TokenDefinition::try_from(&definition_account.account.data) + .expect("Token Definition account must be valid"); + let mut holding = if user_holding_account.account == Account::default() { + TokenHolding::zeroized_from_definition(definition_account.account_id, &definition) + } else { + TokenHolding::try_from(&user_holding_account.account.data) + .expect("Token Holding account must be valid") + }; + + assert_eq!( + definition_account.account_id, + holding.definition_id(), + "Mismatch Token Definition and Token Holding" + ); + + match (&mut definition, &mut holding) { + ( + TokenDefinition::Fungible { + name: _, + metadata_id: _, + total_supply, + }, + TokenHolding::Fungible { + definition_id: _, + balance, + }, + ) => { + *balance = balance + .checked_add(amount_to_mint) + .expect("Balance overflow on minting"); + + *total_supply = total_supply + .checked_add(amount_to_mint) + .expect("Total supply overflow"); + } + ( + TokenDefinition::NonFungible { .. }, + TokenHolding::NftMaster { .. } | TokenHolding::NftPrintedCopy { .. }, + ) => { + panic!("Cannot mint additional supply for Non-Fungible Tokens"); + } + _ => panic!("Mismatched Token Definition and Token Holding types"), + } + + let mut definition_post = definition_account.account; + definition_post.data = Data::from(&definition); + + let mut holding_post = user_holding_account.account; + holding_post.data = Data::from(&holding); + + vec![ + AccountPostState::new(definition_post), + AccountPostState::new_claimed_if_default(holding_post), + ] +} diff --git a/programs/token/src/new_definition.rs b/programs/token/src/new_definition.rs new file mode 100644 index 00000000..b2a9ae9f --- /dev/null +++ b/programs/token/src/new_definition.rs @@ -0,0 +1,124 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{ + NewTokenDefinition, NewTokenMetadata, TokenDefinition, TokenHolding, TokenMetadata, +}; + +pub fn new_fungible_definition( + definition_target_account: AccountWithMetadata, + holding_target_account: AccountWithMetadata, + name: String, + total_supply: u128, +) -> Vec { + assert_eq!( + definition_target_account.account, + Account::default(), + "Definition target account must have default values" + ); + + assert_eq!( + holding_target_account.account, + Account::default(), + "Holding target account must have default values" + ); + + let token_definition = TokenDefinition::Fungible { + name, + total_supply, + metadata_id: None, + }; + let token_holding = TokenHolding::Fungible { + definition_id: definition_target_account.account_id, + balance: total_supply, + }; + + let mut definition_target_account_post = definition_target_account.account; + definition_target_account_post.data = Data::from(&token_definition); + + let mut holding_target_account_post = holding_target_account.account; + holding_target_account_post.data = Data::from(&token_holding); + + vec![ + AccountPostState::new_claimed(definition_target_account_post), + AccountPostState::new_claimed(holding_target_account_post), + ] +} + +pub fn new_definition_with_metadata( + definition_target_account: AccountWithMetadata, + holding_target_account: AccountWithMetadata, + metadata_target_account: AccountWithMetadata, + new_definition: NewTokenDefinition, + metadata: NewTokenMetadata, +) -> Vec { + assert_eq!( + definition_target_account.account, + Account::default(), + "Definition target account must have default values" + ); + + assert_eq!( + holding_target_account.account, + Account::default(), + "Holding target account must have default values" + ); + + assert_eq!( + metadata_target_account.account, + Account::default(), + "Metadata target account must have default values" + ); + + let (token_definition, token_holding) = match new_definition { + NewTokenDefinition::Fungible { name, total_supply } => ( + TokenDefinition::Fungible { + name, + total_supply, + metadata_id: Some(metadata_target_account.account_id), + }, + TokenHolding::Fungible { + definition_id: definition_target_account.account_id, + balance: total_supply, + }, + ), + NewTokenDefinition::NonFungible { + name, + printable_supply, + } => ( + TokenDefinition::NonFungible { + name, + printable_supply, + metadata_id: metadata_target_account.account_id, + }, + TokenHolding::NftMaster { + definition_id: definition_target_account.account_id, + print_balance: printable_supply, + }, + ), + }; + + let token_metadata = TokenMetadata { + definition_id: definition_target_account.account_id, + standard: metadata.standard, + uri: metadata.uri, + creators: metadata.creators, + primary_sale_date: 0u64, // TODO #261: future works to implement this + }; + + let mut definition_target_account_post = definition_target_account.account.clone(); + definition_target_account_post.data = Data::from(&token_definition); + + let mut holding_target_account_post = holding_target_account.account.clone(); + holding_target_account_post.data = Data::from(&token_holding); + + let mut metadata_target_account_post = metadata_target_account.account.clone(); + metadata_target_account_post.data = Data::from(&token_metadata); + + vec![ + AccountPostState::new_claimed(definition_target_account_post), + AccountPostState::new_claimed(holding_target_account_post), + AccountPostState::new_claimed(metadata_target_account_post), + ] +} diff --git a/programs/token/src/print_nft.rs b/programs/token/src/print_nft.rs new file mode 100644 index 00000000..d10533c1 --- /dev/null +++ b/programs/token/src/print_nft.rs @@ -0,0 +1,54 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::TokenHolding; + +pub fn print_nft( + master_account: AccountWithMetadata, + printed_account: AccountWithMetadata, +) -> Vec { + assert!( + master_account.is_authorized, + "Master NFT Account must be authorized" + ); + + assert_eq!( + printed_account.account, + Account::default(), + "Printed Account must be uninitialized" + ); + + let mut master_account_data = + TokenHolding::try_from(&master_account.account.data).expect("Invalid Token Holding data"); + + let TokenHolding::NftMaster { + definition_id, + print_balance, + } = &mut master_account_data + else { + panic!("Invalid Token Holding provided as NFT Master Account"); + }; + + let definition_id = *definition_id; + + assert!( + *print_balance > 1, + "Insufficient balance to print another NFT copy" + ); + *print_balance -= 1; + + let mut master_account_post = master_account.account; + master_account_post.data = Data::from(&master_account_data); + + let mut printed_account_post = printed_account.account; + printed_account_post.data = Data::from(&TokenHolding::NftPrintedCopy { + definition_id, + owned: true, + }); + + vec![ + AccountPostState::new(master_account_post), + AccountPostState::new_claimed(printed_account_post), + ] +} diff --git a/programs/token/src/tests.rs b/programs/token/src/tests.rs new file mode 100644 index 00000000..cf95c4d4 --- /dev/null +++ b/programs/token/src/tests.rs @@ -0,0 +1,1040 @@ +#![cfg(test)] + +use nssa_core::account::{Account, AccountId, AccountWithMetadata, Data}; +use token_core::{ + MetadataStandard, NewTokenDefinition, NewTokenMetadata, TokenDefinition, TokenHolding, +}; + +use crate::{ + burn::burn, + mint::mint, + new_definition::{new_definition_with_metadata, new_fungible_definition}, + print_nft::print_nft, + transfer::transfer, +}; + +// TODO: Move tests to a proper modules like burn, mint, transfer, etc, so that they are more +// unit-test. + +struct BalanceForTests; +struct IdForTests; + +struct AccountForTests; + +impl AccountForTests { + fn definition_account_auth() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_without_auth() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_different_definition() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id_diff(), + balance: BalanceForTests::holding_balance(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_same_definition_with_authorization() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_same_definition_without_authorization() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_same_definition_without_authorization_overflow() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn definition_account_post_burn() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply_burned(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_account_post_burn() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance_burned(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_uninit() -> AccountWithMetadata { + AccountWithMetadata { + account: Account::default(), + is_authorized: false, + account_id: IdForTests::holding_id_2(), + } + } + + fn init_mint() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::mint_success(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_same_definition_mint() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance_mint(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_mint() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply_mint(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_same_definition_with_authorization_and_large_balance() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::mint_overflow(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_with_authorization_nonfungible() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::NonFungible { + name: String::from("test"), + printable_supply: BalanceForTests::printable_copies(), + metadata_id: AccountId::new([0; 32]), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_uninit() -> AccountWithMetadata { + AccountWithMetadata { + account: Account::default(), + is_authorized: false, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_account_init() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn definition_account_unclaimed() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_account_unclaimed() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account2_init() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id_2(), + } + } + + fn holding_account2_init_post_transfer() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::recipient_post_transfer(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id_2(), + } + } + + fn holding_account_init_post_transfer() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::sender_post_transfer(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_master_nft() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: BalanceForTests::printable_copies(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_master_nft_insufficient_balance() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: 1, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_master_nft_after_print() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: BalanceForTests::printable_copies() - 1, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_printed_nft() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftPrintedCopy { + definition_id: IdForTests::pool_definition_id(), + owned: true, + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_with_master_nft_transferred_to() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: BalanceForTests::printable_copies(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id_2(), + } + } + + fn holding_account_master_nft_post_transfer() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: 0, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } +} + +impl BalanceForTests { + fn init_supply() -> u128 { + 100_000 + } + + fn holding_balance() -> u128 { + 1_000 + } + + fn init_supply_burned() -> u128 { + 99_500 + } + + fn holding_balance_burned() -> u128 { + 500 + } + + fn burn_success() -> u128 { + 500 + } + + fn burn_insufficient() -> u128 { + 1_500 + } + + fn mint_success() -> u128 { + 50_000 + } + + fn holding_balance_mint() -> u128 { + 51_000 + } + + fn mint_overflow() -> u128 { + u128::MAX - 40_000 + } + + fn init_supply_mint() -> u128 { + 150_000 + } + + fn sender_post_transfer() -> u128 { + 95_000 + } + + fn recipient_post_transfer() -> u128 { + 105_000 + } + + fn transfer_amount() -> u128 { + 5_000 + } + + fn printable_copies() -> u128 { + 10 + } +} + +impl IdForTests { + fn pool_definition_id() -> AccountId { + AccountId::new([15; 32]) + } + + fn pool_definition_id_diff() -> AccountId { + AccountId::new([16; 32]) + } + + fn holding_id() -> AccountId { + AccountId::new([17; 32]) + } + + fn holding_id_2() -> AccountId { + AccountId::new([42; 32]) + } +} + +#[should_panic(expected = "Definition target account must have default values")] +#[test] +fn test_new_definition_non_default_first_account_should_fail() { + let definition_account = AccountWithMetadata { + account: Account { + program_owner: [1, 2, 3, 4, 5, 6, 7, 8], + ..Account::default() + }, + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let _post_states = new_fungible_definition( + definition_account, + holding_account, + String::from("test"), + 10, + ); +} + +#[should_panic(expected = "Holding target account must have default values")] +#[test] +fn test_new_definition_non_default_second_account_should_fail() { + let definition_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account { + program_owner: [1, 2, 3, 4, 5, 6, 7, 8], + ..Account::default() + }, + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let _post_states = new_fungible_definition( + definition_account, + holding_account, + String::from("test"), + 10, + ); +} + +#[test] +fn test_new_definition_with_valid_inputs_succeeds() { + let definition_account = AccountForTests::definition_account_uninit(); + let holding_account = AccountForTests::holding_account_uninit(); + + let post_states = new_fungible_definition( + definition_account, + holding_account, + String::from("test"), + BalanceForTests::init_supply(), + ); + + let [definition_account, holding_account] = post_states.try_into().unwrap(); + assert_eq!( + *definition_account.account(), + AccountForTests::definition_account_unclaimed().account + ); + + assert_eq!( + *holding_account.account(), + AccountForTests::holding_account_unclaimed().account + ); +} + +#[should_panic(expected = "Sender and recipient definition id mismatch")] +#[test] +fn test_transfer_with_different_definition_ids_should_fail() { + let sender = AccountForTests::holding_same_definition_with_authorization(); + let recipient = AccountForTests::holding_different_definition(); + let _post_states = transfer(sender, recipient, 10); +} + +#[should_panic(expected = "Insufficient balance")] +#[test] +fn test_transfer_with_insufficient_balance_should_fail() { + let sender = AccountForTests::holding_same_definition_with_authorization(); + let recipient = AccountForTests::holding_account_same_definition_mint(); + // Attempt to transfer more than balance + let _post_states = transfer(sender, recipient, BalanceForTests::burn_insufficient()); +} + +#[should_panic(expected = "Sender authorization is missing")] +#[test] +fn test_transfer_without_sender_authorization_should_fail() { + let sender = AccountForTests::holding_same_definition_without_authorization(); + let recipient = AccountForTests::holding_account_uninit(); + let _post_states = transfer(sender, recipient, 37); +} + +#[test] +fn test_transfer_with_valid_inputs_succeeds() { + let sender = AccountForTests::holding_account_init(); + let recipient = AccountForTests::holding_account2_init(); + let post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); + let [sender_post, recipient_post] = post_states.try_into().unwrap(); + + assert_eq!( + *sender_post.account(), + AccountForTests::holding_account_init_post_transfer().account + ); + assert_eq!( + *recipient_post.account(), + AccountForTests::holding_account2_init_post_transfer().account + ); +} + +#[should_panic(expected = "Invalid balance for NFT Master transfer")] +#[test] +fn test_transfer_with_master_nft_invalid_balance() { + let sender = AccountForTests::holding_account_master_nft(); + let recipient = AccountForTests::holding_account_uninit(); + let _post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); +} + +#[should_panic(expected = "Invalid balance in recipient account for NFT transfer")] +#[test] +fn test_transfer_with_master_nft_invalid_recipient_balance() { + let sender = AccountForTests::holding_account_master_nft(); + let recipient = AccountForTests::holding_account_with_master_nft_transferred_to(); + let _post_states = transfer(sender, recipient, BalanceForTests::printable_copies()); +} + +#[test] +fn test_transfer_with_master_nft_success() { + let sender = AccountForTests::holding_account_master_nft(); + let recipient = AccountForTests::holding_account_uninit(); + let post_states = transfer(sender, recipient, BalanceForTests::printable_copies()); + let [sender_post, recipient_post] = post_states.try_into().unwrap(); + + assert_eq!( + *sender_post.account(), + AccountForTests::holding_account_master_nft_post_transfer().account + ); + assert_eq!( + *recipient_post.account(), + AccountForTests::holding_account_with_master_nft_transferred_to().account + ); +} + +#[test] +fn test_token_initialize_account_succeeds() { + let sender = AccountForTests::holding_account_init(); + let recipient = AccountForTests::holding_account2_init(); + let post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); + let [sender_post, recipient_post] = post_states.try_into().unwrap(); + + assert_eq!( + *sender_post.account(), + AccountForTests::holding_account_init_post_transfer().account + ); + assert_eq!( + *recipient_post.account(), + AccountForTests::holding_account2_init_post_transfer().account + ); +} + +#[test] +#[should_panic(expected = "Mismatch Token Definition and Token Holding")] +fn test_burn_mismatch_def() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_different_definition(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_success(), + ); +} + +#[test] +#[should_panic(expected = "Authorization is missing")] +fn test_burn_missing_authorization() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_success(), + ); +} + +#[test] +#[should_panic(expected = "Insufficient balance to burn")] +fn test_burn_insufficient_balance() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_with_authorization(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_insufficient(), + ); +} + +#[test] +#[should_panic(expected = "Total supply underflow")] +fn test_burn_total_supply_underflow() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = + AccountForTests::holding_same_definition_with_authorization_and_large_balance(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::mint_overflow(), + ); +} + +#[test] +fn test_burn_success() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_with_authorization(); + let post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_success(), + ); + + let [def_post, holding_post] = post_states.try_into().unwrap(); + + assert_eq!( + *def_post.account(), + AccountForTests::definition_account_post_burn().account + ); + assert_eq!( + *holding_post.account(), + AccountForTests::holding_account_post_burn().account + ); +} + +#[test] +#[should_panic(expected = "Holding account must be valid")] +fn test_mint_not_valid_holding_account() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::definition_account_without_auth(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +#[should_panic(expected = "Definition account must be valid")] +fn test_mint_not_valid_definition_account() { + let definition_account = AccountForTests::holding_same_definition_with_authorization(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +#[should_panic(expected = "Definition authorization is missing")] +fn test_mint_missing_authorization() { + let definition_account = AccountForTests::definition_account_without_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +#[should_panic(expected = "Mismatch Token Definition and Token Holding")] +fn test_mint_mismatched_token_definition() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_different_definition(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +fn test_mint_success() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); + + let [def_post, holding_post] = post_states.try_into().unwrap(); + + assert_eq!( + *def_post.account(), + AccountForTests::definition_account_mint().account + ); + assert_eq!( + *holding_post.account(), + AccountForTests::holding_account_same_definition_mint().account + ); +} + +#[test] +fn test_mint_uninit_holding_success() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_account_uninit(); + let post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); + + let [def_post, holding_post] = post_states.try_into().unwrap(); + + assert_eq!( + *def_post.account(), + AccountForTests::definition_account_mint().account + ); + assert_eq!( + *holding_post.account(), + AccountForTests::init_mint().account + ); + assert!(holding_post.requires_claim()); +} + +#[test] +#[should_panic(expected = "Total supply overflow")] +fn test_mint_total_supply_overflow() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_overflow(), + ); +} + +#[test] +#[should_panic(expected = "Balance overflow on minting")] +fn test_mint_holding_account_overflow() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization_overflow(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_overflow(), + ); +} + +#[test] +#[should_panic(expected = "Cannot mint additional supply for Non-Fungible Tokens")] +fn test_mint_cannot_mint_unmintable_tokens() { + let definition_account = AccountForTests::definition_account_with_authorization_nonfungible(); + let holding_account = AccountForTests::holding_account_master_nft(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[should_panic(expected = "Definition target account must have default values")] +#[test] +fn test_call_new_definition_metadata_with_init_definition() { + let definition_account = AccountForTests::definition_account_auth(); + let metadata_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([3; 32]), + }; + let new_definition = NewTokenDefinition::Fungible { + name: String::from("test"), + total_supply: 15u128, + }; + let metadata = NewTokenMetadata { + standard: MetadataStandard::Simple, + uri: "test_uri".to_string(), + creators: "test_creators".to_string(), + }; + let _post_states = new_definition_with_metadata( + definition_account, + metadata_account, + holding_account, + new_definition, + metadata, + ); +} + +#[should_panic(expected = "Metadata target account must have default values")] +#[test] +fn test_call_new_definition_metadata_with_init_metadata() { + let definition_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([3; 32]), + }; + let metadata_account = AccountForTests::holding_account_same_definition_mint(); + let new_definition = NewTokenDefinition::Fungible { + name: String::from("test"), + total_supply: 15u128, + }; + let metadata = NewTokenMetadata { + standard: MetadataStandard::Simple, + uri: "test_uri".to_string(), + creators: "test_creators".to_string(), + }; + let _post_states = new_definition_with_metadata( + definition_account, + holding_account, + metadata_account, + new_definition, + metadata, + ); +} + +#[should_panic(expected = "Holding target account must have default values")] +#[test] +fn test_call_new_definition_metadata_with_init_holding() { + let definition_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let metadata_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let holding_account = AccountForTests::holding_account_same_definition_mint(); + let new_definition = NewTokenDefinition::Fungible { + name: String::from("test"), + total_supply: 15u128, + }; + let metadata = NewTokenMetadata { + standard: MetadataStandard::Simple, + uri: "test_uri".to_string(), + creators: "test_creators".to_string(), + }; + let _post_states = new_definition_with_metadata( + definition_account, + holding_account, + metadata_account, + new_definition, + metadata, + ); +} + +#[should_panic(expected = "Master NFT Account must be authorized")] +#[test] +fn test_print_nft_master_account_must_be_authorized() { + let master_account = AccountForTests::holding_account_uninit(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Printed Account must be uninitialized")] +#[test] +fn test_print_nft_print_account_initialized() { + let master_account = AccountForTests::holding_account_master_nft(); + let printed_account = AccountForTests::holding_account_init(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Invalid Token Holding data")] +#[test] +fn test_print_nft_master_nft_invalid_token_holding() { + let master_account = AccountForTests::definition_account_auth(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Invalid Token Holding provided as NFT Master Account")] +#[test] +fn test_print_nft_master_nft_not_nft_master_account() { + let master_account = AccountForTests::holding_account_init(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Insufficient balance to print another NFT copy")] +#[test] +fn test_print_nft_master_nft_insufficient_balance() { + let master_account = AccountForTests::holding_account_master_nft_insufficient_balance(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[test] +fn test_print_nft_success() { + let master_account = AccountForTests::holding_account_master_nft(); + let printed_account = AccountForTests::holding_account_uninit(); + let post_states = print_nft(master_account, printed_account); + + let [post_master_nft, post_printed] = post_states.try_into().unwrap(); + + assert_eq!( + *post_master_nft.account(), + AccountForTests::holding_account_master_nft_after_print().account + ); + assert_eq!( + *post_printed.account(), + AccountForTests::holding_account_printed_nft().account + ); +} diff --git a/programs/token/src/transfer.rs b/programs/token/src/transfer.rs new file mode 100644 index 00000000..a1087bb1 --- /dev/null +++ b/programs/token/src/transfer.rs @@ -0,0 +1,110 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::TokenHolding; + +pub fn transfer( + sender: AccountWithMetadata, + recipient: AccountWithMetadata, + balance_to_move: u128, +) -> Vec { + assert!(sender.is_authorized, "Sender authorization is missing"); + + let mut sender_holding = + TokenHolding::try_from(&sender.account.data).expect("Invalid sender data"); + + let mut recipient_holding = if recipient.account == Account::default() { + TokenHolding::zeroized_clone_from(&sender_holding) + } else { + TokenHolding::try_from(&recipient.account.data).expect("Invalid recipient data") + }; + + assert_eq!( + sender_holding.definition_id(), + recipient_holding.definition_id(), + "Sender and recipient definition id mismatch" + ); + + match (&mut sender_holding, &mut recipient_holding) { + ( + TokenHolding::Fungible { + definition_id: _, + balance: sender_balance, + }, + TokenHolding::Fungible { + definition_id: _, + balance: recipient_balance, + }, + ) => { + *sender_balance = sender_balance + .checked_sub(balance_to_move) + .expect("Insufficient balance"); + + *recipient_balance = recipient_balance + .checked_add(balance_to_move) + .expect("Recipient balance overflow"); + } + ( + TokenHolding::NftMaster { + definition_id: _, + print_balance: sender_print_balance, + }, + TokenHolding::NftMaster { + definition_id: _, + print_balance: recipient_print_balance, + }, + ) => { + assert_eq!( + *recipient_print_balance, 0, + "Invalid balance in recipient account for NFT transfer" + ); + + assert_eq!( + *sender_print_balance, balance_to_move, + "Invalid balance for NFT Master transfer" + ); + + std::mem::swap(sender_print_balance, recipient_print_balance); + } + ( + TokenHolding::NftPrintedCopy { + definition_id: _, + owned: sender_owned, + }, + TokenHolding::NftPrintedCopy { + definition_id: _, + owned: recipient_owned, + }, + ) => { + assert_eq!( + balance_to_move, 1, + "Invalid balance for NFT Printed Copy transfer" + ); + + assert!(*sender_owned, "Sender does not own the NFT Printed Copy"); + + assert!( + !*recipient_owned, + "Recipient already owns the NFT Printed Copy" + ); + + *sender_owned = false; + *recipient_owned = true; + } + _ => { + panic!("Mismatched token holding types for transfer"); + } + }; + + let mut sender_post = sender.account; + sender_post.data = Data::from(&sender_holding); + + let mut recipient_post = recipient.account; + recipient_post.data = Data::from(&recipient_holding); + + vec![ + AccountPostState::new(sender_post), + AccountPostState::new_claimed_if_default(recipient_post), + ] +} diff --git a/sequencer_core/Cargo.toml b/sequencer_core/Cargo.toml index 8d2886ce..fb900252 100644 --- a/sequencer_core/Cargo.toml +++ b/sequencer_core/Cargo.toml @@ -2,6 +2,7 @@ name = "sequencer_core" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true @@ -17,6 +18,7 @@ serde_json.workspace = true tempfile.workspace = true chrono.workspace = true log.workspace = true +tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } bedrock_client.workspace = true logos-blockchain-key-management-system-service.workspace = true logos-blockchain-core.workspace = true @@ -29,5 +31,4 @@ default = [] testnet = [] [dev-dependencies] -tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } futures.workspace = true diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 0aa22420..f99a116e 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -1,8 +1,8 @@ -use std::{fs, path::Path}; +use std::{fs, path::Path, str::FromStr}; use anyhow::{Context, Result, anyhow}; use bedrock_client::BedrockClient; -use common::block::HashableBlockData; +use common::block::Block; use logos_blockchain_core::mantle::{ MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger, ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp}, @@ -10,43 +10,44 @@ use logos_blockchain_core::mantle::{ use logos_blockchain_key_management_system_service::keys::{ ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey, }; +use reqwest::Url; use crate::config::BedrockConfig; /// A component that posts block data to logos blockchain +#[derive(Clone)] pub struct BlockSettlementClient { bedrock_client: BedrockClient, bedrock_signing_key: Ed25519Key, bedrock_channel_id: ChannelId, - last_message_id: MsgId, } impl BlockSettlementClient { pub fn try_new(home: &Path, config: &BedrockConfig) -> Result { let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key")) .context("Failed to load or create signing key")?; - let bedrock_channel_id = ChannelId::from(config.channel_id); - let bedrock_client = BedrockClient::new(None, config.node_url.clone()) - .context("Failed to initialize bedrock client")?; - let channel_genesis_msg = MsgId::from([0; 32]); + let bedrock_url = Url::from_str(config.node_url.as_ref()) + .context("Bedrock node address is not a valid url")?; + let bedrock_client = + BedrockClient::new(None, bedrock_url).context("Failed to initialize bedrock client")?; Ok(Self { bedrock_client, bedrock_signing_key, - bedrock_channel_id, - last_message_id: channel_genesis_msg, + bedrock_channel_id: config.channel_id, }) } /// Create and sign a transaction for inscribing data - pub fn create_inscribe_tx(&self, data: Vec) -> (SignedMantleTx, MsgId) { + pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> { + let inscription_data = borsh::to_vec(block)?; let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes(); let verifying_key = Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key"); let inscribe_op = InscriptionOp { channel_id: self.bedrock_channel_id, - inscription: data, - parent: self.last_message_id, + inscription: inscription_data, + parent: block.bedrock_parent_id.into(), signer: verifying_key, }; let inscribe_op_id = inscribe_op.id(); @@ -76,20 +77,17 @@ impl BlockSettlementClient { ledger_tx_proof: empty_ledger_signature(&tx_hash), mantle_tx: inscribe_tx, }; - (signed_mantle_tx, inscribe_op_id) + Ok((signed_mantle_tx, inscribe_op_id)) } - /// Post a transaction to the node and wait for inclusion - pub async fn post_and_wait(&mut self, block_data: &HashableBlockData) -> Result { - let inscription_data = borsh::to_vec(&block_data)?; - let (tx, new_msg_id) = self.create_inscribe_tx(inscription_data); + /// Post a transaction to the node + pub async fn submit_block_to_bedrock(&self, block: &Block) -> Result { + let (tx, new_msg_id) = self.create_inscribe_tx(block)?; // Post the transaction self.bedrock_client.post_transaction(tx).await?; - self.last_message_id = new_msg_id; - - Ok(block_data.block_id) + Ok(new_msg_id) } } diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index cd9aa194..a0b07445 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -2,9 +2,10 @@ use std::{collections::HashMap, path::Path}; use anyhow::Result; use common::{HashType, block::Block, transaction::EncodedTransaction}; +use nssa::V02State; use storage::RocksDBIO; -pub struct SequencerBlockStore { +pub struct SequencerStore { dbio: RocksDBIO, // TODO: Consider adding the hashmap to the database for faster recovery. tx_hash_to_block_map: HashMap, @@ -12,7 +13,7 @@ pub struct SequencerBlockStore { signing_key: nssa::PrivateKey, } -impl SequencerBlockStore { +impl SequencerStore { /// Starting database at the start of new chain. /// Creates files if necessary. /// @@ -42,18 +43,15 @@ impl SequencerBlockStore { /// Reopening existing database pub fn open_db_restart(location: &Path, signing_key: nssa::PrivateKey) -> Result { - SequencerBlockStore::open_db_with_genesis(location, None, signing_key) + SequencerStore::open_db_with_genesis(location, None, signing_key) } pub fn get_block_at_id(&self, id: u64) -> Result { Ok(self.dbio.get_block(id)?) } - pub fn put_block_at_id(&mut self, block: Block) -> Result<()> { - let new_transactions_map = block_to_transactions_map(&block); - self.dbio.put_block(block, false)?; - self.tx_hash_to_block_map.extend(new_transactions_map); - Ok(()) + pub fn delete_block_at_id(&mut self, block_id: u64) -> Result<()> { + Ok(self.dbio.delete_block(block_id)?) } /// Returns the transaction corresponding to the given hash, if it exists in the blockchain. @@ -81,6 +79,21 @@ impl SequencerBlockStore { pub fn signing_key(&self) -> &nssa::PrivateKey { &self.signing_key } + + pub fn get_all_blocks(&self) -> impl Iterator> { + self.dbio.get_all_blocks().map(|res| Ok(res?)) + } + + pub(crate) fn update(&mut self, block: Block, state: &V02State) -> Result<()> { + let new_transactions_map = block_to_transactions_map(&block); + self.dbio.atomic_update(block, state)?; + self.tx_hash_to_block_map.extend(new_transactions_map); + Ok(()) + } + + pub fn get_nssa_state(&self) -> Option { + self.dbio.get_nssa_state().ok() + } } pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap { @@ -113,11 +126,10 @@ mod tests { transactions: vec![], }; - let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key); + let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); // Start an empty node store let mut node_store = - SequencerBlockStore::open_db_with_genesis(path, Some(genesis_block), signing_key) - .unwrap(); + SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap(); let tx = common::test_utils::produce_dummy_empty_transaction(); let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); @@ -126,7 +138,8 @@ mod tests { let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(None, retrieved_tx); // Add the block with the transaction - node_store.put_block_at_id(block).unwrap(); + let dummy_state = V02State::new_with_genesis_accounts(&[], &[]); + node_store.update(block, &dummy_state).unwrap(); // Try again let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(Some(tx), retrieved_tx); diff --git a/sequencer_core/src/config.rs b/sequencer_core/src/config.rs index 5911cc52..3d69e8af 100644 --- a/sequencer_core/src/config.rs +++ b/sequencer_core/src/config.rs @@ -5,7 +5,8 @@ use std::{ }; use anyhow::Result; -use reqwest::Url; +use common::sequencer_client::BasicAuth; +use logos_blockchain_core::mantle::ops::channel::ChannelId; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Clone)] @@ -40,6 +41,8 @@ pub struct SequencerConfig { pub mempool_max_size: usize, /// Interval in which blocks produced pub block_create_timeout_millis: u64, + /// Interval in which pending blocks are retried + pub retry_pending_blocks_timeout_millis: u64, /// Port to listen pub port: u16, /// List of initial accounts data @@ -55,9 +58,11 @@ pub struct SequencerConfig { #[derive(Clone, Serialize, Deserialize)] pub struct BedrockConfig { /// Bedrock channel ID - pub channel_id: [u8; 32], + pub channel_id: ChannelId, /// Bedrock Url - pub node_url: Url, + pub node_url: String, + /// Bedrock auth + pub auth: Option, } impl SequencerConfig { diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index ee4b66a0..f0bb9dbf 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -5,15 +5,15 @@ use anyhow::Result; use common::PINATA_BASE58; use common::{ HashType, - block::HashableBlockData, + block::{BedrockStatus, Block, HashableBlockData, MantleMsgId}, transaction::{EncodedTransaction, NSSATransaction}, }; use config::SequencerConfig; -use log::warn; +use log::{info, warn}; use mempool::{MemPool, MemPoolHandle}; use serde::{Deserialize, Serialize}; -use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore}; +use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerStore}; mod block_settlement_client; pub mod block_store; @@ -21,11 +21,12 @@ pub mod config; pub struct SequencerCore { state: nssa::V02State, - block_store: SequencerBlockStore, + store: SequencerStore, mempool: MemPool, sequencer_config: SequencerConfig, chain_height: u64, block_settlement_client: Option, + last_bedrock_msg_id: MantleMsgId, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -43,7 +44,11 @@ impl Display for TransactionMalformationError { impl std::error::Error for TransactionMalformationError {} impl SequencerCore { - /// Start Sequencer from configuration and construct transaction sender + /// Starts the sequencer using the provided configuration. + /// If an existing database is found, the sequencer state is loaded from it and + /// assumed to represent the correct latest state consistent with Bedrock-finalized data. + /// If no database is found, the sequencer performs a fresh start from genesis, + /// initializing its state with the accounts defined in the configuration file. pub fn start_from_config(config: SequencerConfig) -> (Self, MemPoolHandle) { let hashable_data = HashableBlockData { block_id: config.genesis_id, @@ -53,37 +58,51 @@ impl SequencerCore { }; let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); - let genesis_block = hashable_data.into_pending_block(&signing_key); + let channel_genesis_msg_id = [0; 32]; + let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id); // Sequencer should panic if unable to open db, // as fixing this issue may require actions non-native to program scope - let block_store = SequencerBlockStore::open_db_with_genesis( + let store = SequencerStore::open_db_with_genesis( &config.home.join("rocksdb"), Some(genesis_block), signing_key, ) .unwrap(); - let mut initial_commitments = vec![]; - for init_comm_data in config.initial_commitments.clone() { - let npk = init_comm_data.npk; + let mut state = match store.get_nssa_state() { + Some(state) => { + info!("Found local database. Loading state and pending blocks from it."); + state + } + None => { + info!( + "No database found when starting the sequencer. Creating a fresh new with the initial data in config" + ); + let initial_commitments: Vec = config + .initial_commitments + .iter() + .map(|init_comm_data| { + let npk = &init_comm_data.npk; - let mut acc = init_comm_data.account; + let mut acc = init_comm_data.account.clone(); - acc.program_owner = nssa::program::Program::authenticated_transfer_program().id(); + acc.program_owner = + nssa::program::Program::authenticated_transfer_program().id(); - let comm = nssa_core::Commitment::new(&npk, &acc); + nssa_core::Commitment::new(npk, &acc) + }) + .collect(); - initial_commitments.push(comm); - } + let init_accs: Vec<(nssa::AccountId, u128)> = config + .initial_accounts + .iter() + .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) + .collect(); - let init_accs: Vec<(nssa::AccountId, u128)> = config - .initial_accounts - .iter() - .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) - .collect(); - - let mut state = nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments); + nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) + } + }; #[cfg(feature = "testnet")] state.add_pinata_program(PINATA_BASE58.parse().unwrap()); @@ -94,37 +113,17 @@ impl SequencerCore { .expect("Block settlement client should be constructible") }); - let mut this = Self { + let sequencer_core = Self { state, - block_store, + store, mempool, chain_height: config.genesis_id, sequencer_config: config, block_settlement_client, + last_bedrock_msg_id: channel_genesis_msg_id, }; - this.sync_state_with_stored_blocks(); - - (this, mempool_handle) - } - - /// If there are stored blocks ahead of the current height, this method will load and process - /// all transaction in them in the order they are stored. The NSSA state will be updated - /// accordingly. - fn sync_state_with_stored_blocks(&mut self) { - let mut next_block_id = self.sequencer_config.genesis_id + 1; - while let Ok(block) = self.block_store.get_block_at_id(next_block_id) { - for encoded_transaction in block.body.transactions { - let transaction = NSSATransaction::try_from(&encoded_transaction).unwrap(); - // Process transaction and update state - self.execute_check_transaction_on_state(transaction) - .unwrap(); - // Update the tx hash to block id map. - self.block_store.insert(&encoded_transaction, next_block_id); - } - self.chain_height = next_block_id; - next_block_id += 1; - } + (sequencer_core, mempool_handle) } fn execute_check_transaction_on_state( @@ -148,8 +147,11 @@ impl SequencerCore { pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result { let block_data = self.produce_new_block_with_mempool_transactions()?; - if let Some(block_settlement) = self.block_settlement_client.as_mut() { - block_settlement.post_and_wait(&block_data).await?; + if let Some(client) = self.block_settlement_client.as_mut() { + let block = + block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); + let msg_id = client.submit_block_to_bedrock(&block).await?; + self.last_bedrock_msg_id = msg_id.into(); log::info!("Posted block data to Bedrock"); } @@ -179,11 +181,7 @@ impl SequencerCore { } } - let prev_block_hash = self - .block_store - .get_block_at_id(self.chain_height)? - .header - .hash; + let prev_block_hash = self.store.get_block_at_id(self.chain_height)?.header.hash; let curr_time = chrono::Utc::now().timestamp_millis() as u64; @@ -196,9 +194,9 @@ impl SequencerCore { let block = hashable_data .clone() - .into_pending_block(self.block_store.signing_key()); + .into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); - self.block_store.put_block_at_id(block)?; + self.store.update(block, &self.state)?; self.chain_height = new_block_height; @@ -224,8 +222,8 @@ impl SequencerCore { &self.state } - pub fn block_store(&self) -> &SequencerBlockStore { - &self.block_store + pub fn block_store(&self) -> &SequencerStore { + &self.store } pub fn chain_height(&self) -> u64 { @@ -235,6 +233,39 @@ impl SequencerCore { pub fn sequencer_config(&self) -> &SequencerConfig { &self.sequencer_config } + + /// Deletes finalized blocks from the sequencer's pending block list. + /// This method must be called when new blocks are finalized on Bedrock. + /// All pending blocks with an ID less than or equal to `last_finalized_block_id` + /// are removed from the database. + pub fn clean_finalized_blocks_from_db(&mut self, last_finalized_block_id: u64) -> Result<()> { + if let Some(first_pending_block_id) = self + .get_pending_blocks()? + .iter() + .map(|block| block.header.block_id) + .min() + { + (first_pending_block_id..=last_finalized_block_id) + .try_for_each(|id| self.store.delete_block_at_id(id)) + } else { + Ok(()) + } + } + + /// Returns the list of stored pending blocks. + pub fn get_pending_blocks(&self) -> Result> { + Ok(self + .store + .get_all_blocks() + .collect::>>()? + .into_iter() + .filter(|block| matches!(block.bedrock_status, BedrockStatus::Pending)) + .collect()) + } + + pub fn block_settlement_client(&self) -> Option { + self.block_settlement_client.clone() + } } // TODO: Introduce type-safe wrapper around checked transaction, e.g. AuthenticatedTransaction @@ -297,6 +328,7 @@ mod tests { initial_commitments: vec![], signing_key: *sequencer_sign_key_for_testing().value(), bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } @@ -680,10 +712,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); // Only one should be included in the block assert_eq!(block.body.transactions, vec![tx.clone()]); @@ -720,10 +749,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); // Add same transaction should fail @@ -732,10 +758,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert!(block.body.transactions.is_empty()); } @@ -768,10 +791,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); } @@ -791,4 +811,42 @@ mod tests { config.initial_accounts[1].balance + balance_to_move ); } + + #[test] + fn test_get_pending_blocks() { + let config = setup_sequencer_config(); + let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4); + } + + #[test] + fn test_delete_blocks() { + let config = setup_sequencer_config(); + let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + + let last_finalized_block = 3; + sequencer + .clean_finalized_blocks_from_db(last_finalized_block) + .unwrap(); + + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 1); + } } diff --git a/sequencer_rpc/Cargo.toml b/sequencer_rpc/Cargo.toml index 2abd5400..1a2b2a0a 100644 --- a/sequencer_rpc/Cargo.toml +++ b/sequencer_rpc/Cargo.toml @@ -2,6 +2,7 @@ name = "sequencer_rpc" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true diff --git a/sequencer_rpc/src/process.rs b/sequencer_rpc/src/process.rs index dd3278a5..4f63915a 100644 --- a/sequencer_rpc/src/process.rs +++ b/sequencer_rpc/src/process.rs @@ -18,8 +18,8 @@ use common::{ GetInitialTestnetAccountsRequest, GetLastBlockRequest, GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, GetProofForCommitmentResponse, GetTransactionByHashRequest, - GetTransactionByHashResponse, HelloRequest, HelloResponse, SendTxRequest, - SendTxResponse, + GetTransactionByHashResponse, HelloRequest, HelloResponse, PostIndexerMessageRequest, + PostIndexerMessageResponse, SendTxRequest, SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, @@ -44,6 +44,7 @@ pub const GET_ACCOUNTS_NONCES: &str = "get_accounts_nonces"; pub const GET_ACCOUNT: &str = "get_account"; pub const GET_PROOF_FOR_COMMITMENT: &str = "get_proof_for_commitment"; pub const GET_PROGRAM_IDS: &str = "get_program_ids"; +pub const POST_INDEXER_MESSAGE: &str = "post_indexer_message"; pub const HELLO_FROM_SEQUENCER: &str = "HELLO_FROM_SEQUENCER"; @@ -314,6 +315,18 @@ impl JsonHandler { respond(response) } + async fn process_indexer_message(&self, request: Request) -> Result { + let _indexer_post_req = PostIndexerMessageRequest::parse(Some(request.params))?; + + // ToDo: Add indexer messages handling + + let response = PostIndexerMessageResponse { + status: "Success".to_string(), + }; + + respond(response) + } + pub async fn process_request_internal(&self, request: Request) -> Result { match request.method.as_ref() { HELLO => self.process_temp_hello(request).await, @@ -329,6 +342,7 @@ impl JsonHandler { GET_TRANSACTION_BY_HASH => self.process_get_transaction_by_hash(request).await, GET_PROOF_FOR_COMMITMENT => self.process_get_proof_by_commitment(request).await, GET_PROGRAM_IDS => self.process_get_program_ids(request).await, + POST_INDEXER_MESSAGE => self.process_indexer_message(request).await, _ => Err(RpcErr(RpcError::method_not_found(request.method))), } } @@ -340,10 +354,13 @@ mod tests { use base58::ToBase58; use base64::{Engine, engine::general_purpose}; - use common::{test_utils::sequencer_sign_key_for_testing, transaction::EncodedTransaction}; + use common::{ + sequencer_client::BasicAuth, test_utils::sequencer_sign_key_for_testing, + transaction::EncodedTransaction, + }; use sequencer_core::{ SequencerCore, - config::{AccountInitialData, SequencerConfig}, + config::{AccountInitialData, BedrockConfig, SequencerConfig}, }; use serde_json::Value; use tempfile::tempdir; @@ -388,12 +405,21 @@ mod tests { initial_accounts, initial_commitments: vec![], signing_key: *sequencer_sign_key_for_testing().value(), - bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, + bedrock_config: Some(BedrockConfig { + channel_id: [42; 32].into(), + node_url: "http://localhost:8080".to_string(), + auth: Some(BasicAuth { + username: "user".to_string(), + password: None, + }), + }), } } async fn components_for_tests() -> (JsonHandler, Vec, EncodedTransaction) { let config = sequencer_config_for_tests(); + let (mut sequencer_core, mempool_handle) = SequencerCore::start_from_config(config); let initial_accounts = sequencer_core.sequencer_config().initial_accounts.clone(); diff --git a/sequencer_runner/Cargo.toml b/sequencer_runner/Cargo.toml index 55f56dec..346c57b2 100644 --- a/sequencer_runner/Cargo.toml +++ b/sequencer_runner/Cargo.toml @@ -2,6 +2,7 @@ name = "sequencer_runner" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] common.workspace = true diff --git a/sequencer_runner/configs/debug/sequencer_config.json b/sequencer_runner/configs/debug/sequencer_config.json index ad43ba65..80bfe0a4 100644 --- a/sequencer_runner/configs/debug/sequencer_config.json +++ b/sequencer_runner/configs/debug/sequencer_config.json @@ -5,7 +5,8 @@ "is_genesis_random": true, "max_num_tx_in_block": 20, "mempool_max_size": 1000, - "block_create_timeout_millis": 10000, + "block_create_timeout_millis": 5000, + "retry_pending_blocks_timeout_millis": 7000, "port": 3040, "initial_accounts": [ { @@ -156,7 +157,10 @@ 37 ], "bedrock_config": { - "channel_id": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], - "node_url": "http://localhost:8080" + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", + "node_url": "http://localhost:8080", + "auth": { + "username": "user" + } } } diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index fd4a6c08..8dbea525 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -4,7 +4,7 @@ use actix_web::dev::ServerHandle; use anyhow::Result; use clap::Parser; use common::rpc_primitives::RpcConfig; -use log::info; +use log::{info, warn}; use sequencer_core::{SequencerCore, config::SequencerConfig}; use sequencer_rpc::new_http_server; use tokio::{sync::Mutex, task::JoinHandle}; @@ -20,8 +20,14 @@ struct Args { pub async fn startup_sequencer( app_config: SequencerConfig, -) -> Result<(ServerHandle, SocketAddr, JoinHandle>)> { +) -> Result<( + ServerHandle, + SocketAddr, + JoinHandle>, + JoinHandle>, +)> { let block_timeout = app_config.block_create_timeout_millis; + let retry_pending_blocks_timeout = app_config.retry_pending_blocks_timeout_millis; let port = app_config.port; let (sequencer_core, mempool_handle) = SequencerCore::start_from_config(app_config); @@ -39,8 +45,41 @@ pub async fn startup_sequencer( let http_server_handle = http_server.handle(); tokio::spawn(http_server); - info!("Starting main sequencer loop"); + info!("Starting pending block retry loop"); + let seq_core_wrapped_for_block_retry = seq_core_wrapped.clone(); + let retry_pending_blocks_handle = tokio::spawn(async move { + loop { + tokio::time::sleep(std::time::Duration::from_millis( + retry_pending_blocks_timeout, + )) + .await; + let (pending_blocks, block_settlement_client) = { + let sequencer_core = seq_core_wrapped_for_block_retry.lock().await; + let client = sequencer_core.block_settlement_client(); + let pending_blocks = sequencer_core + .get_pending_blocks() + .expect("Sequencer should be able to retrieve pending blocks"); + (pending_blocks, client) + }; + + let Some(client) = block_settlement_client else { + continue; + }; + + info!("Resubmitting {} pending blocks", pending_blocks.len()); + for block in &pending_blocks { + if let Err(e) = client.submit_block_to_bedrock(block).await { + warn!( + "Failed to resubmit block with id {} with error {}", + block.header.block_id, e + ); + } + } + } + }); + + info!("Starting main sequencer loop"); let main_loop_handle = tokio::spawn(async move { loop { tokio::time::sleep(std::time::Duration::from_millis(block_timeout)).await; @@ -61,7 +100,12 @@ pub async fn startup_sequencer( } }); - Ok((http_server_handle, addr, main_loop_handle)) + Ok(( + http_server_handle, + addr, + main_loop_handle, + retry_pending_blocks_handle, + )) } pub async fn main_runner() -> Result<()> { @@ -81,9 +125,26 @@ pub async fn main_runner() -> Result<()> { } // ToDo: Add restart on failures - let (_, _, main_loop_handle) = startup_sequencer(app_config).await?; + let (_, _, main_loop_handle, retry_loop_handle) = startup_sequencer(app_config).await?; - main_loop_handle.await??; + info!("Sequencer running. Monitoring concurrent tasks..."); + + tokio::select! { + res = main_loop_handle => { + match res { + Ok(inner_res) => warn!("Main loop exited unexpectedly: {:?}", inner_res), + Err(e) => warn!("Main loop task panicked: {:?}", e), + } + } + res = retry_loop_handle => { + match res { + Ok(inner_res) => warn!("Retry loop exited unexpectedly: {:?}", inner_res), + Err(e) => warn!("Retry loop task panicked: {:?}", e), + } + } + } + + info!("Shutting down sequencer..."); Ok(()) } diff --git a/storage/Cargo.toml b/storage/Cargo.toml index 4678560e..8da47de3 100644 --- a/storage/Cargo.toml +++ b/storage/Cargo.toml @@ -2,6 +2,7 @@ name = "storage" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] common.workspace = true @@ -9,3 +10,4 @@ common.workspace = true thiserror.workspace = true borsh.workspace = true rocksdb.workspace = true +nssa.workspace = true diff --git a/storage/src/lib.rs b/storage/src/lib.rs index 883684c2..b96e0d61 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -2,8 +2,9 @@ use std::{path::Path, sync::Arc}; use common::block::Block; use error::DbError; +use nssa::V02State; use rocksdb::{ - BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, + BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, WriteBatch, }; pub mod error; @@ -29,15 +30,15 @@ pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set"; /// Key base for storing metainformation about the last finalized block on Bedrock pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id"; -/// Key base for storing snapshot which describe block id -pub const DB_SNAPSHOT_BLOCK_ID_KEY: &str = "block_id"; +/// Key base for storing the NSSA state +pub const DB_NSSA_STATE_KEY: &str = "nssa_state"; /// Name of block column family pub const CF_BLOCK_NAME: &str = "cf_block"; /// Name of meta column family pub const CF_META_NAME: &str = "cf_meta"; -/// Name of snapshot column family -pub const CF_SNAPSHOT_NAME: &str = "cf_snapshot"; +/// Name of state column family +pub const CF_NSSA_STATE_NAME: &str = "cf_nssa_state"; pub type DbResult = Result; @@ -52,7 +53,7 @@ impl RocksDBIO { // ToDo: Add more column families for different data let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone()); + let cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); @@ -60,7 +61,7 @@ impl RocksDBIO { let db = DBWithThreadMode::::open_cf_descriptors( &db_opts, path, - vec![cfb, cfmeta, cfsnapshot], + vec![cfb, cfmeta, cfstate], ); let dbio = Self { @@ -92,7 +93,7 @@ impl RocksDBIO { // ToDo: Add more column families for different data let _cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); let _cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let _cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone()); + let _cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); @@ -109,8 +110,8 @@ impl RocksDBIO { self.db.cf_handle(CF_BLOCK_NAME).unwrap() } - pub fn snapshot_column(&self) -> Arc> { - self.db.cf_handle(CF_SNAPSHOT_NAME).unwrap() + pub fn nssa_state_column(&self) -> Arc> { + self.db.cf_handle(CF_NSSA_STATE_NAME).unwrap() } pub fn get_meta_first_block_in_db(&self) -> DbResult { @@ -189,6 +190,24 @@ impl RocksDBIO { Ok(res.is_some()) } + pub fn put_nssa_state_in_db(&self, state: &V02State, batch: &mut WriteBatch) -> DbResult<()> { + let cf_nssa_state = self.nssa_state_column(); + batch.put_cf( + &cf_nssa_state, + borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize DB_NSSA_STATE_KEY".to_string()), + ) + })?, + borsh::to_vec(state).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize NSSA state".to_string())) + })?, + ); + + Ok(()) + } + pub fn put_meta_first_block_in_db(&self, block: Block) -> DbResult<()> { let cf_meta = self.meta_column(); self.db @@ -209,7 +228,15 @@ impl RocksDBIO { ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; - self.put_block(block, true)?; + let mut batch = WriteBatch::default(); + self.put_block(block, true, &mut batch)?; + self.db.write(batch).map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some("Failed to write first block in db".to_string()), + ) + })?; + Ok(()) } @@ -274,7 +301,7 @@ impl RocksDBIO { Ok(()) } - pub fn put_block(&self, block: Block, first: bool) -> DbResult<()> { + pub fn put_block(&self, block: Block, first: bool, batch: &mut WriteBatch) -> DbResult<()> { let cf_block = self.block_column(); if !first { @@ -285,23 +312,15 @@ impl RocksDBIO { } } - self.db - .put_cf( - &cf_block, - borsh::to_vec(&block.header.block_id).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize block id".to_string()), - ) - })?, - borsh::to_vec(&block).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize block data".to_string()), - ) - })?, - ) - .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + batch.put_cf( + &cf_block, + borsh::to_vec(&block.header.block_id).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + })?, + borsh::to_vec(&block).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block data".to_string())) + })?, + ); Ok(()) } @@ -334,32 +353,90 @@ impl RocksDBIO { } } - pub fn get_snapshot_block_id(&self) -> DbResult { - let cf_snapshot = self.snapshot_column(); + pub fn get_nssa_state(&self) -> DbResult { + let cf_nssa_state = self.nssa_state_column(); let res = self .db .get_cf( - &cf_snapshot, - borsh::to_vec(&DB_SNAPSHOT_BLOCK_ID_KEY).map_err(|err| { + &cf_nssa_state, + borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_SNAPSHOT_BLOCK_ID_KEY".to_string()), + Some("Failed to serialize block id".to_string()), ) })?, ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; if let Some(data) = res { - Ok(borsh::from_slice::(&data).map_err(|err| { + Ok(borsh::from_slice::(&data).map_err(|serr| { DbError::borsh_cast_message( - err, - Some("Failed to deserialize last block".to_string()), + serr, + Some("Failed to deserialize block data".to_string()), ) })?) } else { Err(DbError::db_interaction_error( - "Snapshot block ID not found".to_string(), + "Block on this id not found".to_string(), )) } } + + pub fn delete_block(&self, block_id: u64) -> DbResult<()> { + let cf_block = self.block_column(); + let key = borsh::to_vec(&block_id).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + })?; + + if self + .db + .get_cf(&cf_block, &key) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))? + .is_none() + { + return Err(DbError::db_interaction_error( + "Block on this id not found".to_string(), + )); + } + + self.db + .delete_cf(&cf_block, key) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + + Ok(()) + } + + pub fn get_all_blocks(&self) -> impl Iterator> { + let cf_block = self.block_column(); + self.db + .iterator_cf(&cf_block, rocksdb::IteratorMode::Start) + .map(|res| { + let (_key, value) = res.map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some("Failed to get key value pair".to_string()), + ) + })?; + + borsh::from_slice::(&value).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to deserialize block data".to_string()), + ) + }) + }) + } + + pub fn atomic_update(&self, block: Block, state: &V02State) -> DbResult<()> { + let block_id = block.header.block_id; + let mut batch = WriteBatch::default(); + self.put_block(block, false, &mut batch)?; + self.put_nssa_state_in_db(state, &mut batch)?; + self.db.write(batch).map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some(format!("Failed to udpate db with block {block_id}")), + ) + }) + } } diff --git a/test_program_methods/Cargo.toml b/test_program_methods/Cargo.toml index 345c479f..1c3368c7 100644 --- a/test_program_methods/Cargo.toml +++ b/test_program_methods/Cargo.toml @@ -2,6 +2,7 @@ name = "test_program_methods" version = "0.1.0" edition = "2024" +license = { workspace = true } [build-dependencies] risc0-build.workspace = true diff --git a/test_program_methods/guest/Cargo.toml b/test_program_methods/guest/Cargo.toml index 17613351..21c4fdc7 100644 --- a/test_program_methods/guest/Cargo.toml +++ b/test_program_methods/guest/Cargo.toml @@ -2,6 +2,7 @@ name = "test_programs" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true diff --git a/wallet-ffi/Cargo.toml b/wallet-ffi/Cargo.toml new file mode 100644 index 00000000..4305226b --- /dev/null +++ b/wallet-ffi/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "wallet-ffi" +version = "0.1.0" +edition = "2021" +license = { workspace = true } + +[lib] +crate-type = ["rlib", "cdylib", "staticlib"] + +[dependencies] +wallet.workspace = true +nssa.workspace = true +common.workspace = true +nssa_core.workspace = true +tokio.workspace = true + +[build-dependencies] +cbindgen = "0.29" + +[dev-dependencies] +tempfile = "3" diff --git a/wallet-ffi/build.rs b/wallet-ffi/build.rs new file mode 100644 index 00000000..63ee0d9e --- /dev/null +++ b/wallet-ffi/build.rs @@ -0,0 +1,13 @@ +fn main() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + + let config = + cbindgen::Config::from_file("cbindgen.toml").expect("Unable to read cbindgen.toml"); + + cbindgen::Builder::new() + .with_crate(crate_dir) + .with_config(config) + .generate() + .expect("Unable to generate bindings") + .write_to_file("wallet_ffi.h"); +} diff --git a/wallet-ffi/cbindgen.toml b/wallet-ffi/cbindgen.toml new file mode 100644 index 00000000..42c46543 --- /dev/null +++ b/wallet-ffi/cbindgen.toml @@ -0,0 +1,40 @@ +language = "C" +header = """ +/** + * NSSA Wallet FFI Bindings + * + * Thread Safety: All functions are thread-safe. The wallet handle can be + * shared across threads, but operations are serialized internally. + * + * Memory Management: + * - Functions returning pointers allocate memory that must be freed + * - Use the corresponding wallet_ffi_free_* function to free memory + * - Never free memory returned by FFI using standard C free() + * + * Error Handling: + * - Functions return WalletFfiError codes + * - On error, call wallet_ffi_get_last_error() for detailed message + * - The error string must be freed with wallet_ffi_free_error_string() + * + * Initialization: + * 1. Call wallet_ffi_init_runtime() before any other function + * 2. Create wallet with wallet_ffi_create_new() or wallet_ffi_open() + * 3. Destroy wallet with wallet_ffi_destroy() when done + */ +""" + +include_guard = "WALLET_FFI_H" +include_version = true +no_includes = false + +[export] +include = ["Ffi.*", "WalletFfiError", "WalletHandle"] + +[enum] +rename_variants = "ScreamingSnakeCase" + +[fn] +rename_args = "None" + +[struct] +rename_fields = "None" diff --git a/wallet-ffi/src/account.rs b/wallet-ffi/src/account.rs new file mode 100644 index 00000000..cf237276 --- /dev/null +++ b/wallet-ffi/src/account.rs @@ -0,0 +1,375 @@ +//! Account management functions. + +use std::ptr; + +use nssa::AccountId; + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::{FfiAccount, FfiAccountList, FfiAccountListEntry, FfiBytes32, WalletHandle}, + wallet::get_wallet, +}; + +/// Create a new public account. +/// +/// Public accounts use standard transaction signing and are suitable for +/// non-private operations. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_account_id`: Output pointer for the new account ID (32 bytes) +/// +/// # Returns +/// - `Success` on successful creation +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_create_account_public( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_account_id.is_null() { + print_error("Null output pointer for account_id"); + return WalletFfiError::NullPointer; + } + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let (account_id, _chain_index) = wallet.create_new_account_public(None); + + unsafe { + (*out_account_id).data = *account_id.value(); + } + + WalletFfiError::Success +} + +/// Create a new private account. +/// +/// Private accounts use privacy-preserving transactions with nullifiers +/// and commitments. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_account_id`: Output pointer for the new account ID (32 bytes) +/// +/// # Returns +/// - `Success` on successful creation +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_create_account_private( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_account_id.is_null() { + print_error("Null output pointer for account_id"); + return WalletFfiError::NullPointer; + } + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let (account_id, _chain_index) = wallet.create_new_account_private(None); + + unsafe { + (*out_account_id).data = *account_id.value(); + } + + WalletFfiError::Success +} + +/// List all accounts in the wallet. +/// +/// Returns both public and private accounts managed by this wallet. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_list`: Output pointer for the account list +/// +/// # Returns +/// - `Success` on successful listing +/// - Error code on failure +/// +/// # Memory +/// The returned list must be freed with `wallet_ffi_free_account_list()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_list` must be a valid pointer to a `FfiAccountList` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_list_accounts( + handle: *mut WalletHandle, + out_list: *mut FfiAccountList, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_list.is_null() { + print_error("Null output pointer for account list"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let user_data = &wallet.storage().user_data; + let mut entries = Vec::new(); + + // Public accounts from default signing keys (preconfigured) + for account_id in user_data.default_pub_account_signing_keys.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: true, + }); + } + + // Public accounts from key tree (generated) + for account_id in user_data.public_key_tree.account_id_map.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: true, + }); + } + + // Private accounts from default accounts (preconfigured) + for account_id in user_data.default_user_private_accounts.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: false, + }); + } + + // Private accounts from key tree (generated) + for account_id in user_data.private_key_tree.account_id_map.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: false, + }); + } + + let count = entries.len(); + + if count == 0 { + unsafe { + (*out_list).entries = ptr::null_mut(); + (*out_list).count = 0; + } + } else { + let entries_boxed = entries.into_boxed_slice(); + let entries_ptr = Box::into_raw(entries_boxed) as *mut FfiAccountListEntry; + + unsafe { + (*out_list).entries = entries_ptr; + (*out_list).count = count; + } + } + + WalletFfiError::Success +} + +/// Free an account list returned by `wallet_ffi_list_accounts`. +/// +/// # Safety +/// The list must be either null or a valid list returned by `wallet_ffi_list_accounts`. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_account_list(list: *mut FfiAccountList) { + if list.is_null() { + return; + } + + unsafe { + let list = &*list; + if !list.entries.is_null() && list.count > 0 { + let slice = std::slice::from_raw_parts_mut(list.entries, list.count); + drop(Box::from_raw(slice as *mut [FfiAccountListEntry])); + } + } +} + +/// Get account balance. +/// +/// For public accounts, this fetches the balance from the network. +/// For private accounts, this returns the locally cached balance. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `is_public`: Whether this is a public account +/// - `out_balance`: Output for balance as little-endian [u8; 16] +/// +/// # Returns +/// - `Success` on successful query +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_balance` must be a valid pointer to a `[u8; 16]` array +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_balance( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + is_public: bool, + out_balance: *mut [u8; 16], +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_balance.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let balance = if is_public { + match block_on(wallet.get_account_balance(account_id)) { + Ok(Ok(b)) => b, + Ok(Err(e)) => { + print_error(format!("Failed to get balance: {}", e)); + return WalletFfiError::NetworkError; + } + Err(e) => return e, + } + } else { + match wallet.get_account_private(&account_id) { + Some(account) => account.balance, + None => { + print_error("Private account not found"); + return WalletFfiError::AccountNotFound; + } + } + }; + + unsafe { + *out_balance = balance.to_le_bytes(); + } + + WalletFfiError::Success +} + +/// Get full public account data from the network. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `out_account`: Output pointer for account data +/// +/// # Returns +/// - `Success` on successful query +/// - Error code on failure +/// +/// # Memory +/// The account data must be freed with `wallet_ffi_free_account_data()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_account` must be a valid pointer to a `FfiAccount` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_account_public( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_account: *mut FfiAccount, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_account.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let account = match block_on(wallet.get_account_public(account_id)) { + Ok(Ok(a)) => a, + Ok(Err(e)) => { + print_error(format!("Failed to get account: {}", e)); + return WalletFfiError::NetworkError; + } + Err(e) => return e, + }; + + unsafe { + *out_account = account.into(); + } + + WalletFfiError::Success +} + +/// Free account data returned by `wallet_ffi_get_account_public`. +/// +/// # Safety +/// The account must be either null or a valid account returned by +/// `wallet_ffi_get_account_public`. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_account_data(account: *mut FfiAccount) { + if account.is_null() { + return; + } + + unsafe { + let account = &*account; + if !account.data.is_null() && account.data_len > 0 { + let slice = std::slice::from_raw_parts_mut(account.data as *mut u8, account.data_len); + drop(Box::from_raw(slice as *mut [u8])); + } + } +} diff --git a/wallet-ffi/src/error.rs b/wallet-ffi/src/error.rs new file mode 100644 index 00000000..ab9ce6dd --- /dev/null +++ b/wallet-ffi/src/error.rs @@ -0,0 +1,50 @@ +//! Error handling for the FFI layer. +//! +//! Uses numeric error codes with error messages printed to stderr. + +/// Error codes returned by FFI functions. +#[repr(C)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum WalletFfiError { + /// Operation completed successfully + Success = 0, + /// A null pointer was passed where a valid pointer was expected + NullPointer = 1, + /// Invalid UTF-8 string + InvalidUtf8 = 2, + /// Wallet handle is not initialized + WalletNotInitialized = 3, + /// Configuration error + ConfigError = 4, + /// Storage/persistence error + StorageError = 5, + /// Network/RPC error + NetworkError = 6, + /// Account not found + AccountNotFound = 7, + /// Key not found for account + KeyNotFound = 8, + /// Insufficient funds for operation + InsufficientFunds = 9, + /// Invalid account ID format + InvalidAccountId = 10, + /// Tokio runtime error + RuntimeError = 11, + /// Password required but not provided + PasswordRequired = 12, + /// Block synchronization error + SyncError = 13, + /// Serialization/deserialization error + SerializationError = 14, + /// Invalid conversion from FFI types to NSSA types + InvalidTypeConversion = 15, + /// Invalid Key value + InvalidKeyValue = 16, + /// Internal error (catch-all) + InternalError = 99, +} + +/// Log an error message to stderr. +pub fn print_error(msg: impl Into) { + eprintln!("[wallet-ffi] {}", msg.into()); +} diff --git a/wallet-ffi/src/keys.rs b/wallet-ffi/src/keys.rs new file mode 100644 index 00000000..08661a50 --- /dev/null +++ b/wallet-ffi/src/keys.rs @@ -0,0 +1,253 @@ +//! Key retrieval functions. + +use std::ptr; + +use nssa::{AccountId, PublicKey}; + +use crate::{ + error::{print_error, WalletFfiError}, + types::{FfiBytes32, FfiPrivateAccountKeys, FfiPublicAccountKey, WalletHandle}, + wallet::get_wallet, +}; + +/// Get the public key for a public account. +/// +/// This returns the public key derived from the account's signing key. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `out_public_key`: Output pointer for the public key +/// +/// # Returns +/// - `Success` on successful retrieval +/// - `KeyNotFound` if the account's key is not in this wallet +/// - Error code on other failures +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_public_key` must be a valid pointer to a `FfiPublicAccountKey` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_public_account_key( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_public_key: *mut FfiPublicAccountKey, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_public_key.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let private_key = match wallet.get_account_public_signing_key(&account_id) { + Some(k) => k, + None => { + print_error("Public account key not found in wallet"); + return WalletFfiError::KeyNotFound; + } + }; + + let public_key = PublicKey::new_from_private_key(private_key); + + unsafe { + *out_public_key = public_key.into(); + } + + WalletFfiError::Success +} + +/// Get keys for a private account. +/// +/// Returns the nullifier public key (NPK) and incoming viewing public key (IPK) +/// for the specified private account. These keys are safe to share publicly. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `out_keys`: Output pointer for the key data +/// +/// # Returns +/// - `Success` on successful retrieval +/// - `AccountNotFound` if the private account is not in this wallet +/// - Error code on other failures +/// +/// # Memory +/// The keys structure must be freed with `wallet_ffi_free_private_account_keys()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_keys` must be a valid pointer to a `FfiPrivateAccountKeys` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_private_account_keys( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_keys: *mut FfiPrivateAccountKeys, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_keys.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let (key_chain, _account) = match wallet.storage().user_data.get_private_account(&account_id) { + Some(k) => k, + None => { + print_error("Private account not found in wallet"); + return WalletFfiError::AccountNotFound; + } + }; + + // NPK is a 32-byte array + let npk_bytes = key_chain.nullifer_public_key.0; + + // IPK is a compressed secp256k1 point (33 bytes) + let ipk_bytes = key_chain.incoming_viewing_public_key.to_bytes(); + let ipk_len = ipk_bytes.len(); + let ipk_vec = ipk_bytes.to_vec(); + let ipk_boxed = ipk_vec.into_boxed_slice(); + let ipk_ptr = Box::into_raw(ipk_boxed) as *const u8; + + unsafe { + (*out_keys).nullifier_public_key.data = npk_bytes; + (*out_keys).incoming_viewing_public_key = ipk_ptr; + (*out_keys).incoming_viewing_public_key_len = ipk_len; + } + + WalletFfiError::Success +} + +/// Free private account keys returned by `wallet_ffi_get_private_account_keys`. +/// +/// # Safety +/// The keys must be either null or valid keys returned by +/// `wallet_ffi_get_private_account_keys`. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_private_account_keys(keys: *mut FfiPrivateAccountKeys) { + if keys.is_null() { + return; + } + + unsafe { + let keys = &*keys; + if !keys.incoming_viewing_public_key.is_null() && keys.incoming_viewing_public_key_len > 0 { + let slice = std::slice::from_raw_parts_mut( + keys.incoming_viewing_public_key as *mut u8, + keys.incoming_viewing_public_key_len, + ); + drop(Box::from_raw(slice as *mut [u8])); + } + } +} + +/// Convert an account ID to a Base58 string. +/// +/// # Parameters +/// - `account_id`: The account ID (32 bytes) +/// +/// # Returns +/// - Pointer to null-terminated Base58 string on success +/// - Null pointer on error +/// +/// # Memory +/// The returned string must be freed with `wallet_ffi_free_string()`. +/// +/// # Safety +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_account_id_to_base58( + account_id: *const FfiBytes32, +) -> *mut std::ffi::c_char { + if account_id.is_null() { + print_error("Null account_id pointer"); + return ptr::null_mut(); + } + + let account_id = AccountId::new(unsafe { (*account_id).data }); + let base58_str = account_id.to_string(); + + match std::ffi::CString::new(base58_str) { + Ok(s) => s.into_raw(), + Err(e) => { + print_error(format!("Failed to create C string: {}", e)); + ptr::null_mut() + } + } +} + +/// Parse a Base58 string into an account ID. +/// +/// # Parameters +/// - `base58_str`: Null-terminated Base58 string +/// - `out_account_id`: Output pointer for the account ID (32 bytes) +/// +/// # Returns +/// - `Success` on successful parsing +/// - `InvalidAccountId` if the string is not valid Base58 +/// - Error code on other failures +/// +/// # Safety +/// - `base58_str` must be a valid pointer to a null-terminated C string +/// - `out_account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_account_id_from_base58( + base58_str: *const std::ffi::c_char, + out_account_id: *mut FfiBytes32, +) -> WalletFfiError { + if base58_str.is_null() || out_account_id.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let c_str = unsafe { std::ffi::CStr::from_ptr(base58_str) }; + let str_slice = match c_str.to_str() { + Ok(s) => s, + Err(e) => { + print_error(format!("Invalid UTF-8: {}", e)); + return WalletFfiError::InvalidUtf8; + } + }; + + let account_id: AccountId = match str_slice.parse() { + Ok(id) => id, + Err(e) => { + print_error(format!("Invalid Base58 account ID: {}", e)); + return WalletFfiError::InvalidAccountId; + } + }; + + unsafe { + (*out_account_id).data = *account_id.value(); + } + + WalletFfiError::Success +} diff --git a/wallet-ffi/src/lib.rs b/wallet-ffi/src/lib.rs new file mode 100644 index 00000000..e7f2ce98 --- /dev/null +++ b/wallet-ffi/src/lib.rs @@ -0,0 +1,63 @@ +//! NSSA Wallet FFI Library +//! +//! This crate provides C-compatible bindings for the NSSA wallet functionality. +//! +//! # Usage +//! +//! 1. Initialize the runtime with `wallet_ffi_init_runtime()` +//! 2. Create or open a wallet with `wallet_ffi_create_new()` or `wallet_ffi_open()` +//! 3. Use the wallet functions to manage accounts and transfers +//! 4. Destroy the wallet with `wallet_ffi_destroy()` when done +//! +//! # Thread Safety +//! +//! All functions are thread-safe. The wallet handle uses internal locking +//! to ensure safe concurrent access. +//! +//! # Memory Management +//! +//! - Functions returning pointers allocate memory that must be freed +//! - Use the corresponding `wallet_ffi_free_*` function to free memory +//! - Never free memory returned by FFI using standard C `free()` + +pub mod account; +pub mod error; +pub mod keys; +pub mod sync; +pub mod transfer; +pub mod types; +pub mod wallet; + +use std::sync::OnceLock; + +// Re-export public types for cbindgen +pub use error::WalletFfiError as FfiError; +use tokio::runtime::Handle; +pub use types::*; + +use crate::error::{print_error, WalletFfiError}; + +static TOKIO_RUNTIME: OnceLock = OnceLock::new(); + +/// Get a reference to the global runtime. +pub(crate) fn get_runtime() -> Result<&'static Handle, WalletFfiError> { + let runtime = TOKIO_RUNTIME.get_or_init(|| { + match tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + { + Ok(runtime) => runtime, + Err(e) => { + print_error(format!("{e}")); + panic!("Error initializing tokio runtime"); + } + } + }); + Ok(runtime.handle()) +} + +/// Run an async future on the global runtime, blocking until completion. +pub(crate) fn block_on(future: F) -> Result { + let runtime = get_runtime()?; + Ok(runtime.block_on(future)) +} diff --git a/wallet-ffi/src/sync.rs b/wallet-ffi/src/sync.rs new file mode 100644 index 00000000..3979f935 --- /dev/null +++ b/wallet-ffi/src/sync.rs @@ -0,0 +1,151 @@ +//! Block synchronization functions. + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::WalletHandle, + wallet::get_wallet, +}; + +/// Synchronize private accounts to a specific block. +/// +/// This scans the blockchain from the last synced block to the specified block, +/// updating private account balances based on any relevant transactions. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `block_id`: Target block number to sync to +/// +/// # Returns +/// - `Success` if synchronization completed +/// - `SyncError` if synchronization failed +/// - Error code on other failures +/// +/// # Note +/// This operation can take a while for large block ranges. The wallet +/// internally uses a progress bar which may output to stdout. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_sync_to_block( + handle: *mut WalletHandle, + block_id: u64, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + match block_on(wallet.sync_to_block(block_id)) { + Ok(Ok(())) => WalletFfiError::Success, + Ok(Err(e)) => { + print_error(format!("Sync failed: {}", e)); + WalletFfiError::SyncError + } + Err(e) => e, + } +} + +/// Get the last synced block number. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_block_id`: Output pointer for the block number +/// +/// # Returns +/// - `Success` on success +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_block_id` must be a valid pointer to a `u64` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_last_synced_block( + handle: *mut WalletHandle, + out_block_id: *mut u64, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_block_id.is_null() { + print_error("Null output pointer"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + unsafe { + *out_block_id = wallet.last_synced_block; + } + + WalletFfiError::Success +} + +/// Get the current block height from the sequencer. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_block_height`: Output pointer for the current block height +/// +/// # Returns +/// - `Success` on success +/// - `NetworkError` if the sequencer is unreachable +/// - Error code on other failures +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_block_height` must be a valid pointer to a `u64` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_current_block_height( + handle: *mut WalletHandle, + out_block_height: *mut u64, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_block_height.is_null() { + print_error("Null output pointer"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + match block_on(wallet.sequencer_client.get_last_block()) { + Ok(Ok(response)) => { + unsafe { + *out_block_height = response.last_block; + } + WalletFfiError::Success + } + Ok(Err(e)) => { + print_error(format!("Failed to get block height: {:?}", e)); + WalletFfiError::NetworkError + } + Err(e) => e, + } +} diff --git a/wallet-ffi/src/transfer.rs b/wallet-ffi/src/transfer.rs new file mode 100644 index 00000000..055f0c32 --- /dev/null +++ b/wallet-ffi/src/transfer.rs @@ -0,0 +1,199 @@ +//! Token transfer functions. + +use std::{ffi::CString, ptr}; + +use common::error::ExecutionFailureKind; +use nssa::AccountId; +use wallet::program_facades::native_token_transfer::NativeTokenTransfer; + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::{FfiBytes32, FfiTransferResult, WalletHandle}, + wallet::get_wallet, +}; + +/// Send a public token transfer. +/// +/// Transfers tokens from one public account to another on the network. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `from`: Source account ID (must be owned by this wallet) +/// - `to`: Destination account ID +/// - `amount`: Amount to transfer as little-endian [u8; 16] +/// - `out_result`: Output pointer for transfer result +/// +/// # Returns +/// - `Success` if the transfer was submitted successfully +/// - `InsufficientFunds` if the source account doesn't have enough balance +/// - `KeyNotFound` if the source account's signing key is not in this wallet +/// - Error code on other failures +/// +/// # Memory +/// The result must be freed with `wallet_ffi_free_transfer_result()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `from` must be a valid pointer to a `FfiBytes32` struct +/// - `to` must be a valid pointer to a `FfiBytes32` struct +/// - `amount` must be a valid pointer to a `[u8; 16]` array +/// - `out_result` must be a valid pointer to a `FfiTransferResult` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_transfer_public( + handle: *mut WalletHandle, + from: *const FfiBytes32, + to: *const FfiBytes32, + amount: *const [u8; 16], + out_result: *mut FfiTransferResult, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if from.is_null() || to.is_null() || amount.is_null() || out_result.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let from_id = AccountId::new(unsafe { (*from).data }); + let to_id = AccountId::new(unsafe { (*to).data }); + let amount = u128::from_le_bytes(unsafe { *amount }); + + let transfer = NativeTokenTransfer(&wallet); + + match block_on(transfer.send_public_transfer(from_id, to_id, amount)) { + Ok(Ok(response)) => { + let tx_hash = CString::new(response.tx_hash) + .map(|s| s.into_raw()) + .unwrap_or(ptr::null_mut()); + + unsafe { + (*out_result).tx_hash = tx_hash; + (*out_result).success = true; + } + WalletFfiError::Success + } + Ok(Err(e)) => { + print_error(format!("Transfer failed: {:?}", e)); + unsafe { + (*out_result).tx_hash = ptr::null_mut(); + (*out_result).success = false; + } + match e { + ExecutionFailureKind::InsufficientFundsError => WalletFfiError::InsufficientFunds, + ExecutionFailureKind::KeyNotFoundError => WalletFfiError::KeyNotFound, + ExecutionFailureKind::SequencerError => WalletFfiError::NetworkError, + ExecutionFailureKind::SequencerClientError(_) => WalletFfiError::NetworkError, + _ => WalletFfiError::InternalError, + } + } + Err(e) => e, + } +} + +/// Register a public account on the network. +/// +/// This initializes a public account on the blockchain. The account must be +/// owned by this wallet. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: Account ID to register +/// - `out_result`: Output pointer for registration result +/// +/// # Returns +/// - `Success` if the registration was submitted successfully +/// - Error code on failure +/// +/// # Memory +/// The result must be freed with `wallet_ffi_free_transfer_result()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_result` must be a valid pointer to a `FfiTransferResult` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_register_public_account( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_result: *mut FfiTransferResult, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_result.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let transfer = NativeTokenTransfer(&wallet); + + match block_on(transfer.register_account(account_id)) { + Ok(Ok(response)) => { + let tx_hash = CString::new(response.tx_hash) + .map(|s| s.into_raw()) + .unwrap_or(ptr::null_mut()); + + unsafe { + (*out_result).tx_hash = tx_hash; + (*out_result).success = true; + } + WalletFfiError::Success + } + Ok(Err(e)) => { + print_error(format!("Registration failed: {:?}", e)); + unsafe { + (*out_result).tx_hash = ptr::null_mut(); + (*out_result).success = false; + } + match e { + ExecutionFailureKind::KeyNotFoundError => WalletFfiError::KeyNotFound, + ExecutionFailureKind::SequencerError => WalletFfiError::NetworkError, + ExecutionFailureKind::SequencerClientError(_) => WalletFfiError::NetworkError, + _ => WalletFfiError::InternalError, + } + } + Err(e) => e, + } +} + +/// Free a transfer result returned by `wallet_ffi_transfer_public` or +/// `wallet_ffi_register_public_account`. +/// +/// # Safety +/// The result must be either null or a valid result from a transfer function. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_transfer_result(result: *mut FfiTransferResult) { + if result.is_null() { + return; + } + + unsafe { + let result = &*result; + if !result.tx_hash.is_null() { + drop(CString::from_raw(result.tx_hash)); + } + } +} diff --git a/wallet-ffi/src/types.rs b/wallet-ffi/src/types.rs new file mode 100644 index 00000000..a1d70181 --- /dev/null +++ b/wallet-ffi/src/types.rs @@ -0,0 +1,262 @@ +//! C-compatible type definitions for the FFI layer. + +use core::slice; +use std::{ffi::c_char, ptr}; + +use nssa::{Account, Data}; +use nssa_core::encryption::shared_key_derivation::Secp256k1Point; + +use crate::error::WalletFfiError; + +/// Opaque pointer to the Wallet instance. +/// +/// This type is never instantiated directly - it's used as an opaque handle +/// to hide the internal wallet structure from C code. +#[repr(C)] +pub struct WalletHandle { + _private: [u8; 0], +} + +/// 32-byte array type for AccountId, keys, hashes, etc. +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiBytes32 { + pub data: [u8; 32], +} + +/// Program ID - 8 u32 values (32 bytes total). +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiProgramId { + pub data: [u32; 8], +} + +/// U128 - 16 bytes little endian +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiU128 { + pub data: [u8; 16], +} + +/// Account data structure - C-compatible version of nssa Account. +/// +/// Note: `balance` and `nonce` are u128 values represented as little-endian +/// byte arrays since C doesn't have native u128 support. +#[repr(C)] +pub struct FfiAccount { + pub program_owner: FfiProgramId, + /// Balance as little-endian [u8; 16] + pub balance: FfiU128, + /// Pointer to account data bytes + pub data: *const u8, + /// Length of account data + pub data_len: usize, + /// Nonce as little-endian [u8; 16] + pub nonce: FfiU128, +} + +impl Default for FfiAccount { + fn default() -> Self { + Self { + program_owner: FfiProgramId::default(), + balance: FfiU128::default(), + data: std::ptr::null(), + data_len: 0, + nonce: FfiU128::default(), + } + } +} + +/// Public keys for a private account (safe to expose). +#[repr(C)] +pub struct FfiPrivateAccountKeys { + /// Nullifier public key (32 bytes) + pub nullifier_public_key: FfiBytes32, + /// Incoming viewing public key (compressed secp256k1 point) + pub incoming_viewing_public_key: *const u8, + /// Length of incoming viewing public key (typically 33 bytes) + pub incoming_viewing_public_key_len: usize, +} + +impl Default for FfiPrivateAccountKeys { + fn default() -> Self { + Self { + nullifier_public_key: FfiBytes32::default(), + incoming_viewing_public_key: std::ptr::null(), + incoming_viewing_public_key_len: 0, + } + } +} + +/// Public key info for a public account. +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiPublicAccountKey { + pub public_key: FfiBytes32, +} + +/// Single entry in the account list. +#[repr(C)] +#[derive(Clone, Copy)] +pub struct FfiAccountListEntry { + pub account_id: FfiBytes32, + pub is_public: bool, +} + +/// List of accounts returned by wallet_ffi_list_accounts. +#[repr(C)] +pub struct FfiAccountList { + pub entries: *mut FfiAccountListEntry, + pub count: usize, +} + +impl Default for FfiAccountList { + fn default() -> Self { + Self { + entries: std::ptr::null_mut(), + count: 0, + } + } +} + +/// Result of a transfer operation. +#[repr(C)] +pub struct FfiTransferResult { + /// Transaction hash (null-terminated string, or null on failure) + pub tx_hash: *mut c_char, + /// Whether the transfer succeeded + pub success: bool, +} + +impl Default for FfiTransferResult { + fn default() -> Self { + Self { + tx_hash: std::ptr::null_mut(), + success: false, + } + } +} + +// Helper functions to convert between Rust and FFI types + +impl FfiBytes32 { + /// Create from a 32-byte array. + pub fn from_bytes(bytes: [u8; 32]) -> Self { + Self { data: bytes } + } + + /// Create from an AccountId. + pub fn from_account_id(id: &nssa::AccountId) -> Self { + Self { data: *id.value() } + } +} + +impl FfiPrivateAccountKeys { + pub fn npk(&self) -> nssa_core::NullifierPublicKey { + nssa_core::NullifierPublicKey(self.nullifier_public_key.data) + } + + pub fn ivk(&self) -> Result { + if self.incoming_viewing_public_key_len == 33 { + let slice = unsafe { + slice::from_raw_parts( + self.incoming_viewing_public_key, + self.incoming_viewing_public_key_len, + ) + }; + Ok(Secp256k1Point(slice.to_vec())) + } else { + Err(WalletFfiError::InvalidKeyValue) + } + } +} + +impl From for FfiU128 { + fn from(value: u128) -> Self { + Self { + data: value.to_le_bytes(), + } + } +} + +impl From for u128 { + fn from(value: FfiU128) -> Self { + u128::from_le_bytes(value.data) + } +} + +impl From<&nssa::AccountId> for FfiBytes32 { + fn from(id: &nssa::AccountId) -> Self { + Self::from_account_id(id) + } +} + +impl From for nssa::AccountId { + fn from(bytes: FfiBytes32) -> Self { + nssa::AccountId::new(bytes.data) + } +} + +impl From for FfiAccount { + fn from(value: nssa::Account) -> Self { + // Convert account data to FFI type + let data_vec: Vec = value.data.into(); + let data_len = data_vec.len(); + let data = if data_len > 0 { + let data_boxed = data_vec.into_boxed_slice(); + Box::into_raw(data_boxed) as *const u8 + } else { + ptr::null() + }; + + let program_owner = FfiProgramId { + data: value.program_owner, + }; + FfiAccount { + program_owner, + balance: value.balance.into(), + data, + data_len, + nonce: value.nonce.into(), + } + } +} + +impl TryFrom<&FfiAccount> for nssa::Account { + type Error = WalletFfiError; + + fn try_from(value: &FfiAccount) -> Result { + let data = if value.data_len > 0 { + unsafe { + let slice = slice::from_raw_parts(value.data, value.data_len); + Data::try_from(slice.to_vec()).map_err(|_| WalletFfiError::InvalidTypeConversion)? + } + } else { + Data::default() + }; + Ok(Account { + program_owner: value.program_owner.data, + balance: value.balance.into(), + data, + nonce: value.nonce.into(), + }) + } +} + +impl From for FfiPublicAccountKey { + fn from(value: nssa::PublicKey) -> Self { + Self { + public_key: FfiBytes32::from_bytes(*value.value()), + } + } +} + +impl TryFrom<&FfiPublicAccountKey> for nssa::PublicKey { + type Error = WalletFfiError; + + fn try_from(value: &FfiPublicAccountKey) -> Result { + let public_key = nssa::PublicKey::try_new(value.public_key.data) + .map_err(|_| WalletFfiError::InvalidTypeConversion)?; + Ok(public_key) + } +} diff --git a/wallet-ffi/src/wallet.rs b/wallet-ffi/src/wallet.rs new file mode 100644 index 00000000..6f817f8e --- /dev/null +++ b/wallet-ffi/src/wallet.rs @@ -0,0 +1,279 @@ +//! Wallet lifecycle management functions. + +use std::{ + ffi::{c_char, CStr}, + path::PathBuf, + ptr, + sync::Mutex, +}; + +use wallet::WalletCore; + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::WalletHandle, +}; + +/// Internal wrapper around WalletCore with mutex for thread safety. +pub(crate) struct WalletWrapper { + pub core: Mutex, +} + +/// Helper to get the wallet wrapper from an opaque handle. +pub(crate) fn get_wallet( + handle: *mut WalletHandle, +) -> Result<&'static WalletWrapper, WalletFfiError> { + if handle.is_null() { + print_error("Null wallet handle"); + return Err(WalletFfiError::NullPointer); + } + Ok(unsafe { &*(handle as *mut WalletWrapper) }) +} + +/// Helper to get a mutable reference to the wallet wrapper. +#[allow(dead_code)] +pub(crate) fn get_wallet_mut( + handle: *mut WalletHandle, +) -> Result<&'static mut WalletWrapper, WalletFfiError> { + if handle.is_null() { + print_error("Null wallet handle"); + return Err(WalletFfiError::NullPointer); + } + Ok(unsafe { &mut *(handle as *mut WalletWrapper) }) +} + +/// Helper to convert a C string to a Rust PathBuf. +fn c_str_to_path(ptr: *const c_char, name: &str) -> Result { + if ptr.is_null() { + print_error(format!("Null pointer for {}", name)); + return Err(WalletFfiError::NullPointer); + } + + let c_str = unsafe { CStr::from_ptr(ptr) }; + match c_str.to_str() { + Ok(s) => Ok(PathBuf::from(s)), + Err(e) => { + print_error(format!("Invalid UTF-8 in {}: {}", name, e)); + Err(WalletFfiError::InvalidUtf8) + } + } +} + +/// Helper to convert a C string to a Rust String. +fn c_str_to_string(ptr: *const c_char, name: &str) -> Result { + if ptr.is_null() { + print_error(format!("Null pointer for {}", name)); + return Err(WalletFfiError::NullPointer); + } + + let c_str = unsafe { CStr::from_ptr(ptr) }; + match c_str.to_str() { + Ok(s) => Ok(s.to_string()), + Err(e) => { + print_error(format!("Invalid UTF-8 in {}: {}", name, e)); + Err(WalletFfiError::InvalidUtf8) + } + } +} + +/// Create a new wallet with fresh storage. +/// +/// This initializes a new wallet with a new seed derived from the password. +/// Use this for first-time wallet creation. +/// +/// # Parameters +/// - `config_path`: Path to the wallet configuration file (JSON) +/// - `storage_path`: Path where wallet data will be stored +/// - `password`: Password for encrypting the wallet seed +/// +/// # Returns +/// - Opaque wallet handle on success +/// - Null pointer on error (call `wallet_ffi_get_last_error()` for details) +/// +/// # Safety +/// All string parameters must be valid null-terminated UTF-8 strings. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_create_new( + config_path: *const c_char, + storage_path: *const c_char, + password: *const c_char, +) -> *mut WalletHandle { + let config_path = match c_str_to_path(config_path, "config_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + let storage_path = match c_str_to_path(storage_path, "storage_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + let password = match c_str_to_string(password, "password") { + Ok(s) => s, + Err(_) => return ptr::null_mut(), + }; + + match WalletCore::new_init_storage(config_path, storage_path, None, password) { + Ok(core) => { + let wrapper = Box::new(WalletWrapper { + core: Mutex::new(core), + }); + Box::into_raw(wrapper) as *mut WalletHandle + } + Err(e) => { + print_error(format!("Failed to create wallet: {}", e)); + ptr::null_mut() + } + } +} + +/// Open an existing wallet from storage. +/// +/// This loads a wallet that was previously created with `wallet_ffi_create_new()`. +/// +/// # Parameters +/// - `config_path`: Path to the wallet configuration file (JSON) +/// - `storage_path`: Path where wallet data is stored +/// +/// # Returns +/// - Opaque wallet handle on success +/// - Null pointer on error (call `wallet_ffi_get_last_error()` for details) +/// +/// # Safety +/// All string parameters must be valid null-terminated UTF-8 strings. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_open( + config_path: *const c_char, + storage_path: *const c_char, +) -> *mut WalletHandle { + let config_path = match c_str_to_path(config_path, "config_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + let storage_path = match c_str_to_path(storage_path, "storage_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + match WalletCore::new_update_chain(config_path, storage_path, None) { + Ok(core) => { + let wrapper = Box::new(WalletWrapper { + core: Mutex::new(core), + }); + Box::into_raw(wrapper) as *mut WalletHandle + } + Err(e) => { + print_error(format!("Failed to open wallet: {}", e)); + ptr::null_mut() + } + } +} + +/// Destroy a wallet handle and free its resources. +/// +/// After calling this function, the handle is invalid and must not be used. +/// +/// # Safety +/// - The handle must be either null or a valid handle from `wallet_ffi_create_new()` or +/// `wallet_ffi_open()`. +/// - The handle must not be used after this call. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_destroy(handle: *mut WalletHandle) { + if !handle.is_null() { + unsafe { + drop(Box::from_raw(handle as *mut WalletWrapper)); + } + } +} + +/// Save wallet state to persistent storage. +/// +/// This should be called periodically or after important operations to ensure +/// wallet data is persisted to disk. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// +/// # Returns +/// - `Success` on successful save +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_save(handle: *mut WalletHandle) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + match block_on(wallet.store_persistent_data()) { + Ok(Ok(())) => WalletFfiError::Success, + Ok(Err(e)) => { + print_error(format!("Failed to save wallet: {}", e)); + WalletFfiError::StorageError + } + Err(e) => e, + } +} + +/// Get the sequencer address from the wallet configuration. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// +/// # Returns +/// - Pointer to null-terminated string on success (caller must free with +/// `wallet_ffi_free_string()`) +/// - Null pointer on error +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_sequencer_addr(handle: *mut WalletHandle) -> *mut c_char { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(_) => return ptr::null_mut(), + }; + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return ptr::null_mut(); + } + }; + + let addr = wallet.config().sequencer_addr.clone(); + + match std::ffi::CString::new(addr) { + Ok(s) => s.into_raw(), + Err(e) => { + print_error(format!("Invalid sequencer address: {}", e)); + ptr::null_mut() + } + } +} + +/// Free a string returned by wallet FFI functions. +/// +/// # Safety +/// The pointer must be either null or a valid string returned by an FFI function. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_string(ptr: *mut c_char) { + if !ptr.is_null() { + unsafe { + drop(std::ffi::CString::from_raw(ptr)); + } + } +} diff --git a/wallet/Cargo.toml b/wallet/Cargo.toml index bef25007..ca8548bc 100644 --- a/wallet/Cargo.toml +++ b/wallet/Cargo.toml @@ -2,12 +2,14 @@ name = "wallet" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true nssa.workspace = true common.workspace = true key_protocol.workspace = true +token_core.workspace = true anyhow.workspace = true serde_json.workspace = true @@ -29,3 +31,4 @@ risc0-zkvm.workspace = true async-stream = "0.3.6" indicatif = { version = "0.18.3", features = ["improved_unicode"] } optfield = "0.4.0" +url.workspace = true diff --git a/wallet/src/chain_storage.rs b/wallet/src/chain_storage.rs index 0eba1816..3e967e9c 100644 --- a/wallet/src/chain_storage.rs +++ b/wallet/src/chain_storage.rs @@ -11,17 +11,19 @@ use key_protocol::{ use log::debug; use nssa::program::Program; -use crate::config::{InitialAccountData, PersistentAccountData, WalletConfig}; +use crate::config::{InitialAccountData, Label, PersistentAccountData, WalletConfig}; pub struct WalletChainStore { pub user_data: NSSAUserData, pub wallet_config: WalletConfig, + pub labels: HashMap, } impl WalletChainStore { pub fn new( config: WalletConfig, persistent_accounts: Vec, + labels: HashMap, ) -> Result { if persistent_accounts.is_empty() { anyhow::bail!("Roots not found; please run setup beforehand"); @@ -85,6 +87,7 @@ impl WalletChainStore { private_tree, )?, wallet_config: config, + labels, }) } @@ -120,6 +123,7 @@ impl WalletChainStore { private_tree, )?, wallet_config: config, + labels: HashMap::new(), }) } @@ -291,6 +295,6 @@ mod tests { let config = create_sample_wallet_config(); let accs = create_sample_persistent_accounts(); - let _ = WalletChainStore::new(config.clone(), accs).unwrap(); + let _ = WalletChainStore::new(config.clone(), accs, HashMap::new()).unwrap(); } } diff --git a/wallet/src/cli/account.rs b/wallet/src/cli/account.rs index 21e59366..9407c8c5 100644 --- a/wallet/src/cli/account.rs +++ b/wallet/src/cli/account.rs @@ -4,11 +4,12 @@ use clap::Subcommand; use itertools::Itertools as _; use key_protocol::key_management::key_tree::chain_index::ChainIndex; use nssa::{Account, PublicKey, program::Program}; -use serde::Serialize; +use token_core::{TokenDefinition, TokenHolding}; use crate::{ - TokenDefinition, TokenHolding, WalletCore, + WalletCore, cli::{SubcommandReturnValue, WalletSubcommand}, + config::Label, helperfunctions::{AccountPrivacyKind, HumanReadableAccount, parse_addr_with_privacy_prefix}, }; @@ -39,6 +40,15 @@ pub enum AccountSubcommand { #[arg(short, long)] long: bool, }, + /// Set a label for an account + Label { + /// Valid 32 byte base58 string with privacy prefix + #[arg(short, long)] + account_id: String, + /// The label to assign to the account + #[arg(short, long)] + label: String, + }, } /// Represents generic register CLI subcommand @@ -111,83 +121,26 @@ impl WalletSubcommand for NewSubcommand { } } -#[derive(Debug, Serialize)] -pub struct AuthenticatedTransferAccountView { - pub balance: u128, -} - -impl From for AuthenticatedTransferAccountView { - fn from(value: nssa::Account) -> Self { - Self { - balance: value.balance, - } - } -} - -#[derive(Debug, Serialize)] -pub struct TokedDefinitionAccountView { - pub account_type: String, - pub name: String, - pub total_supply: u128, -} - -impl From for TokedDefinitionAccountView { - fn from(value: TokenDefinition) -> Self { - Self { - account_type: "Token definition".to_string(), - name: { - // Assuming, that name does not have UTF-8 NULL and all zeroes are padding. - let name_trimmed: Vec<_> = - value.name.into_iter().take_while(|ch| *ch != 0).collect(); - String::from_utf8(name_trimmed).unwrap_or(hex::encode(value.name)) - }, - total_supply: value.total_supply, - } - } -} - -#[derive(Debug, Serialize)] -pub struct TokedHoldingAccountView { - pub account_type: String, - pub definition_id: String, - pub balance: u128, -} - -impl From for TokedHoldingAccountView { - fn from(value: TokenHolding) -> Self { - Self { - account_type: "Token holding".to_string(), - definition_id: value.definition_id.to_string(), - balance: value.balance, - } - } -} - /// Formats account details for display, returning (description, json_view) fn format_account_details(account: &Account) -> (String, String) { let auth_tr_prog_id = Program::authenticated_transfer_program().id(); let token_prog_id = Program::token().id(); match &account.program_owner { - _ if account.program_owner == auth_tr_prog_id => { - let acc_view: AuthenticatedTransferAccountView = account.clone().into(); - ( - "Account owned by authenticated transfer program".to_string(), - serde_json::to_string(&acc_view).unwrap(), - ) - } - _ if account.program_owner == token_prog_id => { - if let Some(token_def) = TokenDefinition::parse(&account.data) { - let acc_view: TokedDefinitionAccountView = token_def.into(); + o if *o == auth_tr_prog_id => ( + "Account owned by authenticated transfer program".to_string(), + serde_json::to_string(&account).unwrap(), + ), + o if *o == token_prog_id => { + if let Ok(token_def) = TokenDefinition::try_from(&account.data) { ( "Definition account owned by token program".to_string(), - serde_json::to_string(&acc_view).unwrap(), + serde_json::to_string(&token_def).unwrap(), ) - } else if let Some(token_hold) = TokenHolding::parse(&account.data) { - let acc_view: TokedHoldingAccountView = token_hold.into(); + } else if let Ok(token_hold) = TokenHolding::try_from(&account.data) { ( "Holding account owned by token program".to_string(), - serde_json::to_string(&acc_view).unwrap(), + serde_json::to_string(&token_hold).unwrap(), ) } else { let account_hr: HumanReadableAccount = account.clone().into(); @@ -218,9 +171,13 @@ impl WalletSubcommand for AccountSubcommand { keys, account_id, } => { - let (account_id, addr_kind) = parse_addr_with_privacy_prefix(&account_id)?; + let (account_id_str, addr_kind) = parse_addr_with_privacy_prefix(&account_id)?; - let account_id = account_id.parse()?; + let account_id: nssa::AccountId = account_id_str.parse()?; + + if let Some(label) = wallet_core.storage.labels.get(&account_id_str) { + println!("Label: {label}"); + } let account = match addr_kind { AccountPrivacyKind::Public => { @@ -316,32 +273,35 @@ impl WalletSubcommand for AccountSubcommand { } AccountSubcommand::List { long } => { let user_data = &wallet_core.storage.user_data; + let labels = &wallet_core.storage.labels; + + let format_with_label = |prefix: &str, id: &nssa::AccountId| { + let id_str = id.to_string(); + if let Some(label) = labels.get(&id_str) { + format!("{prefix} [{label}]") + } else { + prefix.to_string() + } + }; if !long { let accounts = user_data .default_pub_account_signing_keys .keys() - .map(|id| format!("Preconfigured Public/{id}")) - .chain( - user_data - .default_user_private_accounts - .keys() - .map(|id| format!("Preconfigured Private/{id}")), - ) - .chain( - user_data - .public_key_tree - .account_id_map - .iter() - .map(|(id, chain_index)| format!("{chain_index} Public/{id}")), - ) - .chain( - user_data - .private_key_tree - .account_id_map - .iter() - .map(|(id, chain_index)| format!("{chain_index} Private/{id}")), - ) + .map(|id| format_with_label(&format!("Preconfigured Public/{id}"), id)) + .chain(user_data.default_user_private_accounts.keys().map(|id| { + format_with_label(&format!("Preconfigured Private/{id}"), id) + })) + .chain(user_data.public_key_tree.account_id_map.iter().map( + |(id, chain_index)| { + format_with_label(&format!("{chain_index} Public/{id}"), id) + }, + )) + .chain(user_data.private_key_tree.account_id_map.iter().map( + |(id, chain_index)| { + format_with_label(&format!("{chain_index} Private/{id}"), id) + }, + )) .format("\n"); println!("{accounts}"); @@ -351,7 +311,10 @@ impl WalletSubcommand for AccountSubcommand { // Detailed listing with --long flag // Preconfigured public accounts for id in user_data.default_pub_account_signing_keys.keys() { - println!("Preconfigured Public/{id}"); + println!( + "{}", + format_with_label(&format!("Preconfigured Public/{id}"), id) + ); match wallet_core.get_account_public(*id).await { Ok(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -365,7 +328,10 @@ impl WalletSubcommand for AccountSubcommand { // Preconfigured private accounts for id in user_data.default_user_private_accounts.keys() { - println!("Preconfigured Private/{id}"); + println!( + "{}", + format_with_label(&format!("Preconfigured Private/{id}"), id) + ); match wallet_core.get_account_private(id) { Some(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -379,7 +345,10 @@ impl WalletSubcommand for AccountSubcommand { // Public key tree accounts for (id, chain_index) in user_data.public_key_tree.account_id_map.iter() { - println!("{chain_index} Public/{id}"); + println!( + "{}", + format_with_label(&format!("{chain_index} Public/{id}"), id) + ); match wallet_core.get_account_public(*id).await { Ok(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -393,7 +362,10 @@ impl WalletSubcommand for AccountSubcommand { // Private key tree accounts for (id, chain_index) in user_data.private_key_tree.account_id_map.iter() { - println!("{chain_index} Private/{id}"); + println!( + "{}", + format_with_label(&format!("{chain_index} Private/{id}"), id) + ); match wallet_core.get_account_private(id) { Some(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -405,57 +377,25 @@ impl WalletSubcommand for AccountSubcommand { } } + Ok(SubcommandReturnValue::Empty) + } + AccountSubcommand::Label { account_id, label } => { + let (account_id_str, _) = parse_addr_with_privacy_prefix(&account_id)?; + + let old_label = wallet_core + .storage + .labels + .insert(account_id_str.clone(), Label::new(label.clone())); + + wallet_core.store_persistent_data().await?; + + if let Some(old) = old_label { + eprintln!("Warning: overriding existing label '{old}'"); + } + println!("Label '{label}' set for account {account_id_str}"); + Ok(SubcommandReturnValue::Empty) } } } } - -#[cfg(test)] -mod tests { - use nssa::AccountId; - - use crate::cli::account::{TokedDefinitionAccountView, TokenDefinition}; - - #[test] - fn test_invalid_utf_8_name_of_token() { - let token_def = TokenDefinition { - account_type: 1, - name: [137, 12, 14, 3, 5, 4], - total_supply: 100, - metadata_id: AccountId::new([0; 32]), - }; - - let token_def_view: TokedDefinitionAccountView = token_def.into(); - - assert_eq!(token_def_view.name, "890c0e030504"); - } - - #[test] - fn test_valid_utf_8_name_of_token_all_bytes() { - let token_def = TokenDefinition { - account_type: 1, - name: [240, 159, 146, 150, 66, 66], - total_supply: 100, - metadata_id: AccountId::new([0; 32]), - }; - - let token_def_view: TokedDefinitionAccountView = token_def.into(); - - assert_eq!(token_def_view.name, "💖BB"); - } - - #[test] - fn test_valid_utf_8_name_of_token_less_bytes() { - let token_def = TokenDefinition { - account_type: 1, - name: [78, 65, 77, 69, 0, 0], - total_supply: 100, - metadata_id: AccountId::new([0; 32]), - }; - - let token_def_view: TokedDefinitionAccountView = token_def.into(); - - assert_eq!(token_def_view.name, "NAME"); - } -} diff --git a/wallet/src/cli/programs/token.rs b/wallet/src/cli/programs/token.rs index b5ea1b34..c16c0c94 100644 --- a/wallet/src/cli/programs/token.rs +++ b/wallet/src/cli/programs/token.rs @@ -1258,14 +1258,6 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!("Name length mismatch"); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); - let definition_account_id: AccountId = definition_account_id.parse().unwrap(); let supply_account_id: AccountId = supply_account_id.parse().unwrap(); @@ -1273,7 +1265,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { .send_new_definition_private_owned_definiton_and_supply( definition_account_id, supply_account_id, - name_bytes, + name, total_supply, ) .await?; @@ -1307,14 +1299,6 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!("Name length mismatch"); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); - let definition_account_id: AccountId = definition_account_id.parse().unwrap(); let supply_account_id: AccountId = supply_account_id.parse().unwrap(); @@ -1322,7 +1306,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { .send_new_definition_private_owned_definiton( definition_account_id, supply_account_id, - name_bytes, + name, total_supply, ) .await?; @@ -1353,14 +1337,6 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!("Name length mismatch"); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); - let definition_account_id: AccountId = definition_account_id.parse().unwrap(); let supply_account_id: AccountId = supply_account_id.parse().unwrap(); @@ -1368,7 +1344,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { .send_new_definition_private_owned_supply( definition_account_id, supply_account_id, - name_bytes, + name, total_supply, ) .await?; @@ -1399,18 +1375,11 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!(); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); Token(wallet_core) .send_new_definition( definition_account_id.parse().unwrap(), supply_account_id.parse().unwrap(), - name_bytes, + name, total_supply, ) .await?; diff --git a/wallet/src/config.rs b/wallet/src/config.rs index 2ea88f0f..456d9344 100644 --- a/wallet/src/config.rs +++ b/wallet/src/config.rs @@ -1,10 +1,11 @@ use std::{ + collections::HashMap, io::{BufReader, Write as _}, path::Path, - str::FromStr, }; use anyhow::{Context as _, Result}; +use common::sequencer_client::BasicAuth; use key_protocol::key_management::{ KeyChain, key_tree::{ @@ -14,49 +15,6 @@ use key_protocol::key_management::{ use log::warn; use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BasicAuth { - pub username: String, - pub password: Option, -} - -impl std::fmt::Display for BasicAuth { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.username)?; - if let Some(password) = &self.password { - write!(f, ":{password}")?; - } - - Ok(()) - } -} - -impl FromStr for BasicAuth { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result { - let parse = || { - let mut parts = s.splitn(2, ':'); - let username = parts.next()?; - let password = parts.next().filter(|p| !p.is_empty()); - if parts.next().is_some() { - return None; - } - - Some((username, password)) - }; - - let (username, password) = parse().ok_or_else(|| { - anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'") - })?; - - Ok(Self { - username: username.to_string(), - password: password.map(|p| p.to_string()), - }) - } -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitialAccountDataPublic { pub account_id: String, @@ -105,10 +63,30 @@ pub enum PersistentAccountData { Preconfigured(InitialAccountData), } +/// A human-readable label for an account. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct Label(String); + +impl Label { + pub fn new(label: String) -> Self { + Self(label) + } +} + +impl std::fmt::Display for Label { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct PersistentStorage { pub accounts: Vec, pub last_synced_block: u64, + /// Account labels keyed by account ID string (e.g., + /// "2rnKprXqWGWJTkDZKsQbFXa4ctKRbapsdoTKQFnaVGG8") + #[serde(default)] + pub labels: HashMap, } impl PersistentStorage { @@ -197,7 +175,7 @@ pub struct GasConfig { pub gas_limit_runtime: u64, } -#[optfield::optfield(pub WalletConfigOverrides, rewrap, attrs = (derive(Debug, Default)))] +#[optfield::optfield(pub WalletConfigOverrides, rewrap, attrs = (derive(Debug, Default, Clone)))] #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WalletConfig { /// Override rust log (env var logging level) diff --git a/wallet/src/helperfunctions.rs b/wallet/src/helperfunctions.rs index 23bf4bb8..0162ef18 100644 --- a/wallet/src/helperfunctions.rs +++ b/wallet/src/helperfunctions.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, str::FromStr}; +use std::{collections::HashMap, path::PathBuf, str::FromStr}; use anyhow::Result; use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; @@ -11,7 +11,7 @@ use serde::Serialize; use crate::{ HOME_DIR_ENV_VAR, config::{ - InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, + InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, Label, PersistentAccountDataPrivate, PersistentAccountDataPublic, PersistentStorage, }, }; @@ -57,6 +57,7 @@ pub fn fetch_persistent_storage_path() -> Result { pub fn produce_data_for_storage( user_data: &NSSAUserData, last_synced_block: u64, + labels: HashMap, ) -> PersistentStorage { let mut vec_for_storage = vec![]; @@ -110,6 +111,7 @@ pub fn produce_data_for_storage( PersistentStorage { accounts: vec_for_storage, last_synced_block, + labels, } } diff --git a/wallet/src/lib.rs b/wallet/src/lib.rs index 45709d05..baf263ce 100644 --- a/wallet/src/lib.rs +++ b/wallet/src/lib.rs @@ -18,11 +18,10 @@ use nssa::{ circuit::ProgramWithDependencies, message::EncryptedAccountData, }, }; -use nssa_core::{ - Commitment, MembershipProof, SharedSecretKey, account::Data, program::InstructionData, -}; +use nssa_core::{Commitment, MembershipProof, SharedSecretKey, program::InstructionData}; pub use privacy_preserving_tx::PrivacyPreservingAccount; use tokio::io::AsyncWriteExt; +use url::Url; use crate::{ config::{PersistentStorage, WalletConfigOverrides}, @@ -45,84 +44,9 @@ pub enum AccDecodeData { Decode(nssa_core::SharedSecretKey, AccountId), } -const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - -const TOKEN_HOLDING_TYPE: u8 = 1; -const TOKEN_HOLDING_DATA_SIZE: usize = 49; -const TOKEN_STANDARD_FUNGIBLE_TOKEN: u8 = 0; -const TOKEN_STANDARD_NONFUNGIBLE: u8 = 2; - -struct TokenDefinition { - #[allow(unused)] - account_type: u8, - name: [u8; 6], - total_supply: u128, - #[allow(unused)] - metadata_id: AccountId, -} - -struct TokenHolding { - #[allow(unused)] - account_type: u8, - definition_id: AccountId, - balance: u128, -} - -impl TokenDefinition { - fn parse(data: &Data) -> Option { - let data = Vec::::from(data.clone()); - - if data.len() != TOKEN_DEFINITION_DATA_SIZE { - None - } else { - let account_type = data[0]; - let name = data[1..7].try_into().expect("Name must be a 6 bytes"); - let total_supply = u128::from_le_bytes( - data[7..23] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let metadata_id = AccountId::new( - data[23..TOKEN_DEFINITION_DATA_SIZE] - .try_into() - .expect("Token Program expects valid Account Id for Metadata"), - ); - - let this = Some(Self { - account_type, - name, - total_supply, - metadata_id, - }); - - match account_type { - TOKEN_STANDARD_NONFUNGIBLE if total_supply != 1 => None, - TOKEN_STANDARD_FUNGIBLE_TOKEN if metadata_id != AccountId::new([0; 32]) => None, - _ => this, - } - } - } -} - -impl TokenHolding { - fn parse(data: &[u8]) -> Option { - if data.len() != TOKEN_HOLDING_DATA_SIZE || data[0] != TOKEN_HOLDING_TYPE { - None - } else { - let account_type = data[0]; - let definition_id = AccountId::new(data[1..33].try_into().unwrap()); - let balance = u128::from_le_bytes(data[33..].try_into().unwrap()); - Some(Self { - definition_id, - balance, - account_type, - }) - } - } -} - pub struct WalletCore { config_path: PathBuf, + config_overrides: Option, storage: WalletChainStore, storage_path: PathBuf, poller: TxPoller, @@ -148,6 +72,7 @@ impl WalletCore { let PersistentStorage { accounts: persistent_accounts, last_synced_block, + labels, } = PersistentStorage::from_path(&storage_path) .with_context(|| format!("Failed to read persistent storage at {storage_path:#?}"))?; @@ -155,7 +80,7 @@ impl WalletCore { config_path, storage_path, config_overrides, - |config| WalletChainStore::new(config, persistent_accounts), + |config| WalletChainStore::new(config, persistent_accounts, labels), last_synced_block, ) } @@ -184,17 +109,13 @@ impl WalletCore { ) -> Result { let mut config = WalletConfig::from_path_or_initialize_default(&config_path) .with_context(|| format!("Failed to deserialize wallet config at {config_path:#?}"))?; - if let Some(config_overrides) = config_overrides { + if let Some(config_overrides) = config_overrides.clone() { config.apply_overrides(config_overrides); } - let basic_auth = config - .basic_auth - .as_ref() - .map(|auth| (auth.username.clone(), auth.password.clone())); let sequencer_client = Arc::new(SequencerClient::new_with_auth( - config.sequencer_addr.clone(), - basic_auth, + Url::parse(&config.sequencer_addr)?, + config.basic_auth.clone(), )?); let tx_poller = TxPoller::new(config.clone(), Arc::clone(&sequencer_client)); @@ -207,6 +128,7 @@ impl WalletCore { poller: tx_poller, sequencer_client, last_synced_block, + config_overrides, }) } @@ -228,7 +150,11 @@ impl WalletCore { /// Store persistent data at home pub async fn store_persistent_data(&self) -> Result<()> { - let data = produce_data_for_storage(&self.storage.user_data, self.last_synced_block); + let data = produce_data_for_storage( + &self.storage.user_data, + self.last_synced_block, + self.storage.labels.clone(), + ); let storage = serde_json::to_vec_pretty(&data)?; let mut storage_file = tokio::fs::File::create(&self.storage_path).await?; @@ -541,4 +467,16 @@ impl WalletCore { .insert_private_account_data(affected_account_id, new_acc); } } + + pub fn config_path(&self) -> &PathBuf { + &self.config_path + } + + pub fn storage_path(&self) -> &PathBuf { + &self.storage_path + } + + pub fn config_overrides(&self) -> &Option { + &self.config_overrides + } } diff --git a/wallet/src/program_facades/amm.rs b/wallet/src/program_facades/amm.rs index 3beb92cb..7039a937 100644 --- a/wallet/src/program_facades/amm.rs +++ b/wallet/src/program_facades/amm.rs @@ -1,8 +1,9 @@ use common::{error::ExecutionFailureKind, rpc_primitives::requests::SendTxResponse}; use nssa::{AccountId, ProgramId, program::Program}; use nssa_core::program::PdaSeed; +use token_core::TokenHolding; -use crate::{TokenHolding, WalletCore}; +use crate::WalletCore; fn compute_pool_pda( amm_program_id: ProgramId, @@ -123,12 +124,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); @@ -208,12 +209,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_b))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); @@ -242,14 +243,14 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let token_holder_a = TokenHolding::parse(&token_holder_acc_a.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))?; - let token_holder_b = TokenHolding::parse(&token_holder_acc_b.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_b))?; + let token_holder_a = TokenHolding::try_from(&token_holder_acc_a.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))?; + let token_holder_b = TokenHolding::try_from(&token_holder_acc_b.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))?; - if token_holder_a.definition_id == token_definition_id { + if token_holder_a.definition_id() == token_definition_id { account_id_auth = user_holding_a; - } else if token_holder_b.definition_id == token_definition_id { + } else if token_holder_b.definition_id() == token_definition_id { account_id_auth = user_holding_b; } else { return Err(ExecutionFailureKind::AccountDataError(token_definition_id)); @@ -309,12 +310,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); @@ -395,12 +396,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); diff --git a/wallet/src/program_facades/token.rs b/wallet/src/program_facades/token.rs index 0d3f79d7..4160e255 100644 --- a/wallet/src/program_facades/token.rs +++ b/wallet/src/program_facades/token.rs @@ -1,6 +1,7 @@ use common::{error::ExecutionFailureKind, rpc_primitives::requests::SendTxResponse}; use nssa::{AccountId, program::Program}; use nssa_core::{NullifierPublicKey, SharedSecretKey, encryption::IncomingViewingPublicKey}; +use token_core::Instruction; use crate::{PrivacyPreservingAccount, WalletCore}; @@ -11,15 +12,12 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result { let account_ids = vec![definition_account_id, supply_account_id]; let program_id = nssa::program::Program::token().id(); - // Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] - let mut instruction = vec![0u8; 23]; - instruction[1..17].copy_from_slice(&total_supply.to_le_bytes()); - instruction[17..].copy_from_slice(&name); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let message = nssa::public_transaction::Message::try_new( program_id, account_ids, @@ -39,10 +37,10 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_definition(name, total_supply); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -69,10 +67,10 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_definition(name, total_supply); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -99,10 +97,10 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_definition(name, total_supply); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -132,11 +130,9 @@ impl Token<'_> { ) -> Result { let account_ids = vec![sender_account_id, recipient_account_id]; let program_id = nssa::program::Program::token().id(); - // Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0u8; 23]; - instruction[0] = 0x01; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let Ok(nonces) = self.0.get_accounts_nonces(vec![sender_account_id]).await else { return Err(ExecutionFailureKind::SequencerError); }; @@ -170,7 +166,9 @@ impl Token<'_> { recipient_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -199,7 +197,9 @@ impl Token<'_> { recipient_ipk: IncomingViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -230,7 +230,9 @@ impl Token<'_> { recipient_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -259,7 +261,9 @@ impl Token<'_> { recipient_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -289,7 +293,9 @@ impl Token<'_> { recipient_ipk: IncomingViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -322,7 +328,9 @@ impl Token<'_> { amount: u128, ) -> Result { let account_ids = vec![definition_account_id, holder_account_id]; - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let Ok(nonces) = self.0.get_accounts_nonces(vec![holder_account_id]).await else { return Err(ExecutionFailureKind::SequencerError); @@ -355,7 +363,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -383,7 +393,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -412,7 +424,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -442,7 +456,9 @@ impl Token<'_> { amount: u128, ) -> Result { let account_ids = vec![definition_account_id, holder_account_id]; - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let Ok(nonces) = self .0 @@ -481,7 +497,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -510,7 +528,9 @@ impl Token<'_> { holder_ipk: IncomingViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -541,7 +561,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -570,7 +592,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -600,7 +624,9 @@ impl Token<'_> { holder_ipk: IncomingViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -626,42 +652,3 @@ impl Token<'_> { }) } } - -fn token_program_preparation_transfer(amount: u128) -> Vec { - // Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0u8; 23]; - instruction[0] = 0x01; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); - - instruction -} - -fn token_program_preparation_definition(name: [u8; 6], total_supply: u128) -> Vec { - // Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] - let mut instruction = vec![0u8; 23]; - instruction[1..17].copy_from_slice(&total_supply.to_le_bytes()); - instruction[17..].copy_from_slice(&name); - - instruction -} - -fn token_program_preparation_burn(amount: u128) -> Vec { - // Instruction must be: [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0; 23]; - instruction[0] = 0x03; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); - - instruction -} - -fn token_program_preparation_mint(amount: u128) -> Vec { - // Instruction must be: [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0; 23]; - instruction[0] = 0x04; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); - - instruction -}