diff --git a/.deny.toml b/.deny.toml new file mode 100644 index 00000000..1cb3879c --- /dev/null +++ b/.deny.toml @@ -0,0 +1,50 @@ +# Config file reference can be found at https://embarkstudios.github.io/cargo-deny/checks/cfg.html. + +[graph] +all-features = true +exclude-dev = true +no-default-features = true + +[advisories] +ignore = [ + { id = "RUSTSEC-2023-0071", reason = "Marvin Attack: potential key recovery through timing sidechannels" }, + { id = "RUSTSEC-2024-0388", reason = "`derivative` is unmaintained; consider using an alternative. Use `cargo tree -p derivative -i > tmp.txt` to check the dependency tree." }, + { id = "RUSTSEC-2024-0436", reason = "`paste` has a security vulnerability; consider using an alternative. Use `cargo tree -p paste -i > tmp.txt` to check the dependency tree." }, + { id = "RUSTSEC-2025-0055", reason = "`tracing-subscriber` v0.2.25 pulled in by ark-relations v0.4.0 - will be addressed before mainnet" }, + { id = "RUSTSEC-2025-0141", reason = "`bincode` is unmaintained but continuing to use it." }, +] +yanked = "deny" +unused-ignored-advisory = "deny" + +[bans] +allow-wildcard-paths = false +multiple-versions = "allow" + +[licenses] +allow = [ + "Apache-2.0 WITH LLVM-exception", + "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "BSL-1.0", + "CC0-1.0", + "CDLA-Permissive-2.0", + "ISC", + "MIT", + "MPL-2.0", + "Unicode-3.0", + "Zlib", +] +private = { ignore = false } +unused-allowed-license = "deny" + +[sources] +allow-git = [ + "https://github.com/EspressoSystems/jellyfish.git", + "https://github.com/logos-blockchain/logos-blockchain.git", +] +unknown-git = "deny" +unknown-registry = "deny" + +[sources.allow-org] +github = ["logos-co"] diff --git a/.github/actions/install-logos-blockchain-circuits/action.yaml b/.github/actions/install-logos-blockchain-circuits/action.yaml new file mode 100644 index 00000000..e62aea6b --- /dev/null +++ b/.github/actions/install-logos-blockchain-circuits/action.yaml @@ -0,0 +1,19 @@ +name: Setup Logos Blockchain Circuits + +description: Set up Logos Blockchain Circom Circuits, Rapidsnark prover and Rapidsnark verifier using the setup-logos-blockchain-circuits.sh script. + +inputs: + github-token: + description: GitHub token for downloading releases + required: true + +runs: + using: "composite" + steps: + - name: Setup logos-blockchain-circuits + shell: bash + working-directory: ${{ github.workspace }} + env: + GITHUB_TOKEN: ${{ inputs.github-token }} + run: | + curl -sSL https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/main/scripts/setup-logos-blockchain-circuits.sh | bash diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a8efedd9..6292a786 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -56,6 +56,19 @@ jobs: - name: Check for unused dependencies run: cargo machete + deny: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v5 + with: + ref: ${{ github.head_ref }} + + - name: Install cargo-deny + run: cargo install --locked cargo-deny + + - name: Check licenses and advisories + run: cargo deny check + lint: runs-on: ubuntu-latest timeout-minutes: 60 @@ -70,6 +83,10 @@ jobs: - uses: ./.github/actions/install-risc0 + - uses: ./.github/actions/install-logos-blockchain-circuits + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install active toolchain run: rustup install @@ -95,6 +112,10 @@ jobs: - uses: ./.github/actions/install-risc0 + - uses: ./.github/actions/install-logos-blockchain-circuits + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install active toolchain run: rustup install @@ -119,6 +140,10 @@ jobs: - uses: ./.github/actions/install-risc0 + - uses: ./.github/actions/install-logos-blockchain-circuits + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + - name: Install active toolchain run: rustup install diff --git a/.gitignore b/.gitignore index 6162763b..8a454208 100644 --- a/.gitignore +++ b/.gitignore @@ -7,4 +7,5 @@ data/ .vscode/ rocksdb sequencer_runner/data/ -storage.json \ No newline at end of file +storage.json +result diff --git a/Cargo.lock b/Cargo.lock index b71a3f2b..0bb78b5b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -361,6 +361,18 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "any_spawner" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1384d3fe1eecb464229fcf6eebb72306591c56bf27b373561489458a7c73027d" +dependencies = [ + "futures", + "thiserror 2.0.17", + "tokio", + "wasm-bindgen-futures", +] + [[package]] name = "anyhow" version = "1.0.100" @@ -782,6 +794,23 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "async-lock" +version = "3.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290f7f2596bd5b78a9fec8088ccd89180d7f9f55b94b0576823bbbdc72ee8311" +dependencies = [ + "event-listener", + "event-listener-strategy", + "pin-project-lite", +] + +[[package]] +name = "async-once-cell" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4288f83726785267c6f2ef073a3d83dc3f9b81464e9f99898240cced85fce35a" + [[package]] name = "async-stream" version = "0.3.6" @@ -821,6 +850,36 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "attribute-derive" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05832cdddc8f2650cc2cc187cc2e952b8c133a48eb055f35211f61ee81502d77" +dependencies = [ + "attribute-derive-macro", + "derive-where", + "manyhow", + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "attribute-derive-macro" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a7cdbbd4bd005c5d3e2e9c885e6fa575db4f4a3572335b974d8db853b6beb61" +dependencies = [ + "collection_literals", + "interpolator", + "manyhow", + "proc-macro-utils", + "proc-macro2", + "quote", + "quote-use", + "syn 2.0.111", +] + [[package]] name = "autocfg" version = "1.5.0" @@ -834,7 +893,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.4.5", "bytes", "futures-util", "http 1.4.0", @@ -843,7 +902,7 @@ dependencies = [ "hyper", "hyper-util", "itoa", - "matchit", + "matchit 0.7.3", "memchr", "mime", "percent-encoding", @@ -861,6 +920,43 @@ dependencies = [ "tracing", ] +[[package]] +name = "axum" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8" +dependencies = [ + "axum-core 0.5.6", + "base64", + "bytes", + "form_urlencoded", + "futures-util", + "http 1.4.0", + "http-body", + "http-body-util", + "hyper", + "hyper-util", + "itoa", + "matchit 0.8.4", + "memchr", + "mime", + "multer", + "percent-encoding", + "pin-project-lite", + "serde_core", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper", + "tokio", + "tokio-tungstenite", + "tower 0.5.2", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "axum-core" version = "0.4.5" @@ -882,12 +978,37 @@ dependencies = [ "tracing", ] +[[package]] +name = "axum-core" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" +dependencies = [ + "bytes", + "futures-core", + "http 1.4.0", + "http-body", + "http-body-util", + "mime", + "pin-project-lite", + "sync_wrapper", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "base-x" version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4cbbc9d0964165b47557570cce6c952866c2678457aca742aafc9fb771d30270" +[[package]] +name = "base16" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d27c3610c36aee21ce8ac510e6224498de4228ad772a171ed65643a24693a5a8" + [[package]] name = "base16ct" version = "0.2.0" @@ -900,7 +1021,7 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e9430d9a245a77c92176e649af6e275f20839a48389859d1661e9a128d077c" dependencies = [ - "const-str", + "const-str 0.4.3", "match-lookup", ] @@ -927,9 +1048,14 @@ name = "bedrock_client" version = "0.1.0" dependencies = [ "anyhow", + "futures", + "log", + "logos-blockchain-chain-broadcast-service", "logos-blockchain-common-http-client", "logos-blockchain-core", "reqwest", + "serde", + "tokio-retry", ] [[package]] @@ -1107,9 +1233,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" dependencies = [ "serde", ] @@ -1165,6 +1291,25 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "cbindgen" +version = "0.29.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "befbfd072a8e81c02f8c507aefce431fe5e7d051f83d48a23ffc9b9fe5a11799" +dependencies = [ + "clap", + "heck", + "indexmap 2.12.1", + "log", + "proc-macro2", + "quote", + "serde", + "serde_json", + "syn 2.0.111", + "tempfile", + "toml 0.9.11+spec-1.1.0", +] + [[package]] name = "cc" version = "1.2.49" @@ -1310,6 +1455,23 @@ dependencies = [ "thiserror 2.0.17", ] +[[package]] +name = "codee" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9dbbdc4b4d349732bc6690de10a9de952bd39ba6a065c586e26600b6b0b91f5" +dependencies = [ + "serde", + "serde_json", + "thiserror 2.0.17", +] + +[[package]] +name = "collection_literals" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2550f75b8cfac212855f6b1885455df8eaee8fe8e246b647d69146142e016084" + [[package]] name = "colorchoice" version = "1.0.4" @@ -1335,6 +1497,7 @@ dependencies = [ "borsh", "hex", "log", + "logos-blockchain-common-http-client", "nssa", "nssa_core", "reqwest", @@ -1342,6 +1505,29 @@ dependencies = [ "serde_json", "sha2", "thiserror 2.0.17", + "url", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "config" +version = "0.15.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b30fa8254caad766fc03cb0ccae691e14bf3bd72bfff27f72802ce729551b3d6" +dependencies = [ + "convert_case 0.6.0", + "pathdiff", + "serde_core", + "toml 0.9.11+spec-1.1.0", + "winnow", ] [[package]] @@ -1357,6 +1543,26 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "console_error_panic_hook" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a06aeb73f470f66dcdbf7223caeebb85984942f22f1adb2a088cf9668146bbbc" +dependencies = [ + "cfg-if", + "wasm-bindgen", +] + +[[package]] +name = "console_log" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be8aed40e4edbf4d3b4431ab260b63fdc40f5780a4766824329ea0f1eefe3c0f" +dependencies = [ + "log", + "web-sys", +] + [[package]] name = "const-hex" version = "1.17.0" @@ -1381,12 +1587,53 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f421161cb492475f1661ddc9815a745a1c894592070661180fdec3d4872e9c3" +[[package]] +name = "const-str" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0664d2867b4a32697dfe655557f5c3b187e9b605b38612a748e5ec99811d160" + +[[package]] +name = "const_format" +version = "0.2.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7faa7469a93a566e9ccc1c73fe783b4a65c274c5ace346038dca9c39fe0030ad" +dependencies = [ + "const_format_proc_macros", +] + +[[package]] +name = "const_format_proc_macros" +version = "0.2.34" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d57c2eccfb16dbac1f4e61e206105db5820c9d26c3c472bc17c774259ef7744" +dependencies = [ + "proc-macro2", + "quote", + "unicode-xid", +] + +[[package]] +name = "const_str_slice_concat" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f67855af358fcb20fac58f9d714c94e2b228fe5694c1c9b4ead4a366343eda1b" + [[package]] name = "convert_case" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "convert_case" version = "0.8.0" @@ -1624,6 +1871,20 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "data-encoding" version = "2.9.0" @@ -1647,7 +1908,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976" dependencies = [ "data-encoding", - "syn 2.0.111", + "syn 1.0.109", ] [[package]] @@ -1682,6 +1943,17 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "derive-where" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "derive_builder" version = "0.20.2" @@ -1805,6 +2077,12 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" +[[package]] +name = "drain_filter_polyfill" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "669a445ee724c5c69b1b06fe0b63e70a1c84bc9bb7d9696cd4f4e3ec45050408" + [[package]] name = "duplicate" version = "2.0.1" @@ -1880,6 +2158,16 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +[[package]] +name = "either_of" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "216d23e0ec69759a17f05e1c553f3a6870e5ec73420fbb07807a6f34d5d1d5a4" +dependencies = [ + "paste", + "pin-project-lite", +] + [[package]] name = "elf" version = "0.7.4" @@ -1973,6 +2261,12 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" +[[package]] +name = "erased" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1731451909bde27714eacba19c2566362a7f35224f52b153d3f42cf60f72472" + [[package]] name = "errno" version = "0.3.14" @@ -1983,6 +2277,27 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "event-listener" +version = "5.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e13b66accf52311f30a0db42147dadea9850cb48cd070028831ae5f5d4b856ab" +dependencies = [ + "concurrent-queue", + "parking", + "pin-project-lite", +] + +[[package]] +name = "event-listener-strategy" +version = "0.5.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93" +dependencies = [ + "event-listener", + "pin-project-lite", +] + [[package]] name = "example_program_deployment_methods" version = "0.1.0" @@ -2000,6 +2315,33 @@ dependencies = [ "risc0-zkvm", ] +[[package]] +name = "explorer_service" +version = "0.1.0" +dependencies = [ + "axum 0.8.8", + "chrono", + "clap", + "console_error_panic_hook", + "console_log", + "env_logger", + "hex", + "indexer_service_protocol", + "indexer_service_rpc", + "jsonrpsee", + "leptos", + "leptos_axum", + "leptos_meta", + "leptos_router", + "log", + "serde", + "tokio", + "url", + "urlencoding", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "fastrand" version = "2.3.0" @@ -2141,6 +2483,7 @@ dependencies = [ "futures-core", "futures-task", "futures-util", + "num_cpus", ] [[package]] @@ -2179,7 +2522,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" dependencies = [ "gloo-timers", - "send_wrapper", + "send_wrapper 0.4.0", ] [[package]] @@ -2318,7 +2661,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68a7f542ee6b35af73b06abc0dad1c1bae89964e4e253bc4b587b91c9637867b" dependencies = [ "cfg-if", - "dashmap", + "dashmap 5.5.3", "futures", "futures-timer", "no-std-compat", @@ -2342,6 +2685,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "guardian" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17e2ac29387b1aa07a1e448f7bb4f35b500787971e965b02842b900afa5c8f6f" + [[package]] name = "h2" version = "0.3.27" @@ -2489,6 +2838,15 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89e8d20b3799fa526152a5301a771eaaad80857f83e01b23216ceaafb2d9280" +[[package]] +name = "html-escape" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476" +dependencies = [ + "utf8-width", +] + [[package]] name = "http" version = "0.2.12" @@ -2533,6 +2891,12 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-range-header" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" + [[package]] name = "httparse" version = "1.10.1" @@ -2551,6 +2915,22 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" +[[package]] +name = "hydration_context" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8714ae4adeaa846d838f380fbd72f049197de629948f91bf045329e0cf0a283" +dependencies = [ + "futures", + "js-sys", + "once_cell", + "or_poisoned", + "pin-project-lite", + "serde", + "throw_error", + "wasm-bindgen", +] + [[package]] name = "hyper" version = "1.8.1" @@ -2626,7 +3006,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.5.10", + "socket2 0.6.1", "system-configuration", "tokio", "tower-service", @@ -2772,6 +3152,23 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ee796ad498c8d9a1d68e477df8f754ed784ef875de1414ebdaf169f70a6a784" +[[package]] +name = "indexer_core" +version = "0.1.0" +dependencies = [ + "anyhow", + "bedrock_client", + "borsh", + "common", + "futures", + "log", + "logos-blockchain-core", + "serde", + "serde_json", + "tokio", + "url", +] + [[package]] name = "indexer_service" version = "0.1.0" @@ -2869,15 +3266,35 @@ dependencies = [ "env_logger", "futures", "hex", + "indexer_core", "key_protocol", "log", "nssa", "nssa_core", "sequencer_core", "sequencer_runner", + "serde_json", "tempfile", + "token_core", "tokio", + "url", "wallet", + "wallet-ffi", +] + +[[package]] +name = "interpolator" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71dd52191aae121e8611f1e8dc3e324dd0dd1dee1e6dd91d10ee07a3cfb4d9d8" + +[[package]] +name = "inventory" +version = "0.3.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc61209c082fbeb19919bee74b176221b27223e27b65d781eb91af24eb1fb46e" +dependencies = [ + "rustversion", ] [[package]] @@ -3225,6 +3642,7 @@ dependencies = [ "nssa", "nssa_core", "rand 0.8.5", + "secp256k1", "serde", "sha2", "thiserror 2.0.17", @@ -3268,6 +3686,228 @@ dependencies = [ "spin", ] +[[package]] +name = "leptos" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f9569fc37575a5d64c0512145af7630bf651007237ef67a8a77328199d315bb" +dependencies = [ + "any_spawner", + "base64", + "cfg-if", + "either_of", + "futures", + "getrandom 0.3.4", + "hydration_context", + "leptos_config", + "leptos_dom", + "leptos_hot_reload", + "leptos_macro", + "leptos_server", + "oco_ref", + "or_poisoned", + "paste", + "rand 0.9.2", + "reactive_graph", + "rustc-hash", + "rustc_version", + "send_wrapper 0.6.0", + "serde", + "serde_json", + "serde_qs", + "server_fn", + "slotmap", + "tachys", + "thiserror 2.0.17", + "throw_error", + "typed-builder 0.23.2", + "typed-builder-macro 0.23.2", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm_split_helpers", + "web-sys", +] + +[[package]] +name = "leptos_axum" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f0caa95760f87f3067e05025140becefdbdfd36cbc2adac4519f06e1f1edf4af" +dependencies = [ + "any_spawner", + "axum 0.8.8", + "dashmap 6.1.0", + "futures", + "hydration_context", + "leptos", + "leptos_integration_utils", + "leptos_macro", + "leptos_meta", + "leptos_router", + "parking_lot", + "server_fn", + "tachys", + "tokio", + "tower 0.5.2", + "tower-http", +] + +[[package]] +name = "leptos_config" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "071fc40aeb9fcab885965bad1887990477253ad51f926cd19068f45a44c59e89" +dependencies = [ + "config", + "regex", + "serde", + "thiserror 2.0.17", + "typed-builder 0.21.2", +] + +[[package]] +name = "leptos_dom" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78f4330c88694c5575e0bfe4eecf81b045d14e76a4f8b00d5fd2a63f8779f895" +dependencies = [ + "js-sys", + "or_poisoned", + "reactive_graph", + "send_wrapper 0.6.0", + "tachys", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "leptos_hot_reload" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d61ec3e1ff8aaee8c5151688550c0363f85bc37845450764c31ff7584a33f38" +dependencies = [ + "anyhow", + "camino", + "indexmap 2.12.1", + "parking_lot", + "proc-macro2", + "quote", + "rstml", + "serde", + "syn 2.0.111", + "walkdir", +] + +[[package]] +name = "leptos_integration_utils" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "13cccc9305df53757bae61bf15641bfa6a667b5f78456ace4879dfe0591ae0e8" +dependencies = [ + "futures", + "hydration_context", + "leptos", + "leptos_config", + "leptos_meta", + "leptos_router", + "reactive_graph", +] + +[[package]] +name = "leptos_macro" +version = "0.8.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c86ffd2e9cf3e264e9b3e16bdb086cefa26bd0fa7bc6a26b0cc5f6c1fd3178ed" +dependencies = [ + "attribute-derive", + "cfg-if", + "convert_case 0.10.0", + "html-escape", + "itertools 0.14.0", + "leptos_hot_reload", + "prettyplease", + "proc-macro-error2", + "proc-macro2", + "quote", + "rstml", + "rustc_version", + "server_fn_macro", + "syn 2.0.111", + "uuid", +] + +[[package]] +name = "leptos_meta" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2d489e38d3f541e9e43ecc2e3a815527840345a2afca629b3e23fcc1dd254578" +dependencies = [ + "futures", + "indexmap 2.12.1", + "leptos", + "or_poisoned", + "send_wrapper 0.6.0", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "leptos_router" +version = "0.8.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "01e573711f2fb9ab5d655ec38115220d359eaaf1dcb93cc0ea624543b6dba959" +dependencies = [ + "any_spawner", + "either_of", + "futures", + "gloo-net", + "js-sys", + "leptos", + "leptos_router_macro", + "or_poisoned", + "percent-encoding", + "reactive_graph", + "rustc_version", + "send_wrapper 0.6.0", + "tachys", + "thiserror 2.0.17", + "url", + "wasm-bindgen", + "web-sys", +] + +[[package]] +name = "leptos_router_macro" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "409c0bd99f986c3cfa1a4db2443c835bc602ded1a12784e22ecb28c3ed5a2ae2" +dependencies = [ + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "leptos_server" +version = "0.8.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbf1045af93050bf3388d1c138426393fc131f6d9e46a65519da884c033ed730" +dependencies = [ + "any_spawner", + "base64", + "codee", + "futures", + "hydration_context", + "or_poisoned", + "reactive_graph", + "send_wrapper 0.6.0", + "serde", + "serde_json", + "server_fn", + "tachys", +] + [[package]] name = "libc" version = "0.2.178" @@ -3350,6 +3990,12 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "linear-map" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bfae20f6b19ad527b550c223fddc3077a547fc70cda94b9b566575423fd303ee" + [[package]] name = "linux-raw-sys" version = "0.11.0" @@ -3609,7 +4255,7 @@ name = "logos-blockchain-http-api-common" version = "0.1.0" source = "git+https://github.com/logos-blockchain/logos-blockchain.git#451df112f8574aea2840d04fffb7e16e76d24f42" dependencies = [ - "axum", + "axum 0.7.9", "governor", "logos-blockchain-core", "logos-blockchain-key-management-system-keys", @@ -3886,6 +4532,29 @@ dependencies = [ "libc", ] +[[package]] +name = "manyhow" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b33efb3ca6d3b07393750d4030418d594ab1139cee518f0dc88db70fec873587" +dependencies = [ + "manyhow-macros", + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "manyhow-macros" +version = "0.11.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "46fce34d199b78b6e6073abf984c9cf5fd3e9330145a93ee0738a7443e371495" +dependencies = [ + "proc-macro-utils", + "proc-macro2", + "quote", +] + [[package]] name = "match-lookup" version = "0.1.1" @@ -3903,6 +4572,12 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +[[package]] +name = "matchit" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" + [[package]] name = "maybe-async" version = "0.2.10" @@ -3960,6 +4635,16 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "mime_guess" +version = "2.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f7c44f8e672c00fe5308fa235f821cb4198414e1c77935c1ab6948d3fd78550e" +dependencies = [ + "mime", + "unicase", +] + [[package]] name = "minimal-lexical" version = "0.2.1" @@ -3978,6 +4663,23 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "multer" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 1.4.0", + "httparse", + "memchr", + "mime", + "spin", + "version_check", +] + [[package]] name = "multiaddr" version = "0.18.2" @@ -4035,6 +4737,12 @@ dependencies = [ "tempfile", ] +[[package]] +name = "next_tuple" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "60993920e071b0c9b66f14e2b32740a4e27ffc82854dcd72035887f336a09a28" + [[package]] name = "nimue" version = "0.1.1" @@ -4117,6 +4825,7 @@ dependencies = [ "test-case", "test_program_methods", "thiserror 2.0.17", + "token_core", ] [[package]] @@ -4163,9 +4872,9 @@ dependencies = [ [[package]] name = "num-conv" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" [[package]] name = "num-integer" @@ -4197,6 +4906,16 @@ dependencies = [ "libm", ] +[[package]] +name = "num_cpus" +version = "1.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91df4bbde75afed763b708b7eee1e8e7651e02d97f6d5dd763e89367e957b23b" +dependencies = [ + "hermit-abi", + "libc", +] + [[package]] name = "num_enum" version = "0.7.5" @@ -4227,6 +4946,16 @@ dependencies = [ "malloc_buf", ] +[[package]] +name = "oco_ref" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed0423ff9973dea4d6bd075934fdda86ebb8c05bdf9d6b0507067d4a1226371d" +dependencies = [ + "serde", + "thiserror 2.0.17", +] + [[package]] name = "once_cell" version = "1.21.3" @@ -4312,6 +5041,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" +[[package]] +name = "or_poisoned" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c04f5d74368e4d0dfe06c45c8627c81bd7c317d52762d118fb9b3076f6420fd" + [[package]] name = "overwatch" version = "0.1.0" @@ -4339,6 +5074,12 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "parking" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" + [[package]] name = "parking_lot" version = "0.12.5" @@ -4368,6 +5109,12 @@ version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" +[[package]] +name = "pathdiff" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -4496,6 +5243,16 @@ dependencies = [ "zerocopy", ] +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.111", +] + [[package]] name = "proc-macro-crate" version = "3.4.0" @@ -4527,6 +5284,17 @@ dependencies = [ "syn 2.0.111", ] +[[package]] +name = "proc-macro-utils" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eeaf08a13de400bc215877b5bdc088f241b12eb42f0a548d3390dc1c56bb7071" +dependencies = [ + "proc-macro2", + "quote", + "smallvec", +] + [[package]] name = "proc-macro2" version = "1.0.103" @@ -4546,6 +5314,7 @@ dependencies = [ "quote", "syn 2.0.111", "version_check", + "yansi", ] [[package]] @@ -4573,6 +5342,8 @@ dependencies = [ "nssa_core", "risc0-zkvm", "serde", + "token_core", + "token_program", ] [[package]] @@ -4677,7 +5448,7 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2 0.5.10", + "socket2 0.6.1", "tracing", "windows-sys 0.60.2", ] @@ -4691,6 +5462,28 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "quote-use" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9619db1197b497a36178cfc736dc96b271fe918875fbf1344c436a7e93d0321e" +dependencies = [ + "quote", + "quote-use-macros", +] + +[[package]] +name = "quote-use-macros" +version = "0.8.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82ebfb7faafadc06a7ab141a6f67bcfb24cb8beb158c6fe933f2f035afa99f35" +dependencies = [ + "proc-macro-utils", + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "r-efi" version = "5.3.0" @@ -4774,6 +5567,60 @@ dependencies = [ "bitflags 2.10.0", ] +[[package]] +name = "reactive_graph" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f0df355582937223ea403e52490201d65295bd6981383c69bfae5a1f8730c2" +dependencies = [ + "any_spawner", + "async-lock", + "futures", + "guardian", + "hydration_context", + "indexmap 2.12.1", + "or_poisoned", + "paste", + "pin-project-lite", + "rustc-hash", + "rustc_version", + "send_wrapper 0.6.0", + "serde", + "slotmap", + "thiserror 2.0.17", + "web-sys", +] + +[[package]] +name = "reactive_stores" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35372f05664a62a3dd389503371a15b8feb3396f99f6ec000de651fddb030942" +dependencies = [ + "dashmap 6.1.0", + "guardian", + "itertools 0.14.0", + "or_poisoned", + "paste", + "reactive_graph", + "reactive_stores_macro", + "rustc-hash", + "send_wrapper 0.6.0", +] + +[[package]] +name = "reactive_stores_macro" +version = "0.2.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fa40919eb2975100283b2a70e68eafce1e8bcf81f0622ff168e4c2b3f8d46bb" +dependencies = [ + "convert_case 0.8.0", + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "redox_syscall" version = "0.5.18" @@ -5211,10 +6058,25 @@ dependencies = [ ] [[package]] -name = "ruint" -version = "1.17.0" +name = "rstml" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a68df0380e5c9d20ce49534f292a36a7514ae21350726efe1865bdb1fa91d278" +checksum = "61cf4616de7499fc5164570d40ca4e1b24d231c6833a88bff0fe00725080fd56" +dependencies = [ + "derive-where", + "proc-macro2", + "proc-macro2-diagnostics", + "quote", + "syn 2.0.111", + "syn_derive", + "thiserror 2.0.17", +] + +[[package]] +name = "ruint" +version = "1.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a" dependencies = [ "borsh", "proptest", @@ -5362,7 +6224,7 @@ dependencies = [ "strum", "tempfile", "thiserror 2.0.17", - "toml", + "toml 0.8.23", "yaml-rust2", ] @@ -5514,6 +6376,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f638d531eccd6e23b980caf34876660d38e265409d8e99b397ab71eb3612fad0" +[[package]] +name = "send_wrapper" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd0b0ec5f1c1ca621c432a25813d8d60c88abe6d3e08a3eb9cf37d97a0fe3d73" +dependencies = [ + "futures-core", +] + [[package]] name = "sequencer_core" version = "0.1.0" @@ -5654,6 +6525,17 @@ dependencies = [ "serde_core", ] +[[package]] +name = "serde_qs" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3faaf9e727533a19351a43cc5a8de957372163c7d35cc48c90b75cdda13c352" +dependencies = [ + "percent-encoding", + "serde", + "thiserror 2.0.17", +] + [[package]] name = "serde_spanned" version = "0.6.9" @@ -5663,6 +6545,15 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_spanned" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8bbf91e5a4d6315eee45e704372590b30e260ee83af6639d64557f51b067776" +dependencies = [ + "serde_core", +] + [[package]] name = "serde_urlencoded" version = "0.7.1" @@ -5716,6 +6607,71 @@ dependencies = [ "serde", ] +[[package]] +name = "server_fn" +version = "0.8.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "353d02fa2886cd8dae0b8da0965289fa8f2ecc7df633d1ce965f62fdf9644d29" +dependencies = [ + "axum 0.8.8", + "base64", + "bytes", + "const-str 0.7.1", + "const_format", + "dashmap 6.1.0", + "futures", + "gloo-net", + "http 1.4.0", + "http-body-util", + "hyper", + "inventory", + "js-sys", + "pin-project-lite", + "rustc_version", + "rustversion", + "send_wrapper 0.6.0", + "serde", + "serde_json", + "serde_qs", + "server_fn_macro_default", + "thiserror 2.0.17", + "throw_error", + "tokio", + "tower 0.5.2", + "tower-layer", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "xxhash-rust", +] + +[[package]] +name = "server_fn_macro" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "950b8cfc9ff5f39ca879c5a7c5e640de2695a199e18e424c3289d0964cabe642" +dependencies = [ + "const_format", + "convert_case 0.8.0", + "proc-macro2", + "quote", + "rustc_version", + "syn 2.0.111", + "xxhash-rust", +] + +[[package]] +name = "server_fn_macro_default" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63eb08f80db903d3c42f64e60ebb3875e0305be502bdc064ec0a0eab42207f00" +dependencies = [ + "server_fn_macro", + "syn 2.0.111", +] + [[package]] name = "sha1" version = "0.10.6" @@ -5769,6 +6725,15 @@ version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +[[package]] +name = "slotmap" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdd58c3c93c3d278ca835519292445cb4b0d4dc59ccfdf7ceadaab3f8aeb4038" +dependencies = [ + "version_check", +] + [[package]] name = "smallvec" version = "1.15.1" @@ -5884,6 +6849,7 @@ version = "0.1.0" dependencies = [ "borsh", "common", + "nssa", "rocksdb", "thiserror 2.0.17", ] @@ -5943,6 +6909,18 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn_derive" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdb066a04799e45f5d582e8fc6ec8e6d6896040d00898eb4e6a835196815b219" +dependencies = [ + "proc-macro-error2", + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "sync_wrapper" version = "1.0.2" @@ -5984,6 +6962,40 @@ dependencies = [ "libc", ] +[[package]] +name = "tachys" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2b2db11e455f7e84e2cc3e76f8a3f3843f7956096265d5ecff781eabe235077" +dependencies = [ + "any_spawner", + "async-trait", + "const_str_slice_concat", + "drain_filter_polyfill", + "either_of", + "erased", + "futures", + "html-escape", + "indexmap 2.12.1", + "itertools 0.14.0", + "js-sys", + "linear-map", + "next_tuple", + "oco_ref", + "or_poisoned", + "parking_lot", + "paste", + "reactive_graph", + "reactive_stores", + "rustc-hash", + "rustc_version", + "send_wrapper 0.6.0", + "slotmap", + "throw_error", + "wasm-bindgen", + "web-sys", +] + [[package]] name = "tempfile" version = "3.23.0" @@ -6095,31 +7107,40 @@ dependencies = [ ] [[package]] -name = "time" -version = "0.3.44" +name = "throw_error" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" +checksum = "dc0ed6038fcbc0795aca7c92963ddda636573b956679204e044492d2b13c8f64" +dependencies = [ + "pin-project-lite", +] + +[[package]] +name = "time" +version = "0.3.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" dependencies = [ "deranged", "itoa", "num-conv", "powerfmt", - "serde", + "serde_core", "time-core", "time-macros", ] [[package]] name = "time-core" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" [[package]] name = "time-macros" -version = "0.2.24" +version = "0.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" dependencies = [ "num-conv", "time-core", @@ -6159,6 +7180,23 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" +[[package]] +name = "token_core" +version = "0.1.0" +dependencies = [ + "borsh", + "nssa_core", + "serde", +] + +[[package]] +name = "token_program" +version = "0.1.0" +dependencies = [ + "nssa_core", + "token_core", +] + [[package]] name = "tokio" version = "1.48.0" @@ -6197,6 +7235,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-retry" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" +dependencies = [ + "pin-project", + "rand 0.8.5", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.4" @@ -6219,6 +7268,18 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "tokio-tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d25a406cddcc431a75d3d9afc6a7c0f7428d4891dd973e4d54c56b46127bf857" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite", +] + [[package]] name = "tokio-util" version = "0.7.18" @@ -6240,11 +7301,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc1beb996b9d83529a9e75c17a1686767d148d70663143c7854d8b4a09ced362" dependencies = [ "serde", - "serde_spanned", + "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_edit 0.22.27", ] +[[package]] +name = "toml" +version = "0.9.11+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f3afc9a848309fe1aaffaed6e1546a7a14de1f935dc9d89d32afd9a44bab7c46" +dependencies = [ + "indexmap 2.12.1", + "serde_core", + "serde_spanned 1.0.4", + "toml_datetime 0.7.5+spec-1.1.0", + "toml_parser", + "toml_writer", + "winnow", +] + [[package]] name = "toml_datetime" version = "0.6.11" @@ -6256,9 +7332,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.7.4+spec-1.0.0" +version = "0.7.5+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe3cea6b2aa3b910092f6abd4053ea464fab5f9c170ba5e9a6aead16ec4af2b6" +checksum = "92e1cfed4a3038bc5a127e35a2d360f145e1f4b971b551a2ba5fd7aedf7e1347" dependencies = [ "serde_core", ] @@ -6271,7 +7347,7 @@ checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" dependencies = [ "indexmap 2.12.1", "serde", - "serde_spanned", + "serde_spanned 0.6.9", "toml_datetime 0.6.11", "toml_write", "winnow", @@ -6284,16 +7360,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" dependencies = [ "indexmap 2.12.1", - "toml_datetime 0.7.4+spec-1.0.0", + "toml_datetime 0.7.5+spec-1.1.0", "toml_parser", "winnow", ] [[package]] name = "toml_parser" -version = "1.0.5+spec-1.0.0" +version = "1.0.6+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c03bee5ce3696f31250db0bbaff18bc43301ce0e8db2ed1f07cbb2acf89984c" +checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" dependencies = [ "winnow", ] @@ -6304,6 +7380,12 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d99f8c9a7727884afe522e9bd5edbfc91a3312b36a77b5fb8926e4c31a41801" +[[package]] +name = "toml_writer" +version = "1.0.6+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" + [[package]] name = "tower" version = "0.4.13" @@ -6339,14 +7421,24 @@ checksum = "d4e6559d53cc268e5031cd8429d05415bc4cb4aefc4aa5d6cc35fbf5b924a1f8" dependencies = [ "bitflags 2.10.0", "bytes", + "futures-core", "futures-util", "http 1.4.0", "http-body", + "http-body-util", + "http-range-header", + "httpdate", "iri-string", + "mime", + "mime_guess", + "percent-encoding", "pin-project-lite", + "tokio", + "tokio-util", "tower 0.5.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -6367,7 +7459,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3790eac6ad3fb8d9d96c2b040ae06e2517aa24b067545d1078b96ae72f7bb9a7" dependencies = [ - "axum", + "axum 0.7.9", "forwarded-header-value", "governor", "http 1.4.0", @@ -6441,6 +7533,63 @@ version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +[[package]] +name = "tungstenite" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" +dependencies = [ + "bytes", + "data-encoding", + "http 1.4.0", + "httparse", + "log", + "rand 0.9.2", + "sha1", + "thiserror 2.0.17", + "utf-8", +] + +[[package]] +name = "typed-builder" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fef81aec2ca29576f9f6ae8755108640d0a86dd3161b2e8bca6cfa554e98f77d" +dependencies = [ + "typed-builder-macro 0.21.2", +] + +[[package]] +name = "typed-builder" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "31aa81521b70f94402501d848ccc0ecaa8f93c8eb6999eb9747e72287757ffda" +dependencies = [ + "typed-builder-macro 0.23.2", +] + +[[package]] +name = "typed-builder-macro" +version = "0.21.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ecb9ecf7799210407c14a8cfdfe0173365780968dc57973ed082211958e0b18" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + +[[package]] +name = "typed-builder-macro" +version = "0.23.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "076a02dc54dd46795c2e9c8282ed40bcfb1e22747e955de9389a1de28190fb26" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.111", +] + [[package]] name = "typenum" version = "1.19.0" @@ -6453,6 +7602,12 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" +[[package]] +name = "unicase" +version = "2.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc4bc3a9f746d862c45cb89d705aa10f187bb96c76001afab07a0d35ce60142" + [[package]] name = "unicode-ident" version = "1.0.22" @@ -6526,6 +7681,24 @@ dependencies = [ "serde", ] +[[package]] +name = "urlencoding" +version = "2.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" + +[[package]] +name = "utf-8" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" + +[[package]] +name = "utf8-width" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1292c0d970b54115d14f2492fe0170adf21d68a1de108eebc51c1df4f346a091" + [[package]] name = "utf8_iter" version = "1.0.4" @@ -6538,6 +7711,17 @@ version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" +[[package]] +name = "uuid" +version = "1.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f" +dependencies = [ + "getrandom 0.3.4", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "valuable" version = "0.1.1" @@ -6593,7 +7777,22 @@ dependencies = [ "serde", "serde_json", "sha2", + "token_core", "tokio", + "url", +] + +[[package]] +name = "wallet-ffi" +version = "0.1.0" +dependencies = [ + "cbindgen", + "common", + "nssa", + "nssa_core", + "tempfile", + "tokio", + "wallet", ] [[package]] @@ -6691,6 +7890,28 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasm_split_helpers" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a114b3073258dd5de3d812cdd048cca6842342755e828a14dbf15f843f2d1b84" +dependencies = [ + "async-once-cell", + "wasm_split_macros", +] + +[[package]] +name = "wasm_split_macros" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56481f8ed1a9f9ae97ea7b08a5e2b12e8adf9a7818a6ba952b918e09c7be8bf0" +dependencies = [ + "base16", + "quote", + "sha2", + "syn 2.0.111", +] + [[package]] name = "web-sys" version = "0.3.83" @@ -7094,6 +8315,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "xxhash-rust" +version = "0.8.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fdd20c5420375476fbd4394763288da7eb0cc0b8c11deed431a91562af7335d3" + [[package]] name = "yaml-rust2" version = "0.10.4" @@ -7105,6 +8332,12 @@ dependencies = [ "hashlink", ] +[[package]] +name = "yansi" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" + [[package]] name = "yoke" version = "0.8.1" diff --git a/Cargo.toml b/Cargo.toml index 3b1c9f12..bf17e885 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,3 +1,6 @@ +[workspace.package] +license = "MIT or Apache-2.0" + [workspace] resolver = "3" members = [ @@ -6,6 +9,7 @@ members = [ "key_protocol", "mempool", "wallet", + "wallet-ffi", "common", "nssa", "nssa/core", @@ -15,6 +19,9 @@ members = [ "indexer_service", "indexer_service/protocol", "indexer_service/rpc", + "explorer_service", + "programs/token/core", + "programs/token", "program_methods", "program_methods/guest", "test_program_methods", @@ -23,6 +30,7 @@ members = [ "examples/program_deployment/methods", "examples/program_deployment/methods/guest", "bedrock_client", + "indexer_core", ] [workspace.dependencies] @@ -39,8 +47,12 @@ indexer_service = { path = "indexer_service" } indexer_service_protocol = { path = "indexer_service/protocol" } indexer_service_rpc = { path = "indexer_service/rpc" } wallet = { path = "wallet" } +wallet-ffi = { path = "wallet-ffi" } +token_core = { path = "programs/token/core" } +token_program = { path = "programs/token" } test_program_methods = { path = "test_program_methods" } bedrock_client = { path = "bedrock_client" } +indexer_core = { path = "indexer_core" } tokio = { version = "1.28.2", features = [ "net", @@ -85,12 +97,14 @@ chrono = "0.4.41" borsh = "1.5.7" base58 = "0.2.0" itertools = "0.14.0" -url = "2.5.4" +url = { version = "2.5.4", features = ["serde"] } +tokio-retry = "0.3.0" schemars = "1.2.0" logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } +logos-blockchain-chain-broadcast-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } rocksdb = { version = "0.24.0", default-features = false, features = [ "snappy", @@ -110,3 +124,10 @@ actix-web = { version = "=4.1.0", default-features = false, features = [ ] } clap = { version = "4.5.42", features = ["derive", "env"] } reqwest = { version = "0.12", features = ["json", "rustls-tls", "stream"] } + +# Profile for leptos WASM release builds +[profile.wasm-release] +inherits = "release" +opt-level = 'z' +lto = true +codegen-units = 1 diff --git a/LEZ testnet v0.1 tutorials/amm.md b/LEZ testnet v0.1 tutorials/amm.md new file mode 100644 index 00000000..60751517 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/amm.md @@ -0,0 +1,109 @@ +# Automated Market Maker (AMM) + +This tutorial covers the AMM program in LEZ. The AMM manages liquidity pools and enables swaps between custom tokens. By the end, you will have practiced: +1. Creating a liquidity pool for a token pair. +2. Swapping tokens. +3. Withdrawing liquidity from the pool. +4. Adding liquidity to the pool. + +## 1. Creating a liquidity pool for a token pair + +We start by creating a pool for the tokens created earlier. In return for providing liquidity, you receive liquidity provider (LP) tokens. LP tokens represent your share of the pool and are required to withdraw liquidity later. + +> [!NOTE] +> The AMM does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore represent only a proportional share of the pool reserves. Fee support will be added in future versions. + +### a. Create an LP holding account + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf +``` + +### b. Initialize the pool + +Deposit tokens A and B and specify the account that will receive LP tokens: + +```bash +wallet amm new \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ + --balance-a 100 \ + --balance-b 200 +``` + +> [!Important] +> The LP holding account is owned by the token program, so LP tokens are managed using the same token infrastructure as regular tokens. + +```bash +wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100} +``` + +> [!Tip] +> If you inspect the `user-holding-a` and `user-holding-b` accounts, you will see that 100 and 200 tokens were deducted. Those tokens now reside in the pool and are available for swaps by any user. + +## 2. Swapping + +Use `wallet amm swap` to perform a token swap: + +```bash +wallet amm swap \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + # The amount of tokens to swap + --amount-in 5 \ + # The minimum number of tokens expected in return + --min-amount-out 8 \ + # The definition ID of the token being provided to the swap + # In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA + --token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 +``` + +Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (computed by the pool’s pricing function) is credited to the Token B holding account. + +## 3. Withdrawing liquidity from the pool + +Liquidity providers can withdraw assets by redeeming (burning) LP tokens. The amount received is proportional to the share of LP tokens redeemed relative to the total LP supply. + +Use `wallet amm remove-liquidity`: + +```bash +wallet amm remove-liquidity \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ + --balance-lp 20 \ + --min-amount-a 1 \ + --min-amount-b 1 +``` + +> [!Important] +> This burns `balance-lp` LP tokens from the user’s LP holding account. In return, the AMM transfers tokens A and B from the pool vaults to the user’s holding accounts, based on current reserves. +> The `min-amount-a` and `min-amount-b` parameters set the minimum acceptable outputs. If the computed amounts fall below either threshold, the instruction fails to protect against unfavorable pool changes. + +## 4. Adding liquidity to the pool + +To add liquidity, deposit tokens A and B in the ratio implied by current pool reserves. In return, the AMM mints new LP tokens that represent your proportional share. + +Use `wallet amm add-liquidity`: + +```bash +wallet amm add-liquidity \ + --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ + --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ + --min-amount-lp 1 \ + --max-amount-a 10 \ + --max-amount-b 10 +``` + +> [!Important] +> `max-amount-a` and `max-amount-b` cap how many tokens A and B can be taken from the user’s accounts. The AMM computes the required amounts based on the pool’s reserve ratio. +> `min-amount-lp` sets the minimum LP tokens to mint. If the computed LP amount falls below this threshold, the instruction fails. diff --git a/LEZ testnet v0.1 tutorials/custom-tokens.md b/LEZ testnet v0.1 tutorials/custom-tokens.md new file mode 100644 index 00000000..ea647696 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/custom-tokens.md @@ -0,0 +1,159 @@ +This tutorial focuses on custom tokens using the Token program. As of now, you have used the authenticated-transfers program for native tokens. The Token program is for creating and managing custom tokens. By the end, you will have practiced: +1. Creating new tokens. +2. Transferring custom tokens. + +> [!Important] +> The Token program is a single program that creates and manages all tokens, so you do not deploy a new program for each token. +> Token program accounts fall into two types: +> - Token definition accounts: store token metadata such as name and total supply. This account is the token’s identifier. +> - Token holding accounts: store balances and the definition ID they belong to. + +The CLI provides commands to execute the Token program. Run `wallet token` to see the options: + +```bash +Commands: + new Produce a new token + send Send tokens from one account to another with variable privacy + help Print this message or the help of the given subcommand(s) +``` + +## 1. Creating new tokens + +Use `wallet token new` to execute the `New` function of the Token program. The command expects: +- A token name. +- A total supply. +- Two uninitialized accounts: +- One for the token definition account. +- One for the token holding account that receives the initial supply. + +### a. Public definition account and public supply account + +1. Create two new public accounts: + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 +``` + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw +``` + +2. Create the token (Token A): + +```bash +wallet token new \ + --name TOKENA \ + --total-supply 1337 \ + --definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \ + --supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw +``` + +3. Inspect the initialized accounts: + +```bash +wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 + +# Output: +Definition account owned by token program +{"account_type":"Token definition","name":"TOKENA","total_supply":1337} +``` + +```bash +wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337} +``` + +### b. Public definition account and private supply account + +1. Create fresh accounts for this example: + +> [!Important] +> You cannot reuse the accounts from the previous example. Create new ones here. + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii +``` + +```bash +wallet account new private + +# Output: +Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF +With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93 +With ipk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577 +``` + +2. Create the token (Token B): + +```bash +wallet token new \ + --name TOKENB \ + --total-supply 7331 \ + --definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \ + --supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF +``` + +3. Inspect the accounts: + +```bash +wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii + +# Output: +Definition account owned by token program +{"account_type":"Token definition","name":"TOKENB","total_supply":7331} +``` + +```bash +wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331} +``` + +> [!Important] +> As a private account, the supply account is visible only in your local wallet storage. + +## 2. Custom token transfers + +The Token program can move balances between token holding accounts. If the recipient account is uninitialized, the token program will automatically claim it. Use `wallet token send` to execute a transfer. + +### a. Create a recipient account + +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 +``` + +### b. Send 1000 TOKENB to the recipient + +```bash +wallet token send \ + --from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \ + --to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ + --amount 1000 +``` + +### c. Inspect the recipient account + +```bash +wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 + +# Output: +Holding account owned by token program +{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000} +``` diff --git a/LEZ testnet v0.1 tutorials/token-transfer.md b/LEZ testnet v0.1 tutorials/token-transfer.md new file mode 100644 index 00000000..8019ae56 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/token-transfer.md @@ -0,0 +1,250 @@ +This tutorial walks through native token transfers between public and private accounts using the Authenticated-Transfers program. You will create and initialize accounts, fund them with the Pinata program, and run transfers across different privacy combinations. By the end, you will have practiced: +1. Public account creation and initialization. +2. Account funding through the Pinata program. +3. Native token transfers between public accounts. +4. Private account creation. +5. Native token transfer from a public account to a private account. +6. Native token transfer from a public account to a private account owned by someone else. + +--- + +The CLI provides commands to manage accounts. Run `wallet account` to see the options available: +```bash +Commands: + get Get account data + new Produce new public or private account + sync-private Sync private accounts + help Print this message or the help of the given subcommand(s) +``` + +## 1. Public account creation and initialization +> [!Important] +> Public accounts live on-chain and are identified by a 32-byte Account ID. Running `wallet account new public` generates a fresh keypair for the signature scheme used in LEZ. +> The account ID is derived from the public key, and the private key signs transactions and authorizes program executions. +> The CLI can create both public and private accounts. + +### a. New public account creation +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ +``` +> [!Tip] +> Save this account ID. You will use it in later commands. + +### b. Account initialization + +To query the account’s current status, run: + +```bash +# Replace the id with yours +wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ + +# Output: +Account is Uninitialized +``` + +In this example, we initialize the account for the authenticated-transfer program, which manages native token transfers and enforces authenticated debits. + +1. Initialize the account: +```bash +# This command submits a public transaction executing the `init` function of the +# authenticated-transfer program. The wallet polls the sequencer until the +# transaction is included in a block, which may take several seconds. +wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ +``` + +2. Check the updated account status: +```bash +wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ + +# Output: +Account owned by authenticated-transfer program +{"balance":0} +``` + +> [!NOTE] +> New accounts start uninitialized, meaning no program owns them yet. Any program may claim an uninitialized account; once claimed, that program owns it. +> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account. +> Debiting native tokens must always be performed by the owning program. + +## 2. Account funding through the Piñata program +Now that the account is initialized under the authenticated-tansfer program, fund it using the testnet Piñata program. + +```bash +# Replace with your id +wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ +``` + +After the claim succeeds, the account is funded: + +```bash +wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ + +# Output: +Account owned by authenticated-transfer program +{"balance":150} +``` + +## 3. Native token transfers between public accounts +LEZ includes a program for managing native tokens. Run `wallet auth-transfer` to see the available commands: +```bash +Commands: + init Initialize account under the authenticated-transfer program + send Send native tokens from one account to another with variable privacy + help Print this message or the help of the given subcommand(s) +``` + +We already used `init`. Now use `send` to execute a transfer. + +### a. Create a recipient account +```bash +wallet account new public + +# Output: +Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS +``` + +> [!NOTE] +> The new account is uninitialized. The authenticated-transfer program will claim any uninitialized account used in a transfer, so manual initialization isn’t required. + +### b. Send 37 tokens to the new account +```bash +wallet auth-transfer send \ + --from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \ + --to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ + --amount 37 +``` + +### c. Check both accounts +```bash +# Sender account (use your sender ID) +wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8 + +# Output: +Account owned by authenticated-transfer program +{"balance":113} +``` + +```bash +# Recipient account +wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS + +# Output: +Account owned by authenticated-transfer program +{"balance":37} +``` + +## 4. Private account creation + +> [!Important] +> Private accounts are structurally identical to public accounts, but their values are stored off-chain. On-chain, only a 32-byte commitment is recorded. +> Transactions include encrypted private values so the owner can recover them, and the decryption keys are never shared. +> Private accounts use two keypairs: nullifier keys for privacy-preserving executions and viewing keys for encrypting and decrypting values. +> The private account ID is derived from the nullifier public key. +> Private accounts can be initialized by anyone, but once initialized they can only be modified by the owner’s keys. +> Updates include a new commitment and a nullifier for the old state, which prevents linkage between versions. + +### a. Create a private account + +```bash +wallet account new private + +# Output: +Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL +With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951 +With ipk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17 +``` + +> [!Tip] +> Focus on the account ID for now. The `npk` and `ipk` values are stored locally and used to build privacy-preserving transactions. The private account ID is derived from `npk`. + +Just like public accounts, new private accounts start out uninitialized: + +```bash +wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL + +# Output: +Account is Uninitialized +``` + +> [!Important] +> Private accounts are never visible to the network. They exist only in your local wallet storage. + +## 5. Native token transfer from a public account to a private account + +> [!Important] +> Sending tokens to an uninitialized private account causes the authenticated-transfer program to claim it, just like with public accounts. Program logic is the same regardless of account type. + +### a. Send 17 tokens to the private account + +> [!Note] +> The syntax matches public-to-public transfers, but the recipient is a private ID. This runs locally, generates a proof, and submits it to the sequencer. It may take 30 seconds to 4 minutes. + +```bash +wallet auth-transfer send \ + --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ + --to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \ + --amount 17 +``` + +### b. Check both accounts + +```bash +# Public sender account +wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS + +# Output: +Account owned by authenticated-transfer program +{"balance":20} +``` + +```bash +# Private recipient account +wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL + +# Output: +Account owned by authenticated-transfer program +{"balance":17} +``` + +> [!Note] +> The last command does not query the network. It works offline because private account data is stored locally. Other users cannot read your private balances. + +> [!Caution] +> Private accounts can only be modified by their owner’s keys. The exception is initialization: any user can initialize an uninitialized private account. This enables transfers to a private account owned by someone else, as long as that account is uninitialized. + +## 6. Native token transfer from a public account to a private account owned by someone else + +> [!Important] +> We’ll simulate transferring to someone else by creating a new private account we own and treating it as if it belonged to another user. + +### a. Create a new uninitialized private account + +```bash +wallet account new private + +# Output: +Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5 +With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e +With ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 +``` + +> [!Tip] +> Ignore the private account ID here and use the `npk` and `ipk` values to send to a foreign private account. + +```bash +wallet auth-transfer send \ + --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ + --to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \ + --to-ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \ + --amount 3 +``` + +> [!Warning] +> This command creates a privacy-preserving transaction, which may take a few minutes. The updated values are encrypted and included in the transaction. +> Once accepted, the recipient must run `wallet account sync-private` to scan the chain for their encrypted updates and refresh local state. + +> [!Note] +> You have seen transfers between two public accounts and from a public sender to a private recipient. Transfers from a private sender, whether to a public account or to another private account, follow the same pattern. diff --git a/LEZ testnet v0.1 tutorials/wallet-setup.md b/LEZ testnet v0.1 tutorials/wallet-setup.md new file mode 100644 index 00000000..e5a43961 --- /dev/null +++ b/LEZ testnet v0.1 tutorials/wallet-setup.md @@ -0,0 +1,26 @@ +This repository includes a CLI for interacting with the Logos Blockchain. To install it, run the following command from the root of the repository: + +```bash +cargo install --path wallet --force +``` + +To check that everythin is working, run `wallet help`. + +## Available Wallet Commands + +| Command | Description | +|------------------------|-------------------------------------------------------------| +| `wallet auth-transfer` | Authenticated transfer (init, send) | +| `wallet chain-info` | Chain info queries (current-block-id, block, transaction) | +| `wallet account` | Account management (get, list, new, sync-private) | +| `wallet pinata` | Piñata faucet (claim) | +| `wallet token` | Token operations (new, send) | +| `wallet amm` | AMM operations (new, swap, add-liquidity, remove-liquidity) | +| `wallet check-health` | Health checks that the wallet is connected to the node | +| `wallet config` | Config Setup (get, set) | +| `wallet restore-keys ` | Keys restore from a given password at given `depth` | +| `wallet deploy-program`| Program deployment | +| `wallet help` | Help | + +Some completion scripts exists, see the [completions](./completions/README.md) folder. + diff --git a/README.md b/README.md index 8a85b028..70d06fab 100644 --- a/README.md +++ b/README.md @@ -1,70 +1,75 @@ -# Nescience +# Logos Execution Zone (LEZ) + +Logos Execution Zone (LEZ) is a programmable blockchain that cleanly separates public and private state while keeping them fully interoperable. Developers can build apps that operate across transparent and privacy-preserving accounts without changing their logic. Privacy is enforced by the protocol itself through zero-knowledge proofs (ZKPs), so it is always available and automatic. -Nescience State Separation Architecture (NSSA) is a programmable blockchain system that introduces a clean separation between public and private states, while keeping them fully interoperable. It lets developers build apps that can operate across both transparent and privacy-preserving accounts. Privacy is handled automatically by the protocol through zero-knowledge proofs (ZKPs). The result is a programmable blockchain where privacy comes built-in. ## Background -Typically, public blockchains maintain a fully transparent state, where the mapping from account IDs to account values is entirely visible. In NSSA, we introduce a parallel *private state*, a new layer of accounts that coexists with the public one. The public and private states can be viewed as a partition of the account ID space: accounts with public IDs are openly visible, while private accounts are accessible only to holders of the corresponding viewing keys. Consistency across both states is enforced through zero-knowledge proofs (ZKPs). +These features are provided by the Logos Execution Environment (LEE). Traditional public blockchains expose a fully transparent state: the mapping from account IDs to account values is entirely visible. LEE introduces a parallel *private state* that coexists with the public one. Together, public and private accounts form a partition of the account ID space: public IDs are visible on-chain, while private accounts are accessible only to holders of the corresponding viewing keys. Consistency across both states is enforced by ZKPs. + +Public accounts are stored on-chain as a visible map from IDs to account states, and their values are updated in place. Private accounts are never stored on-chain in raw form. Each update produces a new commitment that binds the current value while keeping it hidden. Previous commitments remain on-chain, but a nullifier set marks old versions as spent, ensuring that only the most recent private state can be used in execution. -Public accounts are represented on-chain as a visible map from IDs to account states and are modified in-place when their values change. Private accounts, by contrast, are never stored in raw form on-chain. Each update creates a new commitment, which cryptographically binds the current value of the account while preserving privacy. Commitments of previous valid versions remain on-chain, but a nullifier set is maintained to mark old versions as spent, ensuring that only the most up-to-date version of each private account can be used in any execution. ### Programmability and selective privacy -Our goal is to enable full programmability within this hybrid model, matching the flexibility and composability of public blockchains. Developers write and deploy programs in NSSA just as they would on any other blockchain. Privacy, along with the ability to execute programs involving any combination of public and private accounts, is handled entirely at the protocol level and available out of the box for all programs. From the program’s perspective, all accounts are indistinguishable. This abstraction allows developers to focus purely on business logic, while the system transparently enforces privacy and consistency guarantees. +LEZ aims to deliver full programmability in a hybrid public/private model, with the same flexibility and composability as public blockchains. Developers write and deploy programs in LEZ just as they would elsewhere. The protocol automatically supports executions that involve any combination of public and private accounts. From the program’s perspective, all accounts look the same, and privacy is enforced transparently. This lets developers focus on business logic while the system guarantees privacy and correctness. -To the best of our knowledge, this approach is unique to Nescience. Other programmable blockchains with a focus on privacy typically adopt a developer-driven model for private execution, meaning that dApp logic must explicitly handle private inputs correctly. In contrast, Nescience handles privacy at the protocol level, so developers do not need to modify their programs—private and public accounts are treated uniformly, and privacy-preserving execution is available out of the box. +To our knowledge, this design is unique to LEZ. Other privacy-focused programmable blockchains often require developers to explicitly handle private inputs inside their app logic. In LEZ, privacy is protocol-level: programs do not change, accounts are treated uniformly, and private execution works out of the box. -### Example: creating and transferring tokens across states +--- + +## Example: Creating and transferring tokens across states + +1. Token creation (public execution) + - Alice submits a transaction that executes the token program `New` function on-chain. + - A new public token definition account is created. + - The minted tokens are recorded on-chain in Alice’s public account. -1. Token creation (public execution): - - Alice submits a transaction to execute the token program `New` function on-chain. - - A new public token account is created, representing the token. - - The minted tokens are recorded on-chain and fully visible on Alice's public account. 2. Transfer from public to private (local / privacy-preserving execution) - - Alice executes the token program `Transfer` function locally, specifying a Bob’s private account as recipient. + - Alice runs the token program `Transfer` function locally, sending to Bob’s private account. - A ZKP of correct execution is generated. - - The proof is submitted to the blockchain, and validator nodes verify it. - - Alice's public account balance is modified accordingly. - - Bob’s private account and balance remain hidden, while the transfer is provably valid. + - The proof is submitted to the blockchain and verified by validators. + - Alice’s public balance is updated on-chain. + - Bob’s private balance remains hidden, while the transfer is provably correct. + 3. Transferring private to public (local / privacy-preserving execution) - - Bob executes the token program `Transfer` function locally, specifying a Charlie’s public account as recipient. + - Bob executes the token program `Transfer` function locally, sending to Charlie’s public account. - A ZKP of correct execution is generated. - - Bob’s private account and balance still remain hidden. - - Charlie's public account is modified with the new tokens added. -4. Transferring public to public (public execution): - - Alice submits a transaction to execute the token program `Transfer` function on-chain, specifying Charlie's public account as recipient. - - The execution is handled on-chain without ZKPs involved. - - Alice's and Charlie's accounts are modified according to the transaction. + - Bob’s private balance stays hidden. + - Charlie’s public account is updated on-chain. + +4. Transfer from public to public (public execution) + - Alice submits an on-chain transaction to run `Transfer`, sending to Charlie’s public account. + - Execution is handled fully on-chain without ZKPs. + - Alice’s and Charlie’s public balances are updated. + -#### Key points: -- The same token program is used in all executions. -- The difference lies in execution mode: public executions update visible accounts on-chain, while private executions rely on ZKPs. -- Validators only need to verify proofs for privacy-preserving transactions, keeping processing efficient. +### Key points: +- The same token program is used in every execution. +- The only difference is execution mode: public execution updates visible state on-chain, while private execution relies on ZKPs. +- Validators verify proofs only for privacy-preserving transactions, keeping processing efficient. -### The account’s model +--- -To achieve both state separation and full programmability, NSSA adopts a stateless program model. Programs do not hold internal state. Instead, all persistent data resides in accounts explicitly passed to the program during execution. This design enables fine-grained control over access and visibility while maintaining composability across public and private states. +## The account’s model + +To achieve both state separation and full programmability, LEZ uses a stateless program model. Programs hold no internal state. All persistent data is stored in accounts passed explicitly into each execution. This enables precise access control and visibility while preserving composability across public and private states. ### Execution types -Execution is divided into two fundamentally distinct types based on how they are processed: public execution, which is executed transparently on-chain, and private execution, which occurs off-chain. For private execution, the blockchain relies on ZKPs to verify the correctness of execution and ensure that all system invariants are preserved. +LEZ supports two execution types: +- Public execution runs transparently on-chain. +- Private execution runs off-chain and is verified on-chain with ZKPs. -Both public and private executions of the same program are enforced to use the same Risc0 VM bytecode. For public transactions, programs are executed directly on-chain like any standard RISC-V VM execution, without generating or verifying proofs. For privacy-preserving transactions, users generate Risc0 ZKPs of correct execution, and validator nodes only verify these proofs rather than re-executing the program. This design ensures that from a validator’s perspective, public transactions are processed as quickly as any RISC-V–based VM, while verification of ZKPs keeps privacy-preserving transactions efficient as well. Additionally, the system naturally supports parallel execution similar to Solana, further increasing throughput. The main computational bottleneck for privacy-preserving transactions lies on the user side, in generating zk proofs. +Both public and private executions use the same Risc0 VM bytecode. Public transactions are executed directly on-chain like any standard RISC-V VM call, without proof generation. Private transactions are executed locally by users, who generate Risc0 proofs that validators verify instead of re-executing the program. -### Resources -- [IFT Research call](https://forum.vac.dev/t/ift-research-call-september-10th-2025-updates-on-the-development-of-nescience/566) -- [NSSA v0.2 specs](https://www.notion.so/NSSA-v0-2-specifications-2848f96fb65c800c9818e6f66d9be8f2) -- [Choice of VM/zkVM](https://www.notion.so/Conclusion-on-the-chosen-VM-and-zkVM-for-NSSA-2318f96fb65c806a810ed1300f56992d) -- [NSSA vs other privacy projects](https://www.notion.so/Privacy-projects-comparison-2688f96fb65c8096b694ecf7e4deca30) -- [NSSA state model](https://www.notion.so/Public-state-model-decision-2388f96fb65c80758b20c76de07b1fcc) -- [NSSA sequencer specs](https://www.notion.so/Sequencer-specs-2428f96fb65c802da2bfea7b0b214ecb) -- [NSSA sequencer code](https://www.notion.so/NSSA-sequencer-pseudocode-2508f96fb65c805e8859e047dffd6785) -- [NSSA Token program desing](https://www.notion.so/Token-program-design-2538f96fb65c80a1b4bdc4fd9dd162d7) -- [NSSA cross program calls](https://www.notion.so/NSSA-cross-program-calls-Tail-call-model-proposal-extended-version-2838f96fb65c8096b3a2d390444193b6) +This design keeps public transactions as fast as any RISC-V–based VM and makes private transactions efficient for validators. It also supports parallel execution similar to Solana, improving throughput. The main computational cost for privacy-preserving transactions is on the user side, where ZK proofs are generated. +--- +--- # Install dependencies -Install build dependencies +### Install build dependencies - On Linux Ubuntu / Debian @@ -72,7 +77,7 @@ Ubuntu / Debian apt install build-essential clang libclang-dev libssl-dev pkg-config ``` -Fedora +- On Fedora ```sh sudo dnf install clang clang-devel openssl-devel pkgconf ``` @@ -83,25 +88,26 @@ xcode-select --install brew install pkg-config openssl ``` -Install Rust +### Install Rust + ```sh curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh ``` -Install Risc0 +### Install Risc0 ```sh curl -L https://risczero.com/install | bash ``` -Then restart your shell and run +### Then restart your shell and run ```sh rzup install ``` # Run tests -The NSSA repository includes both unit and integration test suites. +The LEZ repository includes both unit and integration test suites. ### Unit tests @@ -119,635 +125,23 @@ cd integration_tests RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all ``` -# Run the sequencer +# Run the sequencer and node -The sequencer can be run locally: +The sequencer and node can be run locally: -```bash -cd sequencer_runner -RUST_LOG=info cargo run --release configs/debug -``` + 1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node: + - `git checkout master; git pull` + - `cargo clean` + - `rm ~/.logos-blockchain-circuits` + - `./scripts/setup-logos-blockchain-circuits.sh` + - `cargo build --all-features` + - `./target/debug/logos-blockchain-node nodes/node/config-one-node.yaml` -If everything went well you should see an output similar to this: -```bash -[2025-11-13T19:50:29Z INFO sequencer_runner] Sequencer core set up -[2025-11-13T19:50:29Z INFO network] Starting http server at 0.0.0.0:3040 -[2025-11-13T19:50:29Z INFO actix_server::builder] starting 8 workers -[2025-11-13T19:50:29Z INFO sequencer_runner] HTTP server started -[2025-11-13T19:50:29Z INFO sequencer_runner] Starting main sequencer loop -[2025-11-13T19:50:29Z INFO actix_server::server] Tokio runtime found; starting in existing Tokio runtime -[2025-11-13T19:50:29Z INFO actix_server::server] starting service: "actix-web-service-0.0.0.0:3040", workers: 8, listening on: 0.0.0.0:3040 -[2025-11-13T19:50:39Z INFO sequencer_runner] Collecting transactions from mempool, block creation -[2025-11-13T19:50:39Z INFO sequencer_core] Created block with 0 transactions in 0 seconds -[2025-11-13T19:50:39Z INFO sequencer_runner] Block with id 2 created -[2025-11-13T19:50:39Z INFO sequencer_runner] Waiting for new transactions -``` + 2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service: + - `git checkout schouhy/full-bedrock-integration` + - `RUST_LOG=info cargo run --release -p indexer_service $(pwd)/integration_tests/configs/indexer/indexer_config.json` -# Try the Wallet CLI - -## Install - -This repository includes a CLI for interacting with the Nescience sequencer. To install it, run the following command from the root of the repository: - -```bash -cargo install --path wallet --force -``` - -Run `wallet help` to check everything went well. - -Some completion scripts exists, see the [completions](./completions/README.md) folder. - -## Tutorial - -This tutorial walks you through creating accounts and executing NSSA programs in both public and private contexts. - -> [!NOTE] -> The NSSA state is split into two separate but interconnected components: the public state and the private state. -> The public state is an on-chain, publicly visible record of accounts indexed by their Account IDs -> The private state mirrors this, but the actual account values are stored locally by each account owner. On-chain, only a hidden commitment to each private account state is recorded. This allows the chain to enforce freshness (i.e., prevent the reuse of stale private states) while preserving privacy and unlinkability across executions and private accounts. -> -> Every piece of state in NSSA is stored in an account (public or private). Accounts are either uninitialized or are owned by a program, and programs can only modify the accounts they own. -> -> In NSSA, accounts can only be modified through program execution. A program is the sole mechanism that can change an account’s value. -> Programs run publicly when all involved accounts are public, and privately when at least one private account participates. - -### Health-check - -Verify that the node is running and that the wallet can connect to it: - -```bash -wallet check-health -``` - -You should see `✅ All looks good!`. - -### The commands - -The wallet provides several commands to interact with the node and query state. To see the full list, run `wallet help`: - -```bash -Commands: - auth-transfer Authenticated transfer subcommand - chain-info Generic chain info subcommand - account Account view and sync subcommand - pinata Pinata program interaction subcommand - token Token program interaction subcommand - amm AMM program interaction subcommand - check-health Check the wallet can connect to the node and builtin local programs match the remote versions -``` - -### Accounts - -> [!NOTE] -> Accounts are the basic unit of state in NSSA. They essentially hold native tokens and arbitrary data managed by some program. - -The CLI provides commands to manage accounts. Run `wallet account` to see the options available: -```bash -Commands: - get Get account data - new Produce new public or private account - sync-private Sync private accounts - help Print this message or the help of the given subcommand(s) -``` - -#### Create a new public account - -You can create both public and private accounts through the CLI. For example: - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ -``` - -This id is required when executing any program that interacts with the account. - -> [!NOTE] -> Public accounts live on-chain and are identified by a 32-byte Account ID. -> Running `wallet account new public` generates a fresh keypair for the signature scheme used in NSSA. -> The account ID is derived from the public key. The private key is used to sign transactions and to authorize the account in program executions. - -#### Account initialization - -To query the account’s current status, run: - -```bash -# Replace the id with yours -wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ - -# Output: -Account is Uninitialized -``` - -> [!NOTE] -> New accounts begin in an uninitialized state, meaning they are not yet owned by any program. A program may claim an uninitialized account; once claimed, the account becomes owned by that program. -> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account. -> However, debiting native tokens from an account must always be performed by its owning program. - -In this example, we will initialize the account for the Authenticated transfer program, which securely manages native token transfers by requiring authentication for debits. - -Initialize the account by running: - -```bash -# This command submits a public transaction executing the `init` function of the -# Authenticated-transfer program. The wallet polls the sequencer until the -# transaction is included in a block, which may take several seconds. -wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ -``` - -After it completes, check the updated account status: - -```bash -wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ - -# Output: -Account owned by authenticated transfer program -{"balance":0} -``` - -### Funding the account: executing the Piñata program - -Now that we have a public account initialized by the authenticated transfer program, we need to fund it. For that, the testnet provides the Piñata program. - -```bash -# Complete with your id -wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ -``` - -After the claim succeeds, the account will be funded with some tokens: - -```bash -wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ - -# Output: -Account owned by authenticated transfer program -{"balance":150} -``` - -### Native token transfers: executing the Authenticated transfers program - -NSSA comes with a program for managing and transferring native tokens. Run `wallet auth-transfer` to see the options available: -```bash -Commands: - init Initialize account under authenticated transfer program - send Send native tokens from one account to another with variable privacy - help Print this message or the help of the given subcommand(s) -``` - -We have already used the `init` command. The `send` command is used to execute the `Transfer` function of the authenticated program. -Let's try it. For that we need to create another account for the recipient of the transfer. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS -``` - - -> [!NOTE] -> The new account is uninitialized. The authenticated transfers program will claim any uninitialized account used in a transfer. So we don't need to manually initialize the recipient account. - -Let's send 37 tokens to the new account. - -```bash -wallet auth-transfer send \ - --from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \ - --to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ - --amount 37 -``` - -Once that succeeds we can check the states. - -```bash -# Sender account -wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8 - -# Output: -Account owned by authenticated transfer program -{"balance":113} -``` - -```bash -# Recipient account -wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS - -# Output: -Account owned by authenticated transfer program -{"balance":37} -``` - -#### Create a new private account - -> [!NOTE] -> Private accounts are structurally identical to public accounts; they differ only in how their state is stored off-chain and represented on-chain. -> The raw values of a private account are never stored on-chain. Instead, the chain only holds a 32-byte commitment (a hash-like binding to the actual values). Transactions include encrypted versions of the private values so that users can recover them from the blockchain. The decryption keys are known only to the user and are never shared. -> Private accounts are not managed through the usual signature mechanism used for public accounts. Instead, each private account is associated with two keypairs: -> - *Nullifier keys*, for using the corresponding private account in privacy preserving executions. -> - *Viewing keys*, used for encrypting and decrypting the values included in transactions. -> -> Private accounts also have a 32-byte identifier, derived from the nullifier public key. -> -> Just like public accounts, private accounts can only be initialized once. Any user can initialize them without knowing the owner's secret keys. However, modifying an initialized private account through an off-chain program execution requires knowledge of the owner’s secret keys. -> -> Transactions that modify the values of a private account include a commitment to the new values, which will be added to the on-chain commitment set. They also include a nullifier that marks the previous version as old. -> The nullifier is constructed so that it cannot be linked to any prior commitment, ensuring that updates to the same private account cannot be correlated. - -Now let’s switch to the private state and create a private account. - -```bash -wallet account new private - -# Output: -Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL -With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951 -With vpk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17 -``` - -For now, focus only on the account id. Ignore the `npk` and `vpk` values. These are the Nullifier public key and the Viewing public key. They are stored locally in the wallet and are used internally to build privacy-preserving transactions. -Also, the account id for private accounts is derived from the `npk` value. But we won't need them now. - -Just like public accounts, new private accounts start out uninitialized: - -```bash -wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL - -# Output: -Account is Uninitialized -``` -Unlike public accounts, private accounts are never visible to the network. They exist only in your local wallet storage. - -#### Sending tokens from the public account to the private account - -Sending tokens to an uninitialized private account causes the Authenticated-Transfers program to claim it. Just like with public accounts. -This happens because program execution logic does not depend on whether the involved accounts are public or private. - -Let’s send 17 tokens to the new private account. - -The syntax is identical to the public-to-public transfer; just set the private ID as the recipient. - -This command will run the Authenticated-Transfer program locally, generate a proof, and submit it to the sequencer. Depending on your machine, this can take from 30 seconds to 4 minutes. - -```bash -wallet auth-transfer send \ - --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ - --to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \ - --amount 17 -``` - -After it succeeds, check both accounts: - -```bash -# Public sender account -wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS - -# Output: -Account owned by authenticated transfer program -{"balance":20} -``` - -```bash -# Private recipient account -wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL - -# Output: -Account owned by authenticated transfer program -{"balance":17} -``` - -> [!NOTE] -> The last command does not query the network. -> It works even offline because private account data lives only in your wallet storage. Other users cannot read your private balances. - -#### Digression: modifying private accounts - -As a general rule, private accounts can only be modified through a program execution performed by their owner. That is, the person who holds the private key for that account. There is one exception: an uninitialized private account may be initialized by any user, without requiring the private key. After initialization, only the owner can modify it. - -This mechanism enables a common use case: transferring funds from any account (public or private) to a private account owned by someone else. For such transfers, the recipient’s private account must be uninitialized. - - -#### Sending tokens from the public account to a private account owned by someone else - -For this tutorial, we’ll simulate that scenario by creating a new private account that we own, but we’ll treat it as if it belonged to someone else. - -Let's create a new (uninitialized) private account like before: - -```bash -wallet account new private - -# Output: -Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5 -With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e -With vpk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 -``` - -Now we'll ignore the private account ID and focus on the `npk` and `vpk` values. We'll need this to send tokens to a foreign private account. Syntax is very similar. - -```bash -wallet auth-transfer send \ - --from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \ - --to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \ - --to-vpk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \ - --amount 3 -``` - -The command above produces a privacy-preserving transaction, which may take a few minutes to complete. The updated values of the private account are encrypted and included in the transaction. - -Once the transaction is accepted, the recipient must run `wallet account sync-private`. This command scans the chain for encrypted values that belong to their private accounts and updates the local versions accordingly. - - -#### Transfers in other combinations of public and private accounts - -We’ve shown how to use the authenticated-transfers program for transfers between two public accounts, and for transfers from a public sender to a private recipient. Sending tokens from a private account (whether to a public account or to another private account) works in essentially the same way. - -### The token program - -So far, we’ve made transfers using the authenticated-transfers program, which handles native token transfers. The Token program, on the other hand, is used for creating and managing custom tokens. - -> [!NOTE] -> The token program is a single program responsible for creating and managing all tokens. There is no need to deploy new programs to introduce new tokens. All token-related operations are performed by invoking the appropriate functions of the token program. - -The CLI provides commands to execute the token program. To see the options available run `wallet token`: - -```bash -Commands: - new Produce a new token - send Send tokens from one account to another with variable privacy - help Print this message or the help of the given subcommand(s) -``` - - -> [!NOTE] -> The Token program manages its accounts in two categories. Meaning, all accounts owned by the Token program fall into one of these types. -> - Token definition accounts: these accounts store metadata about a token, such as its name, total supply, and other identifying properties. They act as the token’s unique identifier. -> - Token holding accounts: these accounts hold actual token balances. In addition to the balance, they also record which token definition they belong to. - -#### Creating a new token - -To create a new token, simply run `wallet token new`. This will create a transaction to execute the `New` function of the token program. -The command expects a name, the desired total supply, and two uninitialized accounts: -- One that will be initialized as the token definition account for the new token. -- Another that will be initialized as a token holding account and receive the token’s entire initial supply. - - -##### New token with both definition and supply accounts set as public - -For example, let's create two new (uninitialized) public accounts and then use them to create a new token. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 -``` - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw -``` - -Now we use them to create a new token. Let's call it the "Token A" - -```bash -wallet token new \ - --name TOKENA \ - --total-supply 1337 \ - --definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \ - --supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw -``` - -After it succeeds, we can inspect the two accounts to see how they were initialized. - -```bash -wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 - -# Output: -Definition account owned by token program -{"account_type":"Token definition","name":"TOKENA","total_supply":1337} -``` - -```bash -wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337} -``` - -##### New token with public account definition but private holding account for initial supply - -Let’s create a new token, but this time using a public definition account and a private holding account to store the entire supply. - -Since we can’t reuse the accounts from the previous example, we need to create fresh ones for this case. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii -``` - -```bash -wallet account new private - - -# Output: -Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF -With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93 -With vpk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577 -``` - -And we use them to create the token. - -Now we use them to create a new token. Let's call it "Token B". - -```bash -wallet token new \ - --name TOKENB \ - --total-supply 7331 \ - --definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \ - --supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF -``` - -After it succeeds, we can check their values - -```bash -wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii - -# Output: -Definition account owned by token program -{"account_type":"Token definition","name":"TOKENB","total_supply":7331} -``` - -```bash -wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331} -``` - -Like any other private account owned by us, it cannot be seen by other users. - -#### Custom token transfers - -The Token program has a function to move funds from one token holding account to another one. If executed with an uninitialized account as the recipient, this will be automatically claimed by the token program. - -The transfer function can be executed with the `wallet token send` command. - -Let's create a new public account for the recipient. - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 -``` - -Let's send 1000 B tokens to this new account. We'll debit this from the supply account used in the creation of the token. - -```bash -wallet token send \ - --from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \ - --to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --amount 1000 -``` - -Let's inspect the public account: - -```bash -wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000} -``` - -### Chain information - -The wallet provides some commands to query information about the chain. These are under the `wallet chain-info` command. - -```bash -Commands: - current-block-id Get current block id from sequencer - block Get block at id from sequencer - transaction Get transaction at hash from sequencer -``` - -For example, run this to find the current block id. - -```bash -wallet chain-info current-block-id - -# Output: -Last block id is 65537 -``` - - -### Automated Market Maker (AMM) - -NSSA includes an AMM program that manages liquidity pools and enables swaps between custom tokens. To test this functionality, we first need to create a liquidity pool. - -#### Creating a liquidity pool for a token pair - -We start by creating a new pool for the tokens previously created. In return for providing liquidity, we will receive liquidity provider (LP) tokens, which represent our share of the pool and are required to withdraw liquidity later. - ->[!NOTE] -> The AMM program does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore only represent a proportional share of the pool reserves and do not provide additional value from swap activity. Fee support for liquidity providers will be added in future versions of the AMM program. - -To hold these LP tokens, we first create a new account: - -```bash -wallet account new public - -# Output: -Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf -``` - -Next, we initialize the liquidity pool by depositing tokens A and B and specifying the account that will receive the LP tokens: - -```bash -wallet amm new \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ - --balance-a 100 \ - --balance-b 200 -``` - -The newly created account is owned by the token program, meaning that LP tokens are managed by the same token infrastructure as regular tokens. - -```bash -wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf - -# Output: -Holding account owned by token program -{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100} -``` - -If you inspect the `user-holding-a` and `user-holding-b` accounts passed to the `wallet amm new` command, you will see that 100 and 200 tokens were deducted, respectively. These tokens now reside in the liquidity pool and are available for swaps by any user. - - -#### Swaping - -Token swaps can be performed using the wallet amm swap command: - -```bash -wallet amm swap \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - # The amount of tokens to swap - --amount-in 5 \ - # The minimum number of tokens expected in return - --min-amount-out 8 \ - # The definition ID of the token being provided to the swap - # In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA - --token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 -``` - -Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (determined by the pool’s pricing function) is credited to the Token B holding account. - - -#### Withdrawing liquidity from the pool - -Liquidity providers can withdraw assets from the pool by redeeming (burning) LP tokens. The amount of tokens received is proportional to the share of LP tokens being redeemed relative to the total LP supply. - -This operation is performed using the `wallet amm remove-liquidity` command: - -```bash -wallet amm remove-liquidity \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ - --balance-lp 20 \ - --min-amount-a 1 \ - --min-amount-b 1 -``` - -This instruction burns `balance-lp` LP tokens from the user’s LP holding account. In exchange, the AMM transfers tokens A and B from the pool’s vault accounts to the user’s holding accounts, according to the current pool reserves. - -The `min-amount-a` and `min-amount-b` parameters specify the minimum acceptable amounts of tokens A and B to be received. If the computed outputs fall below either threshold, the instruction fails, protecting the user against unfavorable pool state changes. - -#### Adding liquidity to the pool - -Additional liquidity can be added to an existing pool by depositing tokens A and B in the ratio implied by the current pool reserves. In return, new LP tokens are minted to represent the user’s proportional share of the pool. - -This is done using the `wallet amm add-liquidity` command: - -```bash -wallet amm add-liquidity \ - --user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \ - --user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \ - --user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \ - --min-amount-lp 1 \ - --max-amount-a 10 \ - --max-amount-b 10 -``` - -In this instruction, `max-amount-a` and `max-amount-b` define upper bounds on the number of tokens A and B that may be withdrawn from the user’s accounts. The AMM computes the actual required amounts based on the pool’s reserve ratio. - -The `min-amount-lp` parameter specifies the minimum number of LP tokens that must be minted for the transaction to succeed. If the resulting LP token amount is below this threshold, the instruction fails. + 3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer: + - `git checkout schouhy/full-bedrock-integration` + - `RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner sequencer_runner/configs/debug` diff --git a/bedrock_client/Cargo.toml b/bedrock_client/Cargo.toml index 50a54815..fec9f1c0 100644 --- a/bedrock_client/Cargo.toml +++ b/bedrock_client/Cargo.toml @@ -2,9 +2,15 @@ name = "bedrock_client" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] reqwest.workspace = true anyhow.workspace = true +tokio-retry.workspace = true +futures.workspace = true +log.workspace = true +serde.workspace = true logos-blockchain-common-http-client.workspace = true logos-blockchain-core.workspace = true +logos-blockchain-chain-broadcast-service.workspace = true diff --git a/bedrock_client/src/lib.rs b/bedrock_client/src/lib.rs index 530fdfc2..b34687c3 100644 --- a/bedrock_client/src/lib.rs +++ b/bedrock_client/src/lib.rs @@ -1,10 +1,24 @@ use anyhow::Result; +use futures::{Stream, TryFutureExt}; +use log::warn; +pub use logos_blockchain_chain_broadcast_service::BlockInfo; pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error}; -use logos_blockchain_core::mantle::SignedMantleTx; +pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx}; use reqwest::{Client, Url}; +use serde::{Deserialize, Serialize}; +use tokio_retry::Retry; + +/// Fibonacci backoff retry strategy configuration +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BackoffConfig { + pub start_delay_millis: u64, + pub max_retries: usize, +} // Simple wrapper // maybe extend in the future for our purposes +// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`). +#[derive(Clone)] pub struct BedrockClient { http_client: CommonHttpClient, node_url: Url, @@ -29,4 +43,25 @@ impl BedrockClient { .post_transaction(self.node_url.clone(), tx) .await } + + pub async fn get_lib_stream(&self) -> Result, Error> { + self.http_client.get_lib_stream(self.node_url.clone()).await + } + + pub async fn get_block_by_id( + &self, + header_id: HeaderId, + backoff: &BackoffConfig, + ) -> Result>, Error> { + let strategy = + tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis) + .take(backoff.max_retries); + + Retry::spawn(strategy, || { + self.http_client + .get_block_by_id(self.node_url.clone(), header_id) + .inspect_err(|err| warn!("Block fetching failed with err: {err:#?}")) + }) + .await + } } diff --git a/common/Cargo.toml b/common/Cargo.toml index a6e26fad..09cb10fa 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -2,6 +2,7 @@ name = "common" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true @@ -17,3 +18,5 @@ log.workspace = true hex.workspace = true borsh.workspace = true base64.workspace = true +url.workspace = true +logos-blockchain-common-http-client.workspace = true diff --git a/common/src/block.rs b/common/src/block.rs index 84b7a419..391bc57d 100644 --- a/common/src/block.rs +++ b/common/src/block.rs @@ -4,6 +4,7 @@ use sha2::{Digest, Sha256, digest::FixedOutput}; use crate::transaction::EncodedTransaction; pub type HashType = [u8; 32]; +pub type MantleMsgId = [u8; 32]; #[derive(Debug, Clone)] /// Our own hasher. @@ -49,6 +50,7 @@ pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, + pub bedrock_parent_id: MantleMsgId, } #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] @@ -60,7 +62,11 @@ pub struct HashableBlockData { } impl HashableBlockData { - pub fn into_pending_block(self, signing_key: &nssa::PrivateKey) -> Block { + pub fn into_pending_block( + self, + signing_key: &nssa::PrivateKey, + bedrock_parent_id: MantleMsgId, + ) -> Block { let data_bytes = borsh::to_vec(&self).unwrap(); let signature = nssa::Signature::new(signing_key, &data_bytes); let hash = OwnHasher::hash(&data_bytes); @@ -76,8 +82,13 @@ impl HashableBlockData { transactions: self.transactions, }, bedrock_status: BedrockStatus::Pending, + bedrock_parent_id, } } + + pub fn block_hash(&self) -> BlockHash { + OwnHasher::hash(&borsh::to_vec(&self).unwrap()) + } } impl From for HashableBlockData { diff --git a/common/src/communication/indexer.rs b/common/src/communication/indexer.rs new file mode 100644 index 00000000..a0edc176 --- /dev/null +++ b/common/src/communication/indexer.rs @@ -0,0 +1,6 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub enum Message { + L2BlockFinalized { l2_block_height: u64 }, +} diff --git a/common/src/communication/mod.rs b/common/src/communication/mod.rs new file mode 100644 index 00000000..d99eb481 --- /dev/null +++ b/common/src/communication/mod.rs @@ -0,0 +1 @@ +pub mod indexer; diff --git a/common/src/lib.rs b/common/src/lib.rs index b64e6ef9..68902811 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -1,4 +1,5 @@ pub mod block; +pub mod communication; pub mod error; pub mod rpc_primitives; pub mod sequencer_client; diff --git a/common/src/rpc_primitives/requests.rs b/common/src/rpc_primitives/requests.rs index 71641936..6191df44 100644 --- a/common/src/rpc_primitives/requests.rs +++ b/common/src/rpc_primitives/requests.rs @@ -73,6 +73,11 @@ pub struct GetProofForCommitmentRequest { #[derive(Serialize, Deserialize, Debug)] pub struct GetProgramIdsRequest {} +#[derive(Serialize, Deserialize, Debug)] +pub struct PostIndexerMessageRequest { + pub message: crate::communication::indexer::Message, +} + parse_request!(HelloRequest); parse_request!(RegisterAccountRequest); parse_request!(SendTxRequest); @@ -87,6 +92,7 @@ parse_request!(GetAccountsNoncesRequest); parse_request!(GetProofForCommitmentRequest); parse_request!(GetAccountRequest); parse_request!(GetProgramIdsRequest); +parse_request!(PostIndexerMessageRequest); #[derive(Serialize, Deserialize, Debug)] pub struct HelloResponse { @@ -216,3 +222,8 @@ pub struct GetInitialTestnetAccountsResponse { pub account_id: String, pub balance: u64, } + +#[derive(Serialize, Deserialize, Debug)] +pub struct PostIndexerMessageResponse { + pub status: String, +} diff --git a/common/src/sequencer_client.rs b/common/src/sequencer_client.rs index 0cb03f6f..7a14d425 100644 --- a/common/src/sequencer_client.rs +++ b/common/src/sequencer_client.rs @@ -1,10 +1,12 @@ -use std::{collections::HashMap, ops::RangeInclusive}; +use std::{collections::HashMap, ops::RangeInclusive, str::FromStr}; use anyhow::Result; +use logos_blockchain_common_http_client::BasicAuthCredentials; use nssa_core::program::ProgramId; use reqwest::Client; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use serde_json::Value; +use url::Url; use super::rpc_primitives::requests::{ GetAccountBalanceRequest, GetAccountBalanceResponse, GetBlockDataRequest, GetBlockDataResponse, @@ -20,28 +22,75 @@ use crate::{ GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, GetProofForCommitmentResponse, GetTransactionByHashRequest, - GetTransactionByHashResponse, SendTxRequest, SendTxResponse, + GetTransactionByHashResponse, PostIndexerMessageRequest, PostIndexerMessageResponse, + SendTxRequest, SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, }; +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct BasicAuth { + pub username: String, + pub password: Option, +} + +impl std::fmt::Display for BasicAuth { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.username)?; + if let Some(password) = &self.password { + write!(f, ":{password}")?; + } + + Ok(()) + } +} + +impl FromStr for BasicAuth { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + let parse = || { + let mut parts = s.splitn(2, ':'); + let username = parts.next()?; + let password = parts.next().filter(|p| !p.is_empty()); + if parts.next().is_some() { + return None; + } + + Some((username, password)) + }; + + let (username, password) = parse().ok_or_else(|| { + anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'") + })?; + + Ok(Self { + username: username.to_string(), + password: password.map(|p| p.to_string()), + }) + } +} + +impl From for BasicAuthCredentials { + fn from(value: BasicAuth) -> Self { + BasicAuthCredentials::new(value.username, value.password) + } +} + #[derive(Clone)] pub struct SequencerClient { pub client: reqwest::Client, - pub sequencer_addr: String, - pub basic_auth: Option<(String, Option)>, + pub sequencer_addr: Url, + pub basic_auth: Option, } impl SequencerClient { - pub fn new(sequencer_addr: String) -> Result { + pub fn new(sequencer_addr: Url) -> Result { Self::new_with_auth(sequencer_addr, None) } - pub fn new_with_auth( - sequencer_addr: String, - basic_auth: Option<(String, Option)>, - ) -> Result { + pub fn new_with_auth(sequencer_addr: Url, basic_auth: Option) -> Result { Ok(Self { client: Client::builder() // Add more fields if needed @@ -66,9 +115,9 @@ impl SequencerClient { "Calling method {method} with payload {request:?} to sequencer at {}", self.sequencer_addr ); - let mut call_builder = self.client.post(&self.sequencer_addr); + let mut call_builder = self.client.post(self.sequencer_addr.clone()); - if let Some((username, password)) = &self.basic_auth { + if let Some(BasicAuth { username, password }) = &self.basic_auth { call_builder = call_builder.basic_auth(username, password.as_deref()); } @@ -347,4 +396,23 @@ impl SequencerClient { Ok(resp_deser) } + + /// Post indexer into sequencer + pub async fn post_indexer_message( + &self, + message: crate::communication::indexer::Message, + ) -> Result { + let last_req = PostIndexerMessageRequest { message }; + + let req = serde_json::to_value(last_req).unwrap(); + + let resp = self + .call_method_with_payload("post_indexer_message", req) + .await + .unwrap(); + + let resp_deser = serde_json::from_value(resp).unwrap(); + + Ok(resp_deser) + } } diff --git a/common/src/test_utils.rs b/common/src/test_utils.rs index 1125b86e..80703342 100644 --- a/common/src/test_utils.rs +++ b/common/src/test_utils.rs @@ -30,7 +30,7 @@ pub fn produce_dummy_block( transactions, }; - block_data.into_pending_block(&sequencer_sign_key_for_testing()) + block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32]) } pub fn produce_dummy_empty_transaction() -> EncodedTransaction { diff --git a/completions/zsh/_wallet b/completions/zsh/_wallet index fae1196d..9f40c4fb 100644 --- a/completions/zsh/_wallet +++ b/completions/zsh/_wallet @@ -150,10 +150,11 @@ _wallet_account() { subcommand) subcommands=( 'get:Get account data' - 'list:List all accounts' + 'list:List all accounts owned by the wallet' 'ls:List all accounts (alias for list)' 'new:Produce new public or private account' 'sync-private:Sync private accounts' + 'label:Set a label for an account' 'help:Print this message or the help of the given subcommand(s)' ) _describe -t subcommands 'account subcommands' subcommands @@ -184,6 +185,11 @@ _wallet_account() { ;; esac ;; + label) + _arguments \ + '(-a --account-id)'{-a,--account-id}'[Account ID to label]:account_id:_wallet_account_ids' \ + '(-l --label)'{-l,--label}'[The label to assign to the account]:label:' + ;; esac ;; esac diff --git a/examples/program_deployment/Cargo.toml b/examples/program_deployment/Cargo.toml index 6aff2d0f..2199fe21 100644 --- a/examples/program_deployment/Cargo.toml +++ b/examples/program_deployment/Cargo.toml @@ -2,6 +2,7 @@ name = "program_deployment" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true diff --git a/examples/program_deployment/methods/Cargo.toml b/examples/program_deployment/methods/Cargo.toml index a25aecf2..95b10ea8 100644 --- a/examples/program_deployment/methods/Cargo.toml +++ b/examples/program_deployment/methods/Cargo.toml @@ -2,6 +2,7 @@ name = "example_program_deployment_methods" version = "0.1.0" edition = "2024" +license = { workspace = true } [build-dependencies] risc0-build.workspace = true diff --git a/examples/program_deployment/methods/guest/Cargo.toml b/examples/program_deployment/methods/guest/Cargo.toml index 245bc5db..1f4db355 100644 --- a/examples/program_deployment/methods/guest/Cargo.toml +++ b/examples/program_deployment/methods/guest/Cargo.toml @@ -2,6 +2,7 @@ name = "example_program_deployment_programs" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true diff --git a/explorer_service/.gitignore b/explorer_service/.gitignore new file mode 100644 index 00000000..49015de2 --- /dev/null +++ b/explorer_service/.gitignore @@ -0,0 +1,11 @@ +# Leptos build outputs +/target +/pkg +/site + +# WASM artifacts +*.wasm + +# Environment +.env +.env.local diff --git a/explorer_service/Cargo.toml b/explorer_service/Cargo.toml new file mode 100644 index 00000000..49d1ddce --- /dev/null +++ b/explorer_service/Cargo.toml @@ -0,0 +1,73 @@ +[package] +name = "explorer_service" +version = "0.1.0" +edition = "2024" +license.workspace = true + +[lib] +crate-type = ["cdylib", "rlib"] + +[dependencies] +indexer_service_protocol.workspace = true + +# Leptos framework +leptos = "0.8.15" +leptos_meta = "0.8.5" +leptos_router = "0.8.11" + +# Serialization +serde.workspace = true + +# Logging +log.workspace = true +console_error_panic_hook = "0.1" +console_log = "1.0" + +# Date/Time +chrono.workspace = true + +# Hex encoding/decoding +hex.workspace = true + +# URL encoding +urlencoding = "2.1" + +# WASM-specific +wasm-bindgen = "0.2" +web-sys = { version = "0.3", features = [ + "Window", + "Document", + "Location", + "HtmlInputElement", +] } + +# Server-side dependencies (optional, enabled by features) +indexer_service_rpc = { workspace = true, features = [ + "client", +], optional = true } +jsonrpsee = { workspace = true, features = ["http-client"], optional = true } +tokio = { workspace = true, optional = true } +axum = { version = "0.8.8", optional = true } +leptos_axum = { version = "0.8.7", optional = true } +clap = { workspace = true, features = ["derive"], optional = true } +url = { workspace = true, optional = true } +env_logger = { workspace = true, optional = true } + +[features] +hydrate = ["leptos/hydrate"] +ssr = [ + "leptos/ssr", + "dep:indexer_service_rpc", + "dep:jsonrpsee", + "dep:tokio", + "dep:axum", + "dep:leptos_axum", + "dep:clap", + "dep:url", + "dep:env_logger", +] + +[package.metadata.leptos] +bin-features = ["ssr"] +lib-features = ["hydrate"] +assets-dir = "public" diff --git a/explorer_service/Dockerfile b/explorer_service/Dockerfile new file mode 100644 index 00000000..e10c5ebe --- /dev/null +++ b/explorer_service/Dockerfile @@ -0,0 +1,52 @@ +FROM rust:1.91.1-trixie AS builder + +# Install cargo-binstall, which makes it easier to install other +# cargo extensions like cargo-leptos +RUN wget https://github.com/cargo-bins/cargo-binstall/releases/latest/download/cargo-binstall-x86_64-unknown-linux-musl.tgz +RUN tar -xvf cargo-binstall-x86_64-unknown-linux-musl.tgz +RUN cp cargo-binstall /usr/local/cargo/bin + +# Install required tools +RUN apt-get update -y \ + && apt-get install -y --no-install-recommends clang + +# Install cargo-leptos +RUN cargo binstall cargo-leptos -y + +# Add the WASM target +RUN rustup target add wasm32-unknown-unknown + +# Make an /explorer_service dir, which everything will eventually live in +RUN mkdir -p /explorer_service +WORKDIR /explorer_service +COPY . . + +# Build the app +RUN cargo leptos build --release -vv + +FROM debian:trixie-slim AS runtime +WORKDIR /explorer_service +RUN apt-get update -y \ + && apt-get install -y --no-install-recommends openssl ca-certificates \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +# Copy the server binary to the /explorer_service directory +COPY --from=builder /explorer_service/target/release/explorer_service /explorer_service/ + +# /target/site contains our JS/WASM/CSS, etc. +COPY --from=builder /explorer_service/target/site /explorer_service/site + +# Copy Cargo.toml as it’s needed at runtime +COPY --from=builder /explorer_service/Cargo.toml /explorer_service/ + +# Set any required env variables +ENV RUST_LOG="info" +ENV LEPTOS_SITE_ADDR="0.0.0.0:8080" +ENV LEPTOS_SITE_ROOT="site" +ENV INDEXER_RPC_URL="http://localhost:8779" +EXPOSE 8080 + +# Run the server +CMD ["/explorer_service/explorer_service"] diff --git a/explorer_service/README.md b/explorer_service/README.md new file mode 100644 index 00000000..6f118dbb --- /dev/null +++ b/explorer_service/README.md @@ -0,0 +1,71 @@ +# LEE Blockchain Explorer + +A web-based UI for exploring the blockchain state, built with Rust and Leptos framework. + +## Features + +- **Main Page**: Search for blocks, transactions, or accounts by hash/ID. View recent blocks. +- **Block Page**: View detailed block information and all transactions within a block. +- **Transaction Page**: View transaction details including type, accounts involved, and proofs. +- **Account Page**: View account state and transaction history. + +## Architecture + +- **Framework**: Leptos 0.8 with SSR (Server-Side Rendering) and hydration +- **Data Source**: Indexer Service JSON-RPC API +- **Components**: Reusable BlockPreview, TransactionPreview, and AccountPreview components +- **Styling**: Custom CSS with responsive design + +## Development + +### Prerequisites + +- Rust (stable or nightly) +- `cargo-leptos` tool: `cargo install cargo-leptos` +- Running indexer service at `http://localhost:8080/rpc` (or configure via `INDEXER_RPC_URL`) + +### Build and Run + +```bash +# Development mode (with hot-reload) +cargo leptos watch + +# Production build +cargo leptos build --release + +# Run production build +cargo leptos serve --release +``` + +The explorer will be available at `http://localhost:3000` by default. + +### Configuration + +Set the `INDEXER_RPC_URL` environment variable to point to your indexer service: + +```bash +export INDEXER_RPC_URL=http://localhost:8080/rpc +cargo leptos watch +``` + +## Features + +### Search + +The search bar supports: +- Block IDs (numeric) +- Block hashes (64-character hex) +- Transaction hashes (64-character hex) +- Account IDs (64-character hex) + +### Real-time Updates + +The main page loads recent blocks and can be extended to subscribe to new blocks via WebSocket. + +### Responsive Design + +The UI is mobile-friendly and adapts to different screen sizes. + +## License + +See LICENSE file in the repository root. diff --git a/explorer_service/docker-compose.yml b/explorer_service/docker-compose.yml new file mode 100644 index 00000000..28c4c9c7 --- /dev/null +++ b/explorer_service/docker-compose.yml @@ -0,0 +1,11 @@ +services: + explorer_service: + image: lssa/explorer_service + build: + context: .. + dockerfile: explorer_service/Dockerfile + container_name: explorer_service + environment: + INDEXER_RPC_URL: ${INDEXER_RPC_URL:-http://localhost:8779} + ports: + - "8080:8080" diff --git a/explorer_service/public/explorer.css b/explorer_service/public/explorer.css new file mode 100644 index 00000000..a6415ed5 --- /dev/null +++ b/explorer_service/public/explorer.css @@ -0,0 +1,516 @@ +/* Reset and base styles */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +body { + font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, "Helvetica Neue", Arial, sans-serif; + background-color: #f5f7fa; + color: #2c3e50; + line-height: 1.6; +} + +/* App layout */ +.app { + display: flex; + flex-direction: column; + min-height: 100vh; +} + +.app-header { + background-color: #2c3e50; + color: white; + padding: 1rem 2rem; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); +} + +.app-nav { + max-width: 1200px; + margin: 0 auto; +} + +.nav-logo { + color: white; + text-decoration: none; + font-size: 1.5rem; + font-weight: bold; +} + +.nav-logo:hover { + color: #3498db; +} + +.app-main { + flex: 1; + max-width: 1200px; + width: 100%; + margin: 0 auto; + padding: 2rem; +} + +.app-footer { + background-color: #34495e; + color: white; + text-align: center; + padding: 1.5rem; + margin-top: 2rem; +} + +/* Page headers */ +.page-header h1 { + font-size: 2rem; + margin-bottom: 1.5rem; + color: #2c3e50; +} + +/* Search section */ +.search-section { + margin-bottom: 3rem; +} + +.search-form { + display: flex; + gap: 0.5rem; + margin-bottom: 1.5rem; +} + +.search-input { + flex: 1; + padding: 0.75rem 1rem; + border: 2px solid #dde4ed; + border-radius: 8px; + font-size: 1rem; + transition: border-color 0.3s; +} + +.search-input:focus { + outline: none; + border-color: #3498db; +} + +.search-button { + padding: 0.75rem 2rem; + background-color: #3498db; + color: white; + border: none; + border-radius: 8px; + font-size: 1rem; + font-weight: 600; + cursor: pointer; + transition: background-color 0.3s; +} + +.search-button:hover { + background-color: #2980b9; +} + +/* Block preview */ +.block-preview { + background-color: white; + border-radius: 8px; + padding: 1.5rem; + margin-bottom: 1rem; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); + transition: transform 0.2s, box-shadow 0.2s; +} + +.block-preview:hover { + transform: translateY(-2px); + box-shadow: 0 4px 8px rgba(0, 0, 0, 0.15); +} + +.block-preview-link { + text-decoration: none; + color: inherit; +} + +.block-preview-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 1rem; + padding-bottom: 1rem; + border-bottom: 1px solid #eee; +} + +.block-id .label { + color: #7f8c8d; + font-size: 0.9rem; +} + +.block-id .value { + font-size: 1.5rem; + font-weight: bold; + color: #2c3e50; +} + +.block-status { + padding: 0.5rem 1rem; + border-radius: 20px; + font-size: 0.85rem; + font-weight: 600; +} + +.status-pending { + background-color: #fff3cd; + color: #856404; +} + +.status-safe { + background-color: #d1ecf1; + color: #0c5460; +} + +.status-finalized { + background-color: #d4edda; + color: #155724; +} + +.block-preview-body { + display: grid; + gap: 0.5rem; +} + +.block-field { + display: flex; + gap: 0.5rem; +} + +.field-label { + color: #7f8c8d; + font-weight: 500; +} + +.field-value { + color: #2c3e50; +} + +.hash { + font-family: "Courier New", monospace; + font-size: 0.9rem; + word-break: break-all; +} + +/* Transaction preview */ +.transaction-preview { + background-color: white; + border-radius: 8px; + padding: 1.5rem; + margin-bottom: 1rem; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); + transition: transform 0.2s, box-shadow 0.2s; +} + +.transaction-preview:hover { + transform: translateY(-2px); + box-shadow: 0 4px 8px rgba(0, 0, 0, 0.15); +} + +.transaction-preview-link { + text-decoration: none; + color: inherit; +} + +.transaction-preview-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 0.75rem; + padding-bottom: 0.75rem; + border-bottom: 1px solid #eee; +} + +.tx-type { + padding: 0.4rem 0.8rem; + border-radius: 16px; + font-size: 0.85rem; + font-weight: 600; + border: 2px solid; +} + +.tx-type-public { + background-color: #e3f2fd; + color: #0d47a1; + border-color: #1976d2; + border-style: solid; +} + +.tx-type-private { + background-color: #ffe0f0; + color: #880e4f; + border-color: #c2185b; + border-style: dashed; + font-style: italic; +} + +.tx-type-deployment { + background-color: #fff3e0; + color: #e65100; + border-color: #ff9800; + border-style: dotted; +} + +.tx-hash { + display: flex; + gap: 0.5rem; + align-items: center; +} + +.transaction-preview-body { + color: #7f8c8d; + font-size: 0.9rem; +} + +/* Account preview */ +.account-preview { + background-color: white; + border-radius: 8px; + padding: 1.5rem; + margin-bottom: 1rem; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); + transition: transform 0.2s, box-shadow 0.2s; +} + +.account-preview:hover { + transform: translateY(-2px); + box-shadow: 0 4px 8px rgba(0, 0, 0, 0.15); +} + +.account-preview-link { + text-decoration: none; + color: inherit; +} + +.account-preview-header { + margin-bottom: 1rem; + padding-bottom: 1rem; + border-bottom: 1px solid #eee; +} + +.account-id .label { + color: #7f8c8d; + font-size: 0.9rem; +} + +.account-id .value { + font-size: 1.2rem; + font-weight: 600; + color: #2c3e50; +} + +.account-preview-body { + display: grid; + gap: 0.5rem; +} + +.account-field { + display: flex; + gap: 0.5rem; +} + +.account-not-found { + color: #e74c3c; + font-style: italic; +} + +/* Detail pages */ +.block-detail, +.transaction-detail, +.account-detail { + background-color: white; + border-radius: 8px; + padding: 2rem; + box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1); +} + +.block-info, +.transaction-info, +.account-info, +.transaction-details { + margin-bottom: 2rem; +} + +.block-info h2, +.transaction-info h2, +.account-info h2, +.transaction-details h2 { + font-size: 1.5rem; + margin-bottom: 1rem; + color: #2c3e50; +} + +.info-grid { + display: grid; + gap: 1rem; +} + +.info-row { + display: flex; + gap: 1rem; + padding: 0.75rem; + background-color: #f8f9fa; + border-radius: 4px; +} + +.info-label { + color: #7f8c8d; + font-weight: 600; + min-width: 150px; +} + +.info-value { + color: #2c3e50; + word-break: break-all; +} + +.signature { + font-size: 0.75rem; +} + +/* Transactions list */ +.block-transactions, +.account-transactions { + margin-top: 2rem; +} + +.block-transactions h2, +.account-transactions h2 { + font-size: 1.5rem; + margin-bottom: 1rem; + color: #2c3e50; +} + +.transactions-list { + display: grid; + gap: 1rem; +} + +.no-transactions { + padding: 2rem; + text-align: center; + color: #7f8c8d; + background-color: #f8f9fa; + border-radius: 8px; +} + +/* Accounts list */ +.accounts-list { + display: grid; + gap: 0.5rem; + margin-top: 1rem; +} + +.account-item { + padding: 0.75rem; + background-color: #f8f9fa; + border-radius: 4px; +} + +.account-item a { + color: #3498db; + text-decoration: none; +} + +.account-item a:hover { + text-decoration: underline; +} + +.nonce { + color: #7f8c8d; + font-size: 0.9rem; + margin-left: 0.5rem; +} + +/* Loading and error states */ +.loading, +.loading-more { + text-align: center; + padding: 2rem; + color: #7f8c8d; + font-style: italic; +} + +.error, +.error-page { + background-color: #f8d7da; + color: #721c24; + padding: 1rem; + border-radius: 8px; + margin: 1rem 0; +} + +.not-found, +.not-found-page { + text-align: center; + padding: 3rem; + color: #7f8c8d; +} + +.not-found-page h1 { + font-size: 4rem; + color: #e74c3c; + margin-bottom: 1rem; +} + +.not-found-page a { + color: #3498db; + text-decoration: none; + font-weight: 600; +} + +.not-found-page a:hover { + text-decoration: underline; +} + +/* Load more button */ +.load-more-button { + display: block; + width: 100%; + padding: 1rem; + margin-top: 1rem; + background-color: #3498db; + color: white; + border: none; + border-radius: 8px; + font-size: 1rem; + font-weight: 600; + cursor: pointer; + transition: background-color 0.3s; +} + +.load-more-button:hover { + background-color: #2980b9; +} + +/* Responsive design */ +@media (max-width: 768px) { + .app-main { + padding: 1rem; + } + + .search-form { + flex-direction: column; + } + + .search-button { + width: 100%; + } + + .block-preview-header, + .transaction-preview-header { + flex-direction: column; + align-items: flex-start; + gap: 0.5rem; + } + + .info-row { + flex-direction: column; + gap: 0.25rem; + } + + .info-label { + min-width: auto; + } +} diff --git a/explorer_service/src/api.rs b/explorer_service/src/api.rs new file mode 100644 index 00000000..fe84033f --- /dev/null +++ b/explorer_service/src/api.rs @@ -0,0 +1,158 @@ +use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction}; +use leptos::prelude::*; +use serde::{Deserialize, Serialize}; + +/// Search results structure +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct SearchResults { + pub blocks: Vec, + pub transactions: Vec, + pub accounts: Vec<(AccountId, Option)>, +} + +/// RPC client type +#[cfg(feature = "ssr")] +pub type IndexerRpcClient = jsonrpsee::http_client::HttpClient; + +/// Get account information by ID +#[server] +pub async fn get_account(account_id: AccountId) -> Result { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + client + .get_account(account_id) + .await + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) +} + +/// Parse hex string to bytes +#[cfg(feature = "ssr")] +fn parse_hex(s: &str) -> Option> { + let s = s.trim().trim_start_matches("0x"); + hex::decode(s).ok() +} + +/// Search for a block, transaction, or account by query string +#[server] +pub async fn search(query: String) -> Result { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + + let mut blocks = Vec::new(); + let mut transactions = Vec::new(); + let mut accounts = Vec::new(); + + // Try to parse as hash (32 bytes) + if let Some(bytes) = parse_hex(&query) + && let Ok(hash_array) = <[u8; 32]>::try_from(bytes) + { + let hash = Hash(hash_array); + + // Try as block hash + if let Ok(block) = client.get_block_by_hash(hash).await { + blocks.push(block); + } + + // Try as transaction hash + if let Ok(tx) = client.get_transaction(hash).await { + transactions.push(tx); + } + + // Try as account ID + let account_id = AccountId { value: hash_array }; + match client.get_account(account_id).await { + Ok(account) => { + accounts.push((account_id, Some(account))); + } + Err(_) => { + // Account might not exist yet, still add it to results + accounts.push((account_id, None)); + } + } + } + + // Try as block ID + if let Ok(block_id) = query.parse::() + && let Ok(block) = client.get_block_by_id(block_id).await + { + blocks.push(block); + } + + Ok(SearchResults { + blocks, + transactions, + accounts, + }) +} + +/// Get block by ID +#[server] +pub async fn get_block_by_id(block_id: BlockId) -> Result { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + client + .get_block_by_id(block_id) + .await + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) +} + +/// Get block by hash +#[server] +pub async fn get_block_by_hash(block_hash: Hash) -> Result { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + client + .get_block_by_hash(block_hash) + .await + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) +} + +/// Get transaction by hash +#[server] +pub async fn get_transaction(tx_hash: Hash) -> Result { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + client + .get_transaction(tx_hash) + .await + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) +} + +/// Get blocks with pagination +#[server] +pub async fn get_blocks(offset: u32, limit: u32) -> Result, ServerFnError> { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + client + .get_blocks(offset, limit) + .await + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) +} + +/// Get transactions by account +#[server] +pub async fn get_transactions_by_account( + account_id: AccountId, + limit: u32, + offset: u32, +) -> Result, ServerFnError> { + use indexer_service_rpc::RpcClient as _; + let client = expect_context::(); + client + .get_transactions_by_account(account_id, limit, offset) + .await + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) +} + +/// Create the RPC client for the indexer service (server-side only) +#[cfg(feature = "ssr")] +pub fn create_indexer_rpc_client(url: &url::Url) -> Result { + use jsonrpsee::http_client::HttpClientBuilder; + use log::info; + + info!("Connecting to Indexer RPC on URL: {url}"); + + HttpClientBuilder::default() + .build(url.as_str()) + .map_err(|e| format!("Failed to create RPC client: {e}")) +} diff --git a/explorer_service/src/components/account_preview.rs b/explorer_service/src/components/account_preview.rs new file mode 100644 index 00000000..30bbae5b --- /dev/null +++ b/explorer_service/src/components/account_preview.rs @@ -0,0 +1,63 @@ +use indexer_service_protocol::{Account, AccountId}; +use leptos::prelude::*; +use leptos_router::components::A; + +use crate::format_utils; + +/// Account preview component +#[component] +pub fn AccountPreview(account_id: AccountId, account: Option) -> impl IntoView { + let account_id_str = format_utils::format_account_id(&account_id); + + view! { + + } +} diff --git a/explorer_service/src/components/block_preview.rs b/explorer_service/src/components/block_preview.rs new file mode 100644 index 00000000..b577cceb --- /dev/null +++ b/explorer_service/src/components/block_preview.rs @@ -0,0 +1,77 @@ +use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader}; +use leptos::prelude::*; +use leptos_router::components::A; + +use crate::format_utils; + +/// Get CSS class for bedrock status +fn status_class(status: &BedrockStatus) -> &'static str { + match status { + BedrockStatus::Pending => "status-pending", + BedrockStatus::Safe => "status-safe", + BedrockStatus::Finalized => "status-finalized", + } +} + +/// Block preview component +#[component] +pub fn BlockPreview(block: Block) -> impl IntoView { + let Block { + header: + BlockHeader { + block_id, + prev_block_hash, + hash, + timestamp, + signature: _, + }, + body: BlockBody { transactions }, + bedrock_status, + bedrock_parent_id: _, + } = block; + + let tx_count = transactions.len(); + + let hash_str = hex::encode(hash.0); + let prev_hash_str = hex::encode(prev_block_hash.0); + let time_str = format_utils::format_timestamp(timestamp); + let status_str = match &bedrock_status { + BedrockStatus::Pending => "Pending", + BedrockStatus::Safe => "Safe", + BedrockStatus::Finalized => "Finalized", + }; + + view! { + + } +} diff --git a/explorer_service/src/components/mod.rs b/explorer_service/src/components/mod.rs new file mode 100644 index 00000000..a0032b10 --- /dev/null +++ b/explorer_service/src/components/mod.rs @@ -0,0 +1,7 @@ +pub mod account_preview; +pub mod block_preview; +pub mod transaction_preview; + +pub use account_preview::AccountPreview; +pub use block_preview::BlockPreview; +pub use transaction_preview::TransactionPreview; diff --git a/explorer_service/src/components/transaction_preview.rs b/explorer_service/src/components/transaction_preview.rs new file mode 100644 index 00000000..a08abb30 --- /dev/null +++ b/explorer_service/src/components/transaction_preview.rs @@ -0,0 +1,72 @@ +use indexer_service_protocol::Transaction; +use leptos::prelude::*; +use leptos_router::components::A; + +/// Get transaction type name and CSS class +fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) { + match tx { + Transaction::Public(_) => ("Public", "tx-type-public"), + Transaction::PrivacyPreserving(_) => ("Privacy-Preserving", "tx-type-private"), + Transaction::ProgramDeployment(_) => ("Program Deployment", "tx-type-deployment"), + } +} + +/// Transaction preview component +#[component] +pub fn TransactionPreview(transaction: Transaction) -> impl IntoView { + let hash = transaction.hash(); + let hash_str = hex::encode(hash.0); + let (type_name, type_class) = transaction_type_info(&transaction); + + // Get additional metadata based on transaction type + let metadata = match &transaction { + Transaction::Public(tx) => { + let indexer_service_protocol::PublicTransaction { + hash: _, + message, + witness_set: _, + } = tx; + format!("{} accounts involved", message.account_ids.len()) + } + Transaction::PrivacyPreserving(tx) => { + let indexer_service_protocol::PrivacyPreservingTransaction { + hash: _, + message, + witness_set: _, + } = tx; + format!( + "{} public accounts, {} commitments", + message.public_account_ids.len(), + message.new_commitments.len() + ) + } + Transaction::ProgramDeployment(tx) => { + let indexer_service_protocol::ProgramDeploymentTransaction { hash: _, message } = tx; + format!("{} bytes", message.bytecode.len()) + } + }; + + view! { + + } +} diff --git a/explorer_service/src/format_utils.rs b/explorer_service/src/format_utils.rs new file mode 100644 index 00000000..6f5378de --- /dev/null +++ b/explorer_service/src/format_utils.rs @@ -0,0 +1,33 @@ +//! Formatting utilities for the explorer + +use indexer_service_protocol::{AccountId, ProgramId}; + +/// Format timestamp to human-readable string +pub fn format_timestamp(timestamp: u64) -> String { + let seconds = timestamp / 1000; + let datetime = chrono::DateTime::from_timestamp(seconds as i64, 0) + .unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap()); + datetime.format("%Y-%m-%d %H:%M:%S UTC").to_string() +} + +/// Format hash (32 bytes) to hex string +pub fn format_hash(hash: &[u8; 32]) -> String { + hex::encode(hash) +} + +/// Format account ID to hex string +pub fn format_account_id(account_id: &AccountId) -> String { + hex::encode(account_id.value) +} + +/// Format program ID to hex string +pub fn format_program_id(program_id: &ProgramId) -> String { + let bytes: Vec = program_id.iter().flat_map(|n| n.to_be_bytes()).collect(); + hex::encode(bytes) +} + +/// Parse hex string to bytes +pub fn parse_hex(s: &str) -> Option> { + let s = s.trim().trim_start_matches("0x"); + hex::decode(s).ok() +} diff --git a/explorer_service/src/lib.rs b/explorer_service/src/lib.rs new file mode 100644 index 00000000..489636fd --- /dev/null +++ b/explorer_service/src/lib.rs @@ -0,0 +1,102 @@ +use leptos::prelude::*; +use leptos_meta::{Meta, Stylesheet, Title, provide_meta_context}; +use leptos_router::{ + ParamSegment, StaticSegment, + components::{Route, Router, Routes}, +}; +use pages::{AccountPage, BlockPage, MainPage, TransactionPage}; + +pub mod api; +mod components; +mod format_utils; +mod pages; + +/// Main application component with routing setup. +/// +/// # Routes +/// +/// - `/` - Main page with search and recent blocks +/// - `/block/:id` - Block detail page (`:id` is the numeric block ID) +/// - `/transaction/:hash` - Transaction detail page (`:hash` is the hex-encoded transaction hash) +/// - `/account/:id` - Account detail page (`:id` is the hex-encoded account ID) +/// +/// All other routes will show a 404 Not Found page. +#[component] +pub fn App() -> impl IntoView { + // Provides context that manages stylesheets, titles, meta tags, etc. + provide_meta_context(); + + view! { + + + <Meta name="description" content="Explore the blockchain - view blocks, transactions, and accounts" /> + + <Router> + <div class="app"> + <header class="app-header"> + <nav class="app-nav"> + <a href="/" class="nav-logo"> + "LEE Blockchain Explorer" + </a> + </nav> + </header> + + <main class="app-main"> + // Route definitions: + // - MainPage: Home with search and recent blocks + // - BlockPage: Detailed block view with all transactions + // - TransactionPage: Detailed transaction view + // - AccountPage: Account state and transaction history + <Routes fallback=|| view! { <NotFound /> }> + // Main page - search and recent blocks + <Route path=StaticSegment("") view=MainPage /> + + // Block detail page - /block/123 + <Route path=(StaticSegment("block"), ParamSegment("id")) view=BlockPage /> + + // Transaction detail page - /transaction/0abc123... + <Route + path=(StaticSegment("transaction"), ParamSegment("hash")) + view=TransactionPage + /> + + // Account detail page - /account/0def456... + <Route + path=(StaticSegment("account"), ParamSegment("id")) + view=AccountPage + /> + </Routes> + </main> + + <footer class="app-footer"> + <p>"LEE Blockchain Explorer © 2026"</p> + </footer> + </div> + </Router> + } +} + +/// 404 Not Found page component. +/// +/// Displayed when a user navigates to a route that doesn't exist. +#[component] +fn NotFound() -> impl IntoView { + view! { + <div class="not-found-page"> + <h1>"404"</h1> + <p>"Page not found"</p> + <a href="/">"Go back to home"</a> + </div> + } +} + +#[cfg(feature = "hydrate")] +#[wasm_bindgen::prelude::wasm_bindgen] +pub fn hydrate() { + use leptos::mount::hydrate_body; + + console_error_panic_hook::set_once(); + console_log::init_with_level(log::Level::Debug).expect("error initializing logger"); + + hydrate_body(App); +} diff --git a/explorer_service/src/main.rs b/explorer_service/src/main.rs new file mode 100644 index 00000000..63d54d70 --- /dev/null +++ b/explorer_service/src/main.rs @@ -0,0 +1,79 @@ +#[cfg(feature = "ssr")] +#[tokio::main] +async fn main() { + use axum::Router; + use clap::Parser; + use explorer_service::App; + use leptos::prelude::*; + use leptos_axum::{LeptosRoutes, generate_route_list}; + use leptos_meta::MetaTags; + + env_logger::init(); + + /// LEE Blockchain Explorer Server CLI arguments. + #[derive(Parser, Debug)] + #[command(version, about, long_about = None)] + struct Args { + /// Indexer RPC URL + #[arg(long, env = "INDEXER_RPC_URL", default_value = "http://localhost:8779")] + indexer_rpc_url: url::Url, + } + + let args = Args::parse(); + + let conf = get_configuration(None).unwrap(); + let leptos_options = conf.leptos_options; + let addr = leptos_options.site_addr; + let routes = generate_route_list(App); + + // Create RPC client once + let rpc_client = explorer_service::api::create_indexer_rpc_client(&args.indexer_rpc_url) + .expect("Failed to create RPC client"); + + // Build our application with routes + let app = Router::new() + .leptos_routes_with_context( + &leptos_options, + routes, + { + let rpc_client = rpc_client.clone(); + move || provide_context(rpc_client.clone()) + }, + { + let leptos_options = leptos_options.clone(); + move || { + view! { + <!DOCTYPE html> + <html lang="en"> + <head> + <meta charset="utf-8" /> + <meta name="viewport" content="width=device-width, initial-scale=1" /> + <AutoReload options=leptos_options.clone() /> + <HydrationScripts options=leptos_options.clone() /> + <MetaTags /> + </head> + <body> + <App /> + </body> + </html> + } + } + }, + ) + .fallback(leptos_axum::file_and_error_handler(|_| { + view! { "Page not found" } + })) + .with_state(leptos_options); + + // Run the server + let listener = tokio::net::TcpListener::bind(&addr).await.unwrap(); + println!("Listening on http://{}", &addr); + axum::serve(listener, app.into_make_service()) + .await + .unwrap(); +} + +#[cfg(not(feature = "ssr"))] +fn main() { + // Client-only main - no-op since hydration is done via wasm_bindgen +} diff --git a/explorer_service/src/pages/account_page.rs b/explorer_service/src/pages/account_page.rs new file mode 100644 index 00000000..efd9ae81 --- /dev/null +++ b/explorer_service/src/pages/account_page.rs @@ -0,0 +1,229 @@ +use indexer_service_protocol::{Account, AccountId}; +use leptos::prelude::*; +use leptos_router::hooks::use_params_map; + +use crate::{api, components::TransactionPreview, format_utils}; + +/// Account page component +#[component] +pub fn AccountPage() -> impl IntoView { + let params = use_params_map(); + let (tx_offset, set_tx_offset) = signal(0u32); + let (all_transactions, set_all_transactions) = signal(Vec::new()); + let (is_loading, set_is_loading) = signal(false); + let (has_more, set_has_more) = signal(true); + let tx_limit = 10u32; + + // Parse account ID from URL params + let account_id = move || { + let account_id_str = params.read().get("id").unwrap_or_default(); + format_utils::parse_hex(&account_id_str).and_then(|bytes| { + if bytes.len() == 32 { + let account_id_array: [u8; 32] = bytes.try_into().ok()?; + Some(AccountId { + value: account_id_array, + }) + } else { + None + } + }) + }; + + // Load account data + let account_resource = Resource::new(account_id, |acc_id_opt| async move { + match acc_id_opt { + Some(acc_id) => api::get_account(acc_id).await, + None => Err(leptos::prelude::ServerFnError::ServerError( + "Invalid account ID".to_string(), + )), + } + }); + + // Load initial transactions + let transactions_resource = Resource::new(account_id, move |acc_id_opt| async move { + match acc_id_opt { + Some(acc_id) => api::get_transactions_by_account(acc_id, tx_limit, 0).await, + None => Err(leptos::prelude::ServerFnError::ServerError( + "Invalid account ID".to_string(), + )), + } + }); + + // Update all_transactions when initial load completes + Effect::new(move || { + if let Some(Ok(txs)) = transactions_resource.get() { + set_all_transactions.set(txs.clone()); + set_has_more.set(txs.len() as u32 == tx_limit); + } + }); + + // Load more transactions handler + let load_more = move |_| { + let Some(acc_id) = account_id() else { + return; + }; + + set_is_loading.set(true); + let current_offset = tx_offset.get() + tx_limit; + set_tx_offset.set(current_offset); + + leptos::task::spawn_local(async move { + match api::get_transactions_by_account(acc_id, tx_limit, current_offset).await { + Ok(new_txs) => { + let txs_count = new_txs.len() as u32; + set_all_transactions.update(|txs| txs.extend(new_txs)); + set_has_more.set(txs_count == tx_limit); + } + Err(e) => { + log::error!("Failed to load more transactions: {}", e); + } + } + set_is_loading.set(false); + }); + }; + + view! { + <div class="account-page"> + <Suspense fallback=move || view! { <div class="loading">"Loading account..."</div> }> + {move || { + account_resource + .get() + .map(|result| match result { + Ok(acc) => { + let Account { + program_owner, + balance, + data, + nonce, + } = acc; + + let acc_id = account_id().expect("Account ID should be set"); + let account_id_str = format_utils::format_account_id(&acc_id); + let program_id = format_utils::format_program_id(&program_owner); + let balance_str = balance.to_string(); + let nonce_str = nonce.to_string(); + let data_len = data.0.len(); + view! { + <div class="account-detail"> + <div class="page-header"> + <h1>"Account"</h1> + </div> + + <div class="account-info"> + <h2>"Account Information"</h2> + <div class="info-grid"> + <div class="info-row"> + <span class="info-label">"Account ID:"</span> + <span class="info-value hash">{account_id_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Balance:"</span> + <span class="info-value">{balance_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Program Owner:"</span> + <span class="info-value hash">{program_id}</span> + </div> + <div class="info-row"> + <span class="info-label">"Nonce:"</span> + <span class="info-value">{nonce_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Data:"</span> + <span class="info-value">{format!("{} bytes", data_len)}</span> + </div> + </div> + </div> + + <div class="account-transactions"> + <h2>"Transactions"</h2> + <Suspense fallback=move || { + view! { <div class="loading">"Loading transactions..."</div> } + }> + + {move || { + transactions_resource + .get() + .map(|result| match result { + Ok(_) => { + let txs = all_transactions.get(); + if txs.is_empty() { + view! { + <div class="no-transactions"> + "No transactions found" + </div> + } + .into_any() + } else { + view! { + <div> + <div class="transactions-list"> + {txs + .into_iter() + .map(|tx| { + view! { <TransactionPreview transaction=tx /> } + }) + .collect::<Vec<_>>()} + </div> + {move || { + if has_more.get() { + view! { + <button + class="load-more-button" + on:click=load_more + disabled=move || is_loading.get() + > + {move || { + if is_loading.get() { + "Loading..." + } else { + "Load More" + } + }} + + </button> + } + .into_any() + } else { + ().into_any() + } + }} + + </div> + } + .into_any() + } + } + Err(e) => { + view! { + <div class="error"> + {format!("Failed to load transactions: {}", e)} + </div> + } + .into_any() + } + }) + }} + + </Suspense> + </div> + </div> + } + .into_any() + } + Err(e) => { + view! { + <div class="error-page"> + <h1>"Error"</h1> + <p>{format!("Failed to load account: {}", e)}</p> + </div> + } + .into_any() + } + }) + }} + + </Suspense> + </div> + } +} diff --git a/explorer_service/src/pages/block_page.rs b/explorer_service/src/pages/block_page.rs new file mode 100644 index 00000000..988bc5e3 --- /dev/null +++ b/explorer_service/src/pages/block_page.rs @@ -0,0 +1,159 @@ +use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Hash}; +use leptos::prelude::*; +use leptos_router::{components::A, hooks::use_params_map}; + +use crate::{api, components::TransactionPreview, format_utils}; + +#[derive(Clone, PartialEq, Eq)] +enum BlockIdOrHash { + BlockId(BlockId), + Hash(Hash), +} + +/// Block page component +#[component] +pub fn BlockPage() -> impl IntoView { + let params = use_params_map(); + + let block_resource = Resource::new( + move || { + let id_str = params.read().get("id").unwrap_or_default(); + + // Try to parse as block ID (number) + if let Ok(block_id) = id_str.parse::<BlockId>() { + return Some(BlockIdOrHash::BlockId(block_id)); + } + + // Try to parse as block hash (hex string) + let id_str = id_str.trim().trim_start_matches("0x"); + if let Some(bytes) = format_utils::parse_hex(id_str) + && let Ok(hash_array) = <[u8; 32]>::try_from(bytes) + { + return Some(BlockIdOrHash::Hash(Hash(hash_array))); + } + + None + }, + |block_id_or_hash| async move { + match block_id_or_hash { + Some(BlockIdOrHash::BlockId(id)) => api::get_block_by_id(id).await, + Some(BlockIdOrHash::Hash(hash)) => api::get_block_by_hash(hash).await, + None => Err(leptos::prelude::ServerFnError::ServerError( + "Invalid block ID or hash".to_string(), + )), + } + }, + ); + + view! { + <div class="block-page"> + <Suspense fallback=move || view! { <div class="loading">"Loading block..."</div> }> + {move || { + block_resource + .get() + .map(|result| match result { + Ok(blk) => { + let Block { + header: BlockHeader { + block_id, + prev_block_hash, + hash, + timestamp, + signature, + }, + body: BlockBody { + transactions, + }, + bedrock_status, + bedrock_parent_id: _, + } = blk; + + let hash_str = format_utils::format_hash(&hash.0); + let prev_hash = format_utils::format_hash(&prev_block_hash.0); + let timestamp_str = format_utils::format_timestamp(timestamp); + let signature_str = hex::encode(signature.0); + let status = match &bedrock_status { + BedrockStatus::Pending => "Pending", + BedrockStatus::Safe => "Safe", + BedrockStatus::Finalized => "Finalized", + }; + view! { + <div class="block-detail"> + <div class="page-header"> + <h1>"Block " {block_id.to_string()}</h1> + </div> + + <div class="block-info"> + <h2>"Block Information"</h2> + <div class="info-grid"> + <div class="info-row"> + <span class="info-label">"Block ID: "</span> + <span class="info-value">{block_id.to_string()}</span> + </div> + <div class="info-row"> + <span class="info-label">"Hash: "</span> + <span class="info-value hash">{hash_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Previous Block Hash: "</span> + <A href=format!("/block/{}", prev_hash) attr:class="info-value hash"> + {prev_hash} + </A> + </div> + <div class="info-row"> + <span class="info-label">"Timestamp: "</span> + <span class="info-value">{timestamp_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Status: "</span> + <span class="info-value">{status}</span> + </div> + <div class="info-row"> + <span class="info-label">"Signature: "</span> + <span class="info-value hash signature">{signature_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Transaction Count: "</span> + <span class="info-value">{transactions.len().to_string()}</span> + </div> + </div> + </div> + + <div class="block-transactions"> + <h2>"Transactions"</h2> + {if transactions.is_empty() { + view! { <div class="no-transactions">"No transactions"</div> } + .into_any() + } else { + view! { + <div class="transactions-list"> + {transactions + .into_iter() + .map(|tx| view! { <TransactionPreview transaction=tx /> }) + .collect::<Vec<_>>()} + </div> + } + .into_any() + }} + + </div> + </div> + } + .into_any() + } + Err(e) => { + view! { + <div class="error-page"> + <h1>"Error"</h1> + <p>{format!("Failed to load block: {}", e)}</p> + </div> + } + .into_any() + } + }) + }} + + </Suspense> + </div> + } +} diff --git a/explorer_service/src/pages/main_page.rs b/explorer_service/src/pages/main_page.rs new file mode 100644 index 00000000..ffd625c8 --- /dev/null +++ b/explorer_service/src/pages/main_page.rs @@ -0,0 +1,208 @@ +use leptos::prelude::*; +use leptos_router::hooks::{use_navigate, use_query_map}; +use web_sys::SubmitEvent; + +use crate::{ + api::{self, SearchResults}, + components::{AccountPreview, BlockPreview, TransactionPreview}, +}; + +/// Main page component +#[component] +pub fn MainPage() -> impl IntoView { + let query_map = use_query_map(); + let navigate = use_navigate(); + + // Read search query from URL parameter + let url_query = move || query_map.read().get("q").unwrap_or_default(); + + let (search_query, set_search_query) = signal(url_query()); + + // Sync search input with URL parameter + Effect::new(move || { + set_search_query.set(url_query()); + }); + + // Search results resource based on URL query parameter + let search_resource = Resource::new(url_query, |query| async move { + if query.is_empty() { + return None; + } + match api::search(query).await { + Ok(result) => Some(result), + Err(e) => { + log::error!("Search error: {}", e); + None + } + } + }); + + // Load recent blocks on mount + let recent_blocks_resource = Resource::new(|| (), |_| async { api::get_blocks(0, 10).await }); + + // Handle search - update URL parameter + let on_search = move |ev: SubmitEvent| { + ev.prevent_default(); + let query = search_query.get(); + if query.is_empty() { + navigate("?", Default::default()); + return; + } + + navigate( + &format!("?q={}", urlencoding::encode(&query)), + Default::default(), + ); + }; + + view! { + <div class="main-page"> + <div class="page-header"> + <h1>"LEE Blockchain Explorer"</h1> + </div> + + <div class="search-section"> + <form on:submit=on_search class="search-form"> + <input + type="text" + class="search-input" + placeholder="Search by block ID, block hash, transaction hash, or account ID..." + prop:value=move || search_query.get() + on:input=move |ev| set_search_query.set(event_target_value(&ev)) + /> + <button type="submit" class="search-button"> + "Search" + </button> + </form> + + <Suspense fallback=move || view! { <div class="loading">"Searching..."</div> }> + {move || { + search_resource + .get() + .and_then(|opt_results| opt_results) + .map(|results| { + let SearchResults { + blocks, + transactions, + accounts, + } = results; + let has_results = !blocks.is_empty() + || !transactions.is_empty() + || !accounts.is_empty(); + view! { + <div class="search-results"> + <h2>"Search Results"</h2> + {if !has_results { + view! { <div class="not-found">"No results found"</div> } + .into_any() + } else { + view! { + <div class="results-container"> + {if !blocks.is_empty() { + view! { + <div class="results-section"> + <h3>"Blocks"</h3> + <div class="results-list"> + {blocks + .into_iter() + .map(|block| { + view! { <BlockPreview block=block /> } + }) + .collect::<Vec<_>>()} + </div> + </div> + } + .into_any() + } else { + ().into_any() + }} + + {if !transactions.is_empty() { + view! { + <div class="results-section"> + <h3>"Transactions"</h3> + <div class="results-list"> + {transactions + .into_iter() + .map(|tx| { + view! { <TransactionPreview transaction=tx /> } + }) + .collect::<Vec<_>>()} + </div> + </div> + } + .into_any() + } else { + ().into_any() + }} + + {if !accounts.is_empty() { + view! { + <div class="results-section"> + <h3>"Accounts"</h3> + <div class="results-list"> + {accounts + .into_iter() + .map(|(id, account)| { + view! { + <AccountPreview + account_id=id + account=account + /> + } + }) + .collect::<Vec<_>>()} + </div> + </div> + } + .into_any() + } else { + ().into_any() + }} + + </div> + } + .into_any() + }} + </div> + } + .into_any() + }) + }} + + </Suspense> + </div> + + <div class="blocks-section"> + <h2>"Recent Blocks"</h2> + <Suspense fallback=move || view! { <div class="loading">"Loading blocks..."</div> }> + {move || { + recent_blocks_resource + .get() + .map(|result| match result { + Ok(blocks) if !blocks.is_empty() => { + view! { + <div class="blocks-list"> + {blocks + .into_iter() + .map(|block| view! { <BlockPreview block=block /> }) + .collect::<Vec<_>>()} + </div> + } + .into_any() + } + Ok(_) => { + view! { <div class="no-blocks">"No blocks found"</div> }.into_any() + } + Err(e) => { + view! { <div class="error">{format!("Error: {}", e)}</div> } + .into_any() + } + }) + }} + + </Suspense> + </div> + </div> + } +} diff --git a/explorer_service/src/pages/mod.rs b/explorer_service/src/pages/mod.rs new file mode 100644 index 00000000..f4220145 --- /dev/null +++ b/explorer_service/src/pages/mod.rs @@ -0,0 +1,9 @@ +pub mod account_page; +pub mod block_page; +pub mod main_page; +pub mod transaction_page; + +pub use account_page::AccountPage; +pub use block_page::BlockPage; +pub use main_page::MainPage; +pub use transaction_page::TransactionPage; diff --git a/explorer_service/src/pages/transaction_page.rs b/explorer_service/src/pages/transaction_page.rs new file mode 100644 index 00000000..a8571a7a --- /dev/null +++ b/explorer_service/src/pages/transaction_page.rs @@ -0,0 +1,262 @@ +use indexer_service_protocol::{ + Hash, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage, + ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet, +}; +use leptos::prelude::*; +use leptos_router::{components::A, hooks::use_params_map}; + +use crate::{api, format_utils}; + +/// Transaction page component +#[component] +pub fn TransactionPage() -> impl IntoView { + let params = use_params_map(); + + let transaction_resource = Resource::new( + move || { + let tx_hash_str = params.read().get("hash").unwrap_or_default(); + format_utils::parse_hex(&tx_hash_str).and_then(|bytes| { + if bytes.len() == 32 { + let hash_array: [u8; 32] = bytes.try_into().ok()?; + Some(Hash(hash_array)) + } else { + None + } + }) + }, + |hash_opt| async move { + match hash_opt { + Some(hash) => api::get_transaction(hash).await, + None => Err(leptos::prelude::ServerFnError::ServerError( + "Invalid transaction hash".to_string(), + )), + } + }, + ); + + view! { + <div class="transaction-page"> + <Suspense fallback=move || view! { <div class="loading">"Loading transaction..."</div> }> + {move || { + transaction_resource + .get() + .map(|result| match result { + Ok(tx) => { + let tx_hash = format_utils::format_hash(&tx.hash().0); + let tx_type = match &tx { + Transaction::Public(_) => "Public Transaction", + Transaction::PrivacyPreserving(_) => "Privacy-Preserving Transaction", + Transaction::ProgramDeployment(_) => "Program Deployment Transaction", + }; + view! { + <div class="transaction-detail"> + <div class="page-header"> + <h1>"Transaction"</h1> + </div> + + <div class="transaction-info"> + <h2>"Transaction Information"</h2> + <div class="info-grid"> + <div class="info-row"> + <span class="info-label">"Hash:"</span> + <span class="info-value hash">{tx_hash}</span> + </div> + <div class="info-row"> + <span class="info-label">"Type:"</span> + <span class="info-value">{tx_type}</span> + </div> + </div> + </div> + + {match tx { + Transaction::Public(ptx) => { + let PublicTransaction { + hash: _, + message, + witness_set, + } = ptx; + let PublicMessage { + program_id, + account_ids, + nonces, + instruction_data, + } = message; + let WitnessSet { + signatures_and_public_keys, + proof, + } = witness_set; + + let program_id_str = program_id + .iter() + .map(|n| format!("{:08x}", n)) + .collect::<String>(); + let proof_len = proof.0.len(); + let signatures_count = signatures_and_public_keys.len(); + + view! { + <div class="transaction-details"> + <h2>"Public Transaction Details"</h2> + <div class="info-grid"> + <div class="info-row"> + <span class="info-label">"Program ID:"</span> + <span class="info-value hash">{program_id_str}</span> + </div> + <div class="info-row"> + <span class="info-label">"Instruction Data:"</span> + <span class="info-value"> + {format!("{} u32 values", instruction_data.len())} + </span> + </div> + <div class="info-row"> + <span class="info-label">"Proof Size:"</span> + <span class="info-value">{format!("{} bytes", proof_len)}</span> + </div> + <div class="info-row"> + <span class="info-label">"Signatures:"</span> + <span class="info-value">{signatures_count.to_string()}</span> + </div> + </div> + + <h3>"Accounts"</h3> + <div class="accounts-list"> + {account_ids + .into_iter() + .zip(nonces.into_iter()) + .map(|(account_id, nonce)| { + let account_id_str = format_utils::format_account_id(&account_id); + view! { + <div class="account-item"> + <A href=format!("/account/{}", account_id_str)> + <span class="hash">{account_id_str}</span> + </A> + <span class="nonce"> + " (nonce: " {nonce.to_string()} ")" + </span> + </div> + } + }) + .collect::<Vec<_>>()} + </div> + </div> + } + .into_any() + } + Transaction::PrivacyPreserving(pptx) => { + let PrivacyPreservingTransaction { + hash: _, + message, + witness_set, + } = pptx; + let PrivacyPreservingMessage { + public_account_ids, + nonces, + public_post_states: _, + encrypted_private_post_states, + new_commitments, + new_nullifiers, + } = message; + let WitnessSet { + signatures_and_public_keys: _, + proof, + } = witness_set; + + let proof_len = proof.0.len(); + view! { + <div class="transaction-details"> + <h2>"Privacy-Preserving Transaction Details"</h2> + <div class="info-grid"> + <div class="info-row"> + <span class="info-label">"Public Accounts:"</span> + <span class="info-value"> + {public_account_ids.len().to_string()} + </span> + </div> + <div class="info-row"> + <span class="info-label">"New Commitments:"</span> + <span class="info-value">{new_commitments.len().to_string()}</span> + </div> + <div class="info-row"> + <span class="info-label">"Nullifiers:"</span> + <span class="info-value">{new_nullifiers.len().to_string()}</span> + </div> + <div class="info-row"> + <span class="info-label">"Encrypted States:"</span> + <span class="info-value"> + {encrypted_private_post_states.len().to_string()} + </span> + </div> + <div class="info-row"> + <span class="info-label">"Proof Size:"</span> + <span class="info-value">{format!("{} bytes", proof_len)}</span> + </div> + </div> + + <h3>"Public Accounts"</h3> + <div class="accounts-list"> + {public_account_ids + .into_iter() + .zip(nonces.into_iter()) + .map(|(account_id, nonce)| { + let account_id_str = format_utils::format_account_id(&account_id); + view! { + <div class="account-item"> + <A href=format!("/account/{}", account_id_str)> + <span class="hash">{account_id_str}</span> + </A> + <span class="nonce"> + " (nonce: " {nonce.to_string()} ")" + </span> + </div> + } + }) + .collect::<Vec<_>>()} + </div> + </div> + } + .into_any() + } + Transaction::ProgramDeployment(pdtx) => { + let ProgramDeploymentTransaction { + hash: _, + message, + } = pdtx; + let ProgramDeploymentMessage { bytecode } = message; + + let bytecode_len = bytecode.len(); + view! { + <div class="transaction-details"> + <h2>"Program Deployment Transaction Details"</h2> + <div class="info-grid"> + <div class="info-row"> + <span class="info-label">"Bytecode Size:"</span> + <span class="info-value"> + {format!("{} bytes", bytecode_len)} + </span> + </div> + </div> + </div> + } + .into_any() + } + }} + + </div> + } + .into_any() + } + Err(e) => { + view! { + <div class="error-page"> + <h1>"Error"</h1> + <p>{format!("Failed to load transaction: {}", e)}</p> + </div> + } + .into_any() + } + }) + }} + + </Suspense> + </div> + } +} diff --git a/flake.lock b/flake.lock new file mode 100644 index 00000000..a12fbc85 --- /dev/null +++ b/flake.lock @@ -0,0 +1,64 @@ +{ + "nodes": { + "crane": { + "locked": { + "lastModified": 1769737823, + "narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=", + "owner": "ipetkov", + "repo": "crane", + "rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba", + "type": "github" + }, + "original": { + "owner": "ipetkov", + "repo": "crane", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1770019141, + "narHash": "sha256-VKS4ZLNx4PNrABoB0L8KUpc1fE7CLpQXQs985tGfaCU=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "cb369ef2efd432b3cdf8622b0ffc0a97a02f3137", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "root": { + "inputs": { + "crane": "crane", + "nixpkgs": "nixpkgs", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1770088046, + "narHash": "sha256-4hfYDnUTvL1qSSZEA4CEThxfz+KlwSFQ30Z9jgDguO0=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "71f9daa4e05e49c434d08627e755495ae222bc34", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/flake.nix b/flake.nix new file mode 100644 index 00000000..f09d4f77 --- /dev/null +++ b/flake.nix @@ -0,0 +1,96 @@ +{ + description = "Logos Execution Zone"; + + inputs = { + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + + rust-overlay = { + url = "github:oxalica/rust-overlay"; + inputs.nixpkgs.follows = "nixpkgs"; + }; + + crane.url = "github:ipetkov/crane"; + }; + + outputs = + { + self, + nixpkgs, + rust-overlay, + crane, + ... + }: + let + systems = [ + "x86_64-linux" + "aarch64-linux" + "aarch64-darwin" + "x86_64-windows" + ]; + + forAll = nixpkgs.lib.genAttrs systems; + + mkPkgs = + system: + import nixpkgs { + inherit system; + overlays = [ rust-overlay.overlays.default ]; + }; + in + { + packages = forAll ( + system: + let + pkgs = mkPkgs system; + rustToolchain = pkgs.rust-bin.stable.latest.default; + craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain; + src = ./.; + + commonArgs = { + inherit src; + buildInputs = [ pkgs.openssl ]; + nativeBuildInputs = [ + pkgs.pkg-config + pkgs.clang + pkgs.llvmPackages.libclang.lib + ]; + LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib"; + }; + + walletFfiPackage = craneLib.buildPackage ( + commonArgs + // { + pname = "logos-execution-zone-wallet-ffi"; + version = "0.1.0"; + cargoExtraArgs = "-p wallet-ffi"; + postInstall = '' + mkdir -p $out/include + cp wallet-ffi/wallet_ffi.h $out/include/ + '' + + pkgs.lib.optionalString pkgs.stdenv.isDarwin '' + install_name_tool -id @rpath/libwallet_ffi.dylib $out/lib/libwallet_ffi.dylib + ''; + } + ); + in + { + wallet = walletFfiPackage; + default = walletFfiPackage; + } + ); + devShells = forAll ( + system: + let + pkgs = mkPkgs system; + walletFfiPackage = self.packages.${system}.wallet; + walletFfiShell = pkgs.mkShell { + inputsFrom = [ walletFfiPackage ]; + }; + in + { + wallet = walletFfiShell; + default = walletFfiShell; + } + ); + }; +} diff --git a/indexer_core/Cargo.toml b/indexer_core/Cargo.toml new file mode 100644 index 00000000..abe1e629 --- /dev/null +++ b/indexer_core/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "indexer_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +common.workspace = true +bedrock_client.workspace = true + +anyhow.workspace = true +log.workspace = true +serde.workspace = true +tokio.workspace = true +borsh.workspace = true +futures.workspace = true +url.workspace = true +logos-blockchain-core.workspace = true +serde_json.workspace = true diff --git a/indexer_core/src/config.rs b/indexer_core/src/config.rs new file mode 100644 index 00000000..784f5840 --- /dev/null +++ b/indexer_core/src/config.rs @@ -0,0 +1,36 @@ +use std::{fs::File, io::BufReader, path::Path}; + +use anyhow::{Context, Result}; +use bedrock_client::BackoffConfig; +use common::sequencer_client::BasicAuth; +use logos_blockchain_core::mantle::ops::channel::ChannelId; +use serde::{Deserialize, Serialize}; +use url::Url; + +#[derive(Debug, Clone, Serialize, Deserialize)] +/// ToDo: Expand if necessary +pub struct ClientConfig { + pub addr: Url, + pub auth: Option<BasicAuth>, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +/// Note: For individual RPC requests we use Fibonacci backoff retry strategy +pub struct IndexerConfig { + pub resubscribe_interval_millis: u64, + pub backoff: BackoffConfig, + pub bedrock_client_config: ClientConfig, + pub sequencer_client_config: ClientConfig, + pub channel_id: ChannelId, +} + +impl IndexerConfig { + pub fn from_path(config_home: &Path) -> Result<IndexerConfig> { + let file = File::open(config_home) + .with_context(|| format!("Failed to open indexer config at {config_home:?}"))?; + let reader = BufReader::new(file); + + serde_json::from_reader(reader) + .with_context(|| format!("Failed to parse indexer config at {config_home:?}")) + } +} diff --git a/indexer_core/src/lib.rs b/indexer_core/src/lib.rs new file mode 100644 index 00000000..ca9ec22f --- /dev/null +++ b/indexer_core/src/lib.rs @@ -0,0 +1,124 @@ +use std::sync::Arc; + +use anyhow::Result; +use bedrock_client::BedrockClient; +use common::{ + block::HashableBlockData, communication::indexer::Message, + rpc_primitives::requests::PostIndexerMessageResponse, sequencer_client::SequencerClient, +}; +use futures::StreamExt; +use log::info; +use logos_blockchain_core::mantle::{ + Op, SignedMantleTx, + ops::channel::{ChannelId, inscribe::InscriptionOp}, +}; +use tokio::sync::RwLock; + +use crate::{config::IndexerConfig, state::IndexerState}; + +pub mod config; +pub mod state; + +pub struct IndexerCore { + pub bedrock_client: BedrockClient, + pub sequencer_client: SequencerClient, + pub config: IndexerConfig, + pub state: IndexerState, +} + +impl IndexerCore { + pub fn new(config: IndexerConfig) -> Result<Self> { + Ok(Self { + bedrock_client: BedrockClient::new( + config.bedrock_client_config.auth.clone().map(Into::into), + config.bedrock_client_config.addr.clone(), + )?, + sequencer_client: SequencerClient::new_with_auth( + config.sequencer_client_config.addr.clone(), + config.sequencer_client_config.auth.clone(), + )?, + config, + // No state setup for now, future task. + state: IndexerState { + latest_seen_block: Arc::new(RwLock::new(0)), + }, + }) + } + + pub async fn subscribe_parse_block_stream(&self) -> Result<()> { + loop { + let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?); + + info!("Block stream joined"); + + while let Some(block_info) = stream_pinned.next().await { + let header_id = block_info.header_id; + + info!("Observed L1 block at height {}", block_info.height); + + if let Some(l1_block) = self + .bedrock_client + .get_block_by_id(header_id, &self.config.backoff) + .await? + { + info!("Extracted L1 block at height {}", block_info.height); + + let l2_blocks_parsed = parse_blocks( + l1_block.into_transactions().into_iter(), + &self.config.channel_id, + ); + + for l2_block in l2_blocks_parsed { + // State modification, will be updated in future + { + let mut guard = self.state.latest_seen_block.write().await; + if l2_block.block_id > *guard { + *guard = l2_block.block_id; + } + } + + // Sending data into sequencer, may need to be expanded. + let message = Message::L2BlockFinalized { + l2_block_height: l2_block.block_id, + }; + + let status = self.send_message_to_sequencer(message.clone()).await?; + + info!("Sent message {message:#?} to sequencer; status {status:#?}"); + } + } + } + + // Refetch stream after delay + tokio::time::sleep(std::time::Duration::from_millis( + self.config.resubscribe_interval_millis, + )) + .await; + } + } + + pub async fn send_message_to_sequencer( + &self, + message: Message, + ) -> Result<PostIndexerMessageResponse> { + Ok(self.sequencer_client.post_indexer_message(message).await?) + } +} + +fn parse_blocks( + block_txs: impl Iterator<Item = SignedMantleTx>, + decoded_channel_id: &ChannelId, +) -> impl Iterator<Item = HashableBlockData> { + block_txs.flat_map(|tx| { + tx.mantle_tx.ops.into_iter().filter_map(|op| match op { + Op::ChannelInscribe(InscriptionOp { + channel_id, + inscription, + .. + }) if channel_id == *decoded_channel_id => { + borsh::from_slice::<HashableBlockData>(&inscription).ok() + } + _ => None, + }) + }) +} diff --git a/indexer_core/src/state.rs b/indexer_core/src/state.rs new file mode 100644 index 00000000..bd05971f --- /dev/null +++ b/indexer_core/src/state.rs @@ -0,0 +1,9 @@ +use std::sync::Arc; + +use tokio::sync::RwLock; + +#[derive(Debug, Clone)] +pub struct IndexerState { + // Only one field for now, for testing. + pub latest_seen_block: Arc<RwLock<u64>>, +} diff --git a/indexer_service/Cargo.toml b/indexer_service/Cargo.toml index d3f31de8..6148d544 100644 --- a/indexer_service/Cargo.toml +++ b/indexer_service/Cargo.toml @@ -2,6 +2,7 @@ name = "indexer_service" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] indexer_service_protocol.workspace = true @@ -9,9 +10,13 @@ indexer_service_rpc = { workspace = true, features = ["server"] } clap = { workspace = true, features = ["derive"] } anyhow.workspace = true -tokio.workspace = true +tokio = { workspace = true, features = ["macros", "rt-multi-thread", "signal"] } tokio-util.workspace = true env_logger.workspace = true log.workspace = true jsonrpsee.workspace = true async-trait = "0.1.89" + +[features] +# Return mock responses with generated data for testing purposes +mock-responses = [] diff --git a/indexer_service/protocol/Cargo.toml b/indexer_service/protocol/Cargo.toml index 08add00e..2646086c 100644 --- a/indexer_service/protocol/Cargo.toml +++ b/indexer_service/protocol/Cargo.toml @@ -2,6 +2,7 @@ name = "indexer_service_protocol" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core = { workspace = true, optional = true, features = ["host"] } diff --git a/indexer_service/protocol/src/convert.rs b/indexer_service/protocol/src/convert.rs index 46c8811d..179a25ec 100644 --- a/indexer_service/protocol/src/convert.rs +++ b/indexer_service/protocol/src/convert.rs @@ -381,11 +381,17 @@ impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set:: impl From<nssa::PublicTransaction> for PublicTransaction { fn from(value: nssa::PublicTransaction) -> Self { + let hash = Hash(value.hash()); + let nssa::PublicTransaction { + message, + witness_set, + } = value; + Self { - message: value.message().clone().into(), + hash, + message: message.into(), witness_set: WitnessSet { - signatures_and_public_keys: value - .witness_set() + signatures_and_public_keys: witness_set .signatures_and_public_keys() .iter() .map(|(sig, pk)| (sig.clone().into(), pk.clone().into())) @@ -401,6 +407,7 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction { fn try_from(value: PublicTransaction) -> Result<Self, Self::Error> { let PublicTransaction { + hash: _, message, witness_set, } = value; @@ -408,6 +415,7 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction { signatures_and_public_keys, proof: _, } = witness_set; + Ok(Self::new( message.into(), nssa::public_transaction::WitnessSet::from_raw_parts( @@ -422,9 +430,16 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction { impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction { fn from(value: nssa::PrivacyPreservingTransaction) -> Self { + let hash = Hash(value.hash()); + let nssa::PrivacyPreservingTransaction { + message, + witness_set, + } = value; + Self { - message: value.message().clone().into(), - witness_set: value.witness_set().clone().into(), + hash, + message: message.into(), + witness_set: witness_set.into(), } } } @@ -434,13 +449,17 @@ impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransactio fn try_from(value: PrivacyPreservingTransaction) -> Result<Self, Self::Error> { let PrivacyPreservingTransaction { + hash: _, message, witness_set, } = value; + Ok(Self::new( - message.try_into().map_err(|_| { - nssa::error::NssaError::InvalidInput("Data too big error".to_string()) - })?, + message + .try_into() + .map_err(|err: nssa_core::account::data::DataTooBigError| { + nssa::error::NssaError::InvalidInput(err.to_string()) + })?, witness_set.try_into()?, )) } @@ -448,15 +467,19 @@ impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransactio impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction { fn from(value: nssa::ProgramDeploymentTransaction) -> Self { + let hash = Hash(value.hash()); + let nssa::ProgramDeploymentTransaction { message } = value; + Self { - message: value.into_message().into(), + hash, + message: message.into(), } } } impl From<ProgramDeploymentTransaction> for nssa::ProgramDeploymentTransaction { fn from(value: ProgramDeploymentTransaction) -> Self { - let ProgramDeploymentTransaction { message } = value; + let ProgramDeploymentTransaction { hash: _, message } = value; Self::new(message.into()) } } @@ -599,12 +622,14 @@ impl TryFrom<common::block::Block> for Block { header, body, bedrock_status, + bedrock_parent_id, } = value; Ok(Self { header: header.into(), body: body.try_into()?, bedrock_status: bedrock_status.into(), + bedrock_parent_id: MantleMsgId(bedrock_parent_id), }) } } @@ -617,12 +642,14 @@ impl TryFrom<Block> for common::block::Block { header, body, bedrock_status, + bedrock_parent_id, } = value; Ok(Self { header: header.try_into()?, body: body.try_into()?, bedrock_status: bedrock_status.into(), + bedrock_parent_id: bedrock_parent_id.0, }) } } diff --git a/indexer_service/protocol/src/lib.rs b/indexer_service/protocol/src/lib.rs index 8189f7d8..c354aa59 100644 --- a/indexer_service/protocol/src/lib.rs +++ b/indexer_service/protocol/src/lib.rs @@ -36,6 +36,7 @@ pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, + pub bedrock_parent_id: MantleMsgId, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] @@ -66,14 +67,27 @@ pub enum Transaction { ProgramDeployment(ProgramDeploymentTransaction), } +impl Transaction { + /// Get the hash of the transaction + pub fn hash(&self) -> &self::Hash { + match self { + Transaction::Public(tx) => &tx.hash, + Transaction::PrivacyPreserving(tx) => &tx.hash, + Transaction::ProgramDeployment(tx) => &tx.hash, + } + } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct PublicTransaction { + pub hash: Hash, pub message: PublicMessage, pub witness_set: WitnessSet, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct PrivacyPreservingTransaction { + pub hash: Hash, pub message: PrivacyPreservingMessage, pub witness_set: WitnessSet, } @@ -120,6 +134,7 @@ pub struct EncryptedAccountData { #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct ProgramDeploymentTransaction { + pub hash: Hash, pub message: ProgramDeploymentMessage, } @@ -132,7 +147,7 @@ pub struct Ciphertext( pub Vec<u8>, ); -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct PublicKey( #[serde(with = "base64::arr")] #[schemars(with = "String", description = "base64-encoded public key")] @@ -146,21 +161,21 @@ pub struct EphemeralPublicKey( pub Vec<u8>, ); -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct Commitment( #[serde(with = "base64::arr")] #[schemars(with = "String", description = "base64-encoded commitment")] pub [u8; 32], ); -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct Nullifier( #[serde(with = "base64::arr")] #[schemars(with = "String", description = "base64-encoded nullifier")] pub [u8; 32], ); -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct CommitmentSetDigest( #[serde(with = "base64::arr")] #[schemars(with = "String", description = "base64-encoded commitment set digest")] @@ -181,13 +196,20 @@ pub struct Data( pub Vec<u8>, ); -#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct Hash( #[serde(with = "base64::arr")] #[schemars(with = "String", description = "base64-encoded hash")] pub [u8; 32], ); +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct MantleMsgId( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded Bedrock message id")] + pub [u8; 32], +); + #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub enum BedrockStatus { Pending, diff --git a/indexer_service/rpc/Cargo.toml b/indexer_service/rpc/Cargo.toml index f77c5abf..2bed63ae 100644 --- a/indexer_service/rpc/Cargo.toml +++ b/indexer_service/rpc/Cargo.toml @@ -2,6 +2,7 @@ name = "indexer_service_rpc" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] indexer_service_protocol = { workspace = true } diff --git a/indexer_service/rpc/src/lib.rs b/indexer_service/rpc/src/lib.rs index c1c4a560..def20ad5 100644 --- a/indexer_service/rpc/src/lib.rs +++ b/indexer_service/rpc/src/lib.rs @@ -1,11 +1,14 @@ use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction}; -use jsonrpsee::{core::SubscriptionResult, proc_macros::rpc, types::ErrorObjectOwned}; +use jsonrpsee::proc_macros::rpc; +#[cfg(feature = "server")] +use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned}; #[cfg(all(not(feature = "server"), not(feature = "client")))] compile_error!("At least one of `server` or `client` features must be enabled."); -#[cfg_attr(feature = "server", rpc(server))] -#[cfg_attr(feature = "client", rpc(client))] +#[cfg_attr(all(feature = "server", not(feature = "client")), rpc(server))] +#[cfg_attr(all(feature = "client", not(feature = "server")), rpc(client))] +#[cfg_attr(all(feature = "server", feature = "client"), rpc(server, client))] pub trait Rpc { #[method(name = "get_schema")] fn get_schema(&self) -> Result<serde_json::Value, ErrorObjectOwned> { @@ -37,4 +40,15 @@ pub trait Rpc { #[method(name = "getTransaction")] async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>; + + #[method(name = "getBlocks")] + async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned>; + + #[method(name = "getTransactionsByAccount")] + async fn get_transactions_by_account( + &self, + account_id: AccountId, + limit: u32, + offset: u32, + ) -> Result<Vec<Transaction>, ErrorObjectOwned>; } diff --git a/indexer_service/src/lib.rs b/indexer_service/src/lib.rs index 1f278a4d..0c18410e 100644 --- a/indexer_service/src/lib.rs +++ b/indexer_service/src/lib.rs @@ -1 +1,4 @@ pub mod service; + +#[cfg(feature = "mock-responses")] +pub mod mock_service; diff --git a/indexer_service/src/main.rs b/indexer_service/src/main.rs index bfdd3259..1c6856ec 100644 --- a/indexer_service/src/main.rs +++ b/indexer_service/src/main.rs @@ -51,7 +51,13 @@ async fn run_server(port: u16) -> Result<jsonrpsee::server::ServerHandle> { info!("Starting Indexer Service RPC server on {addr}"); + #[cfg(not(feature = "mock-responses"))] let handle = server.start(indexer_service::service::IndexerService.into_rpc()); + #[cfg(feature = "mock-responses")] + let handle = server.start( + indexer_service::mock_service::MockIndexerService::new_with_mock_blocks().into_rpc(), + ); + Ok(handle) } diff --git a/indexer_service/src/mock_service.rs b/indexer_service/src/mock_service.rs new file mode 100644 index 00000000..907bdd0e --- /dev/null +++ b/indexer_service/src/mock_service.rs @@ -0,0 +1,271 @@ +use std::collections::HashMap; + +use indexer_service_protocol::{ + Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Commitment, + CommitmentSetDigest, Data, EncryptedAccountData, Hash, MantleMsgId, PrivacyPreservingMessage, + PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction, + PublicMessage, PublicTransaction, Signature, Transaction, WitnessSet, +}; +use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned}; + +/// A mock implementation of the IndexerService RPC for testing purposes. +pub struct MockIndexerService { + blocks: Vec<Block>, + accounts: HashMap<AccountId, Account>, + transactions: HashMap<Hash, (Transaction, BlockId)>, +} + +impl MockIndexerService { + pub fn new_with_mock_blocks() -> Self { + let mut blocks = Vec::new(); + let mut accounts = HashMap::new(); + let mut transactions = HashMap::new(); + + // Create some mock accounts + let account_ids: Vec<AccountId> = (0..5) + .map(|i| { + let mut value = [0u8; 32]; + value[0] = i; + AccountId { value } + }) + .collect(); + + for (i, account_id) in account_ids.iter().enumerate() { + accounts.insert( + *account_id, + Account { + program_owner: [i as u32; 8], + balance: 1000 * (i as u128 + 1), + data: Data(vec![0xaa, 0xbb, 0xcc]), + nonce: i as u128, + }, + ); + } + + // Create 10 blocks with transactions + let mut prev_hash = Hash([0u8; 32]); + + for block_id in 0..10 { + let block_hash = { + let mut hash = [0u8; 32]; + hash[0] = block_id as u8; + hash[1] = 0xff; + Hash(hash) + }; + + // Create 2-4 transactions per block (mix of Public, PrivacyPreserving, and + // ProgramDeployment) + let num_txs = 2 + (block_id % 3); + let mut block_transactions = Vec::new(); + + for tx_idx in 0..num_txs { + let tx_hash = { + let mut hash = [0u8; 32]; + hash[0] = block_id as u8; + hash[1] = tx_idx as u8; + Hash(hash) + }; + + // Vary transaction types: Public, PrivacyPreserving, or ProgramDeployment + let tx = match (block_id + tx_idx) % 5 { + // Public transactions (most common) + 0 | 1 => Transaction::Public(PublicTransaction { + hash: tx_hash, + message: PublicMessage { + program_id: [1u32; 8], + account_ids: vec![ + account_ids[tx_idx as usize % account_ids.len()], + account_ids[(tx_idx as usize + 1) % account_ids.len()], + ], + nonces: vec![block_id as u128, (block_id + 1) as u128], + instruction_data: vec![1, 2, 3, 4], + }, + witness_set: WitnessSet { + signatures_and_public_keys: vec![], + proof: indexer_service_protocol::Proof(vec![0; 32]), + }, + }), + // PrivacyPreserving transactions + 2 | 3 => Transaction::PrivacyPreserving(PrivacyPreservingTransaction { + hash: tx_hash, + message: PrivacyPreservingMessage { + public_account_ids: vec![ + account_ids[tx_idx as usize % account_ids.len()], + ], + nonces: vec![block_id as u128], + public_post_states: vec![Account { + program_owner: [1u32; 8], + balance: 500, + data: Data(vec![0xdd, 0xee]), + nonce: block_id as u128, + }], + encrypted_private_post_states: vec![EncryptedAccountData { + ciphertext: indexer_service_protocol::Ciphertext(vec![ + 0x01, 0x02, 0x03, 0x04, + ]), + epk: indexer_service_protocol::EphemeralPublicKey(vec![0xaa; 32]), + view_tag: 42, + }], + new_commitments: vec![Commitment([block_id as u8; 32])], + new_nullifiers: vec![( + indexer_service_protocol::Nullifier([tx_idx as u8; 32]), + CommitmentSetDigest([0xff; 32]), + )], + }, + witness_set: WitnessSet { + signatures_and_public_keys: vec![], + proof: indexer_service_protocol::Proof(vec![0; 32]), + }, + }), + // ProgramDeployment transactions (rare) + _ => Transaction::ProgramDeployment(ProgramDeploymentTransaction { + hash: tx_hash, + message: ProgramDeploymentMessage { + bytecode: vec![0x00, 0x61, 0x73, 0x6d, 0x01, 0x00, 0x00, 0x00], /* WASM magic number */ + }, + }), + }; + + transactions.insert(tx_hash, (tx.clone(), block_id)); + block_transactions.push(tx); + } + + let block = Block { + header: BlockHeader { + block_id, + prev_block_hash: prev_hash, + hash: block_hash, + timestamp: 1704067200000 + (block_id * 12000), // ~12 seconds per block + signature: Signature([0u8; 64]), + }, + body: BlockBody { + transactions: block_transactions, + }, + bedrock_status: match block_id { + 0..=5 => BedrockStatus::Finalized, + 6..=8 => BedrockStatus::Safe, + _ => BedrockStatus::Pending, + }, + bedrock_parent_id: MantleMsgId([0; 32]), + }; + + prev_hash = block_hash; + blocks.push(block); + } + + Self { + blocks, + accounts, + transactions, + } + } +} + +// `async_trait` is required by `jsonrpsee` +#[async_trait::async_trait] +impl indexer_service_rpc::RpcServer for MockIndexerService { + async fn subscribe_to_blocks( + &self, + _subscription_sink: jsonrpsee::PendingSubscriptionSink, + _from: BlockId, + ) -> SubscriptionResult { + // Subscription not implemented for mock service + Err("Subscriptions not supported in mock service".into()) + } + + async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned> { + self.blocks + .iter() + .find(|b| b.header.block_id == block_id) + .cloned() + .ok_or_else(|| { + ErrorObjectOwned::owned( + -32001, + format!("Block with ID {} not found", block_id), + None::<()>, + ) + }) + } + + async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned> { + self.blocks + .iter() + .find(|b| b.header.hash == block_hash) + .cloned() + .ok_or_else(|| ErrorObjectOwned::owned(-32001, "Block with hash not found", None::<()>)) + } + + async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> { + self.blocks + .last() + .map(|b| b.header.block_id) + .ok_or_else(|| ErrorObjectOwned::owned(-32001, "No blocks available", None::<()>)) + } + + async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned> { + self.accounts + .get(&account_id) + .cloned() + .ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>)) + } + + async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> { + self.transactions + .get(&tx_hash) + .map(|(tx, _)| tx.clone()) + .ok_or_else(|| ErrorObjectOwned::owned(-32001, "Transaction not found", None::<()>)) + } + + async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> { + let offset = offset as usize; + let limit = limit as usize; + let total = self.blocks.len(); + + // Return blocks in reverse order (newest first), with pagination + let start = offset.min(total); + let end = (offset + limit).min(total); + + Ok(self + .blocks + .iter() + .rev() + .skip(start) + .take(end - start) + .cloned() + .collect()) + } + + async fn get_transactions_by_account( + &self, + account_id: AccountId, + limit: u32, + offset: u32, + ) -> Result<Vec<Transaction>, ErrorObjectOwned> { + let mut account_txs: Vec<_> = self + .transactions + .values() + .filter(|(tx, _)| match tx { + Transaction::Public(pub_tx) => pub_tx.message.account_ids.contains(&account_id), + Transaction::PrivacyPreserving(priv_tx) => { + priv_tx.message.public_account_ids.contains(&account_id) + } + Transaction::ProgramDeployment(_) => false, + }) + .collect(); + + // Sort by block ID descending (most recent first) + account_txs.sort_by(|a, b| b.1.cmp(&a.1)); + + let start = offset as usize; + if start >= account_txs.len() { + return Ok(Vec::new()); + } + + let end = (start + limit as usize).min(account_txs.len()); + + Ok(account_txs[start..end] + .iter() + .map(|(tx, _)| tx.clone()) + .collect()) + } +} diff --git a/indexer_service/src/service.rs b/indexer_service/src/service.rs index 46c5fb2d..432dcc24 100644 --- a/indexer_service/src/service.rs +++ b/indexer_service/src/service.rs @@ -33,4 +33,17 @@ impl indexer_service_rpc::RpcServer for IndexerService { async fn get_transaction(&self, _tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> { todo!() } + + async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> { + todo!() + } + + async fn get_transactions_by_account( + &self, + _account_id: AccountId, + _limit: u32, + _offset: u32, + ) -> Result<Vec<Transaction>, ErrorObjectOwned> { + todo!() + } } diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index b888c177..142ad78d 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -2,6 +2,7 @@ name = "integration_tests" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core = { workspace = true, features = ["host"] } @@ -11,7 +12,12 @@ sequencer_runner.workspace = true wallet.workspace = true common.workspace = true key_protocol.workspace = true +indexer_core.workspace = true +wallet-ffi.workspace = true +serde_json.workspace = true +token_core.workspace = true +url.workspace = true anyhow.workspace = true env_logger.workspace = true log.workspace = true diff --git a/integration_tests/configs/indexer/indexer_config.json b/integration_tests/configs/indexer/indexer_config.json new file mode 100644 index 00000000..fd5309b2 --- /dev/null +++ b/integration_tests/configs/indexer/indexer_config.json @@ -0,0 +1,17 @@ +{ + "bedrock_client_config": { + "addr": "http://127.0.0.1:8080", + "auth": { + "username": "user" + } + }, + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", + "backoff": { + "max_retries": 10, + "start_delay_millis": 100 + }, + "resubscribe_interval_millis": 1000, + "sequencer_client_config": { + "addr": "will_be_replaced_in_runtime" + } +} \ No newline at end of file diff --git a/integration_tests/configs/sequencer/bedrock_local_attached/sequencer_config.json b/integration_tests/configs/sequencer/bedrock_local_attached/sequencer_config.json new file mode 100644 index 00000000..3253115b --- /dev/null +++ b/integration_tests/configs/sequencer/bedrock_local_attached/sequencer_config.json @@ -0,0 +1,165 @@ +{ + "home": "", + "override_rust_log": null, + "genesis_id": 1, + "is_genesis_random": true, + "max_num_tx_in_block": 20, + "mempool_max_size": 10000, + "block_create_timeout_millis": 10000, + "port": 0, + "initial_accounts": [ + { + "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "balance": 10000 + }, + { + "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "balance": 20000 + } + ], + "initial_commitments": [ + { + "npk": [ + 63, + 202, + 178, + 231, + 183, + 82, + 237, + 212, + 216, + 221, + 215, + 255, + 153, + 101, + 177, + 161, + 254, + 210, + 128, + 122, + 54, + 190, + 230, + 151, + 183, + 64, + 225, + 229, + 113, + 1, + 228, + 97 + ], + "account": { + "program_owner": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "balance": 10000, + "data": [], + "nonce": 0 + } + }, + { + "npk": [ + 192, + 251, + 166, + 243, + 167, + 236, + 84, + 249, + 35, + 136, + 130, + 172, + 219, + 225, + 161, + 139, + 229, + 89, + 243, + 125, + 194, + 213, + 209, + 30, + 23, + 174, + 100, + 244, + 124, + 74, + 140, + 47 + ], + "account": { + "program_owner": [ + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0 + ], + "balance": 20000, + "data": [], + "nonce": 0 + } + } + ], + "signing_key": [ + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37, + 37 + ], + "bedrock_config": { + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", + "node_url": "http://127.0.0.1:8080", + "auth": { + "username": "user" + } + } +} diff --git a/integration_tests/configs/sequencer/sequencer_config.json b/integration_tests/configs/sequencer/detached/sequencer_config.json similarity index 90% rename from integration_tests/configs/sequencer/sequencer_config.json rename to integration_tests/configs/sequencer/detached/sequencer_config.json index ff01f2a5..27aa15b0 100644 --- a/integration_tests/configs/sequencer/sequencer_config.json +++ b/integration_tests/configs/sequencer/detached/sequencer_config.json @@ -6,14 +6,15 @@ "max_num_tx_in_block": 20, "mempool_max_size": 10000, "block_create_timeout_millis": 10000, + "retry_pending_blocks_timeout_millis": 240000, "port": 0, "initial_accounts": [ { - "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", "balance": 10000 }, { - "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", "balance": 20000 } ], diff --git a/integration_tests/configs/wallet/wallet_config.json b/integration_tests/configs/wallet/wallet_config.json index 9200810e..7ed0d47d 100644 --- a/integration_tests/configs/wallet/wallet_config.json +++ b/integration_tests/configs/wallet/wallet_config.json @@ -9,7 +9,7 @@ "initial_accounts": [ { "Public": { - "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", "pub_sign_key": [ 16, 162, @@ -48,7 +48,7 @@ }, { "Public": { - "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", "pub_sign_key": [ 113, 121, diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index bde435b7..108f317c 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -3,26 +3,28 @@ use std::{net::SocketAddr, path::PathBuf, sync::LazyLock}; use actix_web::dev::ServerHandle; -use anyhow::{Context as _, Result}; +use anyhow::{Context, Result}; use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; use common::{ sequencer_client::SequencerClient, transaction::{EncodedTransaction, NSSATransaction}, }; use futures::FutureExt as _; +use indexer_core::{IndexerCore, config::IndexerConfig}; use log::debug; use nssa::PrivacyPreservingTransaction; use nssa_core::Commitment; use sequencer_core::config::SequencerConfig; use tempfile::TempDir; use tokio::task::JoinHandle; +use url::Url; use wallet::{WalletCore, config::WalletConfigOverrides}; // TODO: Remove this and control time from tests pub const TIME_TO_WAIT_FOR_BLOCK_SECONDS: u64 = 12; -pub const ACC_SENDER: &str = "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy"; -pub const ACC_RECEIVER: &str = "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw"; +pub const ACC_SENDER: &str = "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV"; +pub const ACC_RECEIVER: &str = "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo"; pub const ACC_SENDER_PRIVATE: &str = "2ECgkFTaXzwjJBXR7ZKmXYQtpHbvTTHK9Auma4NL9AUo"; pub const ACC_RECEIVER_PRIVATE: &str = "E8HwiTyQe4H9HK7icTvn95HQMnzx49mP9A2ddtMLpNaN"; @@ -38,40 +40,71 @@ static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init); pub struct TestContext { sequencer_server_handle: ServerHandle, sequencer_loop_handle: JoinHandle<Result<()>>, + sequencer_retry_pending_blocks_handle: JoinHandle<Result<()>>, + indexer_loop_handle: Option<JoinHandle<Result<()>>>, sequencer_client: SequencerClient, wallet: WalletCore, + wallet_password: String, _temp_sequencer_dir: TempDir, _temp_wallet_dir: TempDir, } impl TestContext { - /// Create new test context. + /// Create new test context in detached mode. Default. pub async fn new() -> Result<Self> { let manifest_dir = env!("CARGO_MANIFEST_DIR"); let sequencer_config_path = - PathBuf::from(manifest_dir).join("configs/sequencer/sequencer_config.json"); + PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json"); let sequencer_config = SequencerConfig::from_path(&sequencer_config_path) .context("Failed to create sequencer config from file")?; - Self::new_with_sequencer_config(sequencer_config).await + Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, None).await } - /// Create new test context with custom sequencer config. + /// Create new test context in local bedrock node attached mode. + pub async fn new_bedrock_local_attached() -> Result<Self> { + let manifest_dir = env!("CARGO_MANIFEST_DIR"); + + let sequencer_config_path = PathBuf::from(manifest_dir) + .join("configs/sequencer/bedrock_local_attached/sequencer_config.json"); + + let sequencer_config = SequencerConfig::from_path(&sequencer_config_path) + .context("Failed to create sequencer config from file")?; + + let indexer_config_path = + PathBuf::from(manifest_dir).join("configs/indexer/indexer_config.json"); + + let indexer_config = IndexerConfig::from_path(&indexer_config_path) + .context("Failed to create indexer config from file")?; + + Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, Some(indexer_config)) + .await + } + + /// Create new test context with custom sequencer config and maybe indexer config. /// /// `home` and `port` fields of the provided config will be overridden to meet tests parallelism /// requirements. - pub async fn new_with_sequencer_config(sequencer_config: SequencerConfig) -> Result<Self> { + pub async fn new_with_sequencer_and_maybe_indexer_configs( + sequencer_config: SequencerConfig, + indexer_config: Option<IndexerConfig>, + ) -> Result<Self> { // Ensure logger is initialized only once *LOGGER; debug!("Test context setup"); - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle, temp_sequencer_dir) = - Self::setup_sequencer(sequencer_config) - .await - .context("Failed to setup sequencer")?; + let ( + sequencer_server_handle, + sequencer_addr, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + temp_sequencer_dir, + ) = Self::setup_sequencer(sequencer_config) + .await + .context("Failed to setup sequencer")?; // Convert 0.0.0.0 to 127.0.0.1 for client connections // When binding to port 0, the server binds to 0.0.0.0:<random_port> @@ -82,26 +115,60 @@ impl TestContext { format!("http://{sequencer_addr}") }; - let (wallet, temp_wallet_dir) = Self::setup_wallet(sequencer_addr.clone()) + let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone()) .await .context("Failed to setup wallet")?; - let sequencer_client = - SequencerClient::new(sequencer_addr).context("Failed to create sequencer client")?; + let sequencer_client = SequencerClient::new( + Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?, + ) + .context("Failed to create sequencer client")?; - Ok(Self { - sequencer_server_handle, - sequencer_loop_handle, - sequencer_client, - wallet, - _temp_sequencer_dir: temp_sequencer_dir, - _temp_wallet_dir: temp_wallet_dir, - }) + if let Some(mut indexer_config) = indexer_config { + indexer_config.sequencer_client_config.addr = + Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?; + + let indexer_core = IndexerCore::new(indexer_config)?; + + let indexer_loop_handle = Some(tokio::spawn(async move { + indexer_core.subscribe_parse_block_stream().await + })); + + Ok(Self { + sequencer_server_handle, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + indexer_loop_handle, + sequencer_client, + wallet, + _temp_sequencer_dir: temp_sequencer_dir, + _temp_wallet_dir: temp_wallet_dir, + wallet_password, + }) + } else { + Ok(Self { + sequencer_server_handle, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + indexer_loop_handle: None, + sequencer_client, + wallet, + _temp_sequencer_dir: temp_sequencer_dir, + _temp_wallet_dir: temp_wallet_dir, + wallet_password, + }) + } } async fn setup_sequencer( mut config: SequencerConfig, - ) -> Result<(ServerHandle, SocketAddr, JoinHandle<Result<()>>, TempDir)> { + ) -> Result<( + ServerHandle, + SocketAddr, + JoinHandle<Result<()>>, + JoinHandle<Result<()>>, + TempDir, + )> { let temp_sequencer_dir = tempfile::tempdir().context("Failed to create temp dir for sequencer home")?; @@ -113,18 +180,23 @@ impl TestContext { // Setting port to 0 lets the OS choose a free port for us config.port = 0; - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle) = - sequencer_runner::startup_sequencer(config).await?; + let ( + sequencer_server_handle, + sequencer_addr, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + ) = sequencer_runner::startup_sequencer(config).await?; Ok(( sequencer_server_handle, sequencer_addr, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, temp_sequencer_dir, )) } - async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir)> { + async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> { let manifest_dir = env!("CARGO_MANIFEST_DIR"); let wallet_config_source_path = PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json"); @@ -142,11 +214,12 @@ impl TestContext { ..Default::default() }; + let wallet_password = "test_pass".to_owned(); let wallet = WalletCore::new_init_storage( config_path, storage_path, Some(config_overrides), - "test_pass".to_owned(), + wallet_password.clone(), ) .context("Failed to init wallet")?; wallet @@ -154,7 +227,7 @@ impl TestContext { .await .context("Failed to store wallet persistent data")?; - Ok((wallet, temp_wallet_dir)) + Ok((wallet, temp_wallet_dir, wallet_password)) } /// Get reference to the wallet. @@ -162,6 +235,10 @@ impl TestContext { &self.wallet } + pub fn wallet_password(&self) -> &str { + &self.wallet_password + } + /// Get mutable reference to the wallet. pub fn wallet_mut(&mut self) -> &mut WalletCore { &mut self.wallet @@ -180,19 +257,40 @@ impl Drop for TestContext { let Self { sequencer_server_handle, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + indexer_loop_handle, sequencer_client: _, wallet: _, _temp_sequencer_dir, _temp_wallet_dir, + wallet_password: _, } = self; sequencer_loop_handle.abort(); + sequencer_retry_pending_blocks_handle.abort(); + if let Some(indexer_loop_handle) = indexer_loop_handle { + indexer_loop_handle.abort(); + } // Can't wait here as Drop can't be async, but anyway stop signal should be sent sequencer_server_handle.stop(true).now_or_never(); } } +/// A test context to be used in normal #[test] tests +pub struct BlockingTestContext { + pub ctx: TestContext, + pub runtime: tokio::runtime::Runtime, +} + +impl BlockingTestContext { + pub fn new() -> Result<Self> { + let runtime = tokio::runtime::Runtime::new().unwrap(); + let ctx = runtime.block_on(TestContext::new())?; + Ok(Self { ctx, runtime }) + } +} + pub fn format_public_account_id(account_id: &str) -> String { format!("Public/{account_id}") } diff --git a/integration_tests/tests/indexer.rs b/integration_tests/tests/indexer.rs new file mode 100644 index 00000000..b25c887b --- /dev/null +++ b/integration_tests/tests/indexer.rs @@ -0,0 +1,23 @@ +use anyhow::Result; +use integration_tests::TestContext; +use log::info; +use tokio::test; + +#[ignore = "needs complicated setup"] +#[test] +// To run this test properly, you need nomos node running in the background. +// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node). +// +// Recommended to run node locally from build binary. +async fn indexer_run_local_node() -> Result<()> { + let _ctx = TestContext::new_bedrock_local_attached().await?; + + info!("Let's observe behaviour"); + + tokio::time::sleep(std::time::Duration::from_secs(180)).await; + + // No way to check state of indexer now + // When it will be a service, then it will become possible. + + Ok(()) +} diff --git a/integration_tests/tests/token.rs b/integration_tests/tests/token.rs index 37ef7259..2dd1d90d 100644 --- a/integration_tests/tests/token.rs +++ b/integration_tests/tests/token.rs @@ -8,6 +8,7 @@ use integration_tests::{ use key_protocol::key_management::key_tree::chain_index::ChainIndex; use log::info; use nssa::program::Program; +use token_core::{TokenDefinition, TokenHolding}; use tokio::test; use wallet::cli::{ Command, SubcommandReturnValue, @@ -59,11 +60,13 @@ async fn create_and_transfer_public_token() -> Result<()> { }; // Create new token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_public_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name: name.clone(), + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -76,16 +79,16 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!(definition_acc.program_owner, Program::token().id()); - // The data of a token definition account has the following layout: - // [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32 bytes)] assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply, + metadata_id: None + } ); // Check the status of the token holding account with the total supply @@ -97,24 +100,23 @@ async fn create_and_transfer_public_token() -> Result<()> { // The account must be owned by the token program assert_eq!(supply_acc.program_owner, Program::token().id()); - // The data of a token holding account has the following layout: - // [ 0x01 || corresponding_token_definition_id (32 bytes) || balance (little endian 16 bytes) ] - // First byte of the data equal to 1 means it's a token holding account - assert_eq!(supply_acc.data.as_ref()[0], 1); - // Bytes from 1 to 33 represent the id of the token this account is associated with + let token_holding = TokenHolding::try_from(&supply_acc.data)?; assert_eq!( - &supply_acc.data.as_ref()[1..33], - definition_account_id.to_bytes() + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply + } ); - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37); // Transfer 7 tokens from supply_acc to recipient_account_id + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_public_account_id(&supply_account_id.to_string()), to: Some(format_public_account_id(&recipient_account_id.to_string())), to_npk: None, - to_vpk: None, - amount: 7, + to_ipk: None, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -129,9 +131,14 @@ async fn create_and_transfer_public_token() -> Result<()> { .await? .account; assert_eq!(supply_acc.program_owner, Program::token().id()); - assert_eq!(supply_acc.data[0], 1); - assert_eq!(&supply_acc.data[1..33], definition_account_id.to_bytes()); - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); // Check the status of the recipient account after transfer let recipient_acc = ctx @@ -140,15 +147,21 @@ async fn create_and_transfer_public_token() -> Result<()> { .await? .account; assert_eq!(recipient_acc.program_owner, Program::token().id()); - assert_eq!(recipient_acc.data[0], 1); - assert_eq!(&recipient_acc.data[1..33], definition_account_id.to_bytes()); - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); // Burn 3 tokens from recipient_acc + let burn_amount = 3; let subcommand = TokenProgramAgnosticSubcommand::Burn { definition: format_public_account_id(&definition_account_id.to_string()), holder: format_public_account_id(&recipient_account_id.to_string()), - amount: 3, + amount: burn_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -162,13 +175,15 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply: total_supply - burn_amount, + metadata_id: None + } ); // Check the status of the recipient account after burn @@ -177,16 +192,24 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(recipient_account_id.to_string()) .await? .account; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 4); + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount - burn_amount + } + ); // Mint 10 tokens at recipient_acc + let mint_amount = 10; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_public_account_id(&definition_account_id.to_string()), holder: Some(format_public_account_id(&recipient_account_id.to_string())), holder_npk: None, - holder_vpk: None, - amount: 10, + holder_ipk: None, + amount: mint_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -200,13 +223,15 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name, + total_supply: total_supply - burn_amount + mint_amount, + metadata_id: None + } ); // Check the status of the recipient account after mint @@ -215,10 +240,14 @@ async fn create_and_transfer_public_token() -> Result<()> { .get_account(recipient_account_id.to_string()) .await? .account; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc.data[33..].try_into()?), - 14 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount - burn_amount + mint_amount + } ); info!("Successfully created and transferred public token"); @@ -270,11 +299,13 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { }; // Create new token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name: name.clone(), + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -288,14 +319,16 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!(definition_acc.program_owner, Program::token().id()); assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply, + metadata_id: None + } ); let new_commitment1 = ctx @@ -305,12 +338,13 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await); // Transfer 7 tokens from supply_acc to recipient_account_id + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_private_account_id(&supply_account_id.to_string()), to: Some(format_private_account_id(&recipient_account_id.to_string())), to_npk: None, - to_vpk: None, - amount: 7, + to_ipk: None, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -331,10 +365,11 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await); // Burn 3 tokens from recipient_acc + let burn_amount = 3; let subcommand = TokenProgramAgnosticSubcommand::Burn { definition: format_public_account_id(&definition_account_id.to_string()), holder: format_private_account_id(&recipient_account_id.to_string()), - amount: 3, + amount: burn_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -348,13 +383,15 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { .get_account(definition_account_id.to_string()) .await? .account; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - definition_acc.data.as_ref(), - &[ - 0, 65, 32, 78, 65, 77, 69, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] + token_definition, + TokenDefinition::Fungible { + name, + total_supply: total_supply - burn_amount, + metadata_id: None + } ); let new_commitment2 = ctx @@ -368,10 +405,14 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc.data[33..].try_into()?), - 4 // 7 - 3 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount - burn_amount + } ); info!("Successfully created and transferred token with private supply"); @@ -414,11 +455,13 @@ async fn create_token_with_private_definition() -> Result<()> { }; // Create token with private definition + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(&definition_account_id.to_string()), supply_account_id: format_public_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name: name.clone(), + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -441,8 +484,14 @@ async fn create_token_with_private_definition() -> Result<()> { .account; assert_eq!(supply_acc.program_owner, Program::token().id()); - assert_eq!(supply_acc.data.as_ref()[0], 1); - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply + } + ); // Create private recipient account let result = wallet::cli::execute_subcommand( @@ -471,14 +520,15 @@ async fn create_token_with_private_definition() -> Result<()> { }; // Mint to public account + let mint_amount_public = 10; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_private_account_id(&definition_account_id.to_string()), holder: Some(format_public_account_id( &recipient_account_id_public.to_string(), )), holder_npk: None, - holder_vpk: None, - amount: 10, + holder_ipk: None, + amount: mint_amount_public, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -491,10 +541,15 @@ async fn create_token_with_private_definition() -> Result<()> { .wallet() .get_account_private(&definition_account_id) .context("Failed to get definition account")?; + let token_definition = TokenDefinition::try_from(&definition_acc.data)?; assert_eq!( - u128::from_le_bytes(definition_acc.data[7..23].try_into()?), - 47 // 37 + 10 + token_definition, + TokenDefinition::Fungible { + name: name.clone(), + total_supply: total_supply + mint_amount_public, + metadata_id: None + } ); // Verify public recipient received tokens @@ -503,21 +558,26 @@ async fn create_token_with_private_definition() -> Result<()> { .get_account(recipient_account_id_public.to_string()) .await? .account; + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc.data[33..].try_into()?), - 10 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: mint_amount_public + } ); // Mint to private account + let mint_amount_private = 5; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_private_account_id(&definition_account_id.to_string()), holder: Some(format_private_account_id( &recipient_account_id_private.to_string(), )), holder_npk: None, - holder_vpk: None, - amount: 5, + holder_ipk: None, + amount: mint_amount_private, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -537,10 +597,14 @@ async fn create_token_with_private_definition() -> Result<()> { .wallet() .get_account_private(&recipient_account_id_private) .context("Failed to get private recipient account")?; + let token_holding = TokenHolding::try_from(&recipient_acc_private.data)?; assert_eq!( - u128::from_le_bytes(recipient_acc_private.data[33..].try_into()?), - 5 + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: mint_amount_private + } ); info!("Successfully created token with private definition and minted to both account types"); @@ -579,11 +643,13 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { }; // Create token with both private definition and supply + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -610,8 +676,15 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { .wallet() .get_account_private(&supply_account_id) .context("Failed to get supply account")?; + let token_holding = TokenHolding::try_from(&supply_acc.data)?; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37); + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply + } + ); // Create recipient account let result = wallet::cli::execute_subcommand( @@ -627,12 +700,13 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { }; // Transfer tokens + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_private_account_id(&supply_account_id.to_string()), to: Some(format_private_account_id(&recipient_account_id.to_string())), to_npk: None, - to_vpk: None, - amount: 7, + to_ipk: None, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -658,13 +732,27 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { .wallet() .get_account_private(&supply_account_id) .context("Failed to get supply account")?; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); let recipient_acc = ctx .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); info!("Successfully created and transferred token with both private definition and supply"); @@ -715,11 +803,13 @@ async fn shielded_token_transfer() -> Result<()> { }; // Create token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_public_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -728,12 +818,13 @@ async fn shielded_token_transfer() -> Result<()> { tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; // Perform shielded transfer: public supply -> private recipient + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_public_account_id(&supply_account_id.to_string()), to: Some(format_private_account_id(&recipient_account_id.to_string())), to_npk: None, - to_vpk: None, - amount: 7, + to_ipk: None, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -747,7 +838,14 @@ async fn shielded_token_transfer() -> Result<()> { .get_account(supply_account_id.to_string()) .await? .account; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); // Verify recipient commitment exists let new_commitment = ctx @@ -761,7 +859,14 @@ async fn shielded_token_transfer() -> Result<()> { .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); info!("Successfully performed shielded token transfer"); @@ -812,11 +917,13 @@ async fn deshielded_token_transfer() -> Result<()> { }; // Create token with private supply + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -825,12 +932,13 @@ async fn deshielded_token_transfer() -> Result<()> { tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; // Perform deshielded transfer: private supply -> public recipient + let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { from: format_private_account_id(&supply_account_id.to_string()), to: Some(format_public_account_id(&recipient_account_id.to_string())), to_npk: None, - to_vpk: None, - amount: 7, + to_ipk: None, + amount: transfer_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -850,7 +958,14 @@ async fn deshielded_token_transfer() -> Result<()> { .wallet() .get_account_private(&supply_account_id) .context("Failed to get supply account")?; - assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30); + let token_holding = TokenHolding::try_from(&supply_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: total_supply - transfer_amount + } + ); // Verify recipient balance let recipient_acc = ctx @@ -858,7 +973,14 @@ async fn deshielded_token_transfer() -> Result<()> { .get_account(recipient_account_id.to_string()) .await? .account; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: transfer_amount + } + ); info!("Successfully performed deshielded token transfer"); @@ -896,11 +1018,13 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { }; // Create token + let name = "A NAME".to_string(); + let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(&definition_account_id.to_string()), supply_account_id: format_private_account_id(&supply_account_id.to_string()), - name: "A NAME".to_string(), - total_supply: 37, + name, + total_supply, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -931,12 +1055,13 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { .context("Failed to get private account keys")?; // Mint using claiming path (foreign account) + let mint_amount = 9; let subcommand = TokenProgramAgnosticSubcommand::Mint { definition: format_private_account_id(&definition_account_id.to_string()), holder: None, holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)), - holder_vpk: Some(hex::encode(holder_keys.viewing_public_key.0)), - amount: 9, + holder_ipk: Some(hex::encode(holder_keys.incoming_viewing_public_key.0)), + amount: mint_amount, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -960,7 +1085,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { .wallet() .get_account_private(&recipient_account_id) .context("Failed to get recipient account")?; - assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 9); + let token_holding = TokenHolding::try_from(&recipient_acc.data)?; + assert_eq!( + token_holding, + TokenHolding::Fungible { + definition_id: definition_account_id, + balance: mint_amount + } + ); info!("Successfully minted tokens using claiming path"); diff --git a/integration_tests/tests/tps.rs b/integration_tests/tests/tps.rs index b3b7e4b7..70b922bf 100644 --- a/integration_tests/tests/tps.rs +++ b/integration_tests/tests/tps.rs @@ -25,7 +25,11 @@ pub async fn tps_test() -> Result<()> { let target_tps = 12; let tps_test = TpsTestManager::new(target_tps, num_transactions); - let ctx = TestContext::new_with_sequencer_config(tps_test.generate_sequencer_config()).await?; + let ctx = TestContext::new_with_sequencer_and_maybe_indexer_configs( + tps_test.generate_sequencer_config(), + None, + ) + .await?; let target_time = tps_test.target_time(); info!( @@ -186,6 +190,7 @@ impl TpsTestManager { initial_commitments: vec![initial_commitment], signing_key: [37; 32], bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } } diff --git a/integration_tests/tests/wallet_ffi.rs b/integration_tests/tests/wallet_ffi.rs new file mode 100644 index 00000000..bb6b9805 --- /dev/null +++ b/integration_tests/tests/wallet_ffi.rs @@ -0,0 +1,618 @@ +use std::{ + collections::HashSet, + ffi::{CStr, CString, c_char}, + io::Write, + time::Duration, +}; + +use anyhow::Result; +use integration_tests::{ + ACC_RECEIVER, ACC_SENDER, ACC_SENDER_PRIVATE, BlockingTestContext, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, +}; +use log::info; +use nssa::{Account, AccountId, PublicKey, program::Program}; +use nssa_core::program::DEFAULT_PROGRAM_ID; +use tempfile::tempdir; +use wallet::WalletCore; +use wallet_ffi::{ + FfiAccount, FfiAccountList, FfiBytes32, FfiPrivateAccountKeys, FfiPublicAccountKey, + FfiTransferResult, WalletHandle, error, +}; + +unsafe extern "C" { + fn wallet_ffi_create_new( + config_path: *const c_char, + storage_path: *const c_char, + password: *const c_char, + ) -> *mut WalletHandle; + + fn wallet_ffi_destroy(handle: *mut WalletHandle); + + fn wallet_ffi_create_account_public( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, + ) -> error::WalletFfiError; + + fn wallet_ffi_create_account_private( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, + ) -> error::WalletFfiError; + + fn wallet_ffi_list_accounts( + handle: *mut WalletHandle, + out_list: *mut FfiAccountList, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_account_list(list: *mut FfiAccountList); + + fn wallet_ffi_get_balance( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + is_public: bool, + out_balance: *mut [u8; 16], + ) -> error::WalletFfiError; + + fn wallet_ffi_get_account_public( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_account: *mut FfiAccount, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_account_data(account: *mut FfiAccount); + + fn wallet_ffi_get_public_account_key( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_public_key: *mut FfiPublicAccountKey, + ) -> error::WalletFfiError; + + fn wallet_ffi_get_private_account_keys( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_keys: *mut FfiPrivateAccountKeys, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_private_account_keys(keys: *mut FfiPrivateAccountKeys); + + fn wallet_ffi_account_id_to_base58(account_id: *const FfiBytes32) -> *mut std::ffi::c_char; + + fn wallet_ffi_free_string(ptr: *mut c_char); + + fn wallet_ffi_account_id_from_base58( + base58_str: *const std::ffi::c_char, + out_account_id: *mut FfiBytes32, + ) -> error::WalletFfiError; + + fn wallet_ffi_transfer_public( + handle: *mut WalletHandle, + from: *const FfiBytes32, + to: *const FfiBytes32, + amount: *const [u8; 16], + out_result: *mut FfiTransferResult, + ) -> error::WalletFfiError; + + fn wallet_ffi_free_transfer_result(result: *mut FfiTransferResult); + + fn wallet_ffi_register_public_account( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_result: *mut FfiTransferResult, + ) -> error::WalletFfiError; +} + +fn new_wallet_ffi_with_test_context_config(ctx: &BlockingTestContext) -> *mut WalletHandle { + let tempdir = tempfile::tempdir().unwrap(); + let config_path = tempdir.path().join("wallet_config.json"); + let storage_path = tempdir.path().join("storage.json"); + let mut config = ctx.ctx.wallet().config().to_owned(); + if let Some(config_overrides) = ctx.ctx.wallet().config_overrides().clone() { + config.apply_overrides(config_overrides); + } + let mut file = std::fs::OpenOptions::new() + .write(true) + .create(true) + .truncate(true) + .open(&config_path) + .unwrap(); + + let config_with_overrides_serialized = serde_json::to_vec_pretty(&config).unwrap(); + + file.write_all(&config_with_overrides_serialized).unwrap(); + + let config_path = CString::new(config_path.to_str().unwrap()).unwrap(); + let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap(); + let password = CString::new(ctx.ctx.wallet_password()).unwrap(); + + unsafe { + wallet_ffi_create_new( + config_path.as_ptr(), + storage_path.as_ptr(), + password.as_ptr(), + ) + } +} + +fn new_wallet_ffi_with_default_config(password: &str) -> *mut WalletHandle { + let tempdir = tempdir().unwrap(); + let config_path = tempdir.path().join("wallet_config.json"); + let storage_path = tempdir.path().join("storage.json"); + let config_path_c = CString::new(config_path.to_str().unwrap()).unwrap(); + let storage_path_c = CString::new(storage_path.to_str().unwrap()).unwrap(); + let password = CString::new(password).unwrap(); + + unsafe { + wallet_ffi_create_new( + config_path_c.as_ptr(), + storage_path_c.as_ptr(), + password.as_ptr(), + ) + } +} + +fn new_wallet_rust_with_default_config(password: &str) -> WalletCore { + let tempdir = tempdir().unwrap(); + let config_path = tempdir.path().join("wallet_config.json"); + let storage_path = tempdir.path().join("storage.json"); + + WalletCore::new_init_storage( + config_path.to_path_buf(), + storage_path.to_path_buf(), + None, + password.to_string(), + ) + .unwrap() +} + +#[test] +fn test_wallet_ffi_create_public_accounts() { + let password = "password_for_tests"; + let n_accounts = 10; + // First `n_accounts` public accounts created with Rust wallet + let new_public_account_ids_rust = { + let mut account_ids = Vec::new(); + + let mut wallet_rust = new_wallet_rust_with_default_config(password); + for _ in 0..n_accounts { + let account_id = wallet_rust.create_new_account_public(None).0; + account_ids.push(*account_id.value()); + } + account_ids + }; + + // First `n_accounts` public accounts created with wallet FFI + let new_public_account_ids_ffi = unsafe { + let mut account_ids = Vec::new(); + + let wallet_ffi_handle = new_wallet_ffi_with_default_config(password); + for _ in 0..n_accounts { + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + wallet_ffi_create_account_public( + wallet_ffi_handle, + (&mut out_account_id) as *mut FfiBytes32, + ); + account_ids.push(out_account_id.data); + } + wallet_ffi_destroy(wallet_ffi_handle); + account_ids + }; + + assert_eq!(new_public_account_ids_ffi, new_public_account_ids_rust); +} + +#[test] +fn test_wallet_ffi_create_private_accounts() { + let password = "password_for_tests"; + let n_accounts = 10; + // First `n_accounts` private accounts created with Rust wallet + let new_private_account_ids_rust = { + let mut account_ids = Vec::new(); + + let mut wallet_rust = new_wallet_rust_with_default_config(password); + for _ in 0..n_accounts { + let account_id = wallet_rust.create_new_account_private(None).0; + account_ids.push(*account_id.value()); + } + account_ids + }; + + // First `n_accounts` private accounts created with wallet FFI + let new_private_account_ids_ffi = unsafe { + let mut account_ids = Vec::new(); + + let wallet_ffi_handle = new_wallet_ffi_with_default_config(password); + for _ in 0..n_accounts { + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + wallet_ffi_create_account_private( + wallet_ffi_handle, + (&mut out_account_id) as *mut FfiBytes32, + ); + account_ids.push(out_account_id.data); + } + wallet_ffi_destroy(wallet_ffi_handle); + account_ids + }; + + assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust) +} + +#[test] +fn test_wallet_ffi_list_accounts() { + let password = "password_for_tests"; + + // Create the wallet FFI + let wallet_ffi_handle = unsafe { + let handle = new_wallet_ffi_with_default_config(password); + // Create 5 public accounts and 5 private accounts + for _ in 0..5 { + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + wallet_ffi_create_account_public(handle, (&mut out_account_id) as *mut FfiBytes32); + wallet_ffi_create_account_private(handle, (&mut out_account_id) as *mut FfiBytes32); + } + + handle + }; + + // Create the wallet Rust + let wallet_rust = { + let mut wallet = new_wallet_rust_with_default_config(password); + // Create 5 public accounts and 5 private accounts + for _ in 0..5 { + wallet.create_new_account_public(None); + wallet.create_new_account_private(None); + } + wallet + }; + + // Get the account list with FFI method + let mut wallet_ffi_account_list = unsafe { + let mut out_list = FfiAccountList::default(); + wallet_ffi_list_accounts(wallet_ffi_handle, (&mut out_list) as *mut FfiAccountList); + out_list + }; + + let wallet_rust_account_ids = wallet_rust + .storage() + .user_data + .account_ids() + .collect::<Vec<_>>(); + + // Assert same number of elements between Rust and FFI result + assert_eq!(wallet_rust_account_ids.len(), wallet_ffi_account_list.count); + + let wallet_ffi_account_list_slice = unsafe { + core::slice::from_raw_parts( + wallet_ffi_account_list.entries, + wallet_ffi_account_list.count, + ) + }; + + // Assert same account ids between Rust and FFI result + assert_eq!( + wallet_rust_account_ids + .iter() + .map(|id| id.value()) + .collect::<HashSet<_>>(), + wallet_ffi_account_list_slice + .iter() + .map(|entry| &entry.account_id.data) + .collect::<HashSet<_>>() + ); + + // Assert `is_pub` flag is correct in the FFI result + for entry in wallet_ffi_account_list_slice.iter() { + let account_id = AccountId::new(entry.account_id.data); + let is_pub_default_in_rust_wallet = wallet_rust + .storage() + .user_data + .default_pub_account_signing_keys + .contains_key(&account_id); + let is_pub_key_tree_wallet_rust = wallet_rust + .storage() + .user_data + .public_key_tree + .account_id_map + .contains_key(&account_id); + + let is_public_in_rust_wallet = is_pub_default_in_rust_wallet || is_pub_key_tree_wallet_rust; + + assert_eq!(entry.is_public, is_public_in_rust_wallet); + } + + unsafe { + wallet_ffi_free_account_list((&mut wallet_ffi_account_list) as *mut FfiAccountList); + wallet_ffi_destroy(wallet_ffi_handle); + } +} + +#[test] +fn test_wallet_ffi_get_balance_public() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + + let balance = unsafe { + let mut out_balance: [u8; 16] = [0; 16]; + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_balance( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + true, + (&mut out_balance) as *mut [u8; 16], + ); + u128::from_le_bytes(out_balance) + }; + assert_eq!(balance, 10000); + + info!("Successfully retrieved account balance"); + + unsafe { + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} + +#[test] +fn test_wallet_ffi_get_account_public() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let mut out_account = FfiAccount::default(); + + let account: Account = unsafe { + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_account_public( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + (&mut out_account) as *mut FfiAccount, + ); + (&out_account).try_into().unwrap() + }; + + assert_eq!( + account.program_owner, + Program::authenticated_transfer_program().id() + ); + assert_eq!(account.balance, 10000); + assert!(account.data.is_empty()); + assert_eq!(account.nonce, 0); + + unsafe { + wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount); + wallet_ffi_destroy(wallet_ffi_handle); + } + + info!("Successfully retrieved account with correct details"); + + Ok(()) +} + +#[test] +fn test_wallet_ffi_get_public_account_keys() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let mut out_key = FfiPublicAccountKey::default(); + + let key: PublicKey = unsafe { + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_public_account_key( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + (&mut out_key) as *mut FfiPublicAccountKey, + ); + (&out_key).try_into().unwrap() + }; + + let expected_key = { + let private_key = ctx + .ctx + .wallet() + .get_account_public_signing_key(&account_id) + .unwrap(); + PublicKey::new_from_private_key(private_key) + }; + + assert_eq!(key, expected_key); + + info!("Successfully retrieved account key"); + + unsafe { + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} + +#[test] +fn test_wallet_ffi_get_private_account_keys() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let account_id: AccountId = ACC_SENDER_PRIVATE.parse().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let mut keys = FfiPrivateAccountKeys::default(); + + unsafe { + let ffi_account_id = FfiBytes32::from(&account_id); + let _result = wallet_ffi_get_private_account_keys( + wallet_ffi_handle, + (&ffi_account_id) as *const FfiBytes32, + (&mut keys) as *mut FfiPrivateAccountKeys, + ); + }; + + let key_chain = &ctx + .ctx + .wallet() + .storage() + .user_data + .get_private_account(&account_id) + .unwrap() + .0; + + let expected_npk = &key_chain.nullifer_public_key; + let expected_ivk = &key_chain.incoming_viewing_public_key; + + assert_eq!(&keys.npk(), expected_npk); + assert_eq!(&keys.ivk().unwrap(), expected_ivk); + + unsafe { + wallet_ffi_free_private_account_keys((&mut keys) as *mut FfiPrivateAccountKeys); + wallet_ffi_destroy(wallet_ffi_handle); + } + + info!("Successfully retrieved account keys"); + + Ok(()) +} + +#[test] +fn test_wallet_ffi_account_id_to_base58() { + let account_id_str = ACC_SENDER; + let account_id: AccountId = account_id_str.parse().unwrap(); + let ffi_bytes: FfiBytes32 = (&account_id).into(); + let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) }; + + let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() }; + + assert_eq!(account_id_str, ffi_result); + + unsafe { + wallet_ffi_free_string(ptr); + } +} + +#[test] +fn test_wallet_ffi_base58_to_account_id() { + let account_id_str = ACC_SENDER; + let account_id_c_str = CString::new(account_id_str).unwrap(); + let account_id: AccountId = unsafe { + let mut out_account_id_bytes = FfiBytes32::default(); + wallet_ffi_account_id_from_base58( + account_id_c_str.as_ptr(), + (&mut out_account_id_bytes) as *mut FfiBytes32, + ); + out_account_id_bytes.into() + }; + + let expected_account_id = account_id_str.parse().unwrap(); + + assert_eq!(account_id, expected_account_id); +} + +#[test] +fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { + let ctx = BlockingTestContext::new().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + + // Create a new uninitialized public account + let mut out_account_id = FfiBytes32::from_bytes([0; 32]); + unsafe { + wallet_ffi_create_account_public( + wallet_ffi_handle, + (&mut out_account_id) as *mut FfiBytes32, + ); + } + + // Check its program owner is the default program id + let account: Account = unsafe { + let mut out_account = FfiAccount::default(); + let _result = wallet_ffi_get_account_public( + wallet_ffi_handle, + (&out_account_id) as *const FfiBytes32, + (&mut out_account) as *mut FfiAccount, + ); + (&out_account).try_into().unwrap() + }; + assert_eq!(account.program_owner, DEFAULT_PROGRAM_ID); + + // Call the init funciton + let mut transfer_result = FfiTransferResult::default(); + unsafe { + wallet_ffi_register_public_account( + wallet_ffi_handle, + (&out_account_id) as *const FfiBytes32, + (&mut transfer_result) as *mut FfiTransferResult, + ); + } + + info!("Waiting for next block creation"); + std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)); + + // Check that the program owner is now the authenticated transfer program + let account: Account = unsafe { + let mut out_account = FfiAccount::default(); + let _result = wallet_ffi_get_account_public( + wallet_ffi_handle, + (&out_account_id) as *const FfiBytes32, + (&mut out_account) as *mut FfiAccount, + ); + (&out_account).try_into().unwrap() + }; + assert_eq!( + account.program_owner, + Program::authenticated_transfer_program().id() + ); + + unsafe { + wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} + +#[test] +fn test_wallet_ffi_transfer_public() -> Result<()> { + let ctx = BlockingTestContext::new().unwrap(); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx); + let from: FfiBytes32 = (&ACC_SENDER.parse::<AccountId>().unwrap()).into(); + let to: FfiBytes32 = (&ACC_RECEIVER.parse::<AccountId>().unwrap()).into(); + let amount: [u8; 16] = 100u128.to_le_bytes(); + + let mut transfer_result = FfiTransferResult::default(); + unsafe { + wallet_ffi_transfer_public( + wallet_ffi_handle, + (&from) as *const FfiBytes32, + (&to) as *const FfiBytes32, + (&amount) as *const [u8; 16], + (&mut transfer_result) as *mut FfiTransferResult, + ); + } + + info!("Waiting for next block creation"); + std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)); + + let from_balance = unsafe { + let mut out_balance: [u8; 16] = [0; 16]; + let _result = wallet_ffi_get_balance( + wallet_ffi_handle, + (&from) as *const FfiBytes32, + true, + (&mut out_balance) as *mut [u8; 16], + ); + u128::from_le_bytes(out_balance) + }; + + let to_balance = unsafe { + let mut out_balance: [u8; 16] = [0; 16]; + let _result = wallet_ffi_get_balance( + wallet_ffi_handle, + (&to) as *const FfiBytes32, + true, + (&mut out_balance) as *mut [u8; 16], + ); + u128::from_le_bytes(out_balance) + }; + + assert_eq!(from_balance, 9900); + assert_eq!(to_balance, 20100); + + unsafe { + wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_destroy(wallet_ffi_handle); + } + + Ok(()) +} diff --git a/key_protocol/Cargo.toml b/key_protocol/Cargo.toml index 39c1028a..de0a3bf2 100644 --- a/key_protocol/Cargo.toml +++ b/key_protocol/Cargo.toml @@ -2,8 +2,11 @@ name = "key_protocol" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] +secp256k1 = "0.31.1" + nssa.workspace = true nssa_core.workspace = true common.workspace = true @@ -19,4 +22,4 @@ aes-gcm.workspace = true bip39.workspace = true hmac-sha512.workspace = true thiserror.workspace = true -itertools.workspace = true +itertools.workspace = true \ No newline at end of file diff --git a/key_protocol/src/key_management/key_tree/keys_public.rs b/key_protocol/src/key_management/key_tree/keys_public.rs index ddccdfa4..7c5d6e38 100644 --- a/key_protocol/src/key_management/key_tree/keys_public.rs +++ b/key_protocol/src/key_management/key_tree/keys_public.rs @@ -1,3 +1,4 @@ +use secp256k1::Scalar; use serde::{Deserialize, Serialize}; use crate::key_management::key_tree::traits::KeyNode; @@ -11,9 +12,32 @@ pub struct ChildKeysPublic { pub cci: Option<u32>, } +impl ChildKeysPublic { + fn compute_hash_value(&self, cci: u32) -> [u8; 64] { + let mut hash_input = vec![]; + + match ((2u32).pow(31)).cmp(&cci) { + // Non-harden + std::cmp::Ordering::Greater => { + hash_input.extend_from_slice(self.cpk.value()); + hash_input.extend_from_slice(&cci.to_le_bytes()); + + hmac_sha512::HMAC::mac(hash_input, self.ccc) + } + // Harden + _ => { + hash_input.extend_from_slice(self.csk.value()); + hash_input.extend_from_slice(&(cci).to_le_bytes()); + + hmac_sha512::HMAC::mac(hash_input, self.ccc) + } + } + } +} + impl KeyNode for ChildKeysPublic { fn root(seed: [u8; 64]) -> Self { - let hash_value = hmac_sha512::HMAC::mac(seed, "NSSA_master_pub"); + let hash_value = hmac_sha512::HMAC::mac(seed, "LEE_master_pub"); let csk = nssa::PrivateKey::try_new(*hash_value.first_chunk::<32>().unwrap()).unwrap(); let ccc = *hash_value.last_chunk::<32>().unwrap(); @@ -28,21 +52,30 @@ impl KeyNode for ChildKeysPublic { } fn nth_child(&self, cci: u32) -> Self { - let mut hash_input = vec![]; - hash_input.extend_from_slice(self.csk.value()); - hash_input.extend_from_slice(&cci.to_le_bytes()); + let hash_value = self.compute_hash_value(cci); - let hash_value = hmac_sha512::HMAC::mac(&hash_input, self.ccc); - - let csk = nssa::PrivateKey::try_new( + let csk = secp256k1::SecretKey::from_byte_array( *hash_value .first_chunk::<32>() .expect("hash_value is 64 bytes, must be safe to get first 32"), ) .unwrap(); + + let csk = nssa::PrivateKey::try_new( + csk.add_tweak(&Scalar::from_le_bytes(*self.csk.value()).unwrap()) + .expect("Expect a valid Scalar") + .secret_bytes(), + ) + .unwrap(); + + if secp256k1::constants::CURVE_ORDER < *csk.value() { + panic!("Secret key cannot exceed curve order"); + } + let ccc = *hash_value .last_chunk::<32>() .expect("hash_value is 64 bytes, must be safe to get last 32"); + let cpk = nssa::PublicKey::new_from_private_key(&csk); Self { @@ -74,59 +107,152 @@ impl<'a> From<&'a ChildKeysPublic> for &'a nssa::PrivateKey { #[cfg(test)] mod tests { + use nssa::{PrivateKey, PublicKey}; + use super::*; #[test] - fn test_keys_deterministic_generation() { - let root_keys = ChildKeysPublic::root([42; 64]); - let child_keys = root_keys.nth_child(5); + fn test_master_keys_generation() { + let seed = [ + 88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173, + 134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87, + 22, 227, 38, 71, 17, 144, 251, 118, 217, 115, 33, 222, 201, 61, 203, 246, 121, 214, 6, + 187, 148, 92, 44, 253, 210, 37, + ]; + let keys = ChildKeysPublic::root(seed); - assert_eq!(root_keys.cci, None); - assert_eq!(child_keys.cci, Some(5)); + let expected_ccc = [ + 238, 94, 84, 154, 56, 224, 80, 218, 133, 249, 179, 222, 9, 24, 17, 252, 120, 127, 222, + 13, 146, 126, 232, 239, 113, 9, 194, 219, 190, 48, 187, 155, + ]; - assert_eq!( - root_keys.ccc, - [ - 61, 30, 91, 26, 133, 91, 236, 192, 231, 53, 186, 139, 11, 221, 202, 11, 178, 215, - 254, 103, 191, 60, 117, 112, 1, 226, 31, 156, 83, 104, 150, 224 - ] - ); - assert_eq!( - child_keys.ccc, - [ - 67, 26, 102, 68, 189, 155, 102, 80, 199, 188, 112, 142, 207, 157, 36, 210, 48, 224, - 35, 6, 112, 180, 11, 190, 135, 218, 9, 14, 84, 231, 58, 98 - ] + let expected_csk: PrivateKey = PrivateKey::try_new([ + 40, 35, 239, 19, 53, 178, 250, 55, 115, 12, 34, 3, 153, 153, 72, 170, 190, 36, 172, 36, + 202, 148, 181, 228, 35, 222, 58, 84, 156, 24, 146, 86, + ]) + .unwrap(); + let expected_cpk: PublicKey = PublicKey::try_new([ + 219, 141, 130, 105, 11, 203, 187, 124, 112, 75, 223, 22, 11, 164, 153, 127, 59, 247, + 244, 166, 75, 66, 242, 224, 35, 156, 161, 75, 41, 51, 76, 245, + ]) + .unwrap(); + + assert!(expected_ccc == keys.ccc); + assert!(expected_csk == keys.csk); + assert!(expected_cpk == keys.cpk); + } + + #[test] + fn test_harden_child_keys_generation() { + let seed = [ + 88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173, + 134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87, + 22, 227, 38, 71, 17, 144, 251, 118, 217, 115, 33, 222, 201, 61, 203, 246, 121, 214, 6, + 187, 148, 92, 44, 253, 210, 37, + ]; + let root_keys = ChildKeysPublic::root(seed); + let cci = (2u32).pow(31) + 13; + let child_keys = ChildKeysPublic::nth_child(&root_keys, cci); + + print!( + "{} {}", + child_keys.csk.value()[0], + child_keys.csk.value()[1] ); - assert_eq!( - root_keys.csk.value(), - &[ - 241, 82, 246, 237, 62, 130, 116, 47, 189, 112, 99, 67, 178, 40, 115, 245, 141, 193, - 77, 164, 243, 76, 222, 64, 50, 146, 23, 145, 91, 164, 92, 116 - ] - ); - assert_eq!( - child_keys.csk.value(), - &[ - 11, 151, 27, 212, 167, 26, 77, 234, 103, 145, 53, 191, 184, 25, 240, 191, 156, 25, - 60, 144, 65, 22, 193, 163, 246, 227, 212, 81, 49, 170, 33, 158 - ] + let expected_ccc = [ + 126, 175, 244, 41, 41, 173, 134, 103, 139, 140, 195, 86, 194, 147, 116, 48, 71, 107, + 253, 235, 114, 139, 60, 115, 226, 205, 215, 248, 240, 190, 196, 6, + ]; + + let expected_csk: PrivateKey = PrivateKey::try_new([ + 128, 148, 53, 165, 222, 155, 163, 108, 186, 182, 124, 67, 90, 86, 59, 123, 95, 224, + 171, 4, 51, 131, 254, 57, 241, 178, 82, 161, 204, 206, 79, 107, + ]) + .unwrap(); + + let expected_cpk: PublicKey = PublicKey::try_new([ + 149, 240, 55, 15, 178, 67, 245, 254, 44, 141, 95, 223, 238, 62, 85, 11, 248, 9, 11, 40, + 69, 211, 116, 13, 189, 35, 8, 95, 233, 154, 129, 58, + ]) + .unwrap(); + + assert!(expected_ccc == child_keys.ccc); + assert!(expected_csk == child_keys.csk); + assert!(expected_cpk == child_keys.cpk); + } + + #[test] + fn test_nonharden_child_keys_generation() { + let seed = [ + 88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173, + 134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87, + 22, 227, 38, 71, 17, 144, 251, 118, 217, 115, 33, 222, 201, 61, 203, 246, 121, 214, 6, + 187, 148, 92, 44, 253, 210, 37, + ]; + let root_keys = ChildKeysPublic::root(seed); + let cci = 13; + let child_keys = ChildKeysPublic::nth_child(&root_keys, cci); + + print!( + "{} {}", + child_keys.csk.value()[0], + child_keys.csk.value()[1] ); - assert_eq!( - root_keys.cpk.value(), - &[ - 220, 170, 95, 177, 121, 37, 86, 166, 56, 238, 232, 72, 21, 106, 107, 217, 158, 74, - 133, 91, 143, 244, 155, 15, 2, 230, 223, 169, 13, 20, 163, 138 - ] - ); - assert_eq!( - child_keys.cpk.value(), - &[ - 152, 249, 236, 111, 132, 96, 184, 122, 21, 179, 240, 15, 234, 155, 164, 144, 108, - 110, 120, 74, 176, 147, 196, 168, 243, 186, 203, 79, 97, 17, 194, 52 - ] - ); + let expected_ccc = [ + 50, 29, 113, 102, 49, 130, 64, 0, 247, 95, 135, 187, 118, 162, 65, 65, 194, 53, 189, + 242, 66, 178, 168, 2, 51, 193, 155, 72, 209, 2, 207, 251, + ]; + + let expected_csk: PrivateKey = PrivateKey::try_new([ + 162, 32, 211, 190, 180, 74, 151, 246, 189, 93, 8, 57, 182, 239, 125, 245, 192, 255, 24, + 186, 251, 23, 194, 186, 252, 121, 190, 54, 147, 199, 1, 109, + ]) + .unwrap(); + + let expected_cpk: PublicKey = PublicKey::try_new([ + 183, 48, 207, 170, 221, 111, 118, 9, 40, 67, 123, 162, 159, 169, 34, 157, 23, 37, 232, + 102, 231, 187, 199, 191, 205, 146, 159, 22, 79, 100, 10, 223, + ]) + .unwrap(); + + assert!(expected_ccc == child_keys.ccc); + assert!(expected_csk == child_keys.csk); + assert!(expected_cpk == child_keys.cpk); + } + + #[test] + fn test_edge_case_child_keys_generation_2_power_31() { + let seed = [ + 88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173, + 134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87, + 22, 227, 38, 71, 17, 144, 251, 118, 217, 115, 33, 222, 201, 61, 203, 246, 121, 214, 6, + 187, 148, 92, 44, 253, 210, 37, + ]; + let root_keys = ChildKeysPublic::root(seed); + let cci = (2u32).pow(31); //equivant to 0, thus non-harden. + let child_keys = ChildKeysPublic::nth_child(&root_keys, cci); + + let expected_ccc = [ + 101, 15, 69, 152, 144, 22, 105, 89, 175, 21, 13, 50, 160, 167, 93, 80, 94, 99, 192, + 252, 1, 126, 196, 217, 149, 164, 60, 75, 237, 90, 104, 83, + ]; + + let expected_csk: PrivateKey = PrivateKey::try_new([ + 46, 196, 131, 199, 190, 180, 250, 222, 41, 188, 221, 156, 255, 239, 251, 207, 239, 202, + 166, 216, 107, 236, 195, 48, 167, 69, 97, 13, 132, 117, 76, 89, + ]) + .unwrap(); + + let expected_cpk: PublicKey = PublicKey::try_new([ + 93, 151, 154, 238, 175, 198, 53, 146, 255, 43, 37, 52, 214, 165, 69, 161, 38, 20, 68, + 166, 143, 80, 149, 216, 124, 203, 240, 114, 168, 111, 33, 83, + ]) + .unwrap(); + + assert!(expected_ccc == child_keys.ccc); + assert!(expected_csk == child_keys.csk); + assert!(expected_cpk == child_keys.cpk); } } diff --git a/key_protocol/src/key_management/key_tree/mod.rs b/key_protocol/src/key_management/key_tree/mod.rs index 389580b6..a4121b04 100644 --- a/key_protocol/src/key_management/key_tree/mod.rs +++ b/key_protocol/src/key_management/key_tree/mod.rs @@ -345,8 +345,8 @@ mod tests { assert!(tree.key_map.contains_key(&ChainIndex::root())); assert!(tree.account_id_map.contains_key(&AccountId::new([ - 46, 223, 229, 177, 59, 18, 189, 219, 153, 31, 249, 90, 112, 230, 180, 164, 80, 25, 106, - 159, 14, 238, 1, 192, 91, 8, 210, 165, 199, 41, 60, 104, + 172, 82, 222, 249, 164, 16, 148, 184, 219, 56, 92, 145, 203, 220, 251, 89, 214, 178, + 38, 30, 108, 202, 251, 241, 148, 200, 125, 185, 93, 227, 189, 247 ]))); } diff --git a/mempool/Cargo.toml b/mempool/Cargo.toml index 46014389..ee7e884c 100644 --- a/mempool/Cargo.toml +++ b/mempool/Cargo.toml @@ -2,6 +2,7 @@ name = "mempool" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] tokio = { workspace = true, features = ["sync"] } diff --git a/nssa/Cargo.toml b/nssa/Cargo.toml index a508cc08..e6952eee 100644 --- a/nssa/Cargo.toml +++ b/nssa/Cargo.toml @@ -2,6 +2,7 @@ name = "nssa" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core = { workspace = true, features = ["host"] } @@ -23,7 +24,9 @@ risc0-build = "3.0.3" risc0-binfmt = "3.0.2" [dev-dependencies] +token_core.workspace = true test_program_methods.workspace = true + env_logger.workspace = true hex-literal = "1.0.0" test-case = "3.3.1" diff --git a/nssa/core/Cargo.toml b/nssa/core/Cargo.toml index 473cde90..f00f2857 100644 --- a/nssa/core/Cargo.toml +++ b/nssa/core/Cargo.toml @@ -2,6 +2,7 @@ name = "nssa_core" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] risc0-zkvm.workspace = true diff --git a/nssa/core/src/commitment.rs b/nssa/core/src/commitment.rs index 52344177..b08e3005 100644 --- a/nssa/core/src/commitment.rs +++ b/nssa/core/src/commitment.rs @@ -5,7 +5,10 @@ use serde::{Deserialize, Serialize}; use crate::{NullifierPublicKey, account::Account}; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))] +#[cfg_attr( + any(feature = "host", test), + derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord) +)] pub struct Commitment(pub(super) [u8; 32]); /// A commitment to all zero data. diff --git a/nssa/core/src/nullifier.rs b/nssa/core/src/nullifier.rs index a4fbbcf6..c019b185 100644 --- a/nssa/core/src/nullifier.rs +++ b/nssa/core/src/nullifier.rs @@ -43,7 +43,10 @@ impl From<&NullifierSecretKey> for NullifierPublicKey { pub type NullifierSecretKey = [u8; 32]; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))] +#[cfg_attr( + any(feature = "host", test), + derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash) +)] pub struct Nullifier(pub(super) [u8; 32]); impl Nullifier { diff --git a/nssa/core/src/program.rs b/nssa/core/src/program.rs index 32b3e2c0..a6a04425 100644 --- a/nssa/core/src/program.rs +++ b/nssa/core/src/program.rs @@ -20,8 +20,7 @@ pub struct ProgramInput<T> { /// Each program can derive up to `2^256` unique account IDs by choosing different /// seeds. PDAs allow programs to control namespaced account identifiers without /// collisions between programs. -#[derive(Serialize, Deserialize, Clone, Eq, PartialEq)] -#[cfg_attr(any(feature = "host", test), derive(Debug))] +#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)] pub struct PdaSeed([u8; 32]); impl PdaSeed { @@ -65,23 +64,44 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId { } } -#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)] -#[cfg_attr(any(feature = "host", test), derive(Debug,))] +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub struct ChainedCall { /// The program ID of the program to execute pub program_id: ProgramId, + pub pre_states: Vec<AccountWithMetadata>, /// The instruction data to pass pub instruction_data: InstructionData, - pub pre_states: Vec<AccountWithMetadata>, pub pda_seeds: Vec<PdaSeed>, } +impl ChainedCall { + /// Creates a new chained call serializing the given instruction. + pub fn new<I: Serialize>( + program_id: ProgramId, + pre_states: Vec<AccountWithMetadata>, + instruction: &I, + ) -> Self { + Self { + program_id, + pre_states, + instruction_data: risc0_zkvm::serde::to_vec(instruction) + .expect("Serialization to Vec<u32> should not fail"), + pda_seeds: Vec::new(), + } + } + + pub fn with_pda_seeds(mut self, pda_seeds: Vec<PdaSeed>) -> Self { + self.pda_seeds = pda_seeds; + self + } +} + /// Represents the final state of an `Account` after a program execution. /// A post state may optionally request that the executing program /// becomes the owner of the account (a “claim”). This is used to signal /// that the program intends to take ownership of the account. -#[derive(Serialize, Deserialize, Clone)] -#[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))] +#[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(any(feature = "host", test), derive(PartialEq, Eq))] pub struct AccountPostState { account: Account, claim: bool, diff --git a/nssa/src/lib.rs b/nssa/src/lib.rs index de4b65b2..47a0eadb 100644 --- a/nssa/src/lib.rs +++ b/nssa/src/lib.rs @@ -14,7 +14,7 @@ mod state; pub use nssa_core::{ SharedSecretKey, - account::{Account, AccountId}, + account::{Account, AccountId, Data}, encryption::EphemeralPublicKey, program::ProgramId, }; diff --git a/nssa/src/merkle_tree/mod.rs b/nssa/src/merkle_tree/mod.rs index c4501cf8..b3637b13 100644 --- a/nssa/src/merkle_tree/mod.rs +++ b/nssa/src/merkle_tree/mod.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use sha2::{Digest, Sha256}; mod default_values; @@ -20,6 +21,7 @@ fn hash_value(value: &Value) -> Node { } #[cfg_attr(test, derive(Debug, PartialEq, Eq))] +#[derive(BorshSerialize, BorshDeserialize)] pub struct MerkleTree { nodes: Vec<Node>, capacity: usize, diff --git a/nssa/src/privacy_preserving_transaction/transaction.rs b/nssa/src/privacy_preserving_transaction/transaction.rs index 34649d2d..8eb4236e 100644 --- a/nssa/src/privacy_preserving_transaction/transaction.rs +++ b/nssa/src/privacy_preserving_transaction/transaction.rs @@ -5,6 +5,7 @@ use nssa_core::{ Commitment, CommitmentSetDigest, Nullifier, PrivacyPreservingCircuitOutput, account::{Account, AccountWithMetadata}, }; +use sha2::{Digest as _, digest::FixedOutput as _}; use super::{message::Message, witness_set::WitnessSet}; use crate::{ @@ -131,6 +132,13 @@ impl PrivacyPreservingTransaction { &self.witness_set } + pub fn hash(&self) -> [u8; 32] { + let bytes = self.to_bytes(); + let mut hasher = sha2::Sha256::new(); + hasher.update(&bytes); + hasher.finalize_fixed().into() + } + pub(crate) fn signer_account_ids(&self) -> Vec<AccountId> { self.witness_set .signatures_and_public_keys() diff --git a/nssa/src/program.rs b/nssa/src/program.rs index 943b16ed..06c7ad29 100644 --- a/nssa/src/program.rs +++ b/nssa/src/program.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ account::AccountWithMetadata, program::{InstructionData, ProgramId, ProgramOutput}, @@ -14,7 +15,7 @@ use crate::{ /// TODO: Make this variable when fees are implemented const MAX_NUM_CYCLES_PUBLIC_EXECUTION: u64 = 1024 * 1024 * 32; // 32M cycles -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Program { id: ProgramId, elf: Vec<u8>, diff --git a/nssa/src/program_deployment_transaction/transaction.rs b/nssa/src/program_deployment_transaction/transaction.rs index 6002aded..188b73ea 100644 --- a/nssa/src/program_deployment_transaction/transaction.rs +++ b/nssa/src/program_deployment_transaction/transaction.rs @@ -1,4 +1,5 @@ use borsh::{BorshDeserialize, BorshSerialize}; +use sha2::{Digest as _, digest::FixedOutput as _}; use crate::{ V02State, error::NssaError, program::Program, program_deployment_transaction::message::Message, @@ -6,7 +7,7 @@ use crate::{ #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct ProgramDeploymentTransaction { - pub(crate) message: Message, + pub message: Message, } impl ProgramDeploymentTransaction { @@ -30,4 +31,11 @@ impl ProgramDeploymentTransaction { Ok(program) } } + + pub fn hash(&self) -> [u8; 32] { + let bytes = self.to_bytes(); + let mut hasher = sha2::Sha256::new(); + hasher.update(&bytes); + hasher.finalize_fixed().into() + } } diff --git a/nssa/src/public_transaction/transaction.rs b/nssa/src/public_transaction/transaction.rs index f5badb6a..3e82acc7 100644 --- a/nssa/src/public_transaction/transaction.rs +++ b/nssa/src/public_transaction/transaction.rs @@ -17,8 +17,8 @@ use crate::{ #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct PublicTransaction { - message: Message, - witness_set: WitnessSet, + pub message: Message, + pub witness_set: WitnessSet, } impl PublicTransaction { @@ -288,12 +288,12 @@ pub mod tests { let tx = transaction_for_tests(); let expected_signer_account_ids = vec![ AccountId::new([ - 208, 122, 210, 232, 75, 39, 250, 0, 194, 98, 240, 161, 238, 160, 255, 53, 202, 9, - 115, 84, 126, 106, 16, 111, 114, 241, 147, 194, 220, 131, 139, 68, + 148, 179, 206, 253, 199, 51, 82, 86, 232, 2, 152, 122, 80, 243, 54, 207, 237, 112, + 83, 153, 44, 59, 204, 49, 128, 84, 160, 227, 216, 149, 97, 102, ]), AccountId::new([ - 231, 174, 119, 197, 239, 26, 5, 153, 147, 68, 175, 73, 159, 199, 138, 23, 5, 57, - 141, 98, 237, 6, 207, 46, 20, 121, 246, 222, 248, 154, 57, 188, + 30, 145, 107, 3, 207, 73, 192, 230, 160, 63, 238, 207, 18, 69, 54, 216, 103, 244, + 92, 94, 124, 248, 42, 16, 141, 19, 119, 18, 14, 226, 140, 204, ]), ]; let signer_account_ids = tx.signer_account_ids(); diff --git a/nssa/src/signature/public_key.rs b/nssa/src/signature/public_key.rs index 57cda71c..55e55b57 100644 --- a/nssa/src/signature/public_key.rs +++ b/nssa/src/signature/public_key.rs @@ -48,7 +48,8 @@ impl PublicKey { impl From<&PublicKey> for AccountId { fn from(key: &PublicKey) -> Self { - const PUBLIC_ACCOUNT_ID_PREFIX: &[u8; 32] = b"/NSSA/v0.2/AccountId/Public/\x00\x00\x00\x00"; + const PUBLIC_ACCOUNT_ID_PREFIX: &[u8; 32] = + b"/LEE/v0.3/AccountId/Public/\x00\x00\x00\x00\x00"; let mut hasher = Sha256::new(); hasher.update(PUBLIC_ACCOUNT_ID_PREFIX); diff --git a/nssa/src/state.rs b/nssa/src/state.rs index 8117f62e..3756aaea 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -1,5 +1,6 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; +use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, MembershipProof, Nullifier, account::{Account, AccountId}, @@ -15,6 +16,8 @@ use crate::{ pub const MAX_NUMBER_CHAINED_CALLS: usize = 10; +#[derive(BorshSerialize, BorshDeserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub(crate) struct CommitmentSet { merkle_tree: MerkleTree, commitments: HashMap<Commitment, usize>, @@ -60,8 +63,49 @@ impl CommitmentSet { } } -type NullifierSet = HashSet<Nullifier>; +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] +struct NullifierSet(BTreeSet<Nullifier>); +impl NullifierSet { + fn new() -> Self { + Self(BTreeSet::new()) + } + + fn extend(&mut self, new_nullifiers: Vec<Nullifier>) { + self.0.extend(new_nullifiers); + } + + fn contains(&self, nullifier: &Nullifier) -> bool { + self.0.contains(nullifier) + } +} + +impl BorshSerialize for NullifierSet { + fn serialize<W: std::io::Write>(&self, writer: &mut W) -> std::io::Result<()> { + self.0.iter().collect::<Vec<_>>().serialize(writer) + } +} + +impl BorshDeserialize for NullifierSet { + fn deserialize_reader<R: std::io::Read>(reader: &mut R) -> std::io::Result<Self> { + let vec = Vec::<Nullifier>::deserialize_reader(reader)?; + + let mut set = BTreeSet::new(); + for n in vec { + if !set.insert(n) { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "duplicate nullifier in NullifierSet", + )); + } + } + + Ok(Self(set)) + } +} + +#[derive(BorshSerialize, BorshDeserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub struct V02State { public_state: HashMap<AccountId, Account>, private_state: (CommitmentSet, NullifierSet), @@ -273,6 +317,7 @@ pub mod tests { encryption::{EphemeralPublicKey, Scalar, ViewingPublicKey}, program::{PdaSeed, ProgramId}, }; + use token_core::{TokenDefinition, TokenHolding}; use crate::{ PublicKey, PublicTransaction, V02State, @@ -2284,53 +2329,6 @@ pub mod tests { )); } - // TODO: repeated code needs to be cleaned up - // from token.rs (also repeated in amm.rs) - const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - - const TOKEN_HOLDING_DATA_SIZE: usize = 49; - - struct TokenDefinition { - account_type: u8, - name: [u8; 6], - total_supply: u128, - metadata_id: AccountId, - } - - struct TokenHolding { - account_type: u8, - definition_id: AccountId, - balance: u128, - } - impl TokenDefinition { - fn into_data(self) -> Data { - let mut bytes = Vec::<u8>::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.name); - bytes.extend_from_slice(&self.total_supply.to_le_bytes()); - bytes.extend_from_slice(&self.metadata_id.to_bytes()); - - if bytes.len() != TOKEN_DEFINITION_DATA_SIZE { - panic!("Invalid Token Definition data"); - } - - Data::try_from(bytes).expect("Token definition data size must fit into data") - } - } - - impl TokenHolding { - fn into_data(self) -> Data { - let mut bytes = [0; TOKEN_HOLDING_DATA_SIZE]; - bytes[0] = self.account_type; - bytes[1..33].copy_from_slice(&self.definition_id.to_bytes()); - bytes[33..].copy_from_slice(&self.balance.to_le_bytes()); - bytes - .to_vec() - .try_into() - .expect("33 bytes should fit into Data") - } - } - // TODO repeated code should ultimately be removed; fn compute_pool_pda( amm_program_id: ProgramId, @@ -2703,8 +2701,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_init(), }), @@ -2716,8 +2713,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_init(), }), @@ -2749,11 +2745,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::token_a_supply(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -2763,11 +2758,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::token_b_supply(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -2777,11 +2771,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::token_lp_supply(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -2791,8 +2784,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_init(), }), @@ -2804,8 +2796,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_init(), }), @@ -2817,8 +2808,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_holding_init(), }), @@ -2830,8 +2820,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_swap_1(), }), @@ -2843,8 +2832,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_swap_1(), }), @@ -2876,8 +2864,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_swap_1(), }), @@ -2889,8 +2876,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_swap_1(), }), @@ -2902,8 +2888,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_swap_2(), }), @@ -2915,8 +2900,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_swap_2(), }), @@ -2948,8 +2932,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_swap_2(), }), @@ -2961,8 +2944,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_swap_2(), }), @@ -2974,8 +2956,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_add(), }), @@ -2987,8 +2968,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_add(), }), @@ -3020,8 +3000,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_add(), }), @@ -3033,8 +3012,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_add(), }), @@ -3046,8 +3024,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_holding_add(), }), @@ -3059,11 +3036,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::token_lp_supply_add(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3073,8 +3049,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_balance_remove(), }), @@ -3086,8 +3061,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_balance_remove(), }), @@ -3119,8 +3093,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_remove(), }), @@ -3132,8 +3105,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_remove(), }), @@ -3145,8 +3117,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_holding_remove(), }), @@ -3158,11 +3129,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::token_lp_supply_remove(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3172,11 +3142,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: 0, - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3186,8 +3155,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: 0, }), @@ -3199,8 +3167,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: 0, }), @@ -3232,8 +3199,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_holding_new_definition(), }), @@ -3245,8 +3211,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_holding_new_definition(), }), @@ -3258,8 +3223,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_a_holding_new_definition(), }), @@ -3271,11 +3235,10 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1u8; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), total_supply: BalanceForTests::vault_a_balance_init(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, } @@ -3305,8 +3268,7 @@ pub mod tests { Account { program_owner: Program::token().id(), balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: 0, }), @@ -4071,13 +4033,13 @@ pub mod tests { let pinata_token_holding_id = AccountId::from((&pinata_token.id(), &PdaSeed::new([0; 32]))); let winner_token_holding_id = AccountId::new([3; 32]); - let mut expected_winner_account_data = [0; 49]; - expected_winner_account_data[0] = 1; - expected_winner_account_data[1..33].copy_from_slice(pinata_token_definition_id.value()); - expected_winner_account_data[33..].copy_from_slice(&150u128.to_le_bytes()); + let expected_winner_account_holding = token_core::TokenHolding::Fungible { + definition_id: pinata_token_definition_id, + balance: 150, + }; let expected_winner_token_holding_post = Account { program_owner: token.id(), - data: expected_winner_account_data.to_vec().try_into().unwrap(), + data: Data::from(&expected_winner_account_holding), ..Account::default() }; @@ -4087,10 +4049,10 @@ pub mod tests { // Execution of the token program to create new token for the pinata token // definition and supply accounts let total_supply: u128 = 10_000_000; - // instruction: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] - let mut instruction = vec![0; 23]; - instruction[1..17].copy_from_slice(&total_supply.to_le_bytes()); - instruction[17..].copy_from_slice(b"PINATA"); + let instruction = token_core::Instruction::NewFungibleDefinition { + name: String::from("PINATA"), + total_supply, + }; let message = public_transaction::Message::try_new( token.id(), vec![pinata_token_definition_id, pinata_token_holding_id], @@ -4102,9 +4064,8 @@ pub mod tests { let tx = PublicTransaction::new(message, witness_set); state.transition_from_public_transaction(&tx).unwrap(); - // Execution of the token program transfer just to initialize the winner token account - let mut instruction = vec![0; 23]; - instruction[0] = 2; + // Execution of winner's token holding account initialization + let instruction = token_core::Instruction::InitializeAccount; let message = public_transaction::Message::try_new( token.id(), vec![pinata_token_definition_id, winner_token_holding_id], @@ -4528,4 +4489,15 @@ pub mod tests { // Assert - should fail because the malicious program tries to manipulate is_authorized assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); } + + #[test] + fn test_state_serialization_roundtrip() { + let account_id_1 = AccountId::new([1; 32]); + let account_id_2 = AccountId::new([2; 32]); + let initial_data = [(account_id_1, 100u128), (account_id_2, 151u128)]; + let state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); + let bytes = borsh::to_vec(&state).unwrap(); + let state_from_bytes: V02State = borsh::from_slice(&bytes).unwrap(); + assert_eq!(state, state_from_bytes); + } } diff --git a/program_methods/Cargo.toml b/program_methods/Cargo.toml index 5f0688a4..999c1522 100644 --- a/program_methods/Cargo.toml +++ b/program_methods/Cargo.toml @@ -2,6 +2,7 @@ name = "program_methods" version = "0.1.0" edition = "2024" +license = { workspace = true } [build-dependencies] risc0-build.workspace = true diff --git a/program_methods/guest/Cargo.toml b/program_methods/guest/Cargo.toml index 37c1a8d9..eda23348 100644 --- a/program_methods/guest/Cargo.toml +++ b/program_methods/guest/Cargo.toml @@ -2,9 +2,11 @@ name = "programs" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true - +token_core.workspace = true +token_program.workspace = true risc0-zkvm.workspace = true serde = { workspace = true, default-features = false } diff --git a/program_methods/guest/src/bin/amm.rs b/program_methods/guest/src/bin/amm.rs index 9488db13..2c7a7f8f 100644 --- a/program_methods/guest/src/bin/amm.rs +++ b/program_methods/guest/src/bin/amm.rs @@ -152,56 +152,6 @@ impl PoolDefinition { } } -// TODO: remove repeated code for Token_Definition and TokenHoldling - -const TOKEN_HOLDING_TYPE: u8 = 1; -const TOKEN_HOLDING_DATA_SIZE: usize = 49; - -struct TokenHolding { - #[cfg_attr(not(test), expect(dead_code, reason = "TODO: fix later"))] - account_type: u8, - definition_id: AccountId, - balance: u128, -} - -impl TokenHolding { - fn parse(data: &[u8]) -> Option<Self> { - if data.len() != TOKEN_HOLDING_DATA_SIZE || data[0] != TOKEN_HOLDING_TYPE { - None - } else { - let account_type = data[0]; - let definition_id = AccountId::new( - data[1..33] - .try_into() - .expect("Defintion ID must be 32 bytes long"), - ); - let balance = u128::from_le_bytes( - data[33..] - .try_into() - .expect("balance must be 16 bytes little-endian"), - ); - Some(Self { - definition_id, - balance, - account_type, - }) - } - } - - #[cfg(test)] - fn into_data(self) -> Data { - let mut bytes = [0; TOKEN_HOLDING_DATA_SIZE]; - bytes[0] = self.account_type; - bytes[1..33].copy_from_slice(&self.definition_id.to_bytes()); - bytes[33..].copy_from_slice(&self.balance.to_le_bytes()); - - bytes - .to_vec() - .try_into() - .expect("49 bytes should fit into Data") - } -} - type Instruction = Vec<u8>; fn main() { let ( @@ -412,32 +362,6 @@ fn compute_liquidity_token_pda_seed(pool_id: AccountId) -> PdaSeed { ) } -const TOKEN_PROGRAM_NEW: u8 = 0; -const TOKEN_PROGRAM_TRANSFER: u8 = 1; -const TOKEN_PROGRAM_MINT: u8 = 4; -const TOKEN_PROGRAM_BURN: u8 = 3; - -fn initialize_token_transfer_chained_call( - token_program_command: u8, - sender: AccountWithMetadata, - recipient: AccountWithMetadata, - amount_to_move: u128, - pda_seed: Vec<PdaSeed>, -) -> ChainedCall { - let mut instruction_data = vec![0u8; 23]; - instruction_data[0] = token_program_command; - instruction_data[1..17].copy_from_slice(&amount_to_move.to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("AMM Program expects valid token transfer instruction data"); - - ChainedCall { - program_id: sender.account.program_owner, - instruction_data, - pre_states: vec![sender, recipient], - pda_seeds: pda_seed, - } -} - fn new_definition( pre_states: &[AccountWithMetadata], balance_in: &[u128], @@ -471,12 +395,12 @@ fn new_definition( } // Verify token_a and token_b are different - let definition_token_a_id = TokenHolding::parse(&user_holding_a.account.data) + let definition_token_a_id = token_core::TokenHolding::try_from(&user_holding_a.account.data) .expect("New definition: AMM Program expects valid Token Holding account for Token A") - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_holding_b.account.data) + .definition_id(); + let definition_token_b_id = token_core::TokenHolding::try_from(&user_holding_b.account.data) .expect("New definition: AMM Program expects valid Token Holding account for Token B") - .definition_id; + .definition_id(); // both instances of the same token program let token_program = user_holding_a.account.program_owner; @@ -543,57 +467,48 @@ fn new_definition( AccountPostState::new(pool_post.clone()) }; - let mut chained_calls = Vec::<ChainedCall>::new(); + let token_program_id = user_holding_a.account.program_owner; // Chain call for Token A (user_holding_a -> Vault_A) - let call_token_a = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_a.clone(), - vault_a.clone(), - amount_a, - Vec::<PdaSeed>::new(), + let call_token_a = ChainedCall::new( + token_program_id, + vec![user_holding_a.clone(), vault_a.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: amount_a, + }, ); // Chain call for Token B (user_holding_b -> Vault_B) - let call_token_b = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_b.clone(), - vault_b.clone(), - amount_b, - Vec::<PdaSeed>::new(), + let call_token_b = ChainedCall::new( + token_program_id, + vec![user_holding_b.clone(), vault_b.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: amount_b, + }, ); // Chain call for liquidity token (TokenLP definition -> User LP Holding) - let mut instruction_data = vec![0u8; 23]; - instruction_data[0] = if pool.account == Account::default() { - TOKEN_PROGRAM_NEW + let instruction = if pool.account == Account::default() { + token_core::Instruction::NewFungibleDefinition { + name: String::from("LP Token"), + total_supply: amount_a, + } } else { - TOKEN_PROGRAM_MINT - }; //new or mint - let nme = if pool.account == Account::default() { - [1u8; 6] - } else { - [0u8; 6] + token_core::Instruction::Mint { + amount_to_mint: amount_a, + } }; - instruction_data[1..17].copy_from_slice(&amount_a.to_le_bytes()); - instruction_data[17..].copy_from_slice(&nme); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("New definition: AMM Program expects valid instruction_data"); - let mut pool_lp_auth = pool_lp.clone(); pool_lp_auth.is_authorized = true; - let token_program_id = user_holding_a.account.program_owner; - let call_token_lp = ChainedCall { - program_id: token_program_id, - instruction_data, - pre_states: vec![pool_lp_auth.clone(), user_holding_lp.clone()], - pda_seeds: vec![compute_liquidity_token_pda_seed(pool.account_id)], - }; + let call_token_lp = ChainedCall::new( + token_program_id, + vec![pool_lp_auth.clone(), user_holding_lp.clone()], + &instruction, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); - chained_calls.push(call_token_lp); - chained_calls.push(call_token_b); - chained_calls.push(call_token_a); + let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ pool_post.clone(), @@ -645,18 +560,30 @@ fn swap( // fetch pool reserves // validates reserves is at least the vaults' balances - if TokenHolding::parse(&vault_a.account.data) - .expect("Swap: AMM Program expects a valid Token Holding Account for Vault A") - .balance - < pool_def_data.reserve_a - { + let vault_a_token_holding = token_core::TokenHolding::try_from(&vault_a.account.data) + .expect("Swap: AMM Program expects a valid Token Holding Account for Vault A"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_a_balance, + } = vault_a_token_holding + else { + panic!("Swap: AMM Program expects a valid Fungible Token Holding Account for Vault A"); + }; + if vault_a_balance < pool_def_data.reserve_a { panic!("Reserve for Token A exceeds vault balance"); } - if TokenHolding::parse(&vault_b.account.data) - .expect("Swap: AMM Program expects a valid Token Holding Account for Vault B") - .balance - < pool_def_data.reserve_b - { + + let vault_b_token_holding = token_core::TokenHolding::try_from(&vault_b.account.data) + .expect("Swap: AMM Program expects a valid Token Holding Account for Vault B"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_b_balance, + } = vault_b_token_holding + else { + panic!("Swap: AMM Program expects a valid Fungible Token Holding Account for Vault B"); + }; + + if vault_b_balance < pool_def_data.reserve_b { panic!("Reserve for Token B exceeds vault balance"); } @@ -741,30 +668,37 @@ fn swap_logic( panic!("Withdraw amount should be nonzero"); } + let token_program_id = user_deposit.account.program_owner; + let mut chained_calls = Vec::new(); - chained_calls.push(initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_deposit.clone(), - vault_deposit.clone(), - deposit_amount, - Vec::<PdaSeed>::new(), + chained_calls.push(ChainedCall::new( + token_program_id, + vec![user_deposit, vault_deposit], + &token_core::Instruction::Transfer { + amount_to_transfer: deposit_amount, + }, )); let mut vault_withdraw = vault_withdraw.clone(); vault_withdraw.is_authorized = true; - chained_calls.push(initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - vault_withdraw.clone(), - user_withdraw.clone(), - withdraw_amount, - vec![compute_vault_pda_seed( - pool_id, - TokenHolding::parse(&vault_withdraw.account.data) - .expect("Swap Logic: AMM Program expects valid token data") - .definition_id, - )], - )); + let pda_seed = compute_vault_pda_seed( + pool_id, + token_core::TokenHolding::try_from(&vault_withdraw.account.data) + .expect("Swap Logic: AMM Program expects valid token data") + .definition_id(), + ); + + chained_calls.push( + ChainedCall::new( + token_program_id, + vec![vault_withdraw, user_withdraw], + &token_core::Instruction::Transfer { + amount_to_transfer: withdraw_amount, + }, + ) + .with_pda_seeds(vec![pda_seed]), + ); (chained_calls, deposit_amount, withdraw_amount) } @@ -816,12 +750,29 @@ fn add_liquidity( } // 2. Determine deposit amount - let vault_b_balance = TokenHolding::parse(&vault_b.account.data) - .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault B") - .balance; - let vault_a_balance = TokenHolding::parse(&vault_a.account.data) - .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault A") - .balance; + let vault_b_token_holding = token_core::TokenHolding::try_from(&vault_b.account.data) + .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault B"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_b_balance, + } = vault_b_token_holding + else { + panic!( + "Add liquidity: AMM Program expects valid Fungible Token Holding Account for Vault B" + ); + }; + + let vault_a_token_holding = token_core::TokenHolding::try_from(&vault_a.account.data) + .expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault A"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: vault_a_balance, + } = vault_a_token_holding + else { + panic!( + "Add liquidity: AMM Program expects valid Fungible Token Holding Account for Vault A" + ); + }; if pool_def_data.reserve_a == 0 || pool_def_data.reserve_b == 0 { panic!("Reserves must be nonzero"); @@ -879,38 +830,37 @@ fn add_liquidity( }; pool_post.data = pool_post_definition.into_data(); - let mut chained_call = Vec::new(); + let token_program_id = user_holding_a.account.program_owner; // Chain call for Token A (UserHoldingA -> Vault_A) - let call_token_a = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_a.clone(), - vault_a.clone(), - actual_amount_a, - Vec::<PdaSeed>::new(), + let call_token_a = ChainedCall::new( + token_program_id, + vec![user_holding_a.clone(), vault_a.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: actual_amount_a, + }, ); // Chain call for Token B (UserHoldingB -> Vault_B) - let call_token_b = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - user_holding_b.clone(), - vault_b.clone(), - actual_amount_b, - Vec::<PdaSeed>::new(), + let call_token_b = ChainedCall::new( + token_program_id, + vec![user_holding_b.clone(), vault_b.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: actual_amount_b, + }, ); // Chain call for LP (mint new tokens for user_holding_lp) let mut pool_definition_lp_auth = pool_definition_lp.clone(); pool_definition_lp_auth.is_authorized = true; - let call_token_lp = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_MINT, - pool_definition_lp_auth.clone(), - user_holding_lp.clone(), - delta_lp, - vec![compute_liquidity_token_pda_seed(pool.account_id)], - ); + let call_token_lp = ChainedCall::new( + token_program_id, + vec![pool_definition_lp_auth.clone(), user_holding_lp.clone()], + &token_core::Instruction::Mint { + amount_to_mint: delta_lp, + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); - chained_call.push(call_token_lp); - chained_call.push(call_token_b); - chained_call.push(call_token_a); + let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ AccountPostState::new(pool_post), @@ -922,7 +872,7 @@ fn add_liquidity( AccountPostState::new(pre_states[6].account.clone()), ]; - (post_states, chained_call) + (post_states, chained_calls) } fn remove_liquidity( @@ -986,11 +936,20 @@ fn remove_liquidity( } // 2. Compute withdrawal amounts - let user_holding_lp_data = TokenHolding::parse(&user_holding_lp.account.data) + let user_holding_lp_data = token_core::TokenHolding::try_from(&user_holding_lp.account.data) .expect("Remove liquidity: AMM Program expects a valid Token Account for liquidity token"); + let token_core::TokenHolding::Fungible { + definition_id: _, + balance: user_lp_balance, + } = user_holding_lp_data + else { + panic!( + "Remove liquidity: AMM Program expects a valid Fungible Token Holding Account for liquidity token" + ); + }; - if user_holding_lp_data.balance > pool_def_data.liquidity_pool_supply - || user_holding_lp_data.definition_id != pool_def_data.liquidity_pool_id + if user_lp_balance > pool_def_data.liquidity_pool_supply + || user_holding_lp_data.definition_id() != pool_def_data.liquidity_pool_id { panic!("Invalid liquidity account provided"); } @@ -1026,44 +985,45 @@ fn remove_liquidity( pool_post.data = pool_post_definition.into_data(); - let mut chained_calls = Vec::new(); + let token_program_id = user_holding_a.account.program_owner; // Chaincall for Token A withdraw - let call_token_a = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - running_vault_a, - user_holding_a.clone(), - withdraw_amount_a, - vec![compute_vault_pda_seed( - pool.account_id, - pool_def_data.definition_token_a_id, - )], - ); + let call_token_a = ChainedCall::new( + token_program_id, + vec![running_vault_a, user_holding_a.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: withdraw_amount_a, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + pool.account_id, + pool_def_data.definition_token_a_id, + )]); // Chaincall for Token B withdraw - let call_token_b = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_TRANSFER, - running_vault_b, - user_holding_b.clone(), - withdraw_amount_b, - vec![compute_vault_pda_seed( - pool.account_id, - pool_def_data.definition_token_b_id, - )], - ); + let call_token_b = ChainedCall::new( + token_program_id, + vec![running_vault_b, user_holding_b.clone()], + &token_core::Instruction::Transfer { + amount_to_transfer: withdraw_amount_b, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + pool.account_id, + pool_def_data.definition_token_b_id, + )]); // Chaincall for LP adjustment let mut pool_definition_lp_auth = pool_definition_lp.clone(); pool_definition_lp_auth.is_authorized = true; - let call_token_lp = initialize_token_transfer_chained_call( - TOKEN_PROGRAM_BURN, - pool_definition_lp_auth.clone(), - user_holding_lp.clone(), - delta_lp, - vec![compute_liquidity_token_pda_seed(pool.account_id)], - ); + let call_token_lp = ChainedCall::new( + token_program_id, + vec![pool_definition_lp_auth, user_holding_lp.clone()], + &token_core::Instruction::Burn { + amount_to_burn: delta_lp, + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); - chained_calls.push(call_token_lp); - chained_calls.push(call_token_b); - chained_calls.push(call_token_a); + let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ AccountPostState::new(pool_post.clone()), @@ -1082,41 +1042,18 @@ fn remove_liquidity( mod tests { use nssa_core::{ account::{Account, AccountId, AccountWithMetadata, Data}, - program::{ChainedCall, PdaSeed, ProgramId}, + program::{ChainedCall, ProgramId}, }; + use token_core::{TokenDefinition, TokenHolding}; use crate::{ - PoolDefinition, TokenHolding, add_liquidity, compute_liquidity_token_pda, + PoolDefinition, add_liquidity, compute_liquidity_token_pda, compute_liquidity_token_pda_seed, compute_pool_pda, compute_vault_pda, compute_vault_pda_seed, new_definition, remove_liquidity, swap, }; const TOKEN_PROGRAM_ID: ProgramId = [15; 8]; const AMM_PROGRAM_ID: ProgramId = [42; 8]; - const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - - struct TokenDefinition { - account_type: u8, - name: [u8; 6], - total_supply: u128, - metadata_id: AccountId, - } - - impl TokenDefinition { - fn into_data(self) -> Data { - let mut bytes = Vec::<u8>::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.name); - bytes.extend_from_slice(&self.total_supply.to_le_bytes()); - bytes.extend_from_slice(&self.metadata_id.to_bytes()); - - if bytes.len() != TOKEN_DEFINITION_DATA_SIZE { - panic!("Invalid Token Definition data"); - } - - Data::try_from(bytes).expect("Token definition data size must fit into data") - } - } struct BalanceForTests; @@ -1250,21 +1187,16 @@ mod tests { impl ChainedCallForTests { fn cc_swap_token_a_test_1() -> ChainedCall { - let mut instruction_data = vec![0; 23]; - instruction_data[0] = 1; - instruction_data[1..17] - .copy_from_slice(&BalanceForTests::add_max_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_a(), AccountForTests::vault_a_init(), ], - pda_seeds: Vec::<PdaSeed>::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_max_amount_a(), + }, + ) } fn cc_swap_token_b_test_1() -> ChainedCall { @@ -1273,20 +1205,17 @@ mod tests { let mut vault_b_auth = AccountForTests::vault_b_init(); vault_b_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17].copy_from_slice(&swap_amount.to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_b_auth, AccountForTests::user_holding_b()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_b_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_b_auth, AccountForTests::user_holding_b()], + &token_core::Instruction::Transfer { + amount_to_transfer: swap_amount, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_b_definition_id(), + )]) } fn cc_swap_token_a_test_2() -> ChainedCall { @@ -1295,214 +1224,164 @@ mod tests { let mut vault_a_auth = AccountForTests::vault_a_init(); vault_a_auth.is_authorized = true; - let mut instruction_data = vec![0; 23]; - instruction_data[0] = 1; - instruction_data[1..17].copy_from_slice(&swap_amount.to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction_data) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_a_auth, AccountForTests::user_holding_a()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_a_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_a_auth, AccountForTests::user_holding_a()], + &token_core::Instruction::Transfer { + amount_to_transfer: swap_amount, + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_a_definition_id(), + )]) } fn cc_swap_token_b_test_2() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17].copy_from_slice(&BalanceForTests::add_max_amount_b().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_b(), AccountForTests::vault_b_init(), ], - pda_seeds: Vec::<PdaSeed>::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_max_amount_b(), + }, + ) } fn cc_add_token_a() -> ChainedCall { - let mut instruction = vec![0u8; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_a(), AccountForTests::vault_a_init(), ], - pda_seeds: Vec::<PdaSeed>::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_a(), + }, + ) } fn cc_add_token_b() -> ChainedCall { - let mut instruction = vec![0u8; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_b().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("Swap Logic: AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_b(), AccountForTests::vault_b_init(), ], - pda_seeds: Vec::<PdaSeed>::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_b(), + }, + ) } fn cc_add_pool_lp() -> ChainedCall { let mut pool_lp_auth = AccountForTests::pool_lp_init(); pool_lp_auth.is_authorized = true; - let mut instruction = vec![0u8; 23]; - instruction[0] = 4; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("Swap Logic: AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], - pda_seeds: vec![compute_liquidity_token_pda_seed( - IdForTests::pool_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], + &token_core::Instruction::Mint { + amount_to_mint: BalanceForTests::add_successful_amount_a(), + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed( + IdForTests::pool_definition_id(), + )]) } fn cc_remove_token_a() -> ChainedCall { let mut vault_a_auth = AccountForTests::vault_a_init(); vault_a_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::remove_actual_a_successful().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_a_auth, AccountForTests::user_holding_a()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_a_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_a_auth, AccountForTests::user_holding_a()], + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::remove_actual_a_successful(), + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_a_definition_id(), + )]) } fn cc_remove_token_b() -> ChainedCall { let mut vault_b_auth = AccountForTests::vault_b_init(); vault_b_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::remove_min_amount_b_low().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![vault_b_auth, AccountForTests::user_holding_b()], - pda_seeds: vec![compute_vault_pda_seed( - IdForTests::pool_definition_id(), - IdForTests::token_b_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![vault_b_auth, AccountForTests::user_holding_b()], + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::remove_min_amount_b_low(), + }, + ) + .with_pda_seeds(vec![compute_vault_pda_seed( + IdForTests::pool_definition_id(), + IdForTests::token_b_definition_id(), + )]) } fn cc_remove_pool_lp() -> ChainedCall { let mut pool_lp_auth = AccountForTests::pool_lp_init(); pool_lp_auth.is_authorized = true; - let mut instruction = vec![0; 23]; - instruction[0] = 3; - instruction[1..17] - .copy_from_slice(&BalanceForTests::remove_actual_a_successful().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_lp_init(), - ], - pda_seeds: vec![compute_liquidity_token_pda_seed( - IdForTests::pool_definition_id(), - )], - } + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], + &token_core::Instruction::Burn { + amount_to_burn: BalanceForTests::remove_amount_lp(), + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed( + IdForTests::pool_definition_id(), + )]) } fn cc_new_definition_token_a() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_a(), AccountForTests::vault_a_init(), ], - pda_seeds: Vec::<PdaSeed>::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_a(), + }, + ) } fn cc_new_definition_token_b() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_b().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("Swap Logic: AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::user_holding_b(), AccountForTests::vault_b_init(), ], - pda_seeds: Vec::<PdaSeed>::new(), - } + &token_core::Instruction::Transfer { + amount_to_transfer: BalanceForTests::add_successful_amount_b(), + }, + ) } fn cc_new_definition_token_lp() -> ChainedCall { - let mut instruction = vec![0; 23]; - instruction[0] = 1; - instruction[1..17] - .copy_from_slice(&BalanceForTests::add_successful_amount_a().to_le_bytes()); - let instruction_data = risc0_zkvm::serde::to_vec(&instruction) - .expect("AMM Program expects valid transaction instruction data"); - ChainedCall { - program_id: TOKEN_PROGRAM_ID, - instruction_data, - pre_states: vec![ + ChainedCall::new( + TOKEN_PROGRAM_ID, + vec![ AccountForTests::pool_lp_init(), AccountForTests::user_holding_lp_uninit(), ], - pda_seeds: vec![compute_liquidity_token_pda_seed( - IdForTests::pool_definition_id(), - )], - } + &token_core::Instruction::Mint { + amount_to_mint: BalanceForTests::add_successful_amount_a(), + }, + ) + .with_pda_seeds(vec![compute_liquidity_token_pda_seed( + IdForTests::pool_definition_id(), + )]) } } @@ -1566,8 +1445,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_balance(), }), @@ -1583,8 +1461,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_balance(), }), @@ -1600,8 +1477,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_init(), }), @@ -1617,8 +1493,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_init(), }), @@ -1634,8 +1509,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_high(), }), @@ -1651,8 +1525,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_high(), }), @@ -1668,8 +1541,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_low(), }), @@ -1685,8 +1557,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_low(), }), @@ -1702,8 +1573,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: 0, }), @@ -1719,8 +1589,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: 0, }), @@ -1736,11 +1605,10 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::vault_a_reserve_init(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, }, @@ -1754,11 +1622,10 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: 0u8, - name: [1; 6], + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), total_supply: BalanceForTests::vault_a_reserve_init(), - metadata_id: AccountId::new([0; 32]), + metadata_id: None, }), nonce: 0, }, @@ -1772,8 +1639,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: 0, }), @@ -1789,8 +1655,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_balance(), }), @@ -2102,8 +1967,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_init(), }), @@ -2119,8 +1983,7 @@ mod tests { account: Account { program_owner: TOKEN_PROGRAM_ID, balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: 1u8, + data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_init(), }), @@ -3552,8 +3415,14 @@ mod tests { let chained_call_a = chained_calls[0].clone(); let chained_call_b = chained_calls[1].clone(); - assert!(chained_call_a == ChainedCallForTests::cc_swap_token_a_test_1()); - assert!(chained_call_b == ChainedCallForTests::cc_swap_token_b_test_1()); + assert_eq!( + chained_call_a, + ChainedCallForTests::cc_swap_token_a_test_1() + ); + assert_eq!( + chained_call_b, + ChainedCallForTests::cc_swap_token_b_test_1() + ); } #[test] @@ -3581,7 +3450,13 @@ mod tests { let chained_call_a = chained_calls[1].clone(); let chained_call_b = chained_calls[0].clone(); - assert!(chained_call_a == ChainedCallForTests::cc_swap_token_a_test_2()); - assert!(chained_call_b == ChainedCallForTests::cc_swap_token_b_test_2()); + assert_eq!( + chained_call_a, + ChainedCallForTests::cc_swap_token_a_test_2() + ); + assert_eq!( + chained_call_b, + ChainedCallForTests::cc_swap_token_b_test_2() + ); } } diff --git a/program_methods/guest/src/bin/pinata_token.rs b/program_methods/guest/src/bin/pinata_token.rs index 04613791..188597cb 100644 --- a/program_methods/guest/src/bin/pinata_token.rs +++ b/program_methods/guest/src/bin/pinata_token.rs @@ -5,10 +5,7 @@ use nssa_core::{ write_nssa_outputs_with_chained_call, }, }; -use risc0_zkvm::{ - serde::to_vec, - sha::{Impl, Sha256}, -}; +use risc0_zkvm::sha::{Impl, Sha256}; const PRIZE: u128 = 150; @@ -82,23 +79,21 @@ fn main() { let winner_token_holding_post = winner_token_holding.account.clone(); pinata_definition_post.data = data.next_data(); - let mut instruction_data = vec![0; 23]; - instruction_data[0] = 1; - instruction_data[1..17].copy_from_slice(&PRIZE.to_le_bytes()); - // Flip authorization to true for chained call let mut pinata_token_holding_for_chain_call = pinata_token_holding.clone(); pinata_token_holding_for_chain_call.is_authorized = true; - let chained_calls = vec![ChainedCall { - program_id: pinata_token_holding_post.program_owner, - instruction_data: to_vec(&instruction_data).unwrap(), - pre_states: vec![ + let chained_call = ChainedCall::new( + pinata_token_holding_post.program_owner, + vec![ pinata_token_holding_for_chain_call, winner_token_holding.clone(), ], - pda_seeds: vec![PdaSeed::new([0; 32])], - }]; + &token_core::Instruction::Transfer { + amount_to_transfer: PRIZE, + }, + ) + .with_pda_seeds(vec![PdaSeed::new([0; 32])]); write_nssa_outputs_with_chained_call( instruction_words, @@ -112,6 +107,6 @@ fn main() { AccountPostState::new(pinata_token_holding_post), AccountPostState::new(winner_token_holding_post), ], - chained_calls, + vec![chained_call], ); } diff --git a/program_methods/guest/src/bin/token.rs b/program_methods/guest/src/bin/token.rs index 0f7b6287..0bc3d245 100644 --- a/program_methods/guest/src/bin/token.rs +++ b/program_methods/guest/src/bin/token.rs @@ -1,700 +1,13 @@ -use nssa_core::{ - account::{Account, AccountId, AccountWithMetadata, Data}, - program::{ - AccountPostState, DEFAULT_PROGRAM_ID, ProgramInput, read_nssa_inputs, write_nssa_outputs, - }, -}; - -// The token program has three functions: -// 1. New token definition. Arguments to this function are: -// * Two **default** accounts: [definition_account, holding_account]. The first default account -// will be initialized with the token definition account values. The second account will be -// initialized to a token holding account for the new token, holding the entire total supply. -// * An instruction data of 23-bytes, indicating the total supply and the token name, with the -// following layout: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] The -// name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00] -// 2. Token transfer Arguments to this function are: -// * Two accounts: [sender_account, recipient_account]. -// * An instruction data byte string of length 23, indicating the total supply with the -// following layout [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 -// || 0x00 || 0x00]. -// 3. Initialize account with zero balance Arguments to this function are: -// * Two accounts: [definition_account, account_to_initialize]. -// * An dummy byte string of length 23, with the following layout [0x02 || 0x00 || 0x00 || 0x00 -// || ... || 0x00 || 0x00]. -// 4. Burn tokens from a Token Holding account (thus lowering total supply) Arguments to this -// function are: -// * Two accounts: [definition_account, holding_account]. -// * Authorization required: holding_account -// * An instruction data byte string of length 23, indicating the balance to burn with the -// folloiwng layout -// [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00]. -// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total -// supply) Arguments to this function are: -// * Two accounts: [definition_account, holding_account]. -// * Authorization required: definition_account -// * An instruction data byte string of length 23, indicating the balance to mint with the -// folloiwng layout -// [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00]. -// 6. New token definition with metadata. Arguments to this function are: -// * Three **default** accounts: [definition_account, metadata_account. holding_account]. The -// first default account will be initialized with the token definition account values. The -// second account will be initialized to a token metadata account for the new token -// definition. The third account will be initialized to a token holding account for the new -// token, holding the entire total supply. -// * An instruction data of 474-bytes, indicating the token name, total supply, token standard, -// metadata standard and metadata_values (uri and creators). the following layout: [0x05 || -// total_supply (little-endian 16 bytes) || name (6 bytes) || token_standard || -// metadata_standard || metadata_values] The name cannot be equal to [0x00, 0x00, 0x00, 0x00, -// 0x00, 0x00] -// 7. Print NFT copy from Master NFT Arguments to this function are: -// * Two accounts: [master_nft, printed_account (default)]. -// * Authorization required: master_nft -// * An dummy byte string of length 23, with the following layout [0x06 || 0x00 || 0x00 || 0x00 -// || ... || 0x00 || 0x00]. -const TOKEN_STANDARD_FUNGIBLE_TOKEN: u8 = 0; -const TOKEN_STANDARD_FUNGIBLE_ASSET: u8 = 1; -const TOKEN_STANDARD_NONFUNGIBLE: u8 = 2; -const TOKEN_STANDARD_NONFUNGIBLE_PRINTABLE: u8 = 3; - -const METADATA_TYPE_SIMPLE: u8 = 0; -const METADATA_TYPE_EXPANDED: u8 = 1; - -const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - -const TOKEN_HOLDING_STANDARD: u8 = 1; -const TOKEN_HOLDING_NFT_MASTER: u8 = 2; -const TOKEN_HOLDING_NFT_PRINTED_COPY: u8 = 3; - -const TOKEN_HOLDING_DATA_SIZE: usize = 49; -const CURRENT_VERSION: u8 = 1; - -const TOKEN_METADATA_DATA_SIZE: usize = 463; - -fn is_token_standard_valid(standard: u8) -> bool { - matches!( - standard, - TOKEN_STANDARD_FUNGIBLE_TOKEN - | TOKEN_STANDARD_FUNGIBLE_ASSET - | TOKEN_STANDARD_NONFUNGIBLE - | TOKEN_STANDARD_NONFUNGIBLE_PRINTABLE - ) -} - -fn is_metadata_type_valid(standard: u8) -> bool { - matches!(standard, METADATA_TYPE_SIMPLE | METADATA_TYPE_EXPANDED) -} - -fn is_token_holding_type_valid(standard: u8) -> bool { - matches!(standard, |TOKEN_HOLDING_STANDARD| TOKEN_HOLDING_NFT_MASTER - | TOKEN_HOLDING_NFT_PRINTED_COPY) -} - -struct TokenDefinition { - account_type: u8, - name: [u8; 6], - total_supply: u128, - metadata_id: AccountId, -} - -impl TokenDefinition { - fn into_data(self) -> Data { - let mut bytes = Vec::<u8>::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.name); - bytes.extend_from_slice(&self.total_supply.to_le_bytes()); - bytes.extend_from_slice(&self.metadata_id.to_bytes()); - - if bytes.len() != TOKEN_DEFINITION_DATA_SIZE { - panic!("Invalid Token Definition data"); - } - - Data::try_from(bytes).expect("Token definition data size must fit into data") - } - - fn parse(data: &Data) -> Option<Self> { - let data = Vec::<u8>::from(data.clone()); - - if data.len() != TOKEN_DEFINITION_DATA_SIZE { - None - } else { - let account_type = data[0]; - let name = data[1..7].try_into().expect("Name must be a 6 bytes"); - let total_supply = u128::from_le_bytes( - data[7..23] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let metadata_id = AccountId::new( - data[23..TOKEN_DEFINITION_DATA_SIZE] - .try_into() - .expect("Token Program expects valid Account Id for Metadata"), - ); - - let this = Some(Self { - account_type, - name, - total_supply, - metadata_id, - }); - - match account_type { - TOKEN_STANDARD_NONFUNGIBLE if total_supply != 1 => None, - TOKEN_STANDARD_FUNGIBLE_TOKEN if metadata_id != AccountId::new([0; 32]) => None, - _ => this, - } - } - } -} - -struct TokenHolding { - account_type: u8, - definition_id: AccountId, - balance: u128, -} - -impl TokenHolding { - fn new(definition_id: &AccountId) -> Self { - Self { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: *definition_id, - balance: 0, - } - } - - fn parse(data: &Data) -> Option<Self> { - let data = Vec::<u8>::from(data.clone()); - - if data.len() != TOKEN_HOLDING_DATA_SIZE { - return None; - } - - // Check account_type - if !is_token_holding_type_valid(data[0]) { - return None; - } - - let account_type = data[0]; - let definition_id = AccountId::new( - data[1..33] - .try_into() - .expect("Defintion ID must be 32 bytes long"), - ); - let balance = u128::from_le_bytes( - data[33..] - .try_into() - .expect("balance must be 16 bytes little-endian"), - ); - - Some(Self { - definition_id, - balance, - account_type, - }) - } - - fn into_data(self) -> Data { - if !is_token_holding_type_valid(self.account_type) { - panic!("Invalid Token Holding type"); - } - - let mut bytes = Vec::<u8>::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&self.definition_id.to_bytes()); - bytes.extend_from_slice(&self.balance.to_le_bytes()); - - if bytes.len() != TOKEN_HOLDING_DATA_SIZE { - panic!("Invalid Token Holding data"); - } - - Data::try_from(bytes).expect("Invalid data") - } -} - -struct TokenMetadata { - account_type: u8, - version: u8, - definition_id: AccountId, - uri: [u8; 200], - creators: [u8; 250], - /// Block id - primary_sale_date: u64, -} - -impl TokenMetadata { - fn into_data(self) -> Data { - if !is_metadata_type_valid(self.account_type) { - panic!("Invalid Metadata type"); - } - - let mut bytes = Vec::<u8>::new(); - bytes.extend_from_slice(&[self.account_type]); - bytes.extend_from_slice(&[self.version]); - bytes.extend_from_slice(&self.definition_id.to_bytes()); - bytes.extend_from_slice(&self.uri); - bytes.extend_from_slice(&self.creators); - bytes.extend_from_slice(&self.primary_sale_date.to_le_bytes()); - - if bytes.len() != TOKEN_METADATA_DATA_SIZE { - panic!("Invalid Token Definition data length"); - } - - Data::try_from(bytes).expect("Invalid data") - } -} - -fn transfer(pre_states: &[AccountWithMetadata], balance_to_move: u128) -> Vec<AccountPostState> { - if pre_states.len() != 2 { - panic!("Invalid number of input accounts"); - } - let sender = &pre_states[0]; - let recipient = &pre_states[1]; - - if !sender.is_authorized { - panic!("Sender authorization is missing"); - } - - let sender_holding = TokenHolding::parse(&sender.account.data).expect("Invalid sender data"); - - let recipient_holding = if recipient.account == Account::default() { - TokenHolding::new(&sender_holding.definition_id) - } else { - TokenHolding::parse(&recipient.account.data).expect("Invalid recipient data") - }; - - if sender_holding.definition_id != recipient_holding.definition_id { - panic!("Sender and recipient definition id mismatch"); - } - - let (sender_holding, recipient_holding) = - if sender_holding.account_type != TOKEN_HOLDING_NFT_MASTER { - standard_transfer(sender_holding, recipient_holding, balance_to_move) - } else { - nft_master_transfer(sender_holding, recipient_holding, balance_to_move) - }; - - let sender_post = { - let mut this = sender.account.clone(); - this.data = sender_holding.into_data(); - AccountPostState::new(this) - }; - - let recipient_post = { - let mut this = recipient.account.clone(); - this.data = recipient_holding.into_data(); - - // Claim the recipient account if it has default program owner - if this.program_owner == DEFAULT_PROGRAM_ID { - AccountPostState::new_claimed(this) - } else { - AccountPostState::new(this) - } - }; - - vec![sender_post, recipient_post] -} - -fn standard_transfer( - sender_holding: TokenHolding, - recipient_holding: TokenHolding, - balance_to_move: u128, -) -> (TokenHolding, TokenHolding) { - let mut sender_holding = sender_holding; - let mut recipient_holding = recipient_holding; - - if sender_holding.balance < balance_to_move { - panic!("Insufficient balance"); - } - - sender_holding.balance = sender_holding - .balance - .checked_sub(balance_to_move) - .expect("Checked above"); - recipient_holding.balance = recipient_holding - .balance - .checked_add(balance_to_move) - .expect("Recipient balance overflow"); - - recipient_holding.account_type = sender_holding.account_type; - - (sender_holding, recipient_holding) -} - -fn nft_master_transfer( - sender_holding: TokenHolding, - recipient_holding: TokenHolding, - balance_to_move: u128, -) -> (TokenHolding, TokenHolding) { - let mut sender_holding = sender_holding; - let mut recipient_holding = recipient_holding; - - if recipient_holding.balance != 0 { - panic!("Invalid balance in recipient account for NFT transfer"); - } - - if sender_holding.balance != balance_to_move { - panic!("Invalid balance for NFT Master transfer"); - } - - sender_holding.balance = 0; - recipient_holding.balance = balance_to_move; - recipient_holding.account_type = sender_holding.account_type; - - (sender_holding, recipient_holding) -} - -fn new_definition( - pre_states: &[AccountWithMetadata], - name: [u8; 6], - total_supply: u128, -) -> Vec<AccountPostState> { - if pre_states.len() != 2 { - panic!("Invalid number of input accounts"); - } - - let definition_target_account = &pre_states[0]; - let holding_target_account = &pre_states[1]; - - if definition_target_account.account != Account::default() { - panic!("Definition target account must have default values"); - } - - if holding_target_account.account != Account::default() { - panic!("Holding target account must have default values"); - } - - let token_definition = TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name, - total_supply, - metadata_id: AccountId::new([0; 32]), - }; - - let token_holding = TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: definition_target_account.account_id, - balance: total_supply, - }; - - let mut definition_target_account_post = definition_target_account.account.clone(); - definition_target_account_post.data = token_definition.into_data(); - - let mut holding_target_account_post = holding_target_account.account.clone(); - holding_target_account_post.data = token_holding.into_data(); - - vec![ - AccountPostState::new_claimed(definition_target_account_post), - AccountPostState::new_claimed(holding_target_account_post), - ] -} - -fn new_definition_with_metadata( - pre_states: &[AccountWithMetadata], - name: [u8; 6], - total_supply: u128, - token_standard: u8, - metadata_standard: u8, - metadata_values: &Data, -) -> Vec<AccountPostState> { - if pre_states.len() != 3 { - panic!("Invalid number of input accounts"); - } - - let definition_target_account = &pre_states[0]; - let metadata_target_account = &pre_states[1]; - let holding_target_account = &pre_states[2]; - - if definition_target_account.account != Account::default() { - panic!("Definition target account must have default values"); - } - - if metadata_target_account.account != Account::default() { - panic!("Metadata target account must have default values"); - } - - if holding_target_account.account != Account::default() { - panic!("Holding target account must have default values"); - } - - if !is_token_standard_valid(token_standard) { - panic!("Invalid Token Standard provided"); - } - - if !is_metadata_type_valid(metadata_standard) { - panic!("Invalid Metadata Standadard provided"); - } - - if !valid_total_supply_for_token_standard(total_supply, token_standard) { - panic!("Invalid total supply for the specified token supply"); - } - - let token_definition = TokenDefinition { - account_type: token_standard, - name, - total_supply, - metadata_id: metadata_target_account.account_id, - }; - - let token_holding = TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: definition_target_account.account_id, - balance: total_supply, - }; - - if metadata_values.len() != 450 { - panic!("Metadata values data should be 450 bytes"); - } - - let uri: [u8; 200] = metadata_values[0..200] - .try_into() - .expect("Token program expects valid uri for Metadata"); - let creators: [u8; 250] = metadata_values[200..450] - .try_into() - .expect("Token program expects valid creators for Metadata"); - - let token_metadata = TokenMetadata { - account_type: metadata_standard, - version: CURRENT_VERSION, - definition_id: definition_target_account.account_id, - uri, - creators, - primary_sale_date: 0u64, // TODO #261: future works to implement this - }; - - let mut definition_target_account_post = definition_target_account.account.clone(); - definition_target_account_post.data = token_definition.into_data(); - - let mut holding_target_account_post = holding_target_account.account.clone(); - holding_target_account_post.data = token_holding.into_data(); - - let mut metadata_target_account_post = metadata_target_account.account.clone(); - metadata_target_account_post.data = token_metadata.into_data(); - - vec![ - AccountPostState::new_claimed(definition_target_account_post), - AccountPostState::new_claimed(holding_target_account_post), - AccountPostState::new_claimed(metadata_target_account_post), - ] -} - -fn valid_total_supply_for_token_standard(total_supply: u128, token_standard: u8) -> bool { - token_standard != TOKEN_STANDARD_NONFUNGIBLE || total_supply == 1 -} - -fn initialize_account(pre_states: &[AccountWithMetadata]) -> Vec<AccountPostState> { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let definition = &pre_states[0]; - let account_to_initialize = &pre_states[1]; - - if account_to_initialize.account != Account::default() { - panic!("Only Uninitialized accounts can be initialized"); - } - - // TODO: #212 We should check that this is an account owned by the token program. - // This check can't be done here since the ID of the program is known only after compiling it - // - // Check definition account is valid - let _definition_values = - TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid"); - let holding_values = TokenHolding::new(&definition.account_id); - - let definition_post = definition.account.clone(); - let mut account_to_initialize = account_to_initialize.account.clone(); - account_to_initialize.data = holding_values.into_data(); - - vec![ - AccountPostState::new(definition_post), - AccountPostState::new_claimed(account_to_initialize), - ] -} - -fn burn(pre_states: &[AccountWithMetadata], balance_to_burn: u128) -> Vec<AccountPostState> { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let definition = &pre_states[0]; - let user_holding = &pre_states[1]; - - if !user_holding.is_authorized { - panic!("Authorization is missing"); - } - - let definition_values = TokenDefinition::parse(&definition.account.data) - .expect("Token Definition account must be valid"); - let user_values = TokenHolding::parse(&user_holding.account.data) - .expect("Token Holding account must be valid"); - - if definition.account_id != user_values.definition_id { - panic!("Mismatch Token Definition and Token Holding"); - } - - if user_values.balance < balance_to_burn { - panic!("Insufficient balance to burn"); - } - - let mut post_user_holding = user_holding.account.clone(); - let mut post_definition = definition.account.clone(); - - post_user_holding.data = TokenHolding::into_data(TokenHolding { - account_type: user_values.account_type, - definition_id: user_values.definition_id, - balance: user_values - .balance - .checked_sub(balance_to_burn) - .expect("Checked above"), - }); - - post_definition.data = TokenDefinition::into_data(TokenDefinition { - account_type: definition_values.account_type, - name: definition_values.name, - total_supply: definition_values - .total_supply - .checked_sub(balance_to_burn) - .expect("Total supply underflow"), - metadata_id: definition_values.metadata_id, - }); - - vec![ - AccountPostState::new(post_definition), - AccountPostState::new(post_user_holding), - ] -} - -fn is_mintable(account_type: u8) -> bool { - account_type != TOKEN_STANDARD_NONFUNGIBLE -} - -fn mint_additional_supply( - pre_states: &[AccountWithMetadata], - amount_to_mint: u128, -) -> Vec<AccountPostState> { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let definition = &pre_states[0]; - let token_holding = &pre_states[1]; - - if !definition.is_authorized { - panic!("Definition authorization is missing"); - } - - let definition_values = - TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid"); - - let token_holding_values: TokenHolding = if token_holding.account == Account::default() { - TokenHolding::new(&definition.account_id) - } else { - TokenHolding::parse(&token_holding.account.data).expect("Holding account must be valid") - }; - - if !is_mintable(definition_values.account_type) { - panic!("Token Definition's standard does not permit minting additional supply"); - } - - if definition.account_id != token_holding_values.definition_id { - panic!("Mismatch Token Definition and Token Holding"); - } - - let token_holding_post_data = TokenHolding { - account_type: token_holding_values.account_type, - definition_id: token_holding_values.definition_id, - balance: token_holding_values - .balance - .checked_add(amount_to_mint) - .expect("New balance overflow"), - }; - - let post_total_supply = definition_values - .total_supply - .checked_add(amount_to_mint) - .expect("Total supply overflow"); - - let post_definition_data = TokenDefinition { - account_type: definition_values.account_type, - name: definition_values.name, - total_supply: post_total_supply, - metadata_id: definition_values.metadata_id, - }; - - let post_definition = { - let mut this = definition.account.clone(); - this.data = post_definition_data.into_data(); - AccountPostState::new(this) - }; - - let token_holding_post = { - let mut this = token_holding.account.clone(); - this.data = token_holding_post_data.into_data(); - - // Claim the recipient account if it has default program owner - if this.program_owner == DEFAULT_PROGRAM_ID { - AccountPostState::new_claimed(this) - } else { - AccountPostState::new(this) - } - }; - vec![post_definition, token_holding_post] -} - -fn print_nft(pre_states: &[AccountWithMetadata]) -> Vec<AccountPostState> { - if pre_states.len() != 2 { - panic!("Invalid number of accounts"); - } - - let master_account = &pre_states[0]; - let printed_account = &pre_states[1]; - - if !master_account.is_authorized { - panic!("Master NFT Account must be authorized"); - } - - if printed_account.account != Account::default() { - panic!("Printed Account must be uninitialized"); - } - - let mut master_account_data = - TokenHolding::parse(&master_account.account.data).expect("Invalid Token Holding data"); - - if master_account_data.account_type != TOKEN_HOLDING_NFT_MASTER { - panic!("Invalid Token Holding provided as NFT Master Account"); - } - - if master_account_data.balance < 2 { - panic!("Insufficient balance to print another NFT copy"); - } - - let definition_id = master_account_data.definition_id; - - let post_master_account = { - let mut this = master_account.account.clone(); - master_account_data.balance -= 1; - this.data = master_account_data.into_data(); - AccountPostState::new(this) - }; - - let post_printed_account = { - let mut this = printed_account.account.clone(); - - let printed_data = TokenHolding { - account_type: TOKEN_HOLDING_NFT_PRINTED_COPY, - definition_id, - balance: 1, - }; - - this.data = TokenHolding::into_data(printed_data); - - AccountPostState::new_claimed(this) - }; - - vec![post_master_account, post_printed_account] -} - -type Instruction = Vec<u8>; +//! The Token Program. +//! +//! This program implements a simple token system supporting both fungible and non-fungible tokens +//! (NFTs). +//! +//! Token program accepts [`Instruction`] as input, refer to the corresponding documentation +//! for more details. + +use nssa_core::program::{ProgramInput, read_nssa_inputs, write_nssa_outputs}; +use token_program::core::Instruction; fn main() { let ( @@ -705,1622 +18,68 @@ fn main() { instruction_words, ) = read_nssa_inputs::<Instruction>(); - let post_states = match instruction[0] { - 0 => { - // Parse instruction - let total_supply = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] + let pre_states_clone = pre_states.clone(); + + let post_states = match instruction { + Instruction::Transfer { + amount_to_transfer: balance_to_move, + } => { + let [sender, recipient] = pre_states .try_into() - .expect("Name must be 6 bytes long"); - assert_ne!(name, [0; 6]); - - // Execute - new_definition(&pre_states, name, total_supply) + .expect("Transfer instruction requires exactly two accounts"); + token_program::transfer::transfer(sender, recipient, balance_to_move) } - 1 => { - // Parse instruction - let balance_to_move = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Balance to move must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] + Instruction::NewFungibleDefinition { name, total_supply } => { + let [definition_account, holding_account] = pre_states .try_into() - .expect("Name must be 6 bytes long"); - assert_eq!(name, [0; 6]); - - // Execute - transfer(&pre_states, balance_to_move) - } - 2 => { - // Initialize account - if instruction[1..] != [0; 22] { - panic!("Invalid instruction for initialize account"); - } - initialize_account(&pre_states) - } - 3 => { - let balance_to_burn = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Balance to burn must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] - .try_into() - .expect("Name must be 6 bytes long"); - assert_eq!(name, [0; 6]); - - // Execute - burn(&pre_states, balance_to_burn) - } - 4 => { - let balance_to_mint = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Balance to burn must be 16 bytes little-endian"), - ); - let name: [u8; 6] = instruction[17..] - .try_into() - .expect("Name must be 6 bytes long"); - assert_eq!(name, [0; 6]); - - // Execute - mint_additional_supply(&pre_states, balance_to_mint) - } - 5 => { - if instruction.len() != 474 { - panic!("Invalid instruction length") - } - - // Parse instruction - let total_supply = u128::from_le_bytes( - instruction[1..17] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let name = instruction[17..23] - .try_into() - .expect("Name must be 6 bytes long"); - assert_ne!(name, [0; 6]); - let token_standard = instruction[23]; - let metadata_standard = instruction[24]; - let metadata_values: Data = - Data::try_from(instruction[25..474].to_vec()).expect("Invalid metadata"); - - // Execute - new_definition_with_metadata( - &pre_states, + .expect("NewFungibleDefinition instruction requires exactly two accounts"); + token_program::new_definition::new_fungible_definition( + definition_account, + holding_account, name, total_supply, - token_standard, - metadata_standard, - &metadata_values, ) } - 6 => { - if instruction.len() != 23 { - panic!("Invalid instruction length"); - } - - // Initialize account - if instruction[1..] != [0; 22] { - panic!("Invalid instruction for initialize account"); - } - - print_nft(&pre_states) + Instruction::NewDefinitionWithMetadata { + new_definition, + metadata, + } => { + let [definition_account, holding_account, metadata_account] = pre_states + .try_into() + .expect("NewDefinitionWithMetadata instruction requires exactly three accounts"); + token_program::new_definition::new_definition_with_metadata( + definition_account, + holding_account, + metadata_account, + new_definition, + *metadata, + ) + } + Instruction::InitializeAccount => { + let [definition_account, account_to_initialize] = pre_states + .try_into() + .expect("InitializeAccount instruction requires exactly two accounts"); + token_program::initialize::initialize_account(definition_account, account_to_initialize) + } + Instruction::Burn { amount_to_burn } => { + let [definition_account, user_holding_account] = pre_states + .try_into() + .expect("Burn instruction requires exactly two accounts"); + token_program::burn::burn(definition_account, user_holding_account, amount_to_burn) + } + Instruction::Mint { amount_to_mint } => { + let [definition_account, user_holding_account] = pre_states + .try_into() + .expect("Mint instruction requires exactly two accounts"); + token_program::mint::mint(definition_account, user_holding_account, amount_to_mint) + } + Instruction::PrintNft => { + let [master_account, printed_account] = pre_states + .try_into() + .expect("PrintNft instruction requires exactly two accounts"); + token_program::print_nft::print_nft(master_account, printed_account) } - _ => panic!("Invalid instruction"), }; - write_nssa_outputs(instruction_words, pre_states, post_states); -} - -#[cfg(test)] -mod tests { - use nssa_core::account::{Account, AccountId, AccountWithMetadata, Data}; - - use crate::{ - TOKEN_DEFINITION_DATA_SIZE, TOKEN_HOLDING_DATA_SIZE, TOKEN_HOLDING_NFT_MASTER, - TOKEN_HOLDING_NFT_PRINTED_COPY, TOKEN_HOLDING_STANDARD, TOKEN_STANDARD_FUNGIBLE_TOKEN, - TOKEN_STANDARD_NONFUNGIBLE, TokenDefinition, TokenHolding, burn, mint_additional_supply, - new_definition, new_definition_with_metadata, print_nft, transfer, - }; - - struct BalanceForTests; - struct IdForTests; - - struct AccountForTests; - - impl AccountForTests { - fn definition_account_auth() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_without_auth() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_different_definition() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id_diff(), - balance: BalanceForTests::holding_balance(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_same_definition_with_authorization() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_same_definition_without_authorization() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_same_definition_without_authorization_overflow() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn definition_account_post_burn() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply_burned(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_account_post_burn() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance_burned(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_uninit() -> AccountWithMetadata { - AccountWithMetadata { - account: Account::default(), - is_authorized: false, - account_id: IdForTests::holding_id_2(), - } - } - - fn init_mint() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::mint_success(), - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_same_definition_mint() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::holding_balance_mint(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_mint() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply_mint(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_same_definition_with_authorization_and_large_balance() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::mint_overflow(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_with_authorization_nonfungible() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_NONFUNGIBLE, - name: [2; 6], - total_supply: 1, - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn definition_account_uninit() -> AccountWithMetadata { - AccountWithMetadata { - account: Account::default(), - is_authorized: false, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_account_init() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn definition_account_unclaimed() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenDefinition::into_data(TokenDefinition { - account_type: TOKEN_STANDARD_FUNGIBLE_TOKEN, - name: [2; 6], - total_supply: BalanceForTests::init_supply(), - metadata_id: AccountId::new([0; 32]), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::pool_definition_id(), - } - } - - fn holding_account_unclaimed() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account2_init() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::init_supply(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id_2(), - } - } - - fn holding_account2_init_post_transfer() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::recipient_post_transfer(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id_2(), - } - } - - fn holding_account_init_post_transfer() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_STANDARD, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::sender_post_transfer(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_master_nft() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::printable_copies(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_master_nft_insufficient_balance() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: 1, - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_master_nft_after_print() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::printable_copies() - 1, - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_printed_nft() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_PRINTED_COPY, - definition_id: IdForTests::pool_definition_id(), - balance: 1, - }), - nonce: 0, - }, - is_authorized: false, - account_id: IdForTests::holding_id(), - } - } - - fn holding_account_with_master_nft_transferred_to() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [0u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: BalanceForTests::printable_copies(), - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id_2(), - } - } - - fn holding_account_master_nft_post_transfer() -> AccountWithMetadata { - AccountWithMetadata { - account: Account { - program_owner: [5u32; 8], - balance: 0u128, - data: TokenHolding::into_data(TokenHolding { - account_type: TOKEN_HOLDING_NFT_MASTER, - definition_id: IdForTests::pool_definition_id(), - balance: 0, - }), - nonce: 0, - }, - is_authorized: true, - account_id: IdForTests::holding_id(), - } - } - } - - impl BalanceForTests { - fn init_supply() -> u128 { - 100_000 - } - - fn holding_balance() -> u128 { - 1_000 - } - - fn init_supply_burned() -> u128 { - 99_500 - } - - fn holding_balance_burned() -> u128 { - 500 - } - - fn burn_success() -> u128 { - 500 - } - - fn burn_insufficient() -> u128 { - 1_500 - } - - fn mint_success() -> u128 { - 50_000 - } - - fn holding_balance_mint() -> u128 { - 51_000 - } - - fn mint_overflow() -> u128 { - u128::MAX - 40_000 - } - - fn init_supply_mint() -> u128 { - 150_000 - } - - fn sender_post_transfer() -> u128 { - 95_000 - } - - fn recipient_post_transfer() -> u128 { - 105_000 - } - - fn transfer_amount() -> u128 { - 5_000 - } - - fn printable_copies() -> u128 { - 10 - } - } - - impl IdForTests { - fn pool_definition_id() -> AccountId { - AccountId::new([15; 32]) - } - - fn pool_definition_id_diff() -> AccountId { - AccountId::new([16; 32]) - } - - fn holding_id() -> AccountId { - AccountId::new([17; 32]) - } - - fn holding_id_2() -> AccountId { - AccountId::new([42; 32]) - } - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_with_invalid_number_of_accounts_1() { - let pre_states = vec![AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_with_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[should_panic(expected = "Definition target account must have default values")] - #[test] - fn test_new_definition_non_default_first_account_should_fail() { - let pre_states = vec![ - AccountWithMetadata { - account: Account { - program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[should_panic(expected = "Holding target account must have default values")] - #[test] - fn test_new_definition_non_default_second_account_should_fail() { - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account { - program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = new_definition(&pre_states, [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe], 10); - } - - #[test] - fn test_new_definition_with_valid_inputs_succeeds() { - let pre_states = vec![ - AccountForTests::definition_account_uninit(), - AccountForTests::holding_account_uninit(), - ]; - - let post_states = new_definition(&pre_states, [2u8; 6], BalanceForTests::init_supply()); - - let [definition_account, holding_account] = post_states.try_into().ok().unwrap(); - assert!( - *definition_account.account() - == AccountForTests::definition_account_unclaimed().account - ); - - assert!(*holding_account.account() == AccountForTests::holding_account_unclaimed().account); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_transfer_with_invalid_number_of_accounts_1() { - let pre_states = vec![AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_transfer_with_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid sender data")] - #[test] - fn test_transfer_invalid_instruction_type_should_fail() { - let invalid_type = TOKEN_HOLDING_STANDARD ^ 1; - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // First byte should be `TOKEN_HOLDING_STANDARD` for token holding accounts - data: Data::try_from(vec![invalid_type; TOKEN_HOLDING_DATA_SIZE]) - .expect("Invalid data"), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid sender data")] - #[test] - fn test_transfer_invalid_data_size_should_fail_1() { - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // Data must be of exact length `TOKEN_HOLDING_DATA_SIZE` - data: Data::try_from(vec![1; TOKEN_HOLDING_DATA_SIZE - 1]).unwrap(), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Invalid sender data")] - #[test] - fn test_transfer_invalid_data_size_should_fail_2() { - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // Data must be of exact length `TOKEN_HOLDING_DATA_SIZE` - data: Data::try_from(vec![1; TOKEN_HOLDING_DATA_SIZE - 1]).unwrap(), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Sender and recipient definition id mismatch")] - #[test] - fn test_transfer_with_different_definition_ids_should_fail() { - let pre_states = vec![ - AccountForTests::holding_same_definition_with_authorization(), - AccountForTests::holding_different_definition(), - ]; - let _post_states = transfer(&pre_states, 10); - } - - #[should_panic(expected = "Insufficient balance")] - #[test] - fn test_transfer_with_insufficient_balance_should_fail() { - let pre_states = vec![ - AccountForTests::holding_same_definition_with_authorization(), - AccountForTests::holding_account_same_definition_mint(), - ]; - // Attempt to transfer 38 tokens - let _post_states = transfer(&pre_states, BalanceForTests::burn_insufficient()); - } - - #[should_panic(expected = "Sender authorization is missing")] - #[test] - fn test_transfer_without_sender_authorization_should_fail() { - let mut def_data = Vec::<u8>::new(); - def_data.extend_from_slice(&[1; TOKEN_DEFINITION_DATA_SIZE - 16]); - def_data.extend_from_slice(&u128::to_le_bytes(37)); - - let pre_states = vec![ - AccountWithMetadata { - account: Account { - // Account with balance 37 - data: Data::try_from(def_data).unwrap(), - ..Account::default() - }, - is_authorized: false, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account { - data: Data::try_from(vec![1; TOKEN_HOLDING_DATA_SIZE - 1]).unwrap(), - ..Account::default() - }, - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = transfer(&pre_states, 37); - } - - #[test] - fn test_transfer_with_valid_inputs_succeeds() { - let pre_states = vec![ - AccountForTests::holding_account_init(), - AccountForTests::holding_account2_init(), - ]; - let post_states = transfer(&pre_states, BalanceForTests::transfer_amount()); - let [sender_post, recipient_post] = post_states.try_into().ok().unwrap(); - - assert!( - *sender_post.account() == AccountForTests::holding_account_init_post_transfer().account - ); - assert!( - *recipient_post.account() - == AccountForTests::holding_account2_init_post_transfer().account - ); - } - - #[should_panic(expected = "Invalid balance for NFT Master transfer")] - #[test] - fn test_transfer_with_master_nft_invalid_balance() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = transfer(&pre_states, BalanceForTests::transfer_amount()); - } - - #[should_panic(expected = "Invalid balance in recipient account for NFT transfer")] - #[test] - fn test_transfer_with_master_nft_invalid_recipient_balance() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_with_master_nft_transferred_to(), - ]; - let _post_states = transfer(&pre_states, BalanceForTests::printable_copies()); - } - - #[test] - fn test_transfer_with_master_nft_success() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_uninit(), - ]; - let post_states = transfer(&pre_states, BalanceForTests::printable_copies()); - let [sender_post, recipient_post] = post_states.try_into().ok().unwrap(); - - assert!( - *sender_post.account() - == AccountForTests::holding_account_master_nft_post_transfer().account - ); - assert!( - *recipient_post.account() - == AccountForTests::holding_account_with_master_nft_transferred_to().account - ); - } - - #[test] - fn test_token_initialize_account_succeeds() { - let pre_states = vec![ - AccountForTests::holding_account_init(), - AccountForTests::holding_account2_init(), - ]; - let post_states = transfer(&pre_states, BalanceForTests::transfer_amount()); - let [sender_post, recipient_post] = post_states.try_into().ok().unwrap(); - - assert!( - *sender_post.account() == AccountForTests::holding_account_init_post_transfer().account - ); - assert!( - *recipient_post.account() - == AccountForTests::holding_account2_init_post_transfer().account - ); - } - - #[test] - #[should_panic(expected = "Invalid number of accounts")] - fn test_burn_invalid_number_of_accounts() { - let pre_states = vec![AccountForTests::definition_account_auth()]; - let _post_states = burn(&pre_states, BalanceForTests::burn_success()); - } - - #[test] - #[should_panic(expected = "Mismatch Token Definition and Token Holding")] - fn test_burn_mismatch_def() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_different_definition(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::burn_success()); - } - - #[test] - #[should_panic(expected = "Authorization is missing")] - fn test_burn_missing_authorization() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::burn_success()); - } - - #[test] - #[should_panic(expected = "Insufficient balance to burn")] - fn test_burn_insufficient_balance() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_with_authorization(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::burn_insufficient()); - } - - #[test] - #[should_panic(expected = "Total supply underflow")] - fn test_burn_total_supply_underflow() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_with_authorization_and_large_balance(), - ]; - let _post_states = burn(&pre_states, BalanceForTests::mint_overflow()); - } - - #[test] - fn test_burn_success() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_with_authorization(), - ]; - let post_states = burn(&pre_states, BalanceForTests::burn_success()); - - let def_post = post_states[0].clone(); - let holding_post = post_states[1].clone(); - - assert!(*def_post.account() == AccountForTests::definition_account_post_burn().account); - assert!(*holding_post.account() == AccountForTests::holding_account_post_burn().account); - } - - #[test] - #[should_panic(expected = "Invalid number of accounts")] - fn test_mint_invalid_number_of_accounts_1() { - let pre_states = vec![AccountForTests::definition_account_auth()]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Invalid number of accounts")] - fn test_mint_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_same_definition_mint(), - AccountForTests::holding_same_definition_with_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Holding account must be valid")] - fn test_mint_not_valid_holding_account() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::definition_account_without_auth(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Definition account must be valid")] - fn test_mint_not_valid_definition_account() { - let pre_states = vec![ - AccountForTests::holding_same_definition_with_authorization(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Definition authorization is missing")] - fn test_mint_missing_authorization() { - let pre_states = vec![ - AccountForTests::definition_account_without_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - #[should_panic(expected = "Mismatch Token Definition and Token Holding")] - fn test_mint_mismatched_token_definition() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_different_definition(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[test] - fn test_mint_success() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - - let def_post = post_states[0].clone(); - let holding_post = post_states[1].clone(); - - assert!(*def_post.account() == AccountForTests::definition_account_mint().account); - assert!( - *holding_post.account() - == AccountForTests::holding_account_same_definition_mint().account - ); - } - - #[test] - fn test_mint_uninit_holding_success() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_uninit(), - ]; - let post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - - let def_post = post_states[0].clone(); - let holding_post = post_states[1].clone(); - - assert!(*def_post.account() == AccountForTests::definition_account_mint().account); - assert!(*holding_post.account() == AccountForTests::init_mint().account); - assert!(holding_post.requires_claim()); - } - - #[test] - #[should_panic(expected = "Total supply overflow")] - fn test_mint_total_supply_overflow() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_overflow()); - } - - #[test] - #[should_panic(expected = "New balance overflow")] - fn test_mint_holding_account_overflow() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_same_definition_without_authorization_overflow(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_overflow()); - } - - #[test] - #[should_panic( - expected = "Token Definition's standard does not permit minting additional supply" - )] - fn test_mint_cannot_mint_unmintable_tokens() { - let pre_states = vec![ - AccountForTests::definition_account_with_authorization_nonfungible(), - AccountForTests::holding_same_definition_without_authorization(), - ]; - let _post_states = mint_additional_supply(&pre_states, BalanceForTests::mint_success()); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_metadata_with_invalid_number_of_accounts_1() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_metadata_with_invalid_number_of_accounts_2() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid number of input accounts")] - #[test] - fn test_call_new_definition_metadata_with_invalid_number_of_accounts_3() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([4; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Definition target account must have default values")] - #[test] - fn test_call_new_definition_metadata_with_init_definition() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Metadata target account must have default values")] - #[test] - fn test_call_new_definition_metadata_with_init_metadata() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountForTests::holding_account_same_definition_mint(), - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Holding target account must have default values")] - #[test] - fn test_call_new_definition_metadata_with_init_holding() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountForTests::holding_account_same_definition_mint(), - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Metadata values data should be 450 bytes")] - #[test] - fn test_call_new_definition_metadata_with_too_short_metadata_length() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 449].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Metadata values data should be 450 bytes")] - #[test] - fn test_call_new_definition_metadata_with_too_long_metadata_length() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 451].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid Token Standard provided")] - #[test] - fn test_call_new_definition_metadata_with_invalid_token_standard() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 14u8; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid Metadata Standadard provided")] - #[test] - fn test_call_new_definition_metadata_with_invalid_metadata_standard() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = 0u8; - let metadata_standard = 14u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid total supply for the specified token supply")] - #[test] - fn test_call_new_definition_metadata_invalid_supply_for_nonfungible() { - let name = [0xca, 0xfe, 0xca, 0xfe, 0xca, 0xfe]; - let total_supply = 15u128; - let token_standard = TOKEN_STANDARD_NONFUNGIBLE; - let metadata_standard = 0u8; - let metadata_values: Data = Data::try_from([1u8; 450].to_vec()).unwrap(); - - let pre_states = vec![ - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([1; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([2; 32]), - }, - AccountWithMetadata { - account: Account::default(), - is_authorized: true, - account_id: AccountId::new([3; 32]), - }, - ]; - let _post_states = new_definition_with_metadata( - &pre_states, - name, - total_supply, - token_standard, - metadata_standard, - &metadata_values, - ); - } - - #[should_panic(expected = "Invalid number of accounts")] - #[test] - fn test_print_nft_invalid_number_of_accounts_1() { - let pre_states = vec![AccountForTests::holding_account_master_nft()]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Invalid number of accounts")] - #[test] - fn test_print_nft_invalid_number_of_accounts_2() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Master NFT Account must be authorized")] - #[test] - fn test_print_nft_master_account_must_be_authorized() { - let pre_states = vec![ - AccountForTests::holding_account_uninit(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Printed Account must be uninitialized")] - #[test] - fn test_print_nft_print_account_initialized() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_init(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Invalid Token Holding data")] - #[test] - fn test_print_nft_master_nft_invalid_token_holding() { - let pre_states = vec![ - AccountForTests::definition_account_auth(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Invalid Token Holding provided as NFT Master Account")] - #[test] - fn test_print_nft_master_nft_not_nft_master_account() { - let pre_states = vec![ - AccountForTests::holding_account_init(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[should_panic(expected = "Insufficient balance to print another NFT copy")] - #[test] - fn test_print_nft_master_nft_insufficient_balance() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft_insufficient_balance(), - AccountForTests::holding_account_uninit(), - ]; - let _post_states = print_nft(&pre_states); - } - - #[test] - fn test_print_nft_success() { - let pre_states = vec![ - AccountForTests::holding_account_master_nft(), - AccountForTests::holding_account_uninit(), - ]; - let post_states = print_nft(&pre_states); - - let post_master_nft = post_states[0].account(); - let post_printed = post_states[1].account(); - - assert!( - *post_master_nft == AccountForTests::holding_account_master_nft_after_print().account - ); - assert!(*post_printed == AccountForTests::holding_account_printed_nft().account); - } + write_nssa_outputs(instruction_words, pre_states_clone, post_states); } diff --git a/programs/token/Cargo.toml b/programs/token/Cargo.toml new file mode 100644 index 00000000..39beb96a --- /dev/null +++ b/programs/token/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "token_program" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +nssa_core.workspace = true +token_core.workspace = true diff --git a/programs/token/core/Cargo.toml b/programs/token/core/Cargo.toml new file mode 100644 index 00000000..cf61a35f --- /dev/null +++ b/programs/token/core/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "token_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[dependencies] +nssa_core.workspace = true +serde.workspace = true +borsh.workspace = true diff --git a/programs/token/core/src/lib.rs b/programs/token/core/src/lib.rs new file mode 100644 index 00000000..140ae38f --- /dev/null +++ b/programs/token/core/src/lib.rs @@ -0,0 +1,241 @@ +//! This crate contains core data structures and utilities for the Token Program. + +use borsh::{BorshDeserialize, BorshSerialize}; +use nssa_core::account::{AccountId, Data}; +use serde::{Deserialize, Serialize}; + +/// Token Program Instruction. +#[derive(Serialize, Deserialize)] +pub enum Instruction { + /// Transfer tokens from sender to recipient. + /// + /// Required accounts: + /// - Sender's Token Holding account (authorized), + /// - Recipient's Token Holding account. + Transfer { amount_to_transfer: u128 }, + + /// Create a new fungible token definition without metadata. + /// + /// Required accounts: + /// - Token Definition account (uninitialized), + /// - Token Holding account (uninitialized). + NewFungibleDefinition { name: String, total_supply: u128 }, + + /// Create a new fungible or non-fungible token definition with metadata. + /// + /// Required accounts: + /// - Token Definition account (uninitialized), + /// - Token Holding account (uninitialized), + /// - Token Metadata account (uninitialized). + NewDefinitionWithMetadata { + new_definition: NewTokenDefinition, + /// Boxed to avoid large enum variant size + metadata: Box<NewTokenMetadata>, + }, + + /// Initialize a token holding account for a given token definition. + /// + /// Required accounts: + /// - Token Definition account (initialized), + /// - Token Holding account (uninitialized), + InitializeAccount, + + /// Burn tokens from the holder's account. + /// + /// Required accounts: + /// - Token Definition account (initialized), + /// - Token Holding account (authorized). + Burn { amount_to_burn: u128 }, + + /// Mint new tokens to the holder's account. + /// + /// Required accounts: + /// - Token Definition account (authorized), + /// - Token Holding account (uninitialized or initialized). + Mint { amount_to_mint: u128 }, + + /// Print a new NFT from the master copy. + /// + /// Required accounts: + /// - NFT Master Token Holding account (authorized), + /// - NFT Printed Copy Token Holding account (uninitialized). + PrintNft, +} + +#[derive(Serialize, Deserialize)] +pub enum NewTokenDefinition { + Fungible { + name: String, + total_supply: u128, + }, + NonFungible { + name: String, + printable_supply: u128, + }, +} + +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub enum TokenDefinition { + Fungible { + name: String, + total_supply: u128, + metadata_id: Option<AccountId>, + }, + NonFungible { + name: String, + printable_supply: u128, + metadata_id: AccountId, + }, +} + +impl TryFrom<&Data> for TokenDefinition { + type Error = std::io::Error; + + fn try_from(data: &Data) -> Result<Self, Self::Error> { + TokenDefinition::try_from_slice(data.as_ref()) + } +} + +impl From<&TokenDefinition> for Data { + fn from(definition: &TokenDefinition) -> Self { + // Using size_of_val as size hint for Vec allocation + let mut data = Vec::with_capacity(std::mem::size_of_val(definition)); + + BorshSerialize::serialize(definition, &mut data) + .expect("Serialization to Vec should not fail"); + + Data::try_from(data).expect("Token definition encoded data should fit into Data") + } +} + +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub enum TokenHolding { + Fungible { + definition_id: AccountId, + balance: u128, + }, + NftMaster { + definition_id: AccountId, + /// The amount of printed copies left - 1 (1 reserved for master copy itself). + print_balance: u128, + }, + NftPrintedCopy { + definition_id: AccountId, + /// Whether nft is owned by the holder. + owned: bool, + }, +} + +impl TokenHolding { + pub fn zeroized_clone_from(other: &Self) -> Self { + match other { + TokenHolding::Fungible { definition_id, .. } => TokenHolding::Fungible { + definition_id: *definition_id, + balance: 0, + }, + TokenHolding::NftMaster { definition_id, .. } => TokenHolding::NftMaster { + definition_id: *definition_id, + print_balance: 0, + }, + TokenHolding::NftPrintedCopy { definition_id, .. } => TokenHolding::NftPrintedCopy { + definition_id: *definition_id, + owned: false, + }, + } + } + + pub fn zeroized_from_definition( + definition_id: AccountId, + definition: &TokenDefinition, + ) -> Self { + match definition { + TokenDefinition::Fungible { .. } => TokenHolding::Fungible { + definition_id, + balance: 0, + }, + TokenDefinition::NonFungible { .. } => TokenHolding::NftPrintedCopy { + definition_id, + owned: false, + }, + } + } + + pub fn definition_id(&self) -> AccountId { + match self { + TokenHolding::Fungible { definition_id, .. } => *definition_id, + TokenHolding::NftMaster { definition_id, .. } => *definition_id, + TokenHolding::NftPrintedCopy { definition_id, .. } => *definition_id, + } + } +} + +impl TryFrom<&Data> for TokenHolding { + type Error = std::io::Error; + + fn try_from(data: &Data) -> Result<Self, Self::Error> { + TokenHolding::try_from_slice(data.as_ref()) + } +} + +impl From<&TokenHolding> for Data { + fn from(holding: &TokenHolding) -> Self { + // Using size_of_val as size hint for Vec allocation + let mut data = Vec::with_capacity(std::mem::size_of_val(holding)); + + BorshSerialize::serialize(holding, &mut data) + .expect("Serialization to Vec should not fail"); + + Data::try_from(data).expect("Token holding encoded data should fit into Data") + } +} + +#[derive(Serialize, Deserialize)] +pub struct NewTokenMetadata { + /// Metadata standard. + pub standard: MetadataStandard, + /// Pointer to off-chain metadata + pub uri: String, + /// Creators of the token. + pub creators: String, +} + +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub struct TokenMetadata { + /// Token Definition account id. + pub definition_id: AccountId, + /// Metadata standard . + pub standard: MetadataStandard, + /// Pointer to off-chain metadata. + pub uri: String, + /// Creators of the token. + pub creators: String, + /// Block id of primary sale. + pub primary_sale_date: u64, +} + +/// Metadata standard defining the expected format of JSON located off-chain. +#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +pub enum MetadataStandard { + Simple, + Expanded, +} + +impl TryFrom<&Data> for TokenMetadata { + type Error = std::io::Error; + + fn try_from(data: &Data) -> Result<Self, Self::Error> { + TokenMetadata::try_from_slice(data.as_ref()) + } +} + +impl From<&TokenMetadata> for Data { + fn from(metadata: &TokenMetadata) -> Self { + // Using size_of_val as size hint for Vec allocation + let mut data = Vec::with_capacity(std::mem::size_of_val(metadata)); + + BorshSerialize::serialize(metadata, &mut data) + .expect("Serialization to Vec should not fail"); + + Data::try_from(data).expect("Token metadata encoded data should fit into Data") + } +} diff --git a/programs/token/src/burn.rs b/programs/token/src/burn.rs new file mode 100644 index 00000000..94637d92 --- /dev/null +++ b/programs/token/src/burn.rs @@ -0,0 +1,104 @@ +use nssa_core::{ + account::{AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{TokenDefinition, TokenHolding}; + +pub fn burn( + definition_account: AccountWithMetadata, + user_holding_account: AccountWithMetadata, + amount_to_burn: u128, +) -> Vec<AccountPostState> { + assert!( + user_holding_account.is_authorized, + "Authorization is missing" + ); + + let mut definition = TokenDefinition::try_from(&definition_account.account.data) + .expect("Token Definition account must be valid"); + let mut holding = TokenHolding::try_from(&user_holding_account.account.data) + .expect("Token Holding account must be valid"); + + assert_eq!( + definition_account.account_id, + holding.definition_id(), + "Mismatch Token Definition and Token Holding" + ); + + match (&mut definition, &mut holding) { + ( + TokenDefinition::Fungible { + name: _, + metadata_id: _, + total_supply, + }, + TokenHolding::Fungible { + definition_id: _, + balance, + }, + ) => { + *balance = balance + .checked_sub(amount_to_burn) + .expect("Insufficient balance to burn"); + + *total_supply = total_supply + .checked_sub(amount_to_burn) + .expect("Total supply underflow"); + } + ( + TokenDefinition::NonFungible { + name: _, + printable_supply, + metadata_id: _, + }, + TokenHolding::NftMaster { + definition_id: _, + print_balance, + }, + ) => { + *printable_supply = printable_supply + .checked_sub(amount_to_burn) + .expect("Printable supply underflow"); + + *print_balance = print_balance + .checked_sub(amount_to_burn) + .expect("Insufficient balance to burn"); + } + ( + TokenDefinition::NonFungible { + name: _, + printable_supply, + metadata_id: _, + }, + TokenHolding::NftPrintedCopy { + definition_id: _, + owned, + }, + ) => { + assert_eq!( + amount_to_burn, 1, + "Invalid balance to burn for NFT Printed Copy" + ); + + assert!(*owned, "Cannot burn unowned NFT Printed Copy"); + + *printable_supply = printable_supply + .checked_sub(1) + .expect("Printable supply underflow"); + + *owned = false; + } + _ => panic!("Mismatched Token Definition and Token Holding types"), + } + + let mut definition_post = definition_account.account; + definition_post.data = Data::from(&definition); + + let mut holding_post = user_holding_account.account; + holding_post.data = Data::from(&holding); + + vec![ + AccountPostState::new(definition_post), + AccountPostState::new(holding_post), + ] +} diff --git a/programs/token/src/initialize.rs b/programs/token/src/initialize.rs new file mode 100644 index 00000000..744fdb64 --- /dev/null +++ b/programs/token/src/initialize.rs @@ -0,0 +1,34 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{TokenDefinition, TokenHolding}; + +pub fn initialize_account( + definition_account: AccountWithMetadata, + account_to_initialize: AccountWithMetadata, +) -> Vec<AccountPostState> { + assert_eq!( + account_to_initialize.account, + Account::default(), + "Only Uninitialized accounts can be initialized" + ); + + // TODO: #212 We should check that this is an account owned by the token program. + // This check can't be done here since the ID of the program is known only after compiling it + // + // Check definition account is valid + let definition = TokenDefinition::try_from(&definition_account.account.data) + .expect("Definition account must be valid"); + let holding = + TokenHolding::zeroized_from_definition(definition_account.account_id, &definition); + + let definition_post = definition_account.account; + let mut account_to_initialize = account_to_initialize.account; + account_to_initialize.data = Data::from(&holding); + + vec![ + AccountPostState::new(definition_post), + AccountPostState::new_claimed(account_to_initialize), + ] +} diff --git a/programs/token/src/lib.rs b/programs/token/src/lib.rs new file mode 100644 index 00000000..8b0698c5 --- /dev/null +++ b/programs/token/src/lib.rs @@ -0,0 +1,12 @@ +//! The Token Program implementation. + +pub use token_core as core; + +pub mod burn; +pub mod initialize; +pub mod mint; +pub mod new_definition; +pub mod print_nft; +pub mod transfer; + +mod tests; diff --git a/programs/token/src/mint.rs b/programs/token/src/mint.rs new file mode 100644 index 00000000..2f17cc62 --- /dev/null +++ b/programs/token/src/mint.rs @@ -0,0 +1,71 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{TokenDefinition, TokenHolding}; + +pub fn mint( + definition_account: AccountWithMetadata, + user_holding_account: AccountWithMetadata, + amount_to_mint: u128, +) -> Vec<AccountPostState> { + assert!( + definition_account.is_authorized, + "Definition authorization is missing" + ); + + let mut definition = TokenDefinition::try_from(&definition_account.account.data) + .expect("Token Definition account must be valid"); + let mut holding = if user_holding_account.account == Account::default() { + TokenHolding::zeroized_from_definition(definition_account.account_id, &definition) + } else { + TokenHolding::try_from(&user_holding_account.account.data) + .expect("Token Holding account must be valid") + }; + + assert_eq!( + definition_account.account_id, + holding.definition_id(), + "Mismatch Token Definition and Token Holding" + ); + + match (&mut definition, &mut holding) { + ( + TokenDefinition::Fungible { + name: _, + metadata_id: _, + total_supply, + }, + TokenHolding::Fungible { + definition_id: _, + balance, + }, + ) => { + *balance = balance + .checked_add(amount_to_mint) + .expect("Balance overflow on minting"); + + *total_supply = total_supply + .checked_add(amount_to_mint) + .expect("Total supply overflow"); + } + ( + TokenDefinition::NonFungible { .. }, + TokenHolding::NftMaster { .. } | TokenHolding::NftPrintedCopy { .. }, + ) => { + panic!("Cannot mint additional supply for Non-Fungible Tokens"); + } + _ => panic!("Mismatched Token Definition and Token Holding types"), + } + + let mut definition_post = definition_account.account; + definition_post.data = Data::from(&definition); + + let mut holding_post = user_holding_account.account; + holding_post.data = Data::from(&holding); + + vec![ + AccountPostState::new(definition_post), + AccountPostState::new_claimed_if_default(holding_post), + ] +} diff --git a/programs/token/src/new_definition.rs b/programs/token/src/new_definition.rs new file mode 100644 index 00000000..b2a9ae9f --- /dev/null +++ b/programs/token/src/new_definition.rs @@ -0,0 +1,124 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::{ + NewTokenDefinition, NewTokenMetadata, TokenDefinition, TokenHolding, TokenMetadata, +}; + +pub fn new_fungible_definition( + definition_target_account: AccountWithMetadata, + holding_target_account: AccountWithMetadata, + name: String, + total_supply: u128, +) -> Vec<AccountPostState> { + assert_eq!( + definition_target_account.account, + Account::default(), + "Definition target account must have default values" + ); + + assert_eq!( + holding_target_account.account, + Account::default(), + "Holding target account must have default values" + ); + + let token_definition = TokenDefinition::Fungible { + name, + total_supply, + metadata_id: None, + }; + let token_holding = TokenHolding::Fungible { + definition_id: definition_target_account.account_id, + balance: total_supply, + }; + + let mut definition_target_account_post = definition_target_account.account; + definition_target_account_post.data = Data::from(&token_definition); + + let mut holding_target_account_post = holding_target_account.account; + holding_target_account_post.data = Data::from(&token_holding); + + vec![ + AccountPostState::new_claimed(definition_target_account_post), + AccountPostState::new_claimed(holding_target_account_post), + ] +} + +pub fn new_definition_with_metadata( + definition_target_account: AccountWithMetadata, + holding_target_account: AccountWithMetadata, + metadata_target_account: AccountWithMetadata, + new_definition: NewTokenDefinition, + metadata: NewTokenMetadata, +) -> Vec<AccountPostState> { + assert_eq!( + definition_target_account.account, + Account::default(), + "Definition target account must have default values" + ); + + assert_eq!( + holding_target_account.account, + Account::default(), + "Holding target account must have default values" + ); + + assert_eq!( + metadata_target_account.account, + Account::default(), + "Metadata target account must have default values" + ); + + let (token_definition, token_holding) = match new_definition { + NewTokenDefinition::Fungible { name, total_supply } => ( + TokenDefinition::Fungible { + name, + total_supply, + metadata_id: Some(metadata_target_account.account_id), + }, + TokenHolding::Fungible { + definition_id: definition_target_account.account_id, + balance: total_supply, + }, + ), + NewTokenDefinition::NonFungible { + name, + printable_supply, + } => ( + TokenDefinition::NonFungible { + name, + printable_supply, + metadata_id: metadata_target_account.account_id, + }, + TokenHolding::NftMaster { + definition_id: definition_target_account.account_id, + print_balance: printable_supply, + }, + ), + }; + + let token_metadata = TokenMetadata { + definition_id: definition_target_account.account_id, + standard: metadata.standard, + uri: metadata.uri, + creators: metadata.creators, + primary_sale_date: 0u64, // TODO #261: future works to implement this + }; + + let mut definition_target_account_post = definition_target_account.account.clone(); + definition_target_account_post.data = Data::from(&token_definition); + + let mut holding_target_account_post = holding_target_account.account.clone(); + holding_target_account_post.data = Data::from(&token_holding); + + let mut metadata_target_account_post = metadata_target_account.account.clone(); + metadata_target_account_post.data = Data::from(&token_metadata); + + vec![ + AccountPostState::new_claimed(definition_target_account_post), + AccountPostState::new_claimed(holding_target_account_post), + AccountPostState::new_claimed(metadata_target_account_post), + ] +} diff --git a/programs/token/src/print_nft.rs b/programs/token/src/print_nft.rs new file mode 100644 index 00000000..d10533c1 --- /dev/null +++ b/programs/token/src/print_nft.rs @@ -0,0 +1,54 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::TokenHolding; + +pub fn print_nft( + master_account: AccountWithMetadata, + printed_account: AccountWithMetadata, +) -> Vec<AccountPostState> { + assert!( + master_account.is_authorized, + "Master NFT Account must be authorized" + ); + + assert_eq!( + printed_account.account, + Account::default(), + "Printed Account must be uninitialized" + ); + + let mut master_account_data = + TokenHolding::try_from(&master_account.account.data).expect("Invalid Token Holding data"); + + let TokenHolding::NftMaster { + definition_id, + print_balance, + } = &mut master_account_data + else { + panic!("Invalid Token Holding provided as NFT Master Account"); + }; + + let definition_id = *definition_id; + + assert!( + *print_balance > 1, + "Insufficient balance to print another NFT copy" + ); + *print_balance -= 1; + + let mut master_account_post = master_account.account; + master_account_post.data = Data::from(&master_account_data); + + let mut printed_account_post = printed_account.account; + printed_account_post.data = Data::from(&TokenHolding::NftPrintedCopy { + definition_id, + owned: true, + }); + + vec![ + AccountPostState::new(master_account_post), + AccountPostState::new_claimed(printed_account_post), + ] +} diff --git a/programs/token/src/tests.rs b/programs/token/src/tests.rs new file mode 100644 index 00000000..cf95c4d4 --- /dev/null +++ b/programs/token/src/tests.rs @@ -0,0 +1,1040 @@ +#![cfg(test)] + +use nssa_core::account::{Account, AccountId, AccountWithMetadata, Data}; +use token_core::{ + MetadataStandard, NewTokenDefinition, NewTokenMetadata, TokenDefinition, TokenHolding, +}; + +use crate::{ + burn::burn, + mint::mint, + new_definition::{new_definition_with_metadata, new_fungible_definition}, + print_nft::print_nft, + transfer::transfer, +}; + +// TODO: Move tests to a proper modules like burn, mint, transfer, etc, so that they are more +// unit-test. + +struct BalanceForTests; +struct IdForTests; + +struct AccountForTests; + +impl AccountForTests { + fn definition_account_auth() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_without_auth() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_different_definition() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id_diff(), + balance: BalanceForTests::holding_balance(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_same_definition_with_authorization() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_same_definition_without_authorization() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_same_definition_without_authorization_overflow() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn definition_account_post_burn() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply_burned(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_account_post_burn() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance_burned(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_uninit() -> AccountWithMetadata { + AccountWithMetadata { + account: Account::default(), + is_authorized: false, + account_id: IdForTests::holding_id_2(), + } + } + + fn init_mint() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::mint_success(), + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_same_definition_mint() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::holding_balance_mint(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_mint() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply_mint(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_same_definition_with_authorization_and_large_balance() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::mint_overflow(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_with_authorization_nonfungible() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::NonFungible { + name: String::from("test"), + printable_supply: BalanceForTests::printable_copies(), + metadata_id: AccountId::new([0; 32]), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn definition_account_uninit() -> AccountWithMetadata { + AccountWithMetadata { + account: Account::default(), + is_authorized: false, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_account_init() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn definition_account_unclaimed() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForTests::init_supply(), + metadata_id: None, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::pool_definition_id(), + } + } + + fn holding_account_unclaimed() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account2_init() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::init_supply(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id_2(), + } + } + + fn holding_account2_init_post_transfer() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::recipient_post_transfer(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id_2(), + } + } + + fn holding_account_init_post_transfer() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForTests::pool_definition_id(), + balance: BalanceForTests::sender_post_transfer(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_master_nft() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: BalanceForTests::printable_copies(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_master_nft_insufficient_balance() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: 1, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_master_nft_after_print() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: BalanceForTests::printable_copies() - 1, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_printed_nft() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftPrintedCopy { + definition_id: IdForTests::pool_definition_id(), + owned: true, + }), + nonce: 0, + }, + is_authorized: false, + account_id: IdForTests::holding_id(), + } + } + + fn holding_account_with_master_nft_transferred_to() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [0u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: BalanceForTests::printable_copies(), + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id_2(), + } + } + + fn holding_account_master_nft_post_transfer() -> AccountWithMetadata { + AccountWithMetadata { + account: Account { + program_owner: [5u32; 8], + balance: 0u128, + data: Data::from(&TokenHolding::NftMaster { + definition_id: IdForTests::pool_definition_id(), + print_balance: 0, + }), + nonce: 0, + }, + is_authorized: true, + account_id: IdForTests::holding_id(), + } + } +} + +impl BalanceForTests { + fn init_supply() -> u128 { + 100_000 + } + + fn holding_balance() -> u128 { + 1_000 + } + + fn init_supply_burned() -> u128 { + 99_500 + } + + fn holding_balance_burned() -> u128 { + 500 + } + + fn burn_success() -> u128 { + 500 + } + + fn burn_insufficient() -> u128 { + 1_500 + } + + fn mint_success() -> u128 { + 50_000 + } + + fn holding_balance_mint() -> u128 { + 51_000 + } + + fn mint_overflow() -> u128 { + u128::MAX - 40_000 + } + + fn init_supply_mint() -> u128 { + 150_000 + } + + fn sender_post_transfer() -> u128 { + 95_000 + } + + fn recipient_post_transfer() -> u128 { + 105_000 + } + + fn transfer_amount() -> u128 { + 5_000 + } + + fn printable_copies() -> u128 { + 10 + } +} + +impl IdForTests { + fn pool_definition_id() -> AccountId { + AccountId::new([15; 32]) + } + + fn pool_definition_id_diff() -> AccountId { + AccountId::new([16; 32]) + } + + fn holding_id() -> AccountId { + AccountId::new([17; 32]) + } + + fn holding_id_2() -> AccountId { + AccountId::new([42; 32]) + } +} + +#[should_panic(expected = "Definition target account must have default values")] +#[test] +fn test_new_definition_non_default_first_account_should_fail() { + let definition_account = AccountWithMetadata { + account: Account { + program_owner: [1, 2, 3, 4, 5, 6, 7, 8], + ..Account::default() + }, + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let _post_states = new_fungible_definition( + definition_account, + holding_account, + String::from("test"), + 10, + ); +} + +#[should_panic(expected = "Holding target account must have default values")] +#[test] +fn test_new_definition_non_default_second_account_should_fail() { + let definition_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account { + program_owner: [1, 2, 3, 4, 5, 6, 7, 8], + ..Account::default() + }, + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let _post_states = new_fungible_definition( + definition_account, + holding_account, + String::from("test"), + 10, + ); +} + +#[test] +fn test_new_definition_with_valid_inputs_succeeds() { + let definition_account = AccountForTests::definition_account_uninit(); + let holding_account = AccountForTests::holding_account_uninit(); + + let post_states = new_fungible_definition( + definition_account, + holding_account, + String::from("test"), + BalanceForTests::init_supply(), + ); + + let [definition_account, holding_account] = post_states.try_into().unwrap(); + assert_eq!( + *definition_account.account(), + AccountForTests::definition_account_unclaimed().account + ); + + assert_eq!( + *holding_account.account(), + AccountForTests::holding_account_unclaimed().account + ); +} + +#[should_panic(expected = "Sender and recipient definition id mismatch")] +#[test] +fn test_transfer_with_different_definition_ids_should_fail() { + let sender = AccountForTests::holding_same_definition_with_authorization(); + let recipient = AccountForTests::holding_different_definition(); + let _post_states = transfer(sender, recipient, 10); +} + +#[should_panic(expected = "Insufficient balance")] +#[test] +fn test_transfer_with_insufficient_balance_should_fail() { + let sender = AccountForTests::holding_same_definition_with_authorization(); + let recipient = AccountForTests::holding_account_same_definition_mint(); + // Attempt to transfer more than balance + let _post_states = transfer(sender, recipient, BalanceForTests::burn_insufficient()); +} + +#[should_panic(expected = "Sender authorization is missing")] +#[test] +fn test_transfer_without_sender_authorization_should_fail() { + let sender = AccountForTests::holding_same_definition_without_authorization(); + let recipient = AccountForTests::holding_account_uninit(); + let _post_states = transfer(sender, recipient, 37); +} + +#[test] +fn test_transfer_with_valid_inputs_succeeds() { + let sender = AccountForTests::holding_account_init(); + let recipient = AccountForTests::holding_account2_init(); + let post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); + let [sender_post, recipient_post] = post_states.try_into().unwrap(); + + assert_eq!( + *sender_post.account(), + AccountForTests::holding_account_init_post_transfer().account + ); + assert_eq!( + *recipient_post.account(), + AccountForTests::holding_account2_init_post_transfer().account + ); +} + +#[should_panic(expected = "Invalid balance for NFT Master transfer")] +#[test] +fn test_transfer_with_master_nft_invalid_balance() { + let sender = AccountForTests::holding_account_master_nft(); + let recipient = AccountForTests::holding_account_uninit(); + let _post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); +} + +#[should_panic(expected = "Invalid balance in recipient account for NFT transfer")] +#[test] +fn test_transfer_with_master_nft_invalid_recipient_balance() { + let sender = AccountForTests::holding_account_master_nft(); + let recipient = AccountForTests::holding_account_with_master_nft_transferred_to(); + let _post_states = transfer(sender, recipient, BalanceForTests::printable_copies()); +} + +#[test] +fn test_transfer_with_master_nft_success() { + let sender = AccountForTests::holding_account_master_nft(); + let recipient = AccountForTests::holding_account_uninit(); + let post_states = transfer(sender, recipient, BalanceForTests::printable_copies()); + let [sender_post, recipient_post] = post_states.try_into().unwrap(); + + assert_eq!( + *sender_post.account(), + AccountForTests::holding_account_master_nft_post_transfer().account + ); + assert_eq!( + *recipient_post.account(), + AccountForTests::holding_account_with_master_nft_transferred_to().account + ); +} + +#[test] +fn test_token_initialize_account_succeeds() { + let sender = AccountForTests::holding_account_init(); + let recipient = AccountForTests::holding_account2_init(); + let post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); + let [sender_post, recipient_post] = post_states.try_into().unwrap(); + + assert_eq!( + *sender_post.account(), + AccountForTests::holding_account_init_post_transfer().account + ); + assert_eq!( + *recipient_post.account(), + AccountForTests::holding_account2_init_post_transfer().account + ); +} + +#[test] +#[should_panic(expected = "Mismatch Token Definition and Token Holding")] +fn test_burn_mismatch_def() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_different_definition(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_success(), + ); +} + +#[test] +#[should_panic(expected = "Authorization is missing")] +fn test_burn_missing_authorization() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_success(), + ); +} + +#[test] +#[should_panic(expected = "Insufficient balance to burn")] +fn test_burn_insufficient_balance() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_with_authorization(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_insufficient(), + ); +} + +#[test] +#[should_panic(expected = "Total supply underflow")] +fn test_burn_total_supply_underflow() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = + AccountForTests::holding_same_definition_with_authorization_and_large_balance(); + let _post_states = burn( + definition_account, + holding_account, + BalanceForTests::mint_overflow(), + ); +} + +#[test] +fn test_burn_success() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_with_authorization(); + let post_states = burn( + definition_account, + holding_account, + BalanceForTests::burn_success(), + ); + + let [def_post, holding_post] = post_states.try_into().unwrap(); + + assert_eq!( + *def_post.account(), + AccountForTests::definition_account_post_burn().account + ); + assert_eq!( + *holding_post.account(), + AccountForTests::holding_account_post_burn().account + ); +} + +#[test] +#[should_panic(expected = "Holding account must be valid")] +fn test_mint_not_valid_holding_account() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::definition_account_without_auth(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +#[should_panic(expected = "Definition account must be valid")] +fn test_mint_not_valid_definition_account() { + let definition_account = AccountForTests::holding_same_definition_with_authorization(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +#[should_panic(expected = "Definition authorization is missing")] +fn test_mint_missing_authorization() { + let definition_account = AccountForTests::definition_account_without_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +#[should_panic(expected = "Mismatch Token Definition and Token Holding")] +fn test_mint_mismatched_token_definition() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_different_definition(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[test] +fn test_mint_success() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); + + let [def_post, holding_post] = post_states.try_into().unwrap(); + + assert_eq!( + *def_post.account(), + AccountForTests::definition_account_mint().account + ); + assert_eq!( + *holding_post.account(), + AccountForTests::holding_account_same_definition_mint().account + ); +} + +#[test] +fn test_mint_uninit_holding_success() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_account_uninit(); + let post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); + + let [def_post, holding_post] = post_states.try_into().unwrap(); + + assert_eq!( + *def_post.account(), + AccountForTests::definition_account_mint().account + ); + assert_eq!( + *holding_post.account(), + AccountForTests::init_mint().account + ); + assert!(holding_post.requires_claim()); +} + +#[test] +#[should_panic(expected = "Total supply overflow")] +fn test_mint_total_supply_overflow() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_overflow(), + ); +} + +#[test] +#[should_panic(expected = "Balance overflow on minting")] +fn test_mint_holding_account_overflow() { + let definition_account = AccountForTests::definition_account_auth(); + let holding_account = AccountForTests::holding_same_definition_without_authorization_overflow(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_overflow(), + ); +} + +#[test] +#[should_panic(expected = "Cannot mint additional supply for Non-Fungible Tokens")] +fn test_mint_cannot_mint_unmintable_tokens() { + let definition_account = AccountForTests::definition_account_with_authorization_nonfungible(); + let holding_account = AccountForTests::holding_account_master_nft(); + let _post_states = mint( + definition_account, + holding_account, + BalanceForTests::mint_success(), + ); +} + +#[should_panic(expected = "Definition target account must have default values")] +#[test] +fn test_call_new_definition_metadata_with_init_definition() { + let definition_account = AccountForTests::definition_account_auth(); + let metadata_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([3; 32]), + }; + let new_definition = NewTokenDefinition::Fungible { + name: String::from("test"), + total_supply: 15u128, + }; + let metadata = NewTokenMetadata { + standard: MetadataStandard::Simple, + uri: "test_uri".to_string(), + creators: "test_creators".to_string(), + }; + let _post_states = new_definition_with_metadata( + definition_account, + metadata_account, + holding_account, + new_definition, + metadata, + ); +} + +#[should_panic(expected = "Metadata target account must have default values")] +#[test] +fn test_call_new_definition_metadata_with_init_metadata() { + let definition_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let holding_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([3; 32]), + }; + let metadata_account = AccountForTests::holding_account_same_definition_mint(); + let new_definition = NewTokenDefinition::Fungible { + name: String::from("test"), + total_supply: 15u128, + }; + let metadata = NewTokenMetadata { + standard: MetadataStandard::Simple, + uri: "test_uri".to_string(), + creators: "test_creators".to_string(), + }; + let _post_states = new_definition_with_metadata( + definition_account, + holding_account, + metadata_account, + new_definition, + metadata, + ); +} + +#[should_panic(expected = "Holding target account must have default values")] +#[test] +fn test_call_new_definition_metadata_with_init_holding() { + let definition_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([1; 32]), + }; + let metadata_account = AccountWithMetadata { + account: Account::default(), + is_authorized: true, + account_id: AccountId::new([2; 32]), + }; + let holding_account = AccountForTests::holding_account_same_definition_mint(); + let new_definition = NewTokenDefinition::Fungible { + name: String::from("test"), + total_supply: 15u128, + }; + let metadata = NewTokenMetadata { + standard: MetadataStandard::Simple, + uri: "test_uri".to_string(), + creators: "test_creators".to_string(), + }; + let _post_states = new_definition_with_metadata( + definition_account, + holding_account, + metadata_account, + new_definition, + metadata, + ); +} + +#[should_panic(expected = "Master NFT Account must be authorized")] +#[test] +fn test_print_nft_master_account_must_be_authorized() { + let master_account = AccountForTests::holding_account_uninit(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Printed Account must be uninitialized")] +#[test] +fn test_print_nft_print_account_initialized() { + let master_account = AccountForTests::holding_account_master_nft(); + let printed_account = AccountForTests::holding_account_init(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Invalid Token Holding data")] +#[test] +fn test_print_nft_master_nft_invalid_token_holding() { + let master_account = AccountForTests::definition_account_auth(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Invalid Token Holding provided as NFT Master Account")] +#[test] +fn test_print_nft_master_nft_not_nft_master_account() { + let master_account = AccountForTests::holding_account_init(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[should_panic(expected = "Insufficient balance to print another NFT copy")] +#[test] +fn test_print_nft_master_nft_insufficient_balance() { + let master_account = AccountForTests::holding_account_master_nft_insufficient_balance(); + let printed_account = AccountForTests::holding_account_uninit(); + let _post_states = print_nft(master_account, printed_account); +} + +#[test] +fn test_print_nft_success() { + let master_account = AccountForTests::holding_account_master_nft(); + let printed_account = AccountForTests::holding_account_uninit(); + let post_states = print_nft(master_account, printed_account); + + let [post_master_nft, post_printed] = post_states.try_into().unwrap(); + + assert_eq!( + *post_master_nft.account(), + AccountForTests::holding_account_master_nft_after_print().account + ); + assert_eq!( + *post_printed.account(), + AccountForTests::holding_account_printed_nft().account + ); +} diff --git a/programs/token/src/transfer.rs b/programs/token/src/transfer.rs new file mode 100644 index 00000000..a1087bb1 --- /dev/null +++ b/programs/token/src/transfer.rs @@ -0,0 +1,110 @@ +use nssa_core::{ + account::{Account, AccountWithMetadata, Data}, + program::AccountPostState, +}; +use token_core::TokenHolding; + +pub fn transfer( + sender: AccountWithMetadata, + recipient: AccountWithMetadata, + balance_to_move: u128, +) -> Vec<AccountPostState> { + assert!(sender.is_authorized, "Sender authorization is missing"); + + let mut sender_holding = + TokenHolding::try_from(&sender.account.data).expect("Invalid sender data"); + + let mut recipient_holding = if recipient.account == Account::default() { + TokenHolding::zeroized_clone_from(&sender_holding) + } else { + TokenHolding::try_from(&recipient.account.data).expect("Invalid recipient data") + }; + + assert_eq!( + sender_holding.definition_id(), + recipient_holding.definition_id(), + "Sender and recipient definition id mismatch" + ); + + match (&mut sender_holding, &mut recipient_holding) { + ( + TokenHolding::Fungible { + definition_id: _, + balance: sender_balance, + }, + TokenHolding::Fungible { + definition_id: _, + balance: recipient_balance, + }, + ) => { + *sender_balance = sender_balance + .checked_sub(balance_to_move) + .expect("Insufficient balance"); + + *recipient_balance = recipient_balance + .checked_add(balance_to_move) + .expect("Recipient balance overflow"); + } + ( + TokenHolding::NftMaster { + definition_id: _, + print_balance: sender_print_balance, + }, + TokenHolding::NftMaster { + definition_id: _, + print_balance: recipient_print_balance, + }, + ) => { + assert_eq!( + *recipient_print_balance, 0, + "Invalid balance in recipient account for NFT transfer" + ); + + assert_eq!( + *sender_print_balance, balance_to_move, + "Invalid balance for NFT Master transfer" + ); + + std::mem::swap(sender_print_balance, recipient_print_balance); + } + ( + TokenHolding::NftPrintedCopy { + definition_id: _, + owned: sender_owned, + }, + TokenHolding::NftPrintedCopy { + definition_id: _, + owned: recipient_owned, + }, + ) => { + assert_eq!( + balance_to_move, 1, + "Invalid balance for NFT Printed Copy transfer" + ); + + assert!(*sender_owned, "Sender does not own the NFT Printed Copy"); + + assert!( + !*recipient_owned, + "Recipient already owns the NFT Printed Copy" + ); + + *sender_owned = false; + *recipient_owned = true; + } + _ => { + panic!("Mismatched token holding types for transfer"); + } + }; + + let mut sender_post = sender.account; + sender_post.data = Data::from(&sender_holding); + + let mut recipient_post = recipient.account; + recipient_post.data = Data::from(&recipient_holding); + + vec![ + AccountPostState::new(sender_post), + AccountPostState::new_claimed_if_default(recipient_post), + ] +} diff --git a/sequencer_core/Cargo.toml b/sequencer_core/Cargo.toml index 8d2886ce..fb900252 100644 --- a/sequencer_core/Cargo.toml +++ b/sequencer_core/Cargo.toml @@ -2,6 +2,7 @@ name = "sequencer_core" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true @@ -17,6 +18,7 @@ serde_json.workspace = true tempfile.workspace = true chrono.workspace = true log.workspace = true +tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } bedrock_client.workspace = true logos-blockchain-key-management-system-service.workspace = true logos-blockchain-core.workspace = true @@ -29,5 +31,4 @@ default = [] testnet = [] [dev-dependencies] -tokio = { workspace = true, features = ["rt-multi-thread", "macros"] } futures.workspace = true diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 0aa22420..f99a116e 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -1,8 +1,8 @@ -use std::{fs, path::Path}; +use std::{fs, path::Path, str::FromStr}; use anyhow::{Context, Result, anyhow}; use bedrock_client::BedrockClient; -use common::block::HashableBlockData; +use common::block::Block; use logos_blockchain_core::mantle::{ MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger, ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp}, @@ -10,43 +10,44 @@ use logos_blockchain_core::mantle::{ use logos_blockchain_key_management_system_service::keys::{ ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey, }; +use reqwest::Url; use crate::config::BedrockConfig; /// A component that posts block data to logos blockchain +#[derive(Clone)] pub struct BlockSettlementClient { bedrock_client: BedrockClient, bedrock_signing_key: Ed25519Key, bedrock_channel_id: ChannelId, - last_message_id: MsgId, } impl BlockSettlementClient { pub fn try_new(home: &Path, config: &BedrockConfig) -> Result<Self> { let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key")) .context("Failed to load or create signing key")?; - let bedrock_channel_id = ChannelId::from(config.channel_id); - let bedrock_client = BedrockClient::new(None, config.node_url.clone()) - .context("Failed to initialize bedrock client")?; - let channel_genesis_msg = MsgId::from([0; 32]); + let bedrock_url = Url::from_str(config.node_url.as_ref()) + .context("Bedrock node address is not a valid url")?; + let bedrock_client = + BedrockClient::new(None, bedrock_url).context("Failed to initialize bedrock client")?; Ok(Self { bedrock_client, bedrock_signing_key, - bedrock_channel_id, - last_message_id: channel_genesis_msg, + bedrock_channel_id: config.channel_id, }) } /// Create and sign a transaction for inscribing data - pub fn create_inscribe_tx(&self, data: Vec<u8>) -> (SignedMantleTx, MsgId) { + pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> { + let inscription_data = borsh::to_vec(block)?; let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes(); let verifying_key = Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key"); let inscribe_op = InscriptionOp { channel_id: self.bedrock_channel_id, - inscription: data, - parent: self.last_message_id, + inscription: inscription_data, + parent: block.bedrock_parent_id.into(), signer: verifying_key, }; let inscribe_op_id = inscribe_op.id(); @@ -76,20 +77,17 @@ impl BlockSettlementClient { ledger_tx_proof: empty_ledger_signature(&tx_hash), mantle_tx: inscribe_tx, }; - (signed_mantle_tx, inscribe_op_id) + Ok((signed_mantle_tx, inscribe_op_id)) } - /// Post a transaction to the node and wait for inclusion - pub async fn post_and_wait(&mut self, block_data: &HashableBlockData) -> Result<u64> { - let inscription_data = borsh::to_vec(&block_data)?; - let (tx, new_msg_id) = self.create_inscribe_tx(inscription_data); + /// Post a transaction to the node + pub async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> { + let (tx, new_msg_id) = self.create_inscribe_tx(block)?; // Post the transaction self.bedrock_client.post_transaction(tx).await?; - self.last_message_id = new_msg_id; - - Ok(block_data.block_id) + Ok(new_msg_id) } } diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index cd9aa194..a0b07445 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -2,9 +2,10 @@ use std::{collections::HashMap, path::Path}; use anyhow::Result; use common::{HashType, block::Block, transaction::EncodedTransaction}; +use nssa::V02State; use storage::RocksDBIO; -pub struct SequencerBlockStore { +pub struct SequencerStore { dbio: RocksDBIO, // TODO: Consider adding the hashmap to the database for faster recovery. tx_hash_to_block_map: HashMap<HashType, u64>, @@ -12,7 +13,7 @@ pub struct SequencerBlockStore { signing_key: nssa::PrivateKey, } -impl SequencerBlockStore { +impl SequencerStore { /// Starting database at the start of new chain. /// Creates files if necessary. /// @@ -42,18 +43,15 @@ impl SequencerBlockStore { /// Reopening existing database pub fn open_db_restart(location: &Path, signing_key: nssa::PrivateKey) -> Result<Self> { - SequencerBlockStore::open_db_with_genesis(location, None, signing_key) + SequencerStore::open_db_with_genesis(location, None, signing_key) } pub fn get_block_at_id(&self, id: u64) -> Result<Block> { Ok(self.dbio.get_block(id)?) } - pub fn put_block_at_id(&mut self, block: Block) -> Result<()> { - let new_transactions_map = block_to_transactions_map(&block); - self.dbio.put_block(block, false)?; - self.tx_hash_to_block_map.extend(new_transactions_map); - Ok(()) + pub fn delete_block_at_id(&mut self, block_id: u64) -> Result<()> { + Ok(self.dbio.delete_block(block_id)?) } /// Returns the transaction corresponding to the given hash, if it exists in the blockchain. @@ -81,6 +79,21 @@ impl SequencerBlockStore { pub fn signing_key(&self) -> &nssa::PrivateKey { &self.signing_key } + + pub fn get_all_blocks(&self) -> impl Iterator<Item = Result<Block>> { + self.dbio.get_all_blocks().map(|res| Ok(res?)) + } + + pub(crate) fn update(&mut self, block: Block, state: &V02State) -> Result<()> { + let new_transactions_map = block_to_transactions_map(&block); + self.dbio.atomic_update(block, state)?; + self.tx_hash_to_block_map.extend(new_transactions_map); + Ok(()) + } + + pub fn get_nssa_state(&self) -> Option<V02State> { + self.dbio.get_nssa_state().ok() + } } pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap<HashType, u64> { @@ -113,11 +126,10 @@ mod tests { transactions: vec![], }; - let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key); + let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); // Start an empty node store let mut node_store = - SequencerBlockStore::open_db_with_genesis(path, Some(genesis_block), signing_key) - .unwrap(); + SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap(); let tx = common::test_utils::produce_dummy_empty_transaction(); let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); @@ -126,7 +138,8 @@ mod tests { let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(None, retrieved_tx); // Add the block with the transaction - node_store.put_block_at_id(block).unwrap(); + let dummy_state = V02State::new_with_genesis_accounts(&[], &[]); + node_store.update(block, &dummy_state).unwrap(); // Try again let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(Some(tx), retrieved_tx); diff --git a/sequencer_core/src/config.rs b/sequencer_core/src/config.rs index 5911cc52..3d69e8af 100644 --- a/sequencer_core/src/config.rs +++ b/sequencer_core/src/config.rs @@ -5,7 +5,8 @@ use std::{ }; use anyhow::Result; -use reqwest::Url; +use common::sequencer_client::BasicAuth; +use logos_blockchain_core::mantle::ops::channel::ChannelId; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Clone)] @@ -40,6 +41,8 @@ pub struct SequencerConfig { pub mempool_max_size: usize, /// Interval in which blocks produced pub block_create_timeout_millis: u64, + /// Interval in which pending blocks are retried + pub retry_pending_blocks_timeout_millis: u64, /// Port to listen pub port: u16, /// List of initial accounts data @@ -55,9 +58,11 @@ pub struct SequencerConfig { #[derive(Clone, Serialize, Deserialize)] pub struct BedrockConfig { /// Bedrock channel ID - pub channel_id: [u8; 32], + pub channel_id: ChannelId, /// Bedrock Url - pub node_url: Url, + pub node_url: String, + /// Bedrock auth + pub auth: Option<BasicAuth>, } impl SequencerConfig { diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 89cafc4c..f0bb9dbf 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -5,15 +5,15 @@ use anyhow::Result; use common::PINATA_BASE58; use common::{ HashType, - block::HashableBlockData, + block::{BedrockStatus, Block, HashableBlockData, MantleMsgId}, transaction::{EncodedTransaction, NSSATransaction}, }; use config::SequencerConfig; -use log::warn; +use log::{info, warn}; use mempool::{MemPool, MemPoolHandle}; use serde::{Deserialize, Serialize}; -use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore}; +use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerStore}; mod block_settlement_client; pub mod block_store; @@ -21,11 +21,12 @@ pub mod config; pub struct SequencerCore { state: nssa::V02State, - block_store: SequencerBlockStore, + store: SequencerStore, mempool: MemPool<EncodedTransaction>, sequencer_config: SequencerConfig, chain_height: u64, block_settlement_client: Option<BlockSettlementClient>, + last_bedrock_msg_id: MantleMsgId, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -43,7 +44,11 @@ impl Display for TransactionMalformationError { impl std::error::Error for TransactionMalformationError {} impl SequencerCore { - /// Start Sequencer from configuration and construct transaction sender + /// Starts the sequencer using the provided configuration. + /// If an existing database is found, the sequencer state is loaded from it and + /// assumed to represent the correct latest state consistent with Bedrock-finalized data. + /// If no database is found, the sequencer performs a fresh start from genesis, + /// initializing its state with the accounts defined in the configuration file. pub fn start_from_config(config: SequencerConfig) -> (Self, MemPoolHandle<EncodedTransaction>) { let hashable_data = HashableBlockData { block_id: config.genesis_id, @@ -53,37 +58,51 @@ impl SequencerCore { }; let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); - let genesis_block = hashable_data.into_pending_block(&signing_key); + let channel_genesis_msg_id = [0; 32]; + let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id); // Sequencer should panic if unable to open db, // as fixing this issue may require actions non-native to program scope - let block_store = SequencerBlockStore::open_db_with_genesis( + let store = SequencerStore::open_db_with_genesis( &config.home.join("rocksdb"), Some(genesis_block), signing_key, ) .unwrap(); - let mut initial_commitments = vec![]; - for init_comm_data in config.initial_commitments.clone() { - let npk = init_comm_data.npk; + let mut state = match store.get_nssa_state() { + Some(state) => { + info!("Found local database. Loading state and pending blocks from it."); + state + } + None => { + info!( + "No database found when starting the sequencer. Creating a fresh new with the initial data in config" + ); + let initial_commitments: Vec<nssa_core::Commitment> = config + .initial_commitments + .iter() + .map(|init_comm_data| { + let npk = &init_comm_data.npk; - let mut acc = init_comm_data.account; + let mut acc = init_comm_data.account.clone(); - acc.program_owner = nssa::program::Program::authenticated_transfer_program().id(); + acc.program_owner = + nssa::program::Program::authenticated_transfer_program().id(); - let comm = nssa_core::Commitment::new(&npk, &acc); + nssa_core::Commitment::new(npk, &acc) + }) + .collect(); - initial_commitments.push(comm); - } + let init_accs: Vec<(nssa::AccountId, u128)> = config + .initial_accounts + .iter() + .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) + .collect(); - let init_accs: Vec<(nssa::AccountId, u128)> = config - .initial_accounts - .iter() - .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) - .collect(); - - let mut state = nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments); + nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) + } + }; #[cfg(feature = "testnet")] state.add_pinata_program(PINATA_BASE58.parse().unwrap()); @@ -94,37 +113,17 @@ impl SequencerCore { .expect("Block settlement client should be constructible") }); - let mut this = Self { + let sequencer_core = Self { state, - block_store, + store, mempool, chain_height: config.genesis_id, sequencer_config: config, block_settlement_client, + last_bedrock_msg_id: channel_genesis_msg_id, }; - this.sync_state_with_stored_blocks(); - - (this, mempool_handle) - } - - /// If there are stored blocks ahead of the current height, this method will load and process - /// all transaction in them in the order they are stored. The NSSA state will be updated - /// accordingly. - fn sync_state_with_stored_blocks(&mut self) { - let mut next_block_id = self.sequencer_config.genesis_id + 1; - while let Ok(block) = self.block_store.get_block_at_id(next_block_id) { - for encoded_transaction in block.body.transactions { - let transaction = NSSATransaction::try_from(&encoded_transaction).unwrap(); - // Process transaction and update state - self.execute_check_transaction_on_state(transaction) - .unwrap(); - // Update the tx hash to block id map. - self.block_store.insert(&encoded_transaction, next_block_id); - } - self.chain_height = next_block_id; - next_block_id += 1; - } + (sequencer_core, mempool_handle) } fn execute_check_transaction_on_state( @@ -148,8 +147,11 @@ impl SequencerCore { pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result<u64> { let block_data = self.produce_new_block_with_mempool_transactions()?; - if let Some(block_settlement) = self.block_settlement_client.as_mut() { - block_settlement.post_and_wait(&block_data).await?; + if let Some(client) = self.block_settlement_client.as_mut() { + let block = + block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); + let msg_id = client.submit_block_to_bedrock(&block).await?; + self.last_bedrock_msg_id = msg_id.into(); log::info!("Posted block data to Bedrock"); } @@ -179,11 +181,7 @@ impl SequencerCore { } } - let prev_block_hash = self - .block_store - .get_block_at_id(self.chain_height)? - .header - .hash; + let prev_block_hash = self.store.get_block_at_id(self.chain_height)?.header.hash; let curr_time = chrono::Utc::now().timestamp_millis() as u64; @@ -196,9 +194,9 @@ impl SequencerCore { let block = hashable_data .clone() - .into_pending_block(self.block_store.signing_key()); + .into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); - self.block_store.put_block_at_id(block)?; + self.store.update(block, &self.state)?; self.chain_height = new_block_height; @@ -224,8 +222,8 @@ impl SequencerCore { &self.state } - pub fn block_store(&self) -> &SequencerBlockStore { - &self.block_store + pub fn block_store(&self) -> &SequencerStore { + &self.store } pub fn chain_height(&self) -> u64 { @@ -235,6 +233,39 @@ impl SequencerCore { pub fn sequencer_config(&self) -> &SequencerConfig { &self.sequencer_config } + + /// Deletes finalized blocks from the sequencer's pending block list. + /// This method must be called when new blocks are finalized on Bedrock. + /// All pending blocks with an ID less than or equal to `last_finalized_block_id` + /// are removed from the database. + pub fn clean_finalized_blocks_from_db(&mut self, last_finalized_block_id: u64) -> Result<()> { + if let Some(first_pending_block_id) = self + .get_pending_blocks()? + .iter() + .map(|block| block.header.block_id) + .min() + { + (first_pending_block_id..=last_finalized_block_id) + .try_for_each(|id| self.store.delete_block_at_id(id)) + } else { + Ok(()) + } + } + + /// Returns the list of stored pending blocks. + pub fn get_pending_blocks(&self) -> Result<Vec<Block>> { + Ok(self + .store + .get_all_blocks() + .collect::<Result<Vec<Block>>>()? + .into_iter() + .filter(|block| matches!(block.bedrock_status, BedrockStatus::Pending)) + .collect()) + } + + pub fn block_settlement_client(&self) -> Option<BlockSettlementClient> { + self.block_settlement_client.clone() + } } // TODO: Introduce type-safe wrapper around checked transaction, e.g. AuthenticatedTransaction @@ -297,18 +328,19 @@ mod tests { initial_commitments: vec![], signing_key: *sequencer_sign_key_for_testing().value(), bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } fn setup_sequencer_config() -> SequencerConfig { let acc1_account_id: Vec<u8> = vec![ - 208, 122, 210, 232, 75, 39, 250, 0, 194, 98, 240, 161, 238, 160, 255, 53, 202, 9, 115, - 84, 126, 106, 16, 111, 114, 241, 147, 194, 220, 131, 139, 68, + 148, 179, 206, 253, 199, 51, 82, 86, 232, 2, 152, 122, 80, 243, 54, 207, 237, 112, 83, + 153, 44, 59, 204, 49, 128, 84, 160, 227, 216, 149, 97, 102, ]; let acc2_account_id: Vec<u8> = vec![ - 231, 174, 119, 197, 239, 26, 5, 153, 147, 68, 175, 73, 159, 199, 138, 23, 5, 57, 141, - 98, 237, 6, 207, 46, 20, 121, 246, 222, 248, 154, 57, 188, + 30, 145, 107, 3, 207, 73, 192, 230, 160, 63, 238, 207, 18, 69, 54, 216, 103, 244, 92, + 94, 124, 248, 42, 16, 141, 19, 119, 18, 14, 226, 140, 204, ]; let initial_acc1 = AccountInitialData { @@ -680,10 +712,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); // Only one should be included in the block assert_eq!(block.body.transactions, vec![tx.clone()]); @@ -720,10 +749,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); // Add same transaction should fail @@ -732,10 +758,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert!(block.body.transactions.is_empty()); } @@ -768,10 +791,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); } @@ -791,4 +811,42 @@ mod tests { config.initial_accounts[1].balance + balance_to_move ); } + + #[test] + fn test_get_pending_blocks() { + let config = setup_sequencer_config(); + let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4); + } + + #[test] + fn test_delete_blocks() { + let config = setup_sequencer_config(); + let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + + let last_finalized_block = 3; + sequencer + .clean_finalized_blocks_from_db(last_finalized_block) + .unwrap(); + + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 1); + } } diff --git a/sequencer_rpc/Cargo.toml b/sequencer_rpc/Cargo.toml index 2abd5400..1a2b2a0a 100644 --- a/sequencer_rpc/Cargo.toml +++ b/sequencer_rpc/Cargo.toml @@ -2,6 +2,7 @@ name = "sequencer_rpc" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa.workspace = true diff --git a/sequencer_rpc/src/process.rs b/sequencer_rpc/src/process.rs index b89993f9..4f63915a 100644 --- a/sequencer_rpc/src/process.rs +++ b/sequencer_rpc/src/process.rs @@ -18,8 +18,8 @@ use common::{ GetInitialTestnetAccountsRequest, GetLastBlockRequest, GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, GetProofForCommitmentResponse, GetTransactionByHashRequest, - GetTransactionByHashResponse, HelloRequest, HelloResponse, SendTxRequest, - SendTxResponse, + GetTransactionByHashResponse, HelloRequest, HelloResponse, PostIndexerMessageRequest, + PostIndexerMessageResponse, SendTxRequest, SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, @@ -44,6 +44,7 @@ pub const GET_ACCOUNTS_NONCES: &str = "get_accounts_nonces"; pub const GET_ACCOUNT: &str = "get_account"; pub const GET_PROOF_FOR_COMMITMENT: &str = "get_proof_for_commitment"; pub const GET_PROGRAM_IDS: &str = "get_program_ids"; +pub const POST_INDEXER_MESSAGE: &str = "post_indexer_message"; pub const HELLO_FROM_SEQUENCER: &str = "HELLO_FROM_SEQUENCER"; @@ -314,6 +315,18 @@ impl JsonHandler { respond(response) } + async fn process_indexer_message(&self, request: Request) -> Result<Value, RpcErr> { + let _indexer_post_req = PostIndexerMessageRequest::parse(Some(request.params))?; + + // ToDo: Add indexer messages handling + + let response = PostIndexerMessageResponse { + status: "Success".to_string(), + }; + + respond(response) + } + pub async fn process_request_internal(&self, request: Request) -> Result<Value, RpcErr> { match request.method.as_ref() { HELLO => self.process_temp_hello(request).await, @@ -329,6 +342,7 @@ impl JsonHandler { GET_TRANSACTION_BY_HASH => self.process_get_transaction_by_hash(request).await, GET_PROOF_FOR_COMMITMENT => self.process_get_proof_by_commitment(request).await, GET_PROGRAM_IDS => self.process_get_program_ids(request).await, + POST_INDEXER_MESSAGE => self.process_indexer_message(request).await, _ => Err(RpcErr(RpcError::method_not_found(request.method))), } } @@ -340,10 +354,13 @@ mod tests { use base58::ToBase58; use base64::{Engine, engine::general_purpose}; - use common::{test_utils::sequencer_sign_key_for_testing, transaction::EncodedTransaction}; + use common::{ + sequencer_client::BasicAuth, test_utils::sequencer_sign_key_for_testing, + transaction::EncodedTransaction, + }; use sequencer_core::{ SequencerCore, - config::{AccountInitialData, SequencerConfig}, + config::{AccountInitialData, BedrockConfig, SequencerConfig}, }; use serde_json::Value; use tempfile::tempdir; @@ -355,13 +372,13 @@ mod tests { let tempdir = tempdir().unwrap(); let home = tempdir.path().to_path_buf(); let acc1_id: Vec<u8> = vec![ - 208, 122, 210, 232, 75, 39, 250, 0, 194, 98, 240, 161, 238, 160, 255, 53, 202, 9, 115, - 84, 126, 106, 16, 111, 114, 241, 147, 194, 220, 131, 139, 68, + 148, 179, 206, 253, 199, 51, 82, 86, 232, 2, 152, 122, 80, 243, 54, 207, 237, 112, 83, + 153, 44, 59, 204, 49, 128, 84, 160, 227, 216, 149, 97, 102, ]; let acc2_id: Vec<u8> = vec![ - 231, 174, 119, 197, 239, 26, 5, 153, 147, 68, 175, 73, 159, 199, 138, 23, 5, 57, 141, - 98, 237, 6, 207, 46, 20, 121, 246, 222, 248, 154, 57, 188, + 30, 145, 107, 3, 207, 73, 192, 230, 160, 63, 238, 207, 18, 69, 54, 216, 103, 244, 92, + 94, 124, 248, 42, 16, 141, 19, 119, 18, 14, 226, 140, 204, ]; let initial_acc1 = AccountInitialData { @@ -388,12 +405,21 @@ mod tests { initial_accounts, initial_commitments: vec![], signing_key: *sequencer_sign_key_for_testing().value(), - bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, + bedrock_config: Some(BedrockConfig { + channel_id: [42; 32].into(), + node_url: "http://localhost:8080".to_string(), + auth: Some(BasicAuth { + username: "user".to_string(), + password: None, + }), + }), } } async fn components_for_tests() -> (JsonHandler, Vec<AccountInitialData>, EncodedTransaction) { let config = sequencer_config_for_tests(); + let (mut sequencer_core, mempool_handle) = SequencerCore::start_from_config(config); let initial_accounts = sequencer_core.sequencer_config().initial_accounts.clone(); @@ -401,8 +427,8 @@ mod tests { let balance_to_move = 10; let tx = common::test_utils::create_transaction_native_token_transfer( [ - 208, 122, 210, 232, 75, 39, 250, 0, 194, 98, 240, 161, 238, 160, 255, 53, 202, 9, - 115, 84, 126, 106, 16, 111, 114, 241, 147, 194, 220, 131, 139, 68, + 148, 179, 206, 253, 199, 51, 82, 86, 232, 2, 152, 122, 80, 243, 54, 207, 237, 112, + 83, 153, 44, 59, 204, 49, 128, 84, 160, 227, 216, 149, 97, 102, ], 0, [2; 32], diff --git a/sequencer_runner/Cargo.toml b/sequencer_runner/Cargo.toml index 55f56dec..346c57b2 100644 --- a/sequencer_runner/Cargo.toml +++ b/sequencer_runner/Cargo.toml @@ -2,6 +2,7 @@ name = "sequencer_runner" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] common.workspace = true diff --git a/sequencer_runner/configs/debug/sequencer_config.json b/sequencer_runner/configs/debug/sequencer_config.json index ad43ba65..80bfe0a4 100644 --- a/sequencer_runner/configs/debug/sequencer_config.json +++ b/sequencer_runner/configs/debug/sequencer_config.json @@ -5,7 +5,8 @@ "is_genesis_random": true, "max_num_tx_in_block": 20, "mempool_max_size": 1000, - "block_create_timeout_millis": 10000, + "block_create_timeout_millis": 5000, + "retry_pending_blocks_timeout_millis": 7000, "port": 3040, "initial_accounts": [ { @@ -156,7 +157,10 @@ 37 ], "bedrock_config": { - "channel_id": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], - "node_url": "http://localhost:8080" + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", + "node_url": "http://localhost:8080", + "auth": { + "username": "user" + } } } diff --git a/sequencer_runner/configs/docker/sequencer_config.json b/sequencer_runner/configs/docker/sequencer_config.json index 56101f46..8ac66d48 100644 --- a/sequencer_runner/configs/docker/sequencer_config.json +++ b/sequencer_runner/configs/docker/sequencer_config.json @@ -9,11 +9,11 @@ "port": 3040, "initial_accounts": [ { - "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", "balance": 10000 }, { - "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", "balance": 20000 } ], diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index fd4a6c08..8dbea525 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -4,7 +4,7 @@ use actix_web::dev::ServerHandle; use anyhow::Result; use clap::Parser; use common::rpc_primitives::RpcConfig; -use log::info; +use log::{info, warn}; use sequencer_core::{SequencerCore, config::SequencerConfig}; use sequencer_rpc::new_http_server; use tokio::{sync::Mutex, task::JoinHandle}; @@ -20,8 +20,14 @@ struct Args { pub async fn startup_sequencer( app_config: SequencerConfig, -) -> Result<(ServerHandle, SocketAddr, JoinHandle<Result<()>>)> { +) -> Result<( + ServerHandle, + SocketAddr, + JoinHandle<Result<()>>, + JoinHandle<Result<()>>, +)> { let block_timeout = app_config.block_create_timeout_millis; + let retry_pending_blocks_timeout = app_config.retry_pending_blocks_timeout_millis; let port = app_config.port; let (sequencer_core, mempool_handle) = SequencerCore::start_from_config(app_config); @@ -39,8 +45,41 @@ pub async fn startup_sequencer( let http_server_handle = http_server.handle(); tokio::spawn(http_server); - info!("Starting main sequencer loop"); + info!("Starting pending block retry loop"); + let seq_core_wrapped_for_block_retry = seq_core_wrapped.clone(); + let retry_pending_blocks_handle = tokio::spawn(async move { + loop { + tokio::time::sleep(std::time::Duration::from_millis( + retry_pending_blocks_timeout, + )) + .await; + let (pending_blocks, block_settlement_client) = { + let sequencer_core = seq_core_wrapped_for_block_retry.lock().await; + let client = sequencer_core.block_settlement_client(); + let pending_blocks = sequencer_core + .get_pending_blocks() + .expect("Sequencer should be able to retrieve pending blocks"); + (pending_blocks, client) + }; + + let Some(client) = block_settlement_client else { + continue; + }; + + info!("Resubmitting {} pending blocks", pending_blocks.len()); + for block in &pending_blocks { + if let Err(e) = client.submit_block_to_bedrock(block).await { + warn!( + "Failed to resubmit block with id {} with error {}", + block.header.block_id, e + ); + } + } + } + }); + + info!("Starting main sequencer loop"); let main_loop_handle = tokio::spawn(async move { loop { tokio::time::sleep(std::time::Duration::from_millis(block_timeout)).await; @@ -61,7 +100,12 @@ pub async fn startup_sequencer( } }); - Ok((http_server_handle, addr, main_loop_handle)) + Ok(( + http_server_handle, + addr, + main_loop_handle, + retry_pending_blocks_handle, + )) } pub async fn main_runner() -> Result<()> { @@ -81,9 +125,26 @@ pub async fn main_runner() -> Result<()> { } // ToDo: Add restart on failures - let (_, _, main_loop_handle) = startup_sequencer(app_config).await?; + let (_, _, main_loop_handle, retry_loop_handle) = startup_sequencer(app_config).await?; - main_loop_handle.await??; + info!("Sequencer running. Monitoring concurrent tasks..."); + + tokio::select! { + res = main_loop_handle => { + match res { + Ok(inner_res) => warn!("Main loop exited unexpectedly: {:?}", inner_res), + Err(e) => warn!("Main loop task panicked: {:?}", e), + } + } + res = retry_loop_handle => { + match res { + Ok(inner_res) => warn!("Retry loop exited unexpectedly: {:?}", inner_res), + Err(e) => warn!("Retry loop task panicked: {:?}", e), + } + } + } + + info!("Shutting down sequencer..."); Ok(()) } diff --git a/storage/Cargo.toml b/storage/Cargo.toml index 4678560e..8da47de3 100644 --- a/storage/Cargo.toml +++ b/storage/Cargo.toml @@ -2,6 +2,7 @@ name = "storage" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] common.workspace = true @@ -9,3 +10,4 @@ common.workspace = true thiserror.workspace = true borsh.workspace = true rocksdb.workspace = true +nssa.workspace = true diff --git a/storage/src/lib.rs b/storage/src/lib.rs index 883684c2..b96e0d61 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -2,8 +2,9 @@ use std::{path::Path, sync::Arc}; use common::block::Block; use error::DbError; +use nssa::V02State; use rocksdb::{ - BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, + BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, WriteBatch, }; pub mod error; @@ -29,15 +30,15 @@ pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set"; /// Key base for storing metainformation about the last finalized block on Bedrock pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id"; -/// Key base for storing snapshot which describe block id -pub const DB_SNAPSHOT_BLOCK_ID_KEY: &str = "block_id"; +/// Key base for storing the NSSA state +pub const DB_NSSA_STATE_KEY: &str = "nssa_state"; /// Name of block column family pub const CF_BLOCK_NAME: &str = "cf_block"; /// Name of meta column family pub const CF_META_NAME: &str = "cf_meta"; -/// Name of snapshot column family -pub const CF_SNAPSHOT_NAME: &str = "cf_snapshot"; +/// Name of state column family +pub const CF_NSSA_STATE_NAME: &str = "cf_nssa_state"; pub type DbResult<T> = Result<T, DbError>; @@ -52,7 +53,7 @@ impl RocksDBIO { // ToDo: Add more column families for different data let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone()); + let cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); @@ -60,7 +61,7 @@ impl RocksDBIO { let db = DBWithThreadMode::<MultiThreaded>::open_cf_descriptors( &db_opts, path, - vec![cfb, cfmeta, cfsnapshot], + vec![cfb, cfmeta, cfstate], ); let dbio = Self { @@ -92,7 +93,7 @@ impl RocksDBIO { // ToDo: Add more column families for different data let _cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); let _cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let _cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone()); + let _cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); @@ -109,8 +110,8 @@ impl RocksDBIO { self.db.cf_handle(CF_BLOCK_NAME).unwrap() } - pub fn snapshot_column(&self) -> Arc<BoundColumnFamily<'_>> { - self.db.cf_handle(CF_SNAPSHOT_NAME).unwrap() + pub fn nssa_state_column(&self) -> Arc<BoundColumnFamily<'_>> { + self.db.cf_handle(CF_NSSA_STATE_NAME).unwrap() } pub fn get_meta_first_block_in_db(&self) -> DbResult<u64> { @@ -189,6 +190,24 @@ impl RocksDBIO { Ok(res.is_some()) } + pub fn put_nssa_state_in_db(&self, state: &V02State, batch: &mut WriteBatch) -> DbResult<()> { + let cf_nssa_state = self.nssa_state_column(); + batch.put_cf( + &cf_nssa_state, + borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize DB_NSSA_STATE_KEY".to_string()), + ) + })?, + borsh::to_vec(state).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize NSSA state".to_string())) + })?, + ); + + Ok(()) + } + pub fn put_meta_first_block_in_db(&self, block: Block) -> DbResult<()> { let cf_meta = self.meta_column(); self.db @@ -209,7 +228,15 @@ impl RocksDBIO { ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; - self.put_block(block, true)?; + let mut batch = WriteBatch::default(); + self.put_block(block, true, &mut batch)?; + self.db.write(batch).map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some("Failed to write first block in db".to_string()), + ) + })?; + Ok(()) } @@ -274,7 +301,7 @@ impl RocksDBIO { Ok(()) } - pub fn put_block(&self, block: Block, first: bool) -> DbResult<()> { + pub fn put_block(&self, block: Block, first: bool, batch: &mut WriteBatch) -> DbResult<()> { let cf_block = self.block_column(); if !first { @@ -285,23 +312,15 @@ impl RocksDBIO { } } - self.db - .put_cf( - &cf_block, - borsh::to_vec(&block.header.block_id).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize block id".to_string()), - ) - })?, - borsh::to_vec(&block).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize block data".to_string()), - ) - })?, - ) - .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + batch.put_cf( + &cf_block, + borsh::to_vec(&block.header.block_id).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + })?, + borsh::to_vec(&block).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block data".to_string())) + })?, + ); Ok(()) } @@ -334,32 +353,90 @@ impl RocksDBIO { } } - pub fn get_snapshot_block_id(&self) -> DbResult<u64> { - let cf_snapshot = self.snapshot_column(); + pub fn get_nssa_state(&self) -> DbResult<V02State> { + let cf_nssa_state = self.nssa_state_column(); let res = self .db .get_cf( - &cf_snapshot, - borsh::to_vec(&DB_SNAPSHOT_BLOCK_ID_KEY).map_err(|err| { + &cf_nssa_state, + borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_SNAPSHOT_BLOCK_ID_KEY".to_string()), + Some("Failed to serialize block id".to_string()), ) })?, ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; if let Some(data) = res { - Ok(borsh::from_slice::<u64>(&data).map_err(|err| { + Ok(borsh::from_slice::<V02State>(&data).map_err(|serr| { DbError::borsh_cast_message( - err, - Some("Failed to deserialize last block".to_string()), + serr, + Some("Failed to deserialize block data".to_string()), ) })?) } else { Err(DbError::db_interaction_error( - "Snapshot block ID not found".to_string(), + "Block on this id not found".to_string(), )) } } + + pub fn delete_block(&self, block_id: u64) -> DbResult<()> { + let cf_block = self.block_column(); + let key = borsh::to_vec(&block_id).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + })?; + + if self + .db + .get_cf(&cf_block, &key) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))? + .is_none() + { + return Err(DbError::db_interaction_error( + "Block on this id not found".to_string(), + )); + } + + self.db + .delete_cf(&cf_block, key) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + + Ok(()) + } + + pub fn get_all_blocks(&self) -> impl Iterator<Item = DbResult<Block>> { + let cf_block = self.block_column(); + self.db + .iterator_cf(&cf_block, rocksdb::IteratorMode::Start) + .map(|res| { + let (_key, value) = res.map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some("Failed to get key value pair".to_string()), + ) + })?; + + borsh::from_slice::<Block>(&value).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to deserialize block data".to_string()), + ) + }) + }) + } + + pub fn atomic_update(&self, block: Block, state: &V02State) -> DbResult<()> { + let block_id = block.header.block_id; + let mut batch = WriteBatch::default(); + self.put_block(block, false, &mut batch)?; + self.put_nssa_state_in_db(state, &mut batch)?; + self.db.write(batch).map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some(format!("Failed to udpate db with block {block_id}")), + ) + }) + } } diff --git a/test_program_methods/Cargo.toml b/test_program_methods/Cargo.toml index 345c479f..1c3368c7 100644 --- a/test_program_methods/Cargo.toml +++ b/test_program_methods/Cargo.toml @@ -2,6 +2,7 @@ name = "test_program_methods" version = "0.1.0" edition = "2024" +license = { workspace = true } [build-dependencies] risc0-build.workspace = true diff --git a/test_program_methods/guest/Cargo.toml b/test_program_methods/guest/Cargo.toml index 17613351..21c4fdc7 100644 --- a/test_program_methods/guest/Cargo.toml +++ b/test_program_methods/guest/Cargo.toml @@ -2,6 +2,7 @@ name = "test_programs" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true diff --git a/wallet-ffi/Cargo.toml b/wallet-ffi/Cargo.toml new file mode 100644 index 00000000..4305226b --- /dev/null +++ b/wallet-ffi/Cargo.toml @@ -0,0 +1,21 @@ +[package] +name = "wallet-ffi" +version = "0.1.0" +edition = "2021" +license = { workspace = true } + +[lib] +crate-type = ["rlib", "cdylib", "staticlib"] + +[dependencies] +wallet.workspace = true +nssa.workspace = true +common.workspace = true +nssa_core.workspace = true +tokio.workspace = true + +[build-dependencies] +cbindgen = "0.29" + +[dev-dependencies] +tempfile = "3" diff --git a/wallet-ffi/build.rs b/wallet-ffi/build.rs new file mode 100644 index 00000000..63ee0d9e --- /dev/null +++ b/wallet-ffi/build.rs @@ -0,0 +1,13 @@ +fn main() { + let crate_dir = std::env::var("CARGO_MANIFEST_DIR").unwrap(); + + let config = + cbindgen::Config::from_file("cbindgen.toml").expect("Unable to read cbindgen.toml"); + + cbindgen::Builder::new() + .with_crate(crate_dir) + .with_config(config) + .generate() + .expect("Unable to generate bindings") + .write_to_file("wallet_ffi.h"); +} diff --git a/wallet-ffi/cbindgen.toml b/wallet-ffi/cbindgen.toml new file mode 100644 index 00000000..42c46543 --- /dev/null +++ b/wallet-ffi/cbindgen.toml @@ -0,0 +1,40 @@ +language = "C" +header = """ +/** + * NSSA Wallet FFI Bindings + * + * Thread Safety: All functions are thread-safe. The wallet handle can be + * shared across threads, but operations are serialized internally. + * + * Memory Management: + * - Functions returning pointers allocate memory that must be freed + * - Use the corresponding wallet_ffi_free_* function to free memory + * - Never free memory returned by FFI using standard C free() + * + * Error Handling: + * - Functions return WalletFfiError codes + * - On error, call wallet_ffi_get_last_error() for detailed message + * - The error string must be freed with wallet_ffi_free_error_string() + * + * Initialization: + * 1. Call wallet_ffi_init_runtime() before any other function + * 2. Create wallet with wallet_ffi_create_new() or wallet_ffi_open() + * 3. Destroy wallet with wallet_ffi_destroy() when done + */ +""" + +include_guard = "WALLET_FFI_H" +include_version = true +no_includes = false + +[export] +include = ["Ffi.*", "WalletFfiError", "WalletHandle"] + +[enum] +rename_variants = "ScreamingSnakeCase" + +[fn] +rename_args = "None" + +[struct] +rename_fields = "None" diff --git a/wallet-ffi/src/account.rs b/wallet-ffi/src/account.rs new file mode 100644 index 00000000..cf237276 --- /dev/null +++ b/wallet-ffi/src/account.rs @@ -0,0 +1,375 @@ +//! Account management functions. + +use std::ptr; + +use nssa::AccountId; + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::{FfiAccount, FfiAccountList, FfiAccountListEntry, FfiBytes32, WalletHandle}, + wallet::get_wallet, +}; + +/// Create a new public account. +/// +/// Public accounts use standard transaction signing and are suitable for +/// non-private operations. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_account_id`: Output pointer for the new account ID (32 bytes) +/// +/// # Returns +/// - `Success` on successful creation +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_create_account_public( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_account_id.is_null() { + print_error("Null output pointer for account_id"); + return WalletFfiError::NullPointer; + } + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let (account_id, _chain_index) = wallet.create_new_account_public(None); + + unsafe { + (*out_account_id).data = *account_id.value(); + } + + WalletFfiError::Success +} + +/// Create a new private account. +/// +/// Private accounts use privacy-preserving transactions with nullifiers +/// and commitments. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_account_id`: Output pointer for the new account ID (32 bytes) +/// +/// # Returns +/// - `Success` on successful creation +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_create_account_private( + handle: *mut WalletHandle, + out_account_id: *mut FfiBytes32, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_account_id.is_null() { + print_error("Null output pointer for account_id"); + return WalletFfiError::NullPointer; + } + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let (account_id, _chain_index) = wallet.create_new_account_private(None); + + unsafe { + (*out_account_id).data = *account_id.value(); + } + + WalletFfiError::Success +} + +/// List all accounts in the wallet. +/// +/// Returns both public and private accounts managed by this wallet. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_list`: Output pointer for the account list +/// +/// # Returns +/// - `Success` on successful listing +/// - Error code on failure +/// +/// # Memory +/// The returned list must be freed with `wallet_ffi_free_account_list()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_list` must be a valid pointer to a `FfiAccountList` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_list_accounts( + handle: *mut WalletHandle, + out_list: *mut FfiAccountList, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_list.is_null() { + print_error("Null output pointer for account list"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let user_data = &wallet.storage().user_data; + let mut entries = Vec::new(); + + // Public accounts from default signing keys (preconfigured) + for account_id in user_data.default_pub_account_signing_keys.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: true, + }); + } + + // Public accounts from key tree (generated) + for account_id in user_data.public_key_tree.account_id_map.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: true, + }); + } + + // Private accounts from default accounts (preconfigured) + for account_id in user_data.default_user_private_accounts.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: false, + }); + } + + // Private accounts from key tree (generated) + for account_id in user_data.private_key_tree.account_id_map.keys() { + entries.push(FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: false, + }); + } + + let count = entries.len(); + + if count == 0 { + unsafe { + (*out_list).entries = ptr::null_mut(); + (*out_list).count = 0; + } + } else { + let entries_boxed = entries.into_boxed_slice(); + let entries_ptr = Box::into_raw(entries_boxed) as *mut FfiAccountListEntry; + + unsafe { + (*out_list).entries = entries_ptr; + (*out_list).count = count; + } + } + + WalletFfiError::Success +} + +/// Free an account list returned by `wallet_ffi_list_accounts`. +/// +/// # Safety +/// The list must be either null or a valid list returned by `wallet_ffi_list_accounts`. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_account_list(list: *mut FfiAccountList) { + if list.is_null() { + return; + } + + unsafe { + let list = &*list; + if !list.entries.is_null() && list.count > 0 { + let slice = std::slice::from_raw_parts_mut(list.entries, list.count); + drop(Box::from_raw(slice as *mut [FfiAccountListEntry])); + } + } +} + +/// Get account balance. +/// +/// For public accounts, this fetches the balance from the network. +/// For private accounts, this returns the locally cached balance. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `is_public`: Whether this is a public account +/// - `out_balance`: Output for balance as little-endian [u8; 16] +/// +/// # Returns +/// - `Success` on successful query +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_balance` must be a valid pointer to a `[u8; 16]` array +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_balance( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + is_public: bool, + out_balance: *mut [u8; 16], +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_balance.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let balance = if is_public { + match block_on(wallet.get_account_balance(account_id)) { + Ok(Ok(b)) => b, + Ok(Err(e)) => { + print_error(format!("Failed to get balance: {}", e)); + return WalletFfiError::NetworkError; + } + Err(e) => return e, + } + } else { + match wallet.get_account_private(&account_id) { + Some(account) => account.balance, + None => { + print_error("Private account not found"); + return WalletFfiError::AccountNotFound; + } + } + }; + + unsafe { + *out_balance = balance.to_le_bytes(); + } + + WalletFfiError::Success +} + +/// Get full public account data from the network. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `out_account`: Output pointer for account data +/// +/// # Returns +/// - `Success` on successful query +/// - Error code on failure +/// +/// # Memory +/// The account data must be freed with `wallet_ffi_free_account_data()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_account` must be a valid pointer to a `FfiAccount` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_account_public( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_account: *mut FfiAccount, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_account.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let account = match block_on(wallet.get_account_public(account_id)) { + Ok(Ok(a)) => a, + Ok(Err(e)) => { + print_error(format!("Failed to get account: {}", e)); + return WalletFfiError::NetworkError; + } + Err(e) => return e, + }; + + unsafe { + *out_account = account.into(); + } + + WalletFfiError::Success +} + +/// Free account data returned by `wallet_ffi_get_account_public`. +/// +/// # Safety +/// The account must be either null or a valid account returned by +/// `wallet_ffi_get_account_public`. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_account_data(account: *mut FfiAccount) { + if account.is_null() { + return; + } + + unsafe { + let account = &*account; + if !account.data.is_null() && account.data_len > 0 { + let slice = std::slice::from_raw_parts_mut(account.data as *mut u8, account.data_len); + drop(Box::from_raw(slice as *mut [u8])); + } + } +} diff --git a/wallet-ffi/src/error.rs b/wallet-ffi/src/error.rs new file mode 100644 index 00000000..ab9ce6dd --- /dev/null +++ b/wallet-ffi/src/error.rs @@ -0,0 +1,50 @@ +//! Error handling for the FFI layer. +//! +//! Uses numeric error codes with error messages printed to stderr. + +/// Error codes returned by FFI functions. +#[repr(C)] +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum WalletFfiError { + /// Operation completed successfully + Success = 0, + /// A null pointer was passed where a valid pointer was expected + NullPointer = 1, + /// Invalid UTF-8 string + InvalidUtf8 = 2, + /// Wallet handle is not initialized + WalletNotInitialized = 3, + /// Configuration error + ConfigError = 4, + /// Storage/persistence error + StorageError = 5, + /// Network/RPC error + NetworkError = 6, + /// Account not found + AccountNotFound = 7, + /// Key not found for account + KeyNotFound = 8, + /// Insufficient funds for operation + InsufficientFunds = 9, + /// Invalid account ID format + InvalidAccountId = 10, + /// Tokio runtime error + RuntimeError = 11, + /// Password required but not provided + PasswordRequired = 12, + /// Block synchronization error + SyncError = 13, + /// Serialization/deserialization error + SerializationError = 14, + /// Invalid conversion from FFI types to NSSA types + InvalidTypeConversion = 15, + /// Invalid Key value + InvalidKeyValue = 16, + /// Internal error (catch-all) + InternalError = 99, +} + +/// Log an error message to stderr. +pub fn print_error(msg: impl Into<String>) { + eprintln!("[wallet-ffi] {}", msg.into()); +} diff --git a/wallet-ffi/src/keys.rs b/wallet-ffi/src/keys.rs new file mode 100644 index 00000000..08661a50 --- /dev/null +++ b/wallet-ffi/src/keys.rs @@ -0,0 +1,253 @@ +//! Key retrieval functions. + +use std::ptr; + +use nssa::{AccountId, PublicKey}; + +use crate::{ + error::{print_error, WalletFfiError}, + types::{FfiBytes32, FfiPrivateAccountKeys, FfiPublicAccountKey, WalletHandle}, + wallet::get_wallet, +}; + +/// Get the public key for a public account. +/// +/// This returns the public key derived from the account's signing key. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `out_public_key`: Output pointer for the public key +/// +/// # Returns +/// - `Success` on successful retrieval +/// - `KeyNotFound` if the account's key is not in this wallet +/// - Error code on other failures +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_public_key` must be a valid pointer to a `FfiPublicAccountKey` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_public_account_key( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_public_key: *mut FfiPublicAccountKey, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_public_key.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let private_key = match wallet.get_account_public_signing_key(&account_id) { + Some(k) => k, + None => { + print_error("Public account key not found in wallet"); + return WalletFfiError::KeyNotFound; + } + }; + + let public_key = PublicKey::new_from_private_key(private_key); + + unsafe { + *out_public_key = public_key.into(); + } + + WalletFfiError::Success +} + +/// Get keys for a private account. +/// +/// Returns the nullifier public key (NPK) and incoming viewing public key (IPK) +/// for the specified private account. These keys are safe to share publicly. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: The account ID (32 bytes) +/// - `out_keys`: Output pointer for the key data +/// +/// # Returns +/// - `Success` on successful retrieval +/// - `AccountNotFound` if the private account is not in this wallet +/// - Error code on other failures +/// +/// # Memory +/// The keys structure must be freed with `wallet_ffi_free_private_account_keys()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_keys` must be a valid pointer to a `FfiPrivateAccountKeys` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_private_account_keys( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_keys: *mut FfiPrivateAccountKeys, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_keys.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let (key_chain, _account) = match wallet.storage().user_data.get_private_account(&account_id) { + Some(k) => k, + None => { + print_error("Private account not found in wallet"); + return WalletFfiError::AccountNotFound; + } + }; + + // NPK is a 32-byte array + let npk_bytes = key_chain.nullifer_public_key.0; + + // IPK is a compressed secp256k1 point (33 bytes) + let ipk_bytes = key_chain.incoming_viewing_public_key.to_bytes(); + let ipk_len = ipk_bytes.len(); + let ipk_vec = ipk_bytes.to_vec(); + let ipk_boxed = ipk_vec.into_boxed_slice(); + let ipk_ptr = Box::into_raw(ipk_boxed) as *const u8; + + unsafe { + (*out_keys).nullifier_public_key.data = npk_bytes; + (*out_keys).incoming_viewing_public_key = ipk_ptr; + (*out_keys).incoming_viewing_public_key_len = ipk_len; + } + + WalletFfiError::Success +} + +/// Free private account keys returned by `wallet_ffi_get_private_account_keys`. +/// +/// # Safety +/// The keys must be either null or valid keys returned by +/// `wallet_ffi_get_private_account_keys`. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_private_account_keys(keys: *mut FfiPrivateAccountKeys) { + if keys.is_null() { + return; + } + + unsafe { + let keys = &*keys; + if !keys.incoming_viewing_public_key.is_null() && keys.incoming_viewing_public_key_len > 0 { + let slice = std::slice::from_raw_parts_mut( + keys.incoming_viewing_public_key as *mut u8, + keys.incoming_viewing_public_key_len, + ); + drop(Box::from_raw(slice as *mut [u8])); + } + } +} + +/// Convert an account ID to a Base58 string. +/// +/// # Parameters +/// - `account_id`: The account ID (32 bytes) +/// +/// # Returns +/// - Pointer to null-terminated Base58 string on success +/// - Null pointer on error +/// +/// # Memory +/// The returned string must be freed with `wallet_ffi_free_string()`. +/// +/// # Safety +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_account_id_to_base58( + account_id: *const FfiBytes32, +) -> *mut std::ffi::c_char { + if account_id.is_null() { + print_error("Null account_id pointer"); + return ptr::null_mut(); + } + + let account_id = AccountId::new(unsafe { (*account_id).data }); + let base58_str = account_id.to_string(); + + match std::ffi::CString::new(base58_str) { + Ok(s) => s.into_raw(), + Err(e) => { + print_error(format!("Failed to create C string: {}", e)); + ptr::null_mut() + } + } +} + +/// Parse a Base58 string into an account ID. +/// +/// # Parameters +/// - `base58_str`: Null-terminated Base58 string +/// - `out_account_id`: Output pointer for the account ID (32 bytes) +/// +/// # Returns +/// - `Success` on successful parsing +/// - `InvalidAccountId` if the string is not valid Base58 +/// - Error code on other failures +/// +/// # Safety +/// - `base58_str` must be a valid pointer to a null-terminated C string +/// - `out_account_id` must be a valid pointer to a `FfiBytes32` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_account_id_from_base58( + base58_str: *const std::ffi::c_char, + out_account_id: *mut FfiBytes32, +) -> WalletFfiError { + if base58_str.is_null() || out_account_id.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let c_str = unsafe { std::ffi::CStr::from_ptr(base58_str) }; + let str_slice = match c_str.to_str() { + Ok(s) => s, + Err(e) => { + print_error(format!("Invalid UTF-8: {}", e)); + return WalletFfiError::InvalidUtf8; + } + }; + + let account_id: AccountId = match str_slice.parse() { + Ok(id) => id, + Err(e) => { + print_error(format!("Invalid Base58 account ID: {}", e)); + return WalletFfiError::InvalidAccountId; + } + }; + + unsafe { + (*out_account_id).data = *account_id.value(); + } + + WalletFfiError::Success +} diff --git a/wallet-ffi/src/lib.rs b/wallet-ffi/src/lib.rs new file mode 100644 index 00000000..e7f2ce98 --- /dev/null +++ b/wallet-ffi/src/lib.rs @@ -0,0 +1,63 @@ +//! NSSA Wallet FFI Library +//! +//! This crate provides C-compatible bindings for the NSSA wallet functionality. +//! +//! # Usage +//! +//! 1. Initialize the runtime with `wallet_ffi_init_runtime()` +//! 2. Create or open a wallet with `wallet_ffi_create_new()` or `wallet_ffi_open()` +//! 3. Use the wallet functions to manage accounts and transfers +//! 4. Destroy the wallet with `wallet_ffi_destroy()` when done +//! +//! # Thread Safety +//! +//! All functions are thread-safe. The wallet handle uses internal locking +//! to ensure safe concurrent access. +//! +//! # Memory Management +//! +//! - Functions returning pointers allocate memory that must be freed +//! - Use the corresponding `wallet_ffi_free_*` function to free memory +//! - Never free memory returned by FFI using standard C `free()` + +pub mod account; +pub mod error; +pub mod keys; +pub mod sync; +pub mod transfer; +pub mod types; +pub mod wallet; + +use std::sync::OnceLock; + +// Re-export public types for cbindgen +pub use error::WalletFfiError as FfiError; +use tokio::runtime::Handle; +pub use types::*; + +use crate::error::{print_error, WalletFfiError}; + +static TOKIO_RUNTIME: OnceLock<tokio::runtime::Runtime> = OnceLock::new(); + +/// Get a reference to the global runtime. +pub(crate) fn get_runtime() -> Result<&'static Handle, WalletFfiError> { + let runtime = TOKIO_RUNTIME.get_or_init(|| { + match tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + { + Ok(runtime) => runtime, + Err(e) => { + print_error(format!("{e}")); + panic!("Error initializing tokio runtime"); + } + } + }); + Ok(runtime.handle()) +} + +/// Run an async future on the global runtime, blocking until completion. +pub(crate) fn block_on<F: std::future::Future>(future: F) -> Result<F::Output, WalletFfiError> { + let runtime = get_runtime()?; + Ok(runtime.block_on(future)) +} diff --git a/wallet-ffi/src/sync.rs b/wallet-ffi/src/sync.rs new file mode 100644 index 00000000..3979f935 --- /dev/null +++ b/wallet-ffi/src/sync.rs @@ -0,0 +1,151 @@ +//! Block synchronization functions. + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::WalletHandle, + wallet::get_wallet, +}; + +/// Synchronize private accounts to a specific block. +/// +/// This scans the blockchain from the last synced block to the specified block, +/// updating private account balances based on any relevant transactions. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `block_id`: Target block number to sync to +/// +/// # Returns +/// - `Success` if synchronization completed +/// - `SyncError` if synchronization failed +/// - Error code on other failures +/// +/// # Note +/// This operation can take a while for large block ranges. The wallet +/// internally uses a progress bar which may output to stdout. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_sync_to_block( + handle: *mut WalletHandle, + block_id: u64, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + match block_on(wallet.sync_to_block(block_id)) { + Ok(Ok(())) => WalletFfiError::Success, + Ok(Err(e)) => { + print_error(format!("Sync failed: {}", e)); + WalletFfiError::SyncError + } + Err(e) => e, + } +} + +/// Get the last synced block number. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_block_id`: Output pointer for the block number +/// +/// # Returns +/// - `Success` on success +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_block_id` must be a valid pointer to a `u64` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_last_synced_block( + handle: *mut WalletHandle, + out_block_id: *mut u64, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_block_id.is_null() { + print_error("Null output pointer"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + unsafe { + *out_block_id = wallet.last_synced_block; + } + + WalletFfiError::Success +} + +/// Get the current block height from the sequencer. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `out_block_height`: Output pointer for the current block height +/// +/// # Returns +/// - `Success` on success +/// - `NetworkError` if the sequencer is unreachable +/// - Error code on other failures +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `out_block_height` must be a valid pointer to a `u64` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_current_block_height( + handle: *mut WalletHandle, + out_block_height: *mut u64, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if out_block_height.is_null() { + print_error("Null output pointer"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + match block_on(wallet.sequencer_client.get_last_block()) { + Ok(Ok(response)) => { + unsafe { + *out_block_height = response.last_block; + } + WalletFfiError::Success + } + Ok(Err(e)) => { + print_error(format!("Failed to get block height: {:?}", e)); + WalletFfiError::NetworkError + } + Err(e) => e, + } +} diff --git a/wallet-ffi/src/transfer.rs b/wallet-ffi/src/transfer.rs new file mode 100644 index 00000000..055f0c32 --- /dev/null +++ b/wallet-ffi/src/transfer.rs @@ -0,0 +1,199 @@ +//! Token transfer functions. + +use std::{ffi::CString, ptr}; + +use common::error::ExecutionFailureKind; +use nssa::AccountId; +use wallet::program_facades::native_token_transfer::NativeTokenTransfer; + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::{FfiBytes32, FfiTransferResult, WalletHandle}, + wallet::get_wallet, +}; + +/// Send a public token transfer. +/// +/// Transfers tokens from one public account to another on the network. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `from`: Source account ID (must be owned by this wallet) +/// - `to`: Destination account ID +/// - `amount`: Amount to transfer as little-endian [u8; 16] +/// - `out_result`: Output pointer for transfer result +/// +/// # Returns +/// - `Success` if the transfer was submitted successfully +/// - `InsufficientFunds` if the source account doesn't have enough balance +/// - `KeyNotFound` if the source account's signing key is not in this wallet +/// - Error code on other failures +/// +/// # Memory +/// The result must be freed with `wallet_ffi_free_transfer_result()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `from` must be a valid pointer to a `FfiBytes32` struct +/// - `to` must be a valid pointer to a `FfiBytes32` struct +/// - `amount` must be a valid pointer to a `[u8; 16]` array +/// - `out_result` must be a valid pointer to a `FfiTransferResult` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_transfer_public( + handle: *mut WalletHandle, + from: *const FfiBytes32, + to: *const FfiBytes32, + amount: *const [u8; 16], + out_result: *mut FfiTransferResult, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if from.is_null() || to.is_null() || amount.is_null() || out_result.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let from_id = AccountId::new(unsafe { (*from).data }); + let to_id = AccountId::new(unsafe { (*to).data }); + let amount = u128::from_le_bytes(unsafe { *amount }); + + let transfer = NativeTokenTransfer(&wallet); + + match block_on(transfer.send_public_transfer(from_id, to_id, amount)) { + Ok(Ok(response)) => { + let tx_hash = CString::new(response.tx_hash) + .map(|s| s.into_raw()) + .unwrap_or(ptr::null_mut()); + + unsafe { + (*out_result).tx_hash = tx_hash; + (*out_result).success = true; + } + WalletFfiError::Success + } + Ok(Err(e)) => { + print_error(format!("Transfer failed: {:?}", e)); + unsafe { + (*out_result).tx_hash = ptr::null_mut(); + (*out_result).success = false; + } + match e { + ExecutionFailureKind::InsufficientFundsError => WalletFfiError::InsufficientFunds, + ExecutionFailureKind::KeyNotFoundError => WalletFfiError::KeyNotFound, + ExecutionFailureKind::SequencerError => WalletFfiError::NetworkError, + ExecutionFailureKind::SequencerClientError(_) => WalletFfiError::NetworkError, + _ => WalletFfiError::InternalError, + } + } + Err(e) => e, + } +} + +/// Register a public account on the network. +/// +/// This initializes a public account on the blockchain. The account must be +/// owned by this wallet. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `account_id`: Account ID to register +/// - `out_result`: Output pointer for registration result +/// +/// # Returns +/// - `Success` if the registration was submitted successfully +/// - Error code on failure +/// +/// # Memory +/// The result must be freed with `wallet_ffi_free_transfer_result()`. +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `account_id` must be a valid pointer to a `FfiBytes32` struct +/// - `out_result` must be a valid pointer to a `FfiTransferResult` struct +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_register_public_account( + handle: *mut WalletHandle, + account_id: *const FfiBytes32, + out_result: *mut FfiTransferResult, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if account_id.is_null() || out_result.is_null() { + print_error("Null pointer argument"); + return WalletFfiError::NullPointer; + } + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + let account_id = AccountId::new(unsafe { (*account_id).data }); + + let transfer = NativeTokenTransfer(&wallet); + + match block_on(transfer.register_account(account_id)) { + Ok(Ok(response)) => { + let tx_hash = CString::new(response.tx_hash) + .map(|s| s.into_raw()) + .unwrap_or(ptr::null_mut()); + + unsafe { + (*out_result).tx_hash = tx_hash; + (*out_result).success = true; + } + WalletFfiError::Success + } + Ok(Err(e)) => { + print_error(format!("Registration failed: {:?}", e)); + unsafe { + (*out_result).tx_hash = ptr::null_mut(); + (*out_result).success = false; + } + match e { + ExecutionFailureKind::KeyNotFoundError => WalletFfiError::KeyNotFound, + ExecutionFailureKind::SequencerError => WalletFfiError::NetworkError, + ExecutionFailureKind::SequencerClientError(_) => WalletFfiError::NetworkError, + _ => WalletFfiError::InternalError, + } + } + Err(e) => e, + } +} + +/// Free a transfer result returned by `wallet_ffi_transfer_public` or +/// `wallet_ffi_register_public_account`. +/// +/// # Safety +/// The result must be either null or a valid result from a transfer function. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_transfer_result(result: *mut FfiTransferResult) { + if result.is_null() { + return; + } + + unsafe { + let result = &*result; + if !result.tx_hash.is_null() { + drop(CString::from_raw(result.tx_hash)); + } + } +} diff --git a/wallet-ffi/src/types.rs b/wallet-ffi/src/types.rs new file mode 100644 index 00000000..a1d70181 --- /dev/null +++ b/wallet-ffi/src/types.rs @@ -0,0 +1,262 @@ +//! C-compatible type definitions for the FFI layer. + +use core::slice; +use std::{ffi::c_char, ptr}; + +use nssa::{Account, Data}; +use nssa_core::encryption::shared_key_derivation::Secp256k1Point; + +use crate::error::WalletFfiError; + +/// Opaque pointer to the Wallet instance. +/// +/// This type is never instantiated directly - it's used as an opaque handle +/// to hide the internal wallet structure from C code. +#[repr(C)] +pub struct WalletHandle { + _private: [u8; 0], +} + +/// 32-byte array type for AccountId, keys, hashes, etc. +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiBytes32 { + pub data: [u8; 32], +} + +/// Program ID - 8 u32 values (32 bytes total). +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiProgramId { + pub data: [u32; 8], +} + +/// U128 - 16 bytes little endian +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiU128 { + pub data: [u8; 16], +} + +/// Account data structure - C-compatible version of nssa Account. +/// +/// Note: `balance` and `nonce` are u128 values represented as little-endian +/// byte arrays since C doesn't have native u128 support. +#[repr(C)] +pub struct FfiAccount { + pub program_owner: FfiProgramId, + /// Balance as little-endian [u8; 16] + pub balance: FfiU128, + /// Pointer to account data bytes + pub data: *const u8, + /// Length of account data + pub data_len: usize, + /// Nonce as little-endian [u8; 16] + pub nonce: FfiU128, +} + +impl Default for FfiAccount { + fn default() -> Self { + Self { + program_owner: FfiProgramId::default(), + balance: FfiU128::default(), + data: std::ptr::null(), + data_len: 0, + nonce: FfiU128::default(), + } + } +} + +/// Public keys for a private account (safe to expose). +#[repr(C)] +pub struct FfiPrivateAccountKeys { + /// Nullifier public key (32 bytes) + pub nullifier_public_key: FfiBytes32, + /// Incoming viewing public key (compressed secp256k1 point) + pub incoming_viewing_public_key: *const u8, + /// Length of incoming viewing public key (typically 33 bytes) + pub incoming_viewing_public_key_len: usize, +} + +impl Default for FfiPrivateAccountKeys { + fn default() -> Self { + Self { + nullifier_public_key: FfiBytes32::default(), + incoming_viewing_public_key: std::ptr::null(), + incoming_viewing_public_key_len: 0, + } + } +} + +/// Public key info for a public account. +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiPublicAccountKey { + pub public_key: FfiBytes32, +} + +/// Single entry in the account list. +#[repr(C)] +#[derive(Clone, Copy)] +pub struct FfiAccountListEntry { + pub account_id: FfiBytes32, + pub is_public: bool, +} + +/// List of accounts returned by wallet_ffi_list_accounts. +#[repr(C)] +pub struct FfiAccountList { + pub entries: *mut FfiAccountListEntry, + pub count: usize, +} + +impl Default for FfiAccountList { + fn default() -> Self { + Self { + entries: std::ptr::null_mut(), + count: 0, + } + } +} + +/// Result of a transfer operation. +#[repr(C)] +pub struct FfiTransferResult { + /// Transaction hash (null-terminated string, or null on failure) + pub tx_hash: *mut c_char, + /// Whether the transfer succeeded + pub success: bool, +} + +impl Default for FfiTransferResult { + fn default() -> Self { + Self { + tx_hash: std::ptr::null_mut(), + success: false, + } + } +} + +// Helper functions to convert between Rust and FFI types + +impl FfiBytes32 { + /// Create from a 32-byte array. + pub fn from_bytes(bytes: [u8; 32]) -> Self { + Self { data: bytes } + } + + /// Create from an AccountId. + pub fn from_account_id(id: &nssa::AccountId) -> Self { + Self { data: *id.value() } + } +} + +impl FfiPrivateAccountKeys { + pub fn npk(&self) -> nssa_core::NullifierPublicKey { + nssa_core::NullifierPublicKey(self.nullifier_public_key.data) + } + + pub fn ivk(&self) -> Result<nssa_core::encryption::IncomingViewingPublicKey, WalletFfiError> { + if self.incoming_viewing_public_key_len == 33 { + let slice = unsafe { + slice::from_raw_parts( + self.incoming_viewing_public_key, + self.incoming_viewing_public_key_len, + ) + }; + Ok(Secp256k1Point(slice.to_vec())) + } else { + Err(WalletFfiError::InvalidKeyValue) + } + } +} + +impl From<u128> for FfiU128 { + fn from(value: u128) -> Self { + Self { + data: value.to_le_bytes(), + } + } +} + +impl From<FfiU128> for u128 { + fn from(value: FfiU128) -> Self { + u128::from_le_bytes(value.data) + } +} + +impl From<&nssa::AccountId> for FfiBytes32 { + fn from(id: &nssa::AccountId) -> Self { + Self::from_account_id(id) + } +} + +impl From<FfiBytes32> for nssa::AccountId { + fn from(bytes: FfiBytes32) -> Self { + nssa::AccountId::new(bytes.data) + } +} + +impl From<nssa::Account> for FfiAccount { + fn from(value: nssa::Account) -> Self { + // Convert account data to FFI type + let data_vec: Vec<u8> = value.data.into(); + let data_len = data_vec.len(); + let data = if data_len > 0 { + let data_boxed = data_vec.into_boxed_slice(); + Box::into_raw(data_boxed) as *const u8 + } else { + ptr::null() + }; + + let program_owner = FfiProgramId { + data: value.program_owner, + }; + FfiAccount { + program_owner, + balance: value.balance.into(), + data, + data_len, + nonce: value.nonce.into(), + } + } +} + +impl TryFrom<&FfiAccount> for nssa::Account { + type Error = WalletFfiError; + + fn try_from(value: &FfiAccount) -> Result<Self, Self::Error> { + let data = if value.data_len > 0 { + unsafe { + let slice = slice::from_raw_parts(value.data, value.data_len); + Data::try_from(slice.to_vec()).map_err(|_| WalletFfiError::InvalidTypeConversion)? + } + } else { + Data::default() + }; + Ok(Account { + program_owner: value.program_owner.data, + balance: value.balance.into(), + data, + nonce: value.nonce.into(), + }) + } +} + +impl From<nssa::PublicKey> for FfiPublicAccountKey { + fn from(value: nssa::PublicKey) -> Self { + Self { + public_key: FfiBytes32::from_bytes(*value.value()), + } + } +} + +impl TryFrom<&FfiPublicAccountKey> for nssa::PublicKey { + type Error = WalletFfiError; + + fn try_from(value: &FfiPublicAccountKey) -> Result<Self, Self::Error> { + let public_key = nssa::PublicKey::try_new(value.public_key.data) + .map_err(|_| WalletFfiError::InvalidTypeConversion)?; + Ok(public_key) + } +} diff --git a/wallet-ffi/src/wallet.rs b/wallet-ffi/src/wallet.rs new file mode 100644 index 00000000..6f817f8e --- /dev/null +++ b/wallet-ffi/src/wallet.rs @@ -0,0 +1,279 @@ +//! Wallet lifecycle management functions. + +use std::{ + ffi::{c_char, CStr}, + path::PathBuf, + ptr, + sync::Mutex, +}; + +use wallet::WalletCore; + +use crate::{ + block_on, + error::{print_error, WalletFfiError}, + types::WalletHandle, +}; + +/// Internal wrapper around WalletCore with mutex for thread safety. +pub(crate) struct WalletWrapper { + pub core: Mutex<WalletCore>, +} + +/// Helper to get the wallet wrapper from an opaque handle. +pub(crate) fn get_wallet( + handle: *mut WalletHandle, +) -> Result<&'static WalletWrapper, WalletFfiError> { + if handle.is_null() { + print_error("Null wallet handle"); + return Err(WalletFfiError::NullPointer); + } + Ok(unsafe { &*(handle as *mut WalletWrapper) }) +} + +/// Helper to get a mutable reference to the wallet wrapper. +#[allow(dead_code)] +pub(crate) fn get_wallet_mut( + handle: *mut WalletHandle, +) -> Result<&'static mut WalletWrapper, WalletFfiError> { + if handle.is_null() { + print_error("Null wallet handle"); + return Err(WalletFfiError::NullPointer); + } + Ok(unsafe { &mut *(handle as *mut WalletWrapper) }) +} + +/// Helper to convert a C string to a Rust PathBuf. +fn c_str_to_path(ptr: *const c_char, name: &str) -> Result<PathBuf, WalletFfiError> { + if ptr.is_null() { + print_error(format!("Null pointer for {}", name)); + return Err(WalletFfiError::NullPointer); + } + + let c_str = unsafe { CStr::from_ptr(ptr) }; + match c_str.to_str() { + Ok(s) => Ok(PathBuf::from(s)), + Err(e) => { + print_error(format!("Invalid UTF-8 in {}: {}", name, e)); + Err(WalletFfiError::InvalidUtf8) + } + } +} + +/// Helper to convert a C string to a Rust String. +fn c_str_to_string(ptr: *const c_char, name: &str) -> Result<String, WalletFfiError> { + if ptr.is_null() { + print_error(format!("Null pointer for {}", name)); + return Err(WalletFfiError::NullPointer); + } + + let c_str = unsafe { CStr::from_ptr(ptr) }; + match c_str.to_str() { + Ok(s) => Ok(s.to_string()), + Err(e) => { + print_error(format!("Invalid UTF-8 in {}: {}", name, e)); + Err(WalletFfiError::InvalidUtf8) + } + } +} + +/// Create a new wallet with fresh storage. +/// +/// This initializes a new wallet with a new seed derived from the password. +/// Use this for first-time wallet creation. +/// +/// # Parameters +/// - `config_path`: Path to the wallet configuration file (JSON) +/// - `storage_path`: Path where wallet data will be stored +/// - `password`: Password for encrypting the wallet seed +/// +/// # Returns +/// - Opaque wallet handle on success +/// - Null pointer on error (call `wallet_ffi_get_last_error()` for details) +/// +/// # Safety +/// All string parameters must be valid null-terminated UTF-8 strings. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_create_new( + config_path: *const c_char, + storage_path: *const c_char, + password: *const c_char, +) -> *mut WalletHandle { + let config_path = match c_str_to_path(config_path, "config_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + let storage_path = match c_str_to_path(storage_path, "storage_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + let password = match c_str_to_string(password, "password") { + Ok(s) => s, + Err(_) => return ptr::null_mut(), + }; + + match WalletCore::new_init_storage(config_path, storage_path, None, password) { + Ok(core) => { + let wrapper = Box::new(WalletWrapper { + core: Mutex::new(core), + }); + Box::into_raw(wrapper) as *mut WalletHandle + } + Err(e) => { + print_error(format!("Failed to create wallet: {}", e)); + ptr::null_mut() + } + } +} + +/// Open an existing wallet from storage. +/// +/// This loads a wallet that was previously created with `wallet_ffi_create_new()`. +/// +/// # Parameters +/// - `config_path`: Path to the wallet configuration file (JSON) +/// - `storage_path`: Path where wallet data is stored +/// +/// # Returns +/// - Opaque wallet handle on success +/// - Null pointer on error (call `wallet_ffi_get_last_error()` for details) +/// +/// # Safety +/// All string parameters must be valid null-terminated UTF-8 strings. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_open( + config_path: *const c_char, + storage_path: *const c_char, +) -> *mut WalletHandle { + let config_path = match c_str_to_path(config_path, "config_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + let storage_path = match c_str_to_path(storage_path, "storage_path") { + Ok(p) => p, + Err(_) => return ptr::null_mut(), + }; + + match WalletCore::new_update_chain(config_path, storage_path, None) { + Ok(core) => { + let wrapper = Box::new(WalletWrapper { + core: Mutex::new(core), + }); + Box::into_raw(wrapper) as *mut WalletHandle + } + Err(e) => { + print_error(format!("Failed to open wallet: {}", e)); + ptr::null_mut() + } + } +} + +/// Destroy a wallet handle and free its resources. +/// +/// After calling this function, the handle is invalid and must not be used. +/// +/// # Safety +/// - The handle must be either null or a valid handle from `wallet_ffi_create_new()` or +/// `wallet_ffi_open()`. +/// - The handle must not be used after this call. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_destroy(handle: *mut WalletHandle) { + if !handle.is_null() { + unsafe { + drop(Box::from_raw(handle as *mut WalletWrapper)); + } + } +} + +/// Save wallet state to persistent storage. +/// +/// This should be called periodically or after important operations to ensure +/// wallet data is persisted to disk. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// +/// # Returns +/// - `Success` on successful save +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_save(handle: *mut WalletHandle) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return WalletFfiError::InternalError; + } + }; + + match block_on(wallet.store_persistent_data()) { + Ok(Ok(())) => WalletFfiError::Success, + Ok(Err(e)) => { + print_error(format!("Failed to save wallet: {}", e)); + WalletFfiError::StorageError + } + Err(e) => e, + } +} + +/// Get the sequencer address from the wallet configuration. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// +/// # Returns +/// - Pointer to null-terminated string on success (caller must free with +/// `wallet_ffi_free_string()`) +/// - Null pointer on error +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_get_sequencer_addr(handle: *mut WalletHandle) -> *mut c_char { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(_) => return ptr::null_mut(), + }; + + let wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {}", e)); + return ptr::null_mut(); + } + }; + + let addr = wallet.config().sequencer_addr.clone(); + + match std::ffi::CString::new(addr) { + Ok(s) => s.into_raw(), + Err(e) => { + print_error(format!("Invalid sequencer address: {}", e)); + ptr::null_mut() + } + } +} + +/// Free a string returned by wallet FFI functions. +/// +/// # Safety +/// The pointer must be either null or a valid string returned by an FFI function. +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_free_string(ptr: *mut c_char) { + if !ptr.is_null() { + unsafe { + drop(std::ffi::CString::from_raw(ptr)); + } + } +} diff --git a/wallet/Cargo.toml b/wallet/Cargo.toml index bef25007..ca8548bc 100644 --- a/wallet/Cargo.toml +++ b/wallet/Cargo.toml @@ -2,12 +2,14 @@ name = "wallet" version = "0.1.0" edition = "2024" +license = { workspace = true } [dependencies] nssa_core.workspace = true nssa.workspace = true common.workspace = true key_protocol.workspace = true +token_core.workspace = true anyhow.workspace = true serde_json.workspace = true @@ -29,3 +31,4 @@ risc0-zkvm.workspace = true async-stream = "0.3.6" indicatif = { version = "0.18.3", features = ["improved_unicode"] } optfield = "0.4.0" +url.workspace = true diff --git a/wallet/configs/debug/wallet_config.json b/wallet/configs/debug/wallet_config.json index f6e90b05..512a9a16 100644 --- a/wallet/configs/debug/wallet_config.json +++ b/wallet/configs/debug/wallet_config.json @@ -1,14 +1,14 @@ { "override_rust_log": null, "sequencer_addr": "http://127.0.0.1:3040", - "seq_poll_timeout_millis": 12000, - "seq_tx_poll_max_blocks": 5, - "seq_poll_max_retries": 5, + "seq_poll_timeout_millis": 30000, + "seq_tx_poll_max_blocks": 15, + "seq_poll_max_retries": 10, "seq_block_poll_max_amount": 100, "initial_accounts": [ { "Public": { - "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", "pub_sign_key": [ 16, 162, @@ -47,7 +47,7 @@ }, { "Public": { - "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", "pub_sign_key": [ 113, 121, diff --git a/wallet/src/chain_storage.rs b/wallet/src/chain_storage.rs index 7d8ed505..3e967e9c 100644 --- a/wallet/src/chain_storage.rs +++ b/wallet/src/chain_storage.rs @@ -11,17 +11,19 @@ use key_protocol::{ use log::debug; use nssa::program::Program; -use crate::config::{InitialAccountData, PersistentAccountData, WalletConfig}; +use crate::config::{InitialAccountData, Label, PersistentAccountData, WalletConfig}; pub struct WalletChainStore { pub user_data: NSSAUserData, pub wallet_config: WalletConfig, + pub labels: HashMap<String, Label>, } impl WalletChainStore { pub fn new( config: WalletConfig, persistent_accounts: Vec<PersistentAccountData>, + labels: HashMap<String, Label>, ) -> Result<Self> { if persistent_accounts.is_empty() { anyhow::bail!("Roots not found; please run setup beforehand"); @@ -85,6 +87,7 @@ impl WalletChainStore { private_tree, )?, wallet_config: config, + labels, }) } @@ -120,6 +123,7 @@ impl WalletChainStore { private_tree, )?, wallet_config: config, + labels: HashMap::new(), }) } @@ -167,7 +171,7 @@ mod tests { let initial_acc1 = serde_json::from_str( r#"{ "Public": { - "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", "pub_sign_key": [ 16, 162, @@ -210,7 +214,7 @@ mod tests { let initial_acc2 = serde_json::from_str( r#"{ "Public": { - "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", "pub_sign_key": [ 113, 121, @@ -291,6 +295,6 @@ mod tests { let config = create_sample_wallet_config(); let accs = create_sample_persistent_accounts(); - let _ = WalletChainStore::new(config.clone(), accs).unwrap(); + let _ = WalletChainStore::new(config.clone(), accs, HashMap::new()).unwrap(); } } diff --git a/wallet/src/cli/account.rs b/wallet/src/cli/account.rs index 8cba6503..42b2cdd1 100644 --- a/wallet/src/cli/account.rs +++ b/wallet/src/cli/account.rs @@ -4,11 +4,12 @@ use clap::Subcommand; use itertools::Itertools as _; use key_protocol::key_management::key_tree::chain_index::ChainIndex; use nssa::{Account, PublicKey, program::Program}; -use serde::Serialize; +use token_core::{TokenDefinition, TokenHolding}; use crate::{ - TokenDefinition, TokenHolding, WalletCore, + WalletCore, cli::{SubcommandReturnValue, WalletSubcommand}, + config::Label, helperfunctions::{AccountPrivacyKind, HumanReadableAccount, parse_addr_with_privacy_prefix}, }; @@ -39,6 +40,15 @@ pub enum AccountSubcommand { #[arg(short, long)] long: bool, }, + /// Set a label for an account + Label { + /// Valid 32 byte base58 string with privacy prefix + #[arg(short, long)] + account_id: String, + /// The label to assign to the account + #[arg(short, long)] + label: String, + }, } /// Represents generic register CLI subcommand @@ -111,83 +121,26 @@ impl WalletSubcommand for NewSubcommand { } } -#[derive(Debug, Serialize)] -pub struct AuthenticatedTransferAccountView { - pub balance: u128, -} - -impl From<nssa::Account> for AuthenticatedTransferAccountView { - fn from(value: nssa::Account) -> Self { - Self { - balance: value.balance, - } - } -} - -#[derive(Debug, Serialize)] -pub struct TokedDefinitionAccountView { - pub account_type: String, - pub name: String, - pub total_supply: u128, -} - -impl From<TokenDefinition> for TokedDefinitionAccountView { - fn from(value: TokenDefinition) -> Self { - Self { - account_type: "Token definition".to_string(), - name: { - // Assuming, that name does not have UTF-8 NULL and all zeroes are padding. - let name_trimmed: Vec<_> = - value.name.into_iter().take_while(|ch| *ch != 0).collect(); - String::from_utf8(name_trimmed).unwrap_or(hex::encode(value.name)) - }, - total_supply: value.total_supply, - } - } -} - -#[derive(Debug, Serialize)] -pub struct TokedHoldingAccountView { - pub account_type: String, - pub definition_id: String, - pub balance: u128, -} - -impl From<TokenHolding> for TokedHoldingAccountView { - fn from(value: TokenHolding) -> Self { - Self { - account_type: "Token holding".to_string(), - definition_id: value.definition_id.to_string(), - balance: value.balance, - } - } -} - /// Formats account details for display, returning (description, json_view) fn format_account_details(account: &Account) -> (String, String) { let auth_tr_prog_id = Program::authenticated_transfer_program().id(); let token_prog_id = Program::token().id(); match &account.program_owner { - _ if account.program_owner == auth_tr_prog_id => { - let acc_view: AuthenticatedTransferAccountView = account.clone().into(); - ( - "Account owned by authenticated transfer program".to_string(), - serde_json::to_string(&acc_view).unwrap(), - ) - } - _ if account.program_owner == token_prog_id => { - if let Some(token_def) = TokenDefinition::parse(&account.data) { - let acc_view: TokedDefinitionAccountView = token_def.into(); + o if *o == auth_tr_prog_id => ( + "Account owned by authenticated transfer program".to_string(), + serde_json::to_string(&account).unwrap(), + ), + o if *o == token_prog_id => { + if let Ok(token_def) = TokenDefinition::try_from(&account.data) { ( "Definition account owned by token program".to_string(), - serde_json::to_string(&acc_view).unwrap(), + serde_json::to_string(&token_def).unwrap(), ) - } else if let Some(token_hold) = TokenHolding::parse(&account.data) { - let acc_view: TokedHoldingAccountView = token_hold.into(); + } else if let Ok(token_hold) = TokenHolding::try_from(&account.data) { ( "Holding account owned by token program".to_string(), - serde_json::to_string(&acc_view).unwrap(), + serde_json::to_string(&token_hold).unwrap(), ) } else { let account_hr: HumanReadableAccount = account.clone().into(); @@ -218,9 +171,13 @@ impl WalletSubcommand for AccountSubcommand { keys, account_id, } => { - let (account_id, addr_kind) = parse_addr_with_privacy_prefix(&account_id)?; + let (account_id_str, addr_kind) = parse_addr_with_privacy_prefix(&account_id)?; - let account_id = account_id.parse()?; + let account_id: nssa::AccountId = account_id_str.parse()?; + + if let Some(label) = wallet_core.storage.labels.get(&account_id_str) { + println!("Label: {label}"); + } let account = match addr_kind { AccountPrivacyKind::Public => { @@ -313,32 +270,35 @@ impl WalletSubcommand for AccountSubcommand { } AccountSubcommand::List { long } => { let user_data = &wallet_core.storage.user_data; + let labels = &wallet_core.storage.labels; + + let format_with_label = |prefix: &str, id: &nssa::AccountId| { + let id_str = id.to_string(); + if let Some(label) = labels.get(&id_str) { + format!("{prefix} [{label}]") + } else { + prefix.to_string() + } + }; if !long { let accounts = user_data .default_pub_account_signing_keys .keys() - .map(|id| format!("Preconfigured Public/{id}")) - .chain( - user_data - .default_user_private_accounts - .keys() - .map(|id| format!("Preconfigured Private/{id}")), - ) - .chain( - user_data - .public_key_tree - .account_id_map - .iter() - .map(|(id, chain_index)| format!("{chain_index} Public/{id}")), - ) - .chain( - user_data - .private_key_tree - .account_id_map - .iter() - .map(|(id, chain_index)| format!("{chain_index} Private/{id}")), - ) + .map(|id| format_with_label(&format!("Preconfigured Public/{id}"), id)) + .chain(user_data.default_user_private_accounts.keys().map(|id| { + format_with_label(&format!("Preconfigured Private/{id}"), id) + })) + .chain(user_data.public_key_tree.account_id_map.iter().map( + |(id, chain_index)| { + format_with_label(&format!("{chain_index} Public/{id}"), id) + }, + )) + .chain(user_data.private_key_tree.account_id_map.iter().map( + |(id, chain_index)| { + format_with_label(&format!("{chain_index} Private/{id}"), id) + }, + )) .format("\n"); println!("{accounts}"); @@ -348,7 +308,10 @@ impl WalletSubcommand for AccountSubcommand { // Detailed listing with --long flag // Preconfigured public accounts for id in user_data.default_pub_account_signing_keys.keys() { - println!("Preconfigured Public/{id}"); + println!( + "{}", + format_with_label(&format!("Preconfigured Public/{id}"), id) + ); match wallet_core.get_account_public(*id).await { Ok(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -362,7 +325,10 @@ impl WalletSubcommand for AccountSubcommand { // Preconfigured private accounts for id in user_data.default_user_private_accounts.keys() { - println!("Preconfigured Private/{id}"); + println!( + "{}", + format_with_label(&format!("Preconfigured Private/{id}"), id) + ); match wallet_core.get_account_private(id) { Some(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -376,7 +342,10 @@ impl WalletSubcommand for AccountSubcommand { // Public key tree accounts for (id, chain_index) in user_data.public_key_tree.account_id_map.iter() { - println!("{chain_index} Public/{id}"); + println!( + "{}", + format_with_label(&format!("{chain_index} Public/{id}"), id) + ); match wallet_core.get_account_public(*id).await { Ok(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -390,7 +359,10 @@ impl WalletSubcommand for AccountSubcommand { // Private key tree accounts for (id, chain_index) in user_data.private_key_tree.account_id_map.iter() { - println!("{chain_index} Private/{id}"); + println!( + "{}", + format_with_label(&format!("{chain_index} Private/{id}"), id) + ); match wallet_core.get_account_private(id) { Some(account) if account != Account::default() => { let (description, json_view) = format_account_details(&account); @@ -402,57 +374,25 @@ impl WalletSubcommand for AccountSubcommand { } } + Ok(SubcommandReturnValue::Empty) + } + AccountSubcommand::Label { account_id, label } => { + let (account_id_str, _) = parse_addr_with_privacy_prefix(&account_id)?; + + let old_label = wallet_core + .storage + .labels + .insert(account_id_str.clone(), Label::new(label.clone())); + + wallet_core.store_persistent_data().await?; + + if let Some(old) = old_label { + eprintln!("Warning: overriding existing label '{old}'"); + } + println!("Label '{label}' set for account {account_id_str}"); + Ok(SubcommandReturnValue::Empty) } } } } - -#[cfg(test)] -mod tests { - use nssa::AccountId; - - use crate::cli::account::{TokedDefinitionAccountView, TokenDefinition}; - - #[test] - fn test_invalid_utf_8_name_of_token() { - let token_def = TokenDefinition { - account_type: 1, - name: [137, 12, 14, 3, 5, 4], - total_supply: 100, - metadata_id: AccountId::new([0; 32]), - }; - - let token_def_view: TokedDefinitionAccountView = token_def.into(); - - assert_eq!(token_def_view.name, "890c0e030504"); - } - - #[test] - fn test_valid_utf_8_name_of_token_all_bytes() { - let token_def = TokenDefinition { - account_type: 1, - name: [240, 159, 146, 150, 66, 66], - total_supply: 100, - metadata_id: AccountId::new([0; 32]), - }; - - let token_def_view: TokedDefinitionAccountView = token_def.into(); - - assert_eq!(token_def_view.name, "💖BB"); - } - - #[test] - fn test_valid_utf_8_name_of_token_less_bytes() { - let token_def = TokenDefinition { - account_type: 1, - name: [78, 65, 77, 69, 0, 0], - total_supply: 100, - metadata_id: AccountId::new([0; 32]), - }; - - let token_def_view: TokedDefinitionAccountView = token_def.into(); - - assert_eq!(token_def_view.name, "NAME"); - } -} diff --git a/wallet/src/cli/programs/token.rs b/wallet/src/cli/programs/token.rs index 5bf23a8a..d0809798 100644 --- a/wallet/src/cli/programs/token.rs +++ b/wallet/src/cli/programs/token.rs @@ -1258,14 +1258,6 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!("Name length mismatch"); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); - let definition_account_id: AccountId = definition_account_id.parse().unwrap(); let supply_account_id: AccountId = supply_account_id.parse().unwrap(); @@ -1273,7 +1265,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { .send_new_definition_private_owned_definiton_and_supply( definition_account_id, supply_account_id, - name_bytes, + name, total_supply, ) .await?; @@ -1307,14 +1299,6 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!("Name length mismatch"); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); - let definition_account_id: AccountId = definition_account_id.parse().unwrap(); let supply_account_id: AccountId = supply_account_id.parse().unwrap(); @@ -1322,7 +1306,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { .send_new_definition_private_owned_definiton( definition_account_id, supply_account_id, - name_bytes, + name, total_supply, ) .await?; @@ -1353,14 +1337,6 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!("Name length mismatch"); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); - let definition_account_id: AccountId = definition_account_id.parse().unwrap(); let supply_account_id: AccountId = supply_account_id.parse().unwrap(); @@ -1368,7 +1344,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { .send_new_definition_private_owned_supply( definition_account_id, supply_account_id, - name_bytes, + name, total_supply, ) .await?; @@ -1399,18 +1375,11 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { name, total_supply, } => { - let name = name.as_bytes(); - if name.len() > 6 { - // TODO: return error - panic!(); - } - let mut name_bytes = [0; 6]; - name_bytes[..name.len()].copy_from_slice(name); Token(wallet_core) .send_new_definition( definition_account_id.parse().unwrap(), supply_account_id.parse().unwrap(), - name_bytes, + name, total_supply, ) .await?; diff --git a/wallet/src/config.rs b/wallet/src/config.rs index 802e513f..c8b4ec97 100644 --- a/wallet/src/config.rs +++ b/wallet/src/config.rs @@ -1,10 +1,11 @@ use std::{ + collections::HashMap, io::{BufReader, Write as _}, path::Path, - str::FromStr, }; use anyhow::{Context as _, Result}; +use common::sequencer_client::BasicAuth; use key_protocol::key_management::{ KeyChain, key_tree::{ @@ -14,49 +15,6 @@ use key_protocol::key_management::{ use log::warn; use serde::{Deserialize, Serialize}; -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct BasicAuth { - pub username: String, - pub password: Option<String>, -} - -impl std::fmt::Display for BasicAuth { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.username)?; - if let Some(password) = &self.password { - write!(f, ":{password}")?; - } - - Ok(()) - } -} - -impl FromStr for BasicAuth { - type Err = anyhow::Error; - - fn from_str(s: &str) -> Result<Self, Self::Err> { - let parse = || { - let mut parts = s.splitn(2, ':'); - let username = parts.next()?; - let password = parts.next().filter(|p| !p.is_empty()); - if parts.next().is_some() { - return None; - } - - Some((username, password)) - }; - - let (username, password) = parse().ok_or_else(|| { - anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'") - })?; - - Ok(Self { - username: username.to_string(), - password: password.map(|p| p.to_string()), - }) - } -} - #[derive(Debug, Clone, Serialize, Deserialize)] pub struct InitialAccountDataPublic { pub account_id: String, @@ -105,10 +63,30 @@ pub enum PersistentAccountData { Preconfigured(InitialAccountData), } +/// A human-readable label for an account. +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct Label(String); + +impl Label { + pub fn new(label: String) -> Self { + Self(label) + } +} + +impl std::fmt::Display for Label { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + #[derive(Debug, Clone, Serialize, Deserialize)] pub struct PersistentStorage { pub accounts: Vec<PersistentAccountData>, pub last_synced_block: u64, + /// Account labels keyed by account ID string (e.g., + /// "2rnKprXqWGWJTkDZKsQbFXa4ctKRbapsdoTKQFnaVGG8") + #[serde(default)] + pub labels: HashMap<String, Label>, } impl PersistentStorage { @@ -197,7 +175,7 @@ pub struct GasConfig { pub gas_limit_runtime: u64, } -#[optfield::optfield(pub WalletConfigOverrides, rewrap, attrs = (derive(Debug, Default)))] +#[optfield::optfield(pub WalletConfigOverrides, rewrap, attrs = (derive(Debug, Default, Clone)))] #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WalletConfig { /// Override rust log (env var logging level) @@ -235,7 +213,7 @@ impl Default for WalletConfig { [ { "Public": { - "account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy", + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", "pub_sign_key": [ 16, 162, @@ -274,7 +252,7 @@ impl Default for WalletConfig { }, { "Public": { - "account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw", + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", "pub_sign_key": [ 113, 121, diff --git a/wallet/src/helperfunctions.rs b/wallet/src/helperfunctions.rs index 23bf4bb8..0162ef18 100644 --- a/wallet/src/helperfunctions.rs +++ b/wallet/src/helperfunctions.rs @@ -1,4 +1,4 @@ -use std::{path::PathBuf, str::FromStr}; +use std::{collections::HashMap, path::PathBuf, str::FromStr}; use anyhow::Result; use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; @@ -11,7 +11,7 @@ use serde::Serialize; use crate::{ HOME_DIR_ENV_VAR, config::{ - InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, + InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, Label, PersistentAccountDataPrivate, PersistentAccountDataPublic, PersistentStorage, }, }; @@ -57,6 +57,7 @@ pub fn fetch_persistent_storage_path() -> Result<PathBuf> { pub fn produce_data_for_storage( user_data: &NSSAUserData, last_synced_block: u64, + labels: HashMap<String, Label>, ) -> PersistentStorage { let mut vec_for_storage = vec![]; @@ -110,6 +111,7 @@ pub fn produce_data_for_storage( PersistentStorage { accounts: vec_for_storage, last_synced_block, + labels, } } diff --git a/wallet/src/lib.rs b/wallet/src/lib.rs index 7c93b614..50adaecf 100644 --- a/wallet/src/lib.rs +++ b/wallet/src/lib.rs @@ -18,11 +18,10 @@ use nssa::{ circuit::ProgramWithDependencies, message::EncryptedAccountData, }, }; -use nssa_core::{ - Commitment, MembershipProof, SharedSecretKey, account::Data, program::InstructionData, -}; +use nssa_core::{Commitment, MembershipProof, SharedSecretKey, program::InstructionData}; pub use privacy_preserving_tx::PrivacyPreservingAccount; use tokio::io::AsyncWriteExt; +use url::Url; use crate::{ config::{PersistentStorage, WalletConfigOverrides}, @@ -45,84 +44,9 @@ pub enum AccDecodeData { Decode(nssa_core::SharedSecretKey, AccountId), } -const TOKEN_DEFINITION_DATA_SIZE: usize = 55; - -const TOKEN_HOLDING_TYPE: u8 = 1; -const TOKEN_HOLDING_DATA_SIZE: usize = 49; -const TOKEN_STANDARD_FUNGIBLE_TOKEN: u8 = 0; -const TOKEN_STANDARD_NONFUNGIBLE: u8 = 2; - -struct TokenDefinition { - #[allow(unused)] - account_type: u8, - name: [u8; 6], - total_supply: u128, - #[allow(unused)] - metadata_id: AccountId, -} - -struct TokenHolding { - #[allow(unused)] - account_type: u8, - definition_id: AccountId, - balance: u128, -} - -impl TokenDefinition { - fn parse(data: &Data) -> Option<Self> { - let data = Vec::<u8>::from(data.clone()); - - if data.len() != TOKEN_DEFINITION_DATA_SIZE { - None - } else { - let account_type = data[0]; - let name = data[1..7].try_into().expect("Name must be a 6 bytes"); - let total_supply = u128::from_le_bytes( - data[7..23] - .try_into() - .expect("Total supply must be 16 bytes little-endian"), - ); - let metadata_id = AccountId::new( - data[23..TOKEN_DEFINITION_DATA_SIZE] - .try_into() - .expect("Token Program expects valid Account Id for Metadata"), - ); - - let this = Some(Self { - account_type, - name, - total_supply, - metadata_id, - }); - - match account_type { - TOKEN_STANDARD_NONFUNGIBLE if total_supply != 1 => None, - TOKEN_STANDARD_FUNGIBLE_TOKEN if metadata_id != AccountId::new([0; 32]) => None, - _ => this, - } - } - } -} - -impl TokenHolding { - fn parse(data: &[u8]) -> Option<Self> { - if data.len() != TOKEN_HOLDING_DATA_SIZE || data[0] != TOKEN_HOLDING_TYPE { - None - } else { - let account_type = data[0]; - let definition_id = AccountId::new(data[1..33].try_into().unwrap()); - let balance = u128::from_le_bytes(data[33..].try_into().unwrap()); - Some(Self { - definition_id, - balance, - account_type, - }) - } - } -} - pub struct WalletCore { config_path: PathBuf, + config_overrides: Option<WalletConfigOverrides>, storage: WalletChainStore, storage_path: PathBuf, poller: TxPoller, @@ -148,6 +72,7 @@ impl WalletCore { let PersistentStorage { accounts: persistent_accounts, last_synced_block, + labels, } = PersistentStorage::from_path(&storage_path) .with_context(|| format!("Failed to read persistent storage at {storage_path:#?}"))?; @@ -155,7 +80,7 @@ impl WalletCore { config_path, storage_path, config_overrides, - |config| WalletChainStore::new(config, persistent_accounts), + |config| WalletChainStore::new(config, persistent_accounts, labels), last_synced_block, ) } @@ -184,17 +109,13 @@ impl WalletCore { ) -> Result<Self> { let mut config = WalletConfig::from_path_or_initialize_default(&config_path) .with_context(|| format!("Failed to deserialize wallet config at {config_path:#?}"))?; - if let Some(config_overrides) = config_overrides { + if let Some(config_overrides) = config_overrides.clone() { config.apply_overrides(config_overrides); } - let basic_auth = config - .basic_auth - .as_ref() - .map(|auth| (auth.username.clone(), auth.password.clone())); let sequencer_client = Arc::new(SequencerClient::new_with_auth( - config.sequencer_addr.clone(), - basic_auth, + Url::parse(&config.sequencer_addr)?, + config.basic_auth.clone(), )?); let tx_poller = TxPoller::new(config.clone(), Arc::clone(&sequencer_client)); @@ -207,6 +128,7 @@ impl WalletCore { poller: tx_poller, sequencer_client, last_synced_block, + config_overrides, }) } @@ -228,7 +150,11 @@ impl WalletCore { /// Store persistent data at home pub async fn store_persistent_data(&self) -> Result<()> { - let data = produce_data_for_storage(&self.storage.user_data, self.last_synced_block); + let data = produce_data_for_storage( + &self.storage.user_data, + self.last_synced_block, + self.storage.labels.clone(), + ); let storage = serde_json::to_vec_pretty(&data)?; let mut storage_file = tokio::fs::File::create(&self.storage_path).await?; @@ -541,4 +467,16 @@ impl WalletCore { .insert_private_account_data(affected_account_id, new_acc); } } + + pub fn config_path(&self) -> &PathBuf { + &self.config_path + } + + pub fn storage_path(&self) -> &PathBuf { + &self.storage_path + } + + pub fn config_overrides(&self) -> &Option<WalletConfigOverrides> { + &self.config_overrides + } } diff --git a/wallet/src/program_facades/amm.rs b/wallet/src/program_facades/amm.rs index 3beb92cb..7039a937 100644 --- a/wallet/src/program_facades/amm.rs +++ b/wallet/src/program_facades/amm.rs @@ -1,8 +1,9 @@ use common::{error::ExecutionFailureKind, rpc_primitives::requests::SendTxResponse}; use nssa::{AccountId, ProgramId, program::Program}; use nssa_core::program::PdaSeed; +use token_core::TokenHolding; -use crate::{TokenHolding, WalletCore}; +use crate::WalletCore; fn compute_pool_pda( amm_program_id: ProgramId, @@ -123,12 +124,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); @@ -208,12 +209,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_b))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); @@ -242,14 +243,14 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let token_holder_a = TokenHolding::parse(&token_holder_acc_a.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))?; - let token_holder_b = TokenHolding::parse(&token_holder_acc_b.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_b))?; + let token_holder_a = TokenHolding::try_from(&token_holder_acc_a.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))?; + let token_holder_b = TokenHolding::try_from(&token_holder_acc_b.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))?; - if token_holder_a.definition_id == token_definition_id { + if token_holder_a.definition_id() == token_definition_id { account_id_auth = user_holding_a; - } else if token_holder_b.definition_id == token_definition_id { + } else if token_holder_b.definition_id() == token_definition_id { account_id_auth = user_holding_b; } else { return Err(ExecutionFailureKind::AccountDataError(token_definition_id)); @@ -309,12 +310,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); @@ -395,12 +396,12 @@ impl Amm<'_> { .await .map_err(|_| ExecutionFailureKind::SequencerError)?; - let definition_token_a_id = TokenHolding::parse(&user_a_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; - let definition_token_b_id = TokenHolding::parse(&user_b_acc.data) - .ok_or(ExecutionFailureKind::AccountDataError(user_holding_a))? - .definition_id; + let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .definition_id(); + let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) + .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .definition_id(); let amm_pool = compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id); diff --git a/wallet/src/program_facades/token.rs b/wallet/src/program_facades/token.rs index ed02ff82..7d0ab020 100644 --- a/wallet/src/program_facades/token.rs +++ b/wallet/src/program_facades/token.rs @@ -1,6 +1,7 @@ use common::{error::ExecutionFailureKind, rpc_primitives::requests::SendTxResponse}; use nssa::{AccountId, program::Program}; -use nssa_core::{NullifierPublicKey, SharedSecretKey, encryption::ViewingPublicKey}; +use nssa_core::{NullifierPublicKey, SharedSecretKey, encryption::IncomingViewingPublicKey}; +use token_core::Instruction; use crate::{PrivacyPreservingAccount, WalletCore}; @@ -11,15 +12,12 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<SendTxResponse, ExecutionFailureKind> { let account_ids = vec![definition_account_id, supply_account_id]; let program_id = nssa::program::Program::token().id(); - // Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] - let mut instruction = vec![0u8; 23]; - instruction[1..17].copy_from_slice(&total_supply.to_le_bytes()); - instruction[17..].copy_from_slice(&name); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let message = nssa::public_transaction::Message::try_new( program_id, account_ids, @@ -39,10 +37,10 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_definition(name, total_supply); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -69,10 +67,10 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_definition(name, total_supply); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -99,10 +97,10 @@ impl Token<'_> { &self, definition_account_id: AccountId, supply_account_id: AccountId, - name: [u8; 6], + name: String, total_supply: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_definition(name, total_supply); + let instruction = Instruction::NewFungibleDefinition { name, total_supply }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -132,11 +130,9 @@ impl Token<'_> { ) -> Result<SendTxResponse, ExecutionFailureKind> { let account_ids = vec![sender_account_id, recipient_account_id]; let program_id = nssa::program::Program::token().id(); - // Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0u8; 23]; - instruction[0] = 0x01; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let Ok(nonces) = self.0.get_accounts_nonces(vec![sender_account_id]).await else { return Err(ExecutionFailureKind::SequencerError); }; @@ -170,7 +166,9 @@ impl Token<'_> { recipient_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -199,7 +197,9 @@ impl Token<'_> { recipient_vpk: ViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -230,7 +230,9 @@ impl Token<'_> { recipient_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -259,7 +261,9 @@ impl Token<'_> { recipient_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -289,7 +293,9 @@ impl Token<'_> { recipient_vpk: ViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_transfer(amount); + let instruction = Instruction::Transfer { + amount_to_transfer: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -322,7 +328,9 @@ impl Token<'_> { amount: u128, ) -> Result<SendTxResponse, ExecutionFailureKind> { let account_ids = vec![definition_account_id, holder_account_id]; - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let Ok(nonces) = self.0.get_accounts_nonces(vec![holder_account_id]).await else { return Err(ExecutionFailureKind::SequencerError); @@ -355,7 +363,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -383,7 +393,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -412,7 +424,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_burn(amount); + let instruction = Instruction::Burn { + amount_to_burn: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -442,7 +456,9 @@ impl Token<'_> { amount: u128, ) -> Result<SendTxResponse, ExecutionFailureKind> { let account_ids = vec![definition_account_id, holder_account_id]; - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let Ok(nonces) = self .0 @@ -481,7 +497,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -510,7 +528,9 @@ impl Token<'_> { holder_vpk: ViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, [SharedSecretKey; 2]), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -541,7 +561,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -570,7 +592,9 @@ impl Token<'_> { holder_account_id: AccountId, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -600,7 +624,9 @@ impl Token<'_> { holder_vpk: ViewingPublicKey, amount: u128, ) -> Result<(SendTxResponse, SharedSecretKey), ExecutionFailureKind> { - let instruction = token_program_preparation_mint(amount); + let instruction = Instruction::Mint { + amount_to_mint: amount, + }; let instruction_data = Program::serialize_instruction(instruction).expect("Instruction should serialize"); @@ -626,42 +652,3 @@ impl Token<'_> { }) } } - -fn token_program_preparation_transfer(amount: u128) -> Vec<u8> { - // Instruction must be: [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0u8; 23]; - instruction[0] = 0x01; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); - - instruction -} - -fn token_program_preparation_definition(name: [u8; 6], total_supply: u128) -> Vec<u8> { - // Instruction must be: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] - let mut instruction = vec![0u8; 23]; - instruction[1..17].copy_from_slice(&total_supply.to_le_bytes()); - instruction[17..].copy_from_slice(&name); - - instruction -} - -fn token_program_preparation_burn(amount: u128) -> Vec<u8> { - // Instruction must be: [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0; 23]; - instruction[0] = 0x03; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); - - instruction -} - -fn token_program_preparation_mint(amount: u128) -> Vec<u8> { - // Instruction must be: [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || - // 0x00 || 0x00 || 0x00]. - let mut instruction = vec![0; 23]; - instruction[0] = 0x04; - instruction[1..17].copy_from_slice(&amount.to_le_bytes()); - - instruction -}