diff --git a/.deny.toml b/.deny.toml index e65cdd34..fb1ce3cf 100644 --- a/.deny.toml +++ b/.deny.toml @@ -14,6 +14,8 @@ ignore = [ { id = "RUSTSEC-2025-0141", reason = "`bincode` is unmaintained but continuing to use it." }, { id = "RUSTSEC-2023-0089", reason = "atomic-polyfill is pulled transitively via risc0-zkvm; waiting on upstream fix (see https://github.com/risc0/risc0/issues/3453)" }, { id = "RUSTSEC-2026-0097", reason = "`rand` v0.8.5 is present transitively from logos crates, modification may break integration" }, + { id = "RUSTSEC-2026-0118", reason = "`hickory-proto` v0.25.0-alpha.5 is present transitively from logos crates, modification may break integration" }, + { id = "RUSTSEC-2026-0119", reason = "`hickory-proto` v0.25.0-alpha.5 is present transitively from logos crates, modification may break integration" }, ] yanked = "deny" unused-ignored-advisory = "deny" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 02381dfc..b7ed4f34 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -134,7 +134,7 @@ jobs: integration-tests: runs-on: ubuntu-latest - timeout-minutes: 60 + timeout-minutes: 90 # TODO: Apply CI cache to speed this up steps: - uses: actions/checkout@v5 with: @@ -158,39 +158,11 @@ jobs: env: RISC0_DEV_MODE: "1" RUST_LOG: "info" - run: cargo nextest run -p integration_tests -- --skip tps_test --skip indexer - - integration-tests-indexer: - runs-on: ubuntu-latest - timeout-minutes: 60 - steps: - - uses: actions/checkout@v5 - with: - ref: ${{ github.event.pull_request.head.sha || github.head_ref }} - - - uses: ./.github/actions/install-system-deps - - - uses: ./.github/actions/install-risc0 - - - uses: ./.github/actions/install-logos-blockchain-circuits - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - - - name: Install active toolchain - run: rustup install - - - name: Install nextest - run: cargo install --locked cargo-nextest - - - name: Run tests - env: - RISC0_DEV_MODE: "1" - RUST_LOG: "info" - run: cargo nextest run -p integration_tests indexer -- --skip tps_test + run: cargo nextest run -p integration_tests -- --skip tps_test valid-proof-test: runs-on: ubuntu-latest - timeout-minutes: 60 + timeout-minutes: 90 steps: - uses: actions/checkout@v5 with: @@ -225,7 +197,7 @@ jobs: - uses: ./.github/actions/install-risc0 - name: Install just - run: cargo install just + run: cargo install --locked just - name: Build artifacts run: just build-artifacts diff --git a/Cargo.lock b/Cargo.lock index 73fbb12a..ec587885 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -627,6 +627,51 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" +[[package]] +name = "asn1-rs" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60" +dependencies = [ + "asn1-rs-derive", + "asn1-rs-impl", + "displaydoc", + "nom 7.1.3", + "num-traits", + "rusticata-macros", + "thiserror 2.0.18", + "time", +] + +[[package]] +name = "asn1-rs-derive" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", + "synstructure", +] + +[[package]] +name = "asn1-rs-impl" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "asn1_der" +version = "0.7.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4858a9d740c5007a9069007c3b4e91152d0506f13c1b31dd49051fd537656156" + [[package]] name = "astral-tokio-tar" version = "0.6.1" @@ -677,6 +722,36 @@ dependencies = [ "serde", ] +[[package]] +name = "async-channel" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "924ed96dd52d1b75e9c1a3e6275715fd320f5f9439fb5a4a11fa51f4221158d2" +dependencies = [ + "concurrent-queue", + "event-listener-strategy", + "futures-core", + "pin-project-lite", +] + +[[package]] +name = "async-io" +version = "2.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "456b8a8feb6f42d237746d4b3e9a178494627745c3c56c6ea55d92ba50d026fc" +dependencies = [ + "autocfg", + "cfg-if", + "concurrent-queue", + "futures-io", + "futures-lite", + "parking", + "polling", + "rustix", + "slab", + "windows-sys 0.61.2", +] + [[package]] name = "async-lock" version = "3.4.2" @@ -694,6 +769,17 @@ version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4288f83726785267c6f2ef073a3d83dc3f9b81464e9f99898240cced85fce35a" +[[package]] +name = "async-recursion" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "async-stream" version = "0.3.6" @@ -727,6 +813,19 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "asynchronous-codec" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" +dependencies = [ + "bytes", + "futures-sink", + "futures-util", + "memchr", + "pin-project-lite", +] + [[package]] name = "ata_core" version = "0.1.0" @@ -760,6 +859,29 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" +[[package]] +name = "attohttpc" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d9a9bf8b79a749ee0b911b91b671cc2b6c670bdbc7e3dfd537576ddc94bb2a2" +dependencies = [ + "http 0.2.12", + "log", + "url", +] + +[[package]] +name = "attohttpc" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16e2cdb6d5ed835199484bb92bb8b3edd526effe995c61732580439c1a67e2e9" +dependencies = [ + "base64 0.22.1", + "http 1.4.0", + "log", + "url", +] + [[package]] name = "attribute-derive" version = "0.10.5" @@ -790,6 +912,13 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "authenticated_transfer_core" +version = "0.1.0" +dependencies = [ + "serde", +] + [[package]] name = "autocfg" version = "1.5.0" @@ -806,7 +935,7 @@ dependencies = [ "axum-core 0.4.5", "bytes", "futures-util", - "http", + "http 1.4.0", "http-body", "http-body-util", "hyper", @@ -840,7 +969,7 @@ dependencies = [ "bytes", "form_urlencoded", "futures-util", - "http", + "http 1.4.0", "http-body", "http-body-util", "hyper", @@ -875,7 +1004,7 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http", + "http 1.4.0", "http-body", "http-body-util", "mime", @@ -894,7 +1023,7 @@ checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1" dependencies = [ "bytes", "futures-core", - "http", + "http 1.4.0", "http-body", "http-body-util", "mime", @@ -905,6 +1034,17 @@ dependencies = [ "tracing", ] +[[package]] +name = "backon" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cffb0e931875b666fc4fcb20fee52e9bbd1ef836fd9e9e04ec21555f9f85f7ef" +dependencies = [ + "fastrand", + "gloo-timers 0.3.0", + "tokio", +] + [[package]] name = "base-x" version = "0.2.11" @@ -957,24 +1097,6 @@ version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06" -[[package]] -name = "bedrock_client" -version = "0.1.0" -dependencies = [ - "anyhow", - "common", - "futures", - "humantime-serde", - "log", - "logos-blockchain-chain-broadcast-service", - "logos-blockchain-chain-service", - "logos-blockchain-common-http-client", - "logos-blockchain-core", - "reqwest", - "serde", - "tokio-retry", -] - [[package]] name = "bincode" version = "1.3.3" @@ -1101,7 +1223,7 @@ dependencies = [ "futures-util", "hex", "home", - "http", + "http 1.4.0", "http-body-util", "hyper", "hyper-named-pipe", @@ -1354,17 +1476,6 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" -[[package]] -name = "cfg_eval" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45565fc9416b9896014f5732ac776f810ee53a66730c17e4020c3ec064a8f88f" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.117", -] - [[package]] name = "chacha20" version = "0.10.0" @@ -1374,6 +1485,7 @@ dependencies = [ "cfg-if", "cipher 0.5.1", "cpufeatures 0.3.0", + "rand_core 0.10.1", ] [[package]] @@ -1517,6 +1629,7 @@ name = "common" version = "0.1.0" dependencies = [ "anyhow", + "authenticated_transfer_core", "base64 0.22.1", "borsh", "clock_core", @@ -1660,6 +1773,15 @@ dependencies = [ "unicode-segmentation", ] +[[package]] +name = "convert_case" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "convert_case" version = "0.11.0" @@ -1973,6 +2095,20 @@ dependencies = [ "zeroize", ] +[[package]] +name = "der-parser" +version = "10.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6" +dependencies = [ + "asn1-rs", + "displaydoc", + "nom 7.1.3", + "num-bigint 0.4.6", + "num-traits", + "rusticata-macros", +] + [[package]] name = "deranged" version = "0.5.8" @@ -2062,6 +2198,7 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb" dependencies = [ + "convert_case 0.10.0", "proc-macro2", "quote", "rustc_version", @@ -2123,10 +2260,21 @@ dependencies = [ ] [[package]] -name = "docker-compose-types" -version = "0.22.0" +name = "dlopen2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7edb75a85449fd9c34d9fb3376c6208ec4115d2ca43b965175a52d71349ecab8" +checksum = "09b4f5f101177ff01b8ec4ecc81eead416a8aa42819a2869311b3420fa114ffa" +dependencies = [ + "libc", + "once_cell", + "winapi", +] + +[[package]] +name = "docker-compose-types" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ea51e75cfa9371c4d760270c3da13516d7206121d668c1fbdd6fd83d1782b0f" dependencies = [ "derive_builder", "indexmap 2.13.0", @@ -2177,6 +2325,12 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "669a445ee724c5c69b1b06fe0b63e70a1c84bc9bb7d9696cd4f4e3ec45050408" +[[package]] +name = "dtoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4c3cf4824e2d5f025c7b531afcb2325364084a16806f6d47fbc1f5fbd9960590" + [[package]] name = "duplicate" version = "2.0.1" @@ -2215,6 +2369,7 @@ version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ + "pkcs8", "serde", "signature", ] @@ -2316,6 +2471,18 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enum-as-inner" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "enum-map" version = "2.7.3" @@ -2499,14 +2666,22 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" +[[package]] +name = "faucet_core" +version = "0.1.0" +dependencies = [ + "nssa_core", + "serde", +] + [[package]] name = "ferroid" -version = "0.8.9" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb330bbd4cb7a5b9f559427f06f98a4f853a137c8298f3bd3f8ca57663e21986" +checksum = "ee93edf3c501f0035bbeffeccfed0b79e14c311f12195ec0e661e114a0f60da4" dependencies = [ "portable-atomic", - "rand 0.9.3", + "rand 0.10.1", "web-time", ] @@ -2583,15 +2758,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared 0.1.1", -] - [[package]] name = "foreign-types" version = "0.5.0" @@ -2599,7 +2765,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d737d9aa519fb7b749cbc3b962edcf310a8dd1f4b67c91c4f83975dbdd17d965" dependencies = [ "foreign-types-macros", - "foreign-types-shared 0.3.1", + "foreign-types-shared", ] [[package]] @@ -2613,12 +2779,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "foreign-types-shared" version = "0.3.1" @@ -2655,6 +2815,16 @@ dependencies = [ "futures-util", ] +[[package]] +name = "futures-bounded" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "91f328e7fb845fc832912fb6a34f40cf6d1888c92f974d1893a54e97b5ff542e" +dependencies = [ + "futures-timer", + "futures-util", +] + [[package]] name = "futures-channel" version = "0.3.32" @@ -2688,6 +2858,16 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cecba35d7ad927e23624b22ad55235f2239cfa44fd10428eecbeba6d6a717718" +[[package]] +name = "futures-lite" +version = "2.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f78e10609fe0e0b3f4157ffab1876319b5b0db102a2c60dc4626306dc46b44ad" +dependencies = [ + "futures-core", + "pin-project-lite", +] + [[package]] name = "futures-macro" version = "0.3.32" @@ -2699,6 +2879,17 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "futures-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" +dependencies = [ + "futures-io", + "rustls", + "rustls-pki-types", +] + [[package]] name = "futures-sink" version = "0.3.32" @@ -2717,7 +2908,7 @@ version = "3.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" dependencies = [ - "gloo-timers", + "gloo-timers 0.2.6", "send_wrapper 0.4.0", ] @@ -2821,6 +3012,7 @@ dependencies = [ "js-sys", "libc", "r-efi 6.0.0", + "rand_core 0.10.1", "wasip2", "wasip3", "wasm-bindgen", @@ -2863,7 +3055,7 @@ dependencies = [ "futures-core", "futures-sink", "gloo-utils", - "http", + "http 1.4.0", "js-sys", "pin-project", "serde", @@ -2886,6 +3078,18 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "gloo-timers" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" +dependencies = [ + "futures-channel", + "futures-core", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "gloo-utils" version = "0.2.0" @@ -2927,7 +3131,7 @@ dependencies = [ "fnv", "futures-core", "futures-sink", - "http", + "http 1.4.0", "indexmap 2.13.0", "slab", "tokio", @@ -2975,6 +3179,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", + "equivalent", "foldhash", ] @@ -2984,6 +3189,15 @@ version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100" +[[package]] +name = "hashlink" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af" +dependencies = [ + "hashbrown 0.14.5", +] + [[package]] name = "hashlink" version = "0.10.0" @@ -3013,6 +3227,12 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" +[[package]] +name = "hermit-abi" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" + [[package]] name = "hex" version = "0.4.3" @@ -3040,6 +3260,59 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e712f64ec3850b98572bffac52e2c6f282b29fe6c5fa6d42334b30be438d95c1" +[[package]] +name = "hex_fmt" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b07f60793ff0a4d9cef0f18e63b5357e06209987153a64648c972c1e5aff336f" + +[[package]] +name = "hickory-proto" +version = "0.25.0-alpha.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d00147af6310f4392a31680db52a3ed45a2e0f68eb18e8c3fe5537ecc96d9e2" +dependencies = [ + "async-recursion", + "async-trait", + "cfg-if", + "data-encoding", + "enum-as-inner", + "futures-channel", + "futures-io", + "futures-util", + "idna", + "ipnet", + "once_cell", + "rand 0.9.3", + "socket2 0.5.10", + "thiserror 2.0.18", + "tinyvec", + "tokio", + "tracing", + "url", +] + +[[package]] +name = "hickory-resolver" +version = "0.25.0-alpha.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5762f69ebdbd4ddb2e975cd24690bf21fe6b2604039189c26acddbc427f12887" +dependencies = [ + "cfg-if", + "futures-util", + "hickory-proto", + "ipconfig", + "moka", + "once_cell", + "parking_lot", + "rand 0.9.3", + "resolv-conf", + "smallvec", + "thiserror 2.0.18", + "tokio", + "tracing", +] + [[package]] name = "hkdf" version = "0.12.4" @@ -3093,6 +3366,17 @@ dependencies = [ "utf8-width", ] +[[package]] +name = "http" +version = "0.2.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" +dependencies = [ + "bytes", + "fnv", + "itoa", +] + [[package]] name = "http" version = "1.4.0" @@ -3110,7 +3394,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http", + "http 1.4.0", ] [[package]] @@ -3121,7 +3405,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a" dependencies = [ "bytes", "futures-core", - "http", + "http 1.4.0", "http-body", "pin-project-lite", ] @@ -3196,7 +3480,7 @@ dependencies = [ "futures-channel", "futures-core", "h2", - "http", + "http 1.4.0", "http-body", "httparse", "httpdate", @@ -3229,7 +3513,7 @@ version = "0.27.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58" dependencies = [ - "http", + "http 1.4.0", "hyper", "hyper-util", "log", @@ -3254,22 +3538,6 @@ dependencies = [ "tower-service", ] -[[package]] -name = "hyper-tls" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" -dependencies = [ - "bytes", - "http-body-util", - "hyper", - "hyper-util", - "native-tls", - "tokio", - "tokio-native-tls", - "tower-service", -] - [[package]] name = "hyper-util" version = "0.1.20" @@ -3280,19 +3548,17 @@ dependencies = [ "bytes", "futures-channel", "futures-util", - "http", + "http 1.4.0", "http-body", "hyper", "ipnet", "libc", "percent-encoding", "pin-project-lite", - "socket2", - "system-configuration", + "socket2 0.6.3", "tokio", "tower-service", "tracing", - "windows-registry", ] [[package]] @@ -3448,6 +3714,81 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "if-addrs" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0a05c691e1fae256cf7013d99dad472dc52d5543322761f83ec8d47eab40d2b" +dependencies = [ + "libc", + "windows-sys 0.61.2", +] + +[[package]] +name = "if-watch" +version = "3.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "71c02a5161c313f0cbdbadc511611893584a10a7b6153cb554bdf83ddce99ec2" +dependencies = [ + "async-io", + "core-foundation 0.9.4", + "fnv", + "futures", + "if-addrs", + "ipnet", + "log", + "netlink-packet-core 0.8.1", + "netlink-packet-route 0.28.0", + "netlink-proto", + "netlink-sys", + "rtnetlink", + "system-configuration 0.7.0", + "tokio", + "windows", +] + +[[package]] +name = "igd-next" +version = "0.15.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76b0d7d4541def58a37bf8efc559683f21edce7c82f0d866c93ac21f7e098f93" +dependencies = [ + "async-trait", + "attohttpc 0.24.1", + "bytes", + "futures", + "http 1.4.0", + "http-body-util", + "hyper", + "hyper-util", + "log", + "rand 0.8.5", + "tokio", + "url", + "xmltree", +] + +[[package]] +name = "igd-next" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "516893339c97f6011282d5825ac94fc1c7aad5cad26bdc2d0cee068c0bf97f97" +dependencies = [ + "async-trait", + "attohttpc 0.30.1", + "bytes", + "futures", + "http 1.4.0", + "http-body-util", + "hyper", + "hyper-util", + "log", + "rand 0.9.3", + "tokio", + "url", + "xmltree", +] + [[package]] name = "include_bytes_aligned" version = "0.1.4" @@ -3460,13 +3801,14 @@ version = "0.1.0" dependencies = [ "anyhow", "async-stream", - "bedrock_client", + "authenticated_transfer_core", "borsh", "common", "futures", "humantime-serde", "log", "logos-blockchain-core", + "logos-blockchain-zone-sdk", "nssa", "nssa_core", "serde", @@ -3482,10 +3824,16 @@ dependencies = [ name = "indexer_ffi" version = "0.1.0" dependencies = [ + "anyhow", "cbindgen", "indexer_service", + "indexer_service_protocol", + "indexer_service_rpc", + "jsonrpsee", "log", + "nssa", "tokio", + "url", ] [[package]] @@ -3603,14 +3951,18 @@ version = "0.1.0" dependencies = [ "anyhow", "ata_core", + "authenticated_transfer_core", "bytesize", "common", "env_logger", + "faucet_core", "futures", "hex", "indexer_ffi", "indexer_service", + "indexer_service_protocol", "indexer_service_rpc", + "jsonrpsee", "key_protocol", "log", "nssa", @@ -3621,10 +3973,10 @@ dependencies = [ "serde_json", "tempfile", "testcontainers", - "testnet_initial_state", "token_core", "tokio", "url", + "vault_core", "wallet", "wallet-ffi", ] @@ -3644,6 +3996,19 @@ dependencies = [ "rustversion", ] +[[package]] +name = "ipconfig" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d40460c0ce33d6ce4b0630ad68ff63d6661961c48b6dba35e5a4d81cfb48222" +dependencies = [ + "socket2 0.6.3", + "widestring", + "windows-registry", + "windows-result", + "windows-sys 0.61.2", +] + [[package]] name = "ipnet" version = "2.12.0" @@ -3827,7 +4192,7 @@ dependencies = [ "futures-channel", "futures-util", "gloo-net", - "http", + "http 1.4.0", "jsonrpsee-core", "pin-project", "rustls", @@ -3852,7 +4217,7 @@ dependencies = [ "bytes", "futures-timer", "futures-util", - "http", + "http 1.4.0", "http-body", "http-body-util", "jsonrpsee-types", @@ -3913,7 +4278,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f" dependencies = [ "futures-util", - "http", + "http 1.4.0", "http-body", "http-body-util", "hyper", @@ -3939,7 +4304,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5" dependencies = [ - "http", + "http 1.4.0", "serde", "serde_json", "thiserror 2.0.18", @@ -3963,7 +4328,7 @@ version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79" dependencies = [ - "http", + "http 1.4.0", "jsonrpsee-client-transport", "jsonrpsee-core", "jsonrpsee-types", @@ -4002,6 +4367,7 @@ dependencies = [ "aes-gcm", "anyhow", "base58", + "bincode", "bip39", "common", "hex", @@ -4023,6 +4389,8 @@ dependencies = [ "log", "nssa", "pyo3", + "serde", + "serde_json", ] [[package]] @@ -4327,18 +4695,401 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" +[[package]] +name = "libp2p" +version = "0.55.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b72dc443ddd0254cb49a794ed6b6728400ee446a0f7ab4a07d0209ee98de20e9" +dependencies = [ + "bytes", + "either", + "futures", + "futures-timer", + "getrandom 0.2.17", + "libp2p-allow-block-list", + "libp2p-autonat", + "libp2p-connection-limits", + "libp2p-core", + "libp2p-dns", + "libp2p-gossipsub", + "libp2p-identify", + "libp2p-identity", + "libp2p-kad", + "libp2p-mdns", + "libp2p-metrics", + "libp2p-quic", + "libp2p-swarm", + "libp2p-tcp", + "libp2p-upnp", + "multiaddr", + "pin-project", + "rw-stream-sink", + "thiserror 2.0.18", +] + +[[package]] +name = "libp2p-allow-block-list" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "38944b7cb981cc93f2f0fb411ff82d0e983bd226fbcc8d559639a3a73236568b" +dependencies = [ + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", +] + +[[package]] +name = "libp2p-autonat" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e297bfc6cabb70c6180707f8fa05661b77ecb9cb67e8e8e1c469301358fa21d0" +dependencies = [ + "async-trait", + "asynchronous-codec", + "either", + "futures", + "futures-bounded", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-request-response", + "libp2p-swarm", + "quick-protobuf", + "quick-protobuf-codec", + "rand 0.8.5", + "rand_core 0.6.4", + "thiserror 2.0.18", + "tracing", + "web-time", +] + +[[package]] +name = "libp2p-connection-limits" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efe9323175a17caa8a2ed4feaf8a548eeef5e0b72d03840a0eab4bcb0210ce1c" +dependencies = [ + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", +] + +[[package]] +name = "libp2p-core" +version = "0.43.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "249128cd37a2199aff30a7675dffa51caf073b51aa612d2f544b19932b9aebca" +dependencies = [ + "either", + "fnv", + "futures", + "futures-timer", + "libp2p-identity", + "multiaddr", + "multihash", + "multistream-select", + "parking_lot", + "pin-project", + "quick-protobuf", + "rand 0.8.5", + "rw-stream-sink", + "thiserror 2.0.18", + "tracing", + "unsigned-varint 0.8.0", + "web-time", +] + +[[package]] +name = "libp2p-dns" +version = "0.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b780a1150214155b0ed1cdf09fbd2e1b0442604f9146a431d1b21d23eef7bd7" +dependencies = [ + "async-trait", + "futures", + "hickory-resolver", + "libp2p-core", + "libp2p-identity", + "parking_lot", + "smallvec", + "tracing", +] + +[[package]] +name = "libp2p-gossipsub" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d558548fa3b5a8e9b66392f785921e363c57c05dcadfda4db0d41ae82d313e4a" +dependencies = [ + "async-channel", + "asynchronous-codec", + "base64 0.22.1", + "byteorder", + "bytes", + "either", + "fnv", + "futures", + "futures-timer", + "getrandom 0.2.17", + "hashlink 0.9.1", + "hex_fmt", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "prometheus-client", + "quick-protobuf", + "quick-protobuf-codec", + "rand 0.8.5", + "regex", + "serde", + "sha2", + "tracing", + "web-time", +] + +[[package]] +name = "libp2p-identify" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8c06862544f02d05d62780ff590cc25a75f5c2b9df38ec7a370dcae8bb873cf" +dependencies = [ + "asynchronous-codec", + "either", + "futures", + "futures-bounded", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "quick-protobuf", + "quick-protobuf-codec", + "smallvec", + "thiserror 2.0.18", + "tracing", +] + [[package]] name = "libp2p-identity" version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0c7892c221730ba55f7196e98b0b8ba5e04b4155651736036628e9f73ed6fc3" dependencies = [ + "asn1_der", "bs58", + "ed25519-dalek", "hkdf", + "k256", "multihash", + "quick-protobuf", + "rand 0.8.5", + "serde", "sha2", "thiserror 2.0.18", "tracing", + "zeroize", +] + +[[package]] +name = "libp2p-kad" +version = "0.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2bab0466a27ebe955bcbc27328fae5429c5b48c915fd6174931414149802ec23" +dependencies = [ + "asynchronous-codec", + "bytes", + "either", + "fnv", + "futures", + "futures-bounded", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "quick-protobuf", + "quick-protobuf-codec", + "rand 0.8.5", + "serde", + "sha2", + "smallvec", + "thiserror 2.0.18", + "tracing", + "uint", + "web-time", +] + +[[package]] +name = "libp2p-mdns" +version = "0.47.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11d0ba095e1175d797540e16b62e7576846b883cb5046d4159086837b36846cc" +dependencies = [ + "futures", + "hickory-proto", + "if-watch", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand 0.8.5", + "smallvec", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-metrics" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2ce58c64292e87af624fcb86465e7dd8342e46a388d71e8fec0ab37ee789630a" +dependencies = [ + "futures", + "libp2p-core", + "libp2p-gossipsub", + "libp2p-identify", + "libp2p-identity", + "libp2p-kad", + "libp2p-swarm", + "pin-project", + "prometheus-client", + "web-time", +] + +[[package]] +name = "libp2p-quic" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41432a159b00424a0abaa2c80d786cddff81055ac24aa127e0cf375f7858d880" +dependencies = [ + "futures", + "futures-timer", + "if-watch", + "libp2p-core", + "libp2p-identity", + "libp2p-tls", + "quinn", + "rand 0.8.5", + "ring", + "rustls", + "socket2 0.5.10", + "thiserror 2.0.18", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-request-response" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "548fe44a80ff275d400f1b26b090d441d83ef73efabbeb6415f4ce37e5aed865" +dependencies = [ + "async-trait", + "futures", + "futures-bounded", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand 0.8.5", + "smallvec", + "tracing", +] + +[[package]] +name = "libp2p-stream" +version = "0.3.0-alpha" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "826716f1ee125895f1fb44911413cba023485b552ff96c7a2159bd037ac619bb" +dependencies = [ + "futures", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm", + "rand 0.8.5", + "tracing", +] + +[[package]] +name = "libp2p-swarm" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "803399b4b6f68adb85e63ab573ac568154b193e9a640f03e0f2890eabbcb37f8" +dependencies = [ + "either", + "fnv", + "futures", + "futures-timer", + "libp2p-core", + "libp2p-identity", + "libp2p-swarm-derive", + "lru", + "multistream-select", + "once_cell", + "rand 0.8.5", + "smallvec", + "tokio", + "tracing", + "web-time", +] + +[[package]] +name = "libp2p-swarm-derive" +version = "0.35.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "206e0aa0ebe004d778d79fb0966aa0de996c19894e2c0605ba2f8524dd4443d8" +dependencies = [ + "heck", + "proc-macro2", + "quote", + "syn 2.0.117", +] + +[[package]] +name = "libp2p-tcp" +version = "0.43.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "65346fb4d36035b23fec4e7be4c320436ba53537ce9b6be1d1db1f70c905cad0" +dependencies = [ + "futures", + "futures-timer", + "if-watch", + "libc", + "libp2p-core", + "socket2 0.5.10", + "tokio", + "tracing", +] + +[[package]] +name = "libp2p-tls" +version = "0.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96ff65a82e35375cbc31ebb99cacbbf28cb6c4fefe26bf13756ddcf708d40080" +dependencies = [ + "futures", + "futures-rustls", + "libp2p-core", + "libp2p-identity", + "rcgen", + "ring", + "rustls", + "rustls-webpki", + "thiserror 2.0.18", + "x509-parser", + "yasna", +] + +[[package]] +name = "libp2p-upnp" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d457b9ecceb66e7199f049926fad447f1f17f040e8d29d690c086b4cab8ed14a" +dependencies = [ + "futures", + "futures-timer", + "igd-next 0.15.1", + "libp2p-core", + "libp2p-swarm", + "tokio", + "tracing", ] [[package]] @@ -4418,8 +5169,8 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "logos-blockchain-blend-crypto" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "blake2", "logos-blockchain-groth16", @@ -4432,8 +5183,8 @@ dependencies = [ [[package]] name = "logos-blockchain-blend-message" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "blake2", "derivative", @@ -4455,8 +5206,8 @@ dependencies = [ [[package]] name = "logos-blockchain-blend-proofs" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "ed25519-dalek", "generic-array 1.3.5", @@ -4474,8 +5225,8 @@ dependencies = [ [[package]] name = "logos-blockchain-chain-broadcast-service" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "derivative", @@ -4490,11 +5241,12 @@ dependencies = [ [[package]] name = "logos-blockchain-chain-service" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "bytes", + "derivative", "futures", "logos-blockchain-chain-broadcast-service", "logos-blockchain-core", @@ -4508,7 +5260,6 @@ dependencies = [ "logos-blockchain-time-service", "logos-blockchain-tracing", "logos-blockchain-utils", - "num-bigint 0.4.6", "overwatch", "serde", "serde_with", @@ -4521,8 +5272,8 @@ dependencies = [ [[package]] name = "logos-blockchain-circuits-prover" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "logos-blockchain-circuits-utils", "tempfile", @@ -4530,16 +5281,16 @@ dependencies = [ [[package]] name = "logos-blockchain-circuits-utils" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "dirs", ] [[package]] name = "logos-blockchain-common-http-client" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "futures", "hex", @@ -4558,8 +5309,8 @@ dependencies = [ [[package]] name = "logos-blockchain-core" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "ark-ff 0.4.2", "bincode", @@ -4572,6 +5323,7 @@ dependencies = [ "logos-blockchain-cryptarchia-engine", "logos-blockchain-groth16", "logos-blockchain-key-management-system-keys", + "logos-blockchain-mmr", "logos-blockchain-poc", "logos-blockchain-pol", "logos-blockchain-poseidon2", @@ -4580,6 +5332,7 @@ dependencies = [ "multiaddr", "nom 8.0.0", "num-bigint 0.4.6", + "rpds", "serde", "strum", "thiserror 1.0.69", @@ -4588,10 +5341,9 @@ dependencies = [ [[package]] name = "logos-blockchain-cryptarchia-engine" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ - "cfg_eval", "logos-blockchain-pol", "logos-blockchain-utils", "serde", @@ -4604,11 +5356,13 @@ dependencies = [ [[package]] name = "logos-blockchain-cryptarchia-sync" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "bytes", "futures", + "libp2p", + "libp2p-stream", "logos-blockchain-core", "logos-blockchain-cryptarchia-engine", "rand 0.8.5", @@ -4621,8 +5375,8 @@ dependencies = [ [[package]] name = "logos-blockchain-groth16" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "ark-bn254 0.4.0", "ark-ec 0.4.2", @@ -4639,8 +5393,8 @@ dependencies = [ [[package]] name = "logos-blockchain-http-api-common" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "axum 0.7.9", "logos-blockchain-core", @@ -4654,8 +5408,8 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-keys" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "bytes", @@ -4680,8 +5434,8 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-macros" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "proc-macro2", "quote", @@ -4690,8 +5444,8 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-operators" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "logos-blockchain-blend-proofs", @@ -4706,8 +5460,8 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-service" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "log", @@ -4723,8 +5477,8 @@ dependencies = [ [[package]] name = "logos-blockchain-ledger" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "derivative", "logos-blockchain-blend-crypto", @@ -4734,6 +5488,7 @@ dependencies = [ "logos-blockchain-cryptarchia-engine", "logos-blockchain-groth16", "logos-blockchain-key-management-system-keys", + "logos-blockchain-mmr", "logos-blockchain-pol", "logos-blockchain-utils", "logos-blockchain-utxotree", @@ -4746,17 +5501,61 @@ dependencies = [ "tracing", ] +[[package]] +name = "logos-blockchain-libp2p" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" +dependencies = [ + "async-trait", + "backon", + "blake2", + "either", + "futures", + "hex", + "igd-next 0.16.2", + "libp2p", + "logos-blockchain-cryptarchia-sync", + "logos-blockchain-utils", + "multiaddr", + "natpmp", + "netdev", + "num_enum", + "rand 0.8.5", + "serde", + "serde_with", + "thiserror 1.0.69", + "tokio", + "tracing", + "zerocopy", +] + +[[package]] +name = "logos-blockchain-mmr" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" +dependencies = [ + "ark-ff 0.4.2", + "logos-blockchain-groth16", + "logos-blockchain-poseidon2", + "rpds", + "serde", + "thiserror 2.0.18", +] + [[package]] name = "logos-blockchain-network-service" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "futures", "logos-blockchain-core", "logos-blockchain-cryptarchia-sync", + "logos-blockchain-libp2p", "logos-blockchain-tracing", "overwatch", + "rand 0.8.5", + "rand_chacha 0.3.1", "serde", "tokio", "tokio-stream", @@ -4765,8 +5564,8 @@ dependencies = [ [[package]] name = "logos-blockchain-poc" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "logos-blockchain-circuits-prover", "logos-blockchain-circuits-utils", @@ -4781,8 +5580,8 @@ dependencies = [ [[package]] name = "logos-blockchain-pol" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "astro-float", "logos-blockchain-circuits-prover", @@ -4800,8 +5599,8 @@ dependencies = [ [[package]] name = "logos-blockchain-poq" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "logos-blockchain-circuits-prover", "logos-blockchain-circuits-utils", @@ -4817,8 +5616,8 @@ dependencies = [ [[package]] name = "logos-blockchain-poseidon2" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "ark-bn254 0.4.0", "ark-ff 0.4.2", @@ -4828,8 +5627,8 @@ dependencies = [ [[package]] name = "logos-blockchain-services-utils" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "futures", @@ -4843,8 +5642,8 @@ dependencies = [ [[package]] name = "logos-blockchain-storage-service" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "bytes", @@ -4861,15 +5660,18 @@ dependencies = [ [[package]] name = "logos-blockchain-time-service" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "futures", "log", "logos-blockchain-cryptarchia-engine", "logos-blockchain-tracing", + "logos-blockchain-utils", "overwatch", + "serde", + "serde_with", "sntpc", "thiserror 2.0.18", "time", @@ -4880,8 +5682,8 @@ dependencies = [ [[package]] name = "logos-blockchain-tracing" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "opentelemetry", "opentelemetry-appender-tracing", @@ -4892,6 +5694,7 @@ dependencies = [ "rand 0.8.5", "serde", "tokio", + "tonic", "tracing", "tracing-appender", "tracing-gelf", @@ -4903,8 +5706,8 @@ dependencies = [ [[package]] name = "logos-blockchain-utils" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "async-trait", "blake2", @@ -4920,8 +5723,8 @@ dependencies = [ [[package]] name = "logos-blockchain-utxotree" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "ark-ff 0.4.2", "logos-blockchain-groth16", @@ -4934,16 +5737,16 @@ dependencies = [ [[package]] name = "logos-blockchain-witness-generator" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "tempfile", ] [[package]] name = "logos-blockchain-zksign" -version = "0.2.1" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=1da154c74b911318fb853d37261f8a05ffe513b4#1da154c74b911318fb853d37261f8a05ffe513b4" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" dependencies = [ "logos-blockchain-circuits-prover", "logos-blockchain-circuits-utils", @@ -4957,6 +5760,26 @@ dependencies = [ "tracing", ] +[[package]] +name = "logos-blockchain-zone-sdk" +version = "0.1.2" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git?rev=ee281a447d95a951752461ee0a6e88eb4a0f17cf#ee281a447d95a951752461ee0a6e88eb4a0f17cf" +dependencies = [ + "async-trait", + "futures", + "logos-blockchain-common-http-client", + "logos-blockchain-core", + "logos-blockchain-groth16", + "logos-blockchain-key-management-system-service", + "rand 0.8.5", + "reqwest", + "rpds", + "serde", + "thiserror 2.0.18", + "tokio", + "tracing", +] + [[package]] name = "loki-api" version = "0.1.3" @@ -4967,6 +5790,15 @@ dependencies = [ "prost-types 0.13.5", ] +[[package]] +name = "lru" +version = "0.12.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" +dependencies = [ + "hashbrown 0.15.5", +] + [[package]] name = "lru-slab" version = "0.1.2" @@ -5180,7 +6012,7 @@ dependencies = [ "bitflags 2.11.0", "block", "core-graphics-types", - "foreign-types 0.5.0", + "foreign-types", "log", "objc", "paste", @@ -5229,6 +6061,23 @@ dependencies = [ "windows-sys 0.61.2", ] +[[package]] +name = "moka" +version = "0.12.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "957228ad12042ee839f93c8f257b62b4c0ab5eaae1d4fa60de53b27c9d7c5046" +dependencies = [ + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "equivalent", + "parking_lot", + "portable-atomic", + "smallvec", + "tagptr", + "uuid", +] + [[package]] name = "multer" version = "3.1.0" @@ -5238,7 +6087,7 @@ dependencies = [ "bytes", "encoding_rs", "futures-util", - "http", + "http 1.4.0", "httparse", "memchr", "mime", @@ -5261,7 +6110,8 @@ dependencies = [ "percent-encoding", "serde", "static_assertions", - "unsigned-varint", + "unsigned-varint 0.8.0", + "url", ] [[package]] @@ -5283,24 +6133,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89ace881e3f514092ce9efbcb8f413d0ad9763860b828981c2de51ddc666936c" dependencies = [ "no_std_io2", - "unsigned-varint", + "serde", + "unsigned-varint 0.8.0", ] [[package]] -name = "native-tls" -version = "0.2.18" +name = "multistream-select" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "465500e14ea162429d264d44189adc38b199b62b1c21eea9f69e4b73cb03bbf2" +checksum = "ea0df8e5eec2298a62b326ee4f0d7fe1a6b90a09dfcf9df37b38f947a8c42f19" dependencies = [ - "libc", + "bytes", + "futures", "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", + "pin-project", + "smallvec", + "unsigned-varint 0.7.2", +] + +[[package]] +name = "natpmp" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77366fa8ce34e2e1322dd97da65f11a62f451bd3daae8be6993c00800f61dd07" +dependencies = [ + "async-trait", + "cc", + "netdev", + "tokio", ] [[package]] @@ -5319,6 +6179,108 @@ dependencies = [ "rayon", ] +[[package]] +name = "netdev" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f901362e84cd407be6f8cd9d3a46bccf09136b095792785401ea7d283c79b91d" +dependencies = [ + "dlopen2", + "ipnet", + "libc", + "netlink-packet-core 0.7.0", + "netlink-packet-route 0.17.1", + "netlink-sys", + "once_cell", + "system-configuration 0.6.1", + "windows-sys 0.52.0", +] + +[[package]] +name = "netlink-packet-core" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72724faf704479d67b388da142b186f916188505e7e0b26719019c525882eda4" +dependencies = [ + "anyhow", + "byteorder", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-core" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3463cbb78394cb0141e2c926b93fc2197e473394b761986eca3b9da2c63ae0f4" +dependencies = [ + "paste", +] + +[[package]] +name = "netlink-packet-route" +version = "0.17.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "053998cea5a306971f88580d0829e90f270f940befd7cf928da179d4187a5a66" +dependencies = [ + "anyhow", + "bitflags 1.3.2", + "byteorder", + "libc", + "netlink-packet-core 0.7.0", + "netlink-packet-utils", +] + +[[package]] +name = "netlink-packet-route" +version = "0.28.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ce3636fa715e988114552619582b530481fd5ef176a1e5c1bf024077c2c9445" +dependencies = [ + "bitflags 2.11.0", + "libc", + "log", + "netlink-packet-core 0.8.1", +] + +[[package]] +name = "netlink-packet-utils" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ede8a08c71ad5a95cdd0e4e52facd37190977039a4704eb82a283f713747d34" +dependencies = [ + "anyhow", + "byteorder", + "paste", + "thiserror 1.0.69", +] + +[[package]] +name = "netlink-proto" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b65d130ee111430e47eed7896ea43ca693c387f097dd97376bffafbf25812128" +dependencies = [ + "bytes", + "futures", + "log", + "netlink-packet-core 0.8.1", + "netlink-sys", + "thiserror 2.0.18", +] + +[[package]] +name = "netlink-sys" +version = "0.8.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd6c30ed10fa69cc491d491b85cc971f6bdeb8e7367b7cde2ee6cc878d583fae" +dependencies = [ + "bytes", + "futures-util", + "libc", + "log", + "tokio", +] + [[package]] name = "next_tuple" version = "0.1.0" @@ -5343,6 +6305,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "nix" +version = "0.30.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "74523f3a35e05aba87a1d978330aef40f67b0304ac79c1c00b294c9830543db6" +dependencies = [ + "bitflags 2.11.0", + "cfg-if", + "cfg_aliases", + "libc", +] + [[package]] name = "no_std_io2" version = "0.8.1" @@ -5382,9 +6356,11 @@ name = "nssa" version = "0.1.0" dependencies = [ "anyhow", + "authenticated_transfer_core", "borsh", "clock_core", "env_logger", + "faucet_core", "hex", "hex-literal 1.1.0", "k256", @@ -5607,6 +6583,15 @@ dependencies = [ "thiserror 2.0.18", ] +[[package]] +name = "oid-registry" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12f40cff3dde1b6087cc5d5f5d4d65712f34016a03ed60e9c08dcc392736b5b7" +dependencies = [ + "asn1-rs", +] + [[package]] name = "once_cell" version = "1.21.3" @@ -5625,50 +6610,12 @@ version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" -[[package]] -name = "openssl" -version = "0.10.75" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08838db121398ad17ab8531ce9de97b244589089e290a384c900cb9ff7434328" -dependencies = [ - "bitflags 2.11.0", - "cfg-if", - "foreign-types 0.3.2", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.117", -] - [[package]] name = "openssl-probe" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" -[[package]] -name = "openssl-sys" -version = "0.9.111" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82cab2d520aa75e3c58898289429321eb788c3106963d0dc886ec7a5f4adc321" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "opentelemetry" version = "0.31.0" @@ -5702,7 +6649,7 @@ checksum = "d7a6d09a73194e6b66df7c8f1b680f156d916a1a942abf2de06823dd02b7855d" dependencies = [ "async-trait", "bytes", - "http", + "http 1.4.0", "opentelemetry", "reqwest", ] @@ -5713,7 +6660,7 @@ version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f69cd6acbb9af919df949cd1ec9e5e7fdc2ef15d234b6b795aaa525cc02f71f" dependencies = [ - "http", + "http 1.4.0", "opentelemetry", "opentelemetry-http", "opentelemetry-proto", @@ -5877,6 +6824,16 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "df94ce210e5bc13cb6651479fa48d14f601d9858cfe0467f43ae157023b938d3" +[[package]] +name = "pem" +version = "3.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be" +dependencies = [ + "base64 0.22.1", + "serde_core", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -5957,6 +6914,20 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" +[[package]] +name = "polling" +version = "3.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d0e4f59085d47d8241c88ead0f274e8a0cb551f3625263c05eb8dd897c34218" +dependencies = [ + "cfg-if", + "concurrent-queue", + "hermit-abi", + "pin-project-lite", + "rustix", + "windows-sys 0.61.2", +] + [[package]] name = "polyval" version = "0.6.2" @@ -6123,12 +7094,38 @@ dependencies = [ "amm_program", "ata_core", "ata_program", + "authenticated_transfer_core", "clock_core", + "faucet_core", "nssa_core", "risc0-zkvm", "serde", "token_core", "token_program", + "vault_core", +] + +[[package]] +name = "prometheus-client" +version = "0.22.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "504ee9ff529add891127c4827eb481bd69dc0ebc72e9a682e187db4caa60c3ca" +dependencies = [ + "dtoa", + "itoa", + "parking_lot", + "prometheus-client-derive-encode", +] + +[[package]] +name = "prometheus-client-derive-encode" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "440f724eba9f6996b75d63681b0a92b06947f1457076d503a4d2e2c8f56442b8" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", ] [[package]] @@ -6286,6 +7283,28 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "quick-protobuf" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d6da84cc204722a989e01ba2f6e1e276e190f22263d0cb6ce8526fcdb0d2e1f" +dependencies = [ + "byteorder", +] + +[[package]] +name = "quick-protobuf-codec" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "15a0580ab32b169745d7a39db2ba969226ca16738931be152a3209b409de2474" +dependencies = [ + "asynchronous-codec", + "bytes", + "quick-protobuf", + "thiserror 1.0.69", + "unsigned-varint 0.8.0", +] + [[package]] name = "quinn" version = "0.11.9" @@ -6294,12 +7313,13 @@ checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" dependencies = [ "bytes", "cfg_aliases", + "futures-io", "pin-project-lite", "quinn-proto", "quinn-udp", "rustc-hash", "rustls", - "socket2", + "socket2 0.6.3", "thiserror 2.0.18", "tokio", "tracing", @@ -6336,7 +7356,7 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2", + "socket2 0.6.3", "tracing", "windows-sys 0.59.0", ] @@ -6411,6 +7431,17 @@ dependencies = [ "rand_core 0.9.5", ] +[[package]] +name = "rand" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2e8e8bcc7961af1fdac401278c6a831614941f6164ee3bf4ce61b7edb162207" +dependencies = [ + "chacha20", + "getrandom 0.4.2", + "rand_core 0.10.1", +] + [[package]] name = "rand_chacha" version = "0.3.1" @@ -6449,6 +7480,12 @@ dependencies = [ "getrandom 0.3.4", ] +[[package]] +name = "rand_core" +version = "0.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63b8176103e19a2643978565ca18b50549f6101881c443590420e4dc998a3c69" + [[package]] name = "rand_xorshift" version = "0.4.0" @@ -6484,6 +7521,19 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "rcgen" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75e669e5202259b5314d1ea5397316ad400819437857b90861765f24c4cf80a2" +dependencies = [ + "pem", + "ring", + "rustls-pki-types", + "time", + "yasna", +] + [[package]] name = "reactive_graph" version = "0.2.13" @@ -6624,22 +7674,18 @@ checksum = "eddd3ca559203180a307f12d114c268abf583f59b03cb906fd0b3ff8646c1147" dependencies = [ "base64 0.22.1", "bytes", - "encoding_rs", "futures-channel", "futures-core", "futures-util", "h2", - "http", + "http 1.4.0", "http-body", "http-body-util", "hyper", "hyper-rustls", - "hyper-tls", "hyper-util", "js-sys", "log", - "mime", - "native-tls", "percent-encoding", "pin-project-lite", "quinn", @@ -6650,7 +7696,6 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-native-tls", "tokio-rustls", "tokio-util", "tower", @@ -6664,6 +7709,12 @@ dependencies = [ "webpki-roots", ] +[[package]] +name = "resolv-conf" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" + [[package]] name = "rfc6979" version = "0.4.0" @@ -7133,6 +8184,24 @@ dependencies = [ "thiserror 2.0.18", ] +[[package]] +name = "rtnetlink" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4b960d5d873a75b5be9761b1e73b146f52dddcd27bac75263f40fba686d4d7b5" +dependencies = [ + "futures-channel", + "futures-util", + "log", + "netlink-packet-core 0.8.1", + "netlink-packet-route 0.28.0", + "netlink-proto", + "netlink-sys", + "nix", + "thiserror 1.0.69", + "tokio", +] + [[package]] name = "rtoolbox" version = "0.0.5" @@ -7186,6 +8255,15 @@ dependencies = [ "semver", ] +[[package]] +name = "rusticata-macros" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faf0c4a6ece9950b9abdb62b1cfcf2a68b3b67a10ba445b3bb85be2a293d0632" +dependencies = [ + "nom 7.1.3", +] + [[package]] name = "rustix" version = "1.1.4" @@ -7289,6 +8367,17 @@ dependencies = [ "twox-hash", ] +[[package]] +name = "rw-stream-sink" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d8c9026ff5d2f23da5e45bbc283f156383001bfb09c4e44256d02c1a685fe9a1" +dependencies = [ + "futures", + "pin-project", + "static_assertions", +] + [[package]] name = "ryu" version = "1.0.23" @@ -7452,17 +8541,17 @@ name = "sequencer_core" version = "0.1.0" dependencies = [ "anyhow", - "bedrock_client", "borsh", "bytesize", "chrono", "common", + "faucet_core", "futures", "humantime-serde", - "jsonrpsee", "log", "logos-blockchain-core", "logos-blockchain-key-management-system-service", + "logos-blockchain-zone-sdk", "mempool", "nssa", "nssa_core", @@ -7475,6 +8564,7 @@ dependencies = [ "testnet_initial_state", "tokio", "url", + "vault_core", ] [[package]] @@ -7488,7 +8578,6 @@ dependencies = [ "common", "env_logger", "futures", - "indexer_service_rpc", "jsonrpsee", "log", "mempool", @@ -7719,7 +8808,7 @@ dependencies = [ "const_format", "futures", "gloo-net", - "http", + "http 1.4.0", "http-body-util", "hyper", "inventory", @@ -7871,6 +8960,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "socket2" +version = "0.5.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678" +dependencies = [ + "libc", + "windows-sys 0.52.0", +] + [[package]] name = "socket2" version = "0.6.3" @@ -7890,7 +8989,7 @@ dependencies = [ "base64 0.22.1", "bytes", "futures", - "http", + "http 1.4.0", "httparse", "log", "rand 0.8.5", @@ -8060,6 +9159,17 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags 2.11.0", + "core-foundation 0.9.4", + "system-configuration-sys", +] + [[package]] name = "system-configuration" version = "0.7.0" @@ -8113,6 +9223,12 @@ dependencies = [ "web-sys", ] +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tap" version = "1.0.1" @@ -8182,6 +9298,7 @@ dependencies = [ name = "test_programs" version = "0.1.0" dependencies = [ + "authenticated_transfer_core", "clock_core", "nssa_core", "risc0-zkvm", @@ -8190,9 +9307,9 @@ dependencies = [ [[package]] name = "testcontainers" -version = "0.27.2" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bd36b06a2a6c0c3c81a83be1ab05fe86460d054d4d51bf513bc56b3e15bdc22" +checksum = "bfd5785b5483672915ed5fe3cddf9f546802779fc1eceff0a6fb7321fac81c1e" dependencies = [ "astral-tokio-tar", "async-trait", @@ -8204,7 +9321,7 @@ dependencies = [ "etcetera", "ferroid", "futures", - "http", + "http 1.4.0", "itertools 0.14.0", "log", "memchr", @@ -8384,7 +9501,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.6.3", "tokio-macros", "windows-sys 0.61.2", ] @@ -8400,27 +9517,6 @@ dependencies = [ "syn 2.0.117", ] -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - -[[package]] -name = "tokio-retry" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" -dependencies = [ - "pin-project", - "rand 0.8.5", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.26.4" @@ -8581,7 +9677,7 @@ dependencies = [ "base64 0.22.1", "bytes", "h2", - "http", + "http 1.4.0", "http-body", "http-body-util", "hyper", @@ -8589,7 +9685,7 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project", - "socket2", + "socket2 0.6.3", "sync_wrapper", "tokio", "tokio-stream", @@ -8639,7 +9735,7 @@ dependencies = [ "bytes", "futures-core", "futures-util", - "http", + "http 1.4.0", "http-body", "http-body-util", "http-range-header", @@ -8847,7 +9943,7 @@ checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442" dependencies = [ "bytes", "data-encoding", - "http", + "http 1.4.0", "httparse", "log", "rand 0.9.3", @@ -8928,6 +10024,18 @@ dependencies = [ "syn 2.0.117", ] +[[package]] +name = "uint" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "909988d098b2f738727b161a106cfc7cab00c539c2687a8836f8e565976fb53e" +dependencies = [ + "byteorder", + "crunchy", + "hex", + "static_assertions", +] + [[package]] name = "unarray" version = "0.1.4" @@ -9001,6 +10109,12 @@ version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" +[[package]] +name = "unsigned-varint" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6889a77d49f1f013504cec6bf97a2c730394adedaeb1deb5ea08949a50541105" + [[package]] name = "unsigned-varint" version = "0.8.0" @@ -9035,7 +10149,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f" dependencies = [ "base64 0.22.1", - "http", + "http 1.4.0", "httparse", "log", ] @@ -9100,6 +10214,15 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" +[[package]] +name = "vault_core" +version = "0.1.0" +dependencies = [ + "nssa_core", + "risc0-zkvm", + "serde", +] + [[package]] name = "vcpkg" version = "0.2.15" @@ -9130,10 +10253,13 @@ dependencies = [ "anyhow", "async-stream", "ata_core", + "authenticated_transfer_core", "base58", + "bincode", "bip39", "clap", "common", + "derive_more", "env_logger", "futures", "hex", @@ -9154,6 +10280,7 @@ dependencies = [ "serde", "serde_json", "sha2", + "tempfile", "testnet_initial_state", "thiserror 2.0.18", "token_core", @@ -9167,9 +10294,11 @@ name = "wallet-ffi" version = "0.1.0" dependencies = [ "cbindgen", + "key_protocol", "nssa", "nssa_core", "sequencer_service_rpc", + "serde_json", "tempfile", "tokio", "wallet", @@ -9406,6 +10535,12 @@ dependencies = [ "safe_arch", ] +[[package]] +name = "widestring" +version = "1.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72069c3113ab32ab29e5584db3c6ec55d416895e60715417b5b883a357c3e471" + [[package]] name = "winapi" version = "0.3.9" @@ -9437,6 +10572,27 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows" +version = "0.62.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "527fadee13e0c05939a6a05d5bd6eec6cd2e3dbd648b9f8e447c6518133d8580" +dependencies = [ + "windows-collections", + "windows-core", + "windows-future", + "windows-numerics", +] + +[[package]] +name = "windows-collections" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b2d95af1a8a14a3c7367e1ed4fc9c20e0a26e79551b1454d72583c97cc6610" +dependencies = [ + "windows-core", +] + [[package]] name = "windows-core" version = "0.62.2" @@ -9450,6 +10606,17 @@ dependencies = [ "windows-strings", ] +[[package]] +name = "windows-future" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d6f90251fe18a279739e78025bd6ddc52a7e22f921070ccdc67dde84c605cb" +dependencies = [ + "windows-core", + "windows-link", + "windows-threading", +] + [[package]] name = "windows-implement" version = "0.60.2" @@ -9478,6 +10645,16 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" +[[package]] +name = "windows-numerics" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e40844ac143cdb44aead537bbf727de9b044e107a0f1220392177d15b0f26" +dependencies = [ + "windows-core", + "windows-link", +] + [[package]] name = "windows-registry" version = "0.6.1" @@ -9574,6 +10751,15 @@ dependencies = [ "windows_x86_64_msvc 0.52.6", ] +[[package]] +name = "windows-threading" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3949bd5b99cafdf1c7ca86b43ca564028dfe27d66958f2470940f73d86d75b37" +dependencies = [ + "windows-link", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -9788,6 +10974,23 @@ dependencies = [ "zeroize", ] +[[package]] +name = "x509-parser" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4569f339c0c402346d4a75a9e39cf8dad310e287eef1ff56d4c68e5067f53460" +dependencies = [ + "asn1-rs", + "data-encoding", + "der-parser", + "lazy_static", + "nom 7.1.3", + "oid-registry", + "rusticata-macros", + "thiserror 2.0.18", + "time", +] + [[package]] name = "xattr" version = "1.6.1" @@ -9798,6 +11001,21 @@ dependencies = [ "rustix", ] +[[package]] +name = "xml-rs" +version = "0.8.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae8337f8a065cfc972643663ea4279e04e7256de865aa66fe25cec5fb912d3f" + +[[package]] +name = "xmltree" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7d8a75eaf6557bb84a65ace8609883db44a29951042ada9b393151532e41fcb" +dependencies = [ + "xml-rs", +] + [[package]] name = "xxhash-rust" version = "0.8.15" @@ -9812,7 +11030,7 @@ checksum = "2462ea039c445496d8793d052e13787f2b90e750b833afee748e601c17621ed9" dependencies = [ "arraydeque", "encoding_rs", - "hashlink", + "hashlink 0.10.0", ] [[package]] @@ -9821,6 +11039,15 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cfe53a6657fd280eaa890a3bc59152892ffa3e30101319d168b781ed6529b049" +[[package]] +name = "yasna" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd" +dependencies = [ + "time", +] + [[package]] name = "yoke" version = "0.8.1" diff --git a/Cargo.toml b/Cargo.toml index 551c1f98..b3090355 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,9 @@ members = [ "programs/token", "programs/associated_token_account/core", "programs/associated_token_account", + "programs/authenticated_transfer/core", + "programs/faucet/core", + "programs/vault/core", "sequencer/core", "sequencer/service", "sequencer/service/protocol", @@ -36,10 +39,9 @@ members = [ "examples/program_deployment", "examples/program_deployment/methods", "examples/program_deployment/methods/guest", - "bedrock_client", "testnet_initial_state", + "indexer/ffi", "keycard_wallet", - "indexer_ffi", ] [workspace.dependencies] @@ -57,9 +59,9 @@ indexer_core = { path = "indexer/core" } indexer_service = { path = "indexer/service" } indexer_service_protocol = { path = "indexer/service/protocol" } indexer_service_rpc = { path = "indexer/service/rpc" } -indexer_ffi = { path = "indexer_ffi" } wallet = { path = "wallet" } wallet-ffi = { path = "wallet-ffi", default-features = false } +indexer_ffi = { path = "indexer/ffi" } clock_core = { path = "programs/clock/core" } token_core = { path = "programs/token/core" } token_program = { path = "programs/token" } @@ -67,8 +69,10 @@ amm_core = { path = "programs/amm/core" } amm_program = { path = "programs/amm" } ata_core = { path = "programs/associated_token_account/core" } ata_program = { path = "programs/associated_token_account" } +authenticated_transfer_core = { path = "programs/authenticated_transfer/core" } +faucet_core = { path = "programs/faucet/core" } +vault_core = { path = "programs/vault/core" } test_program_methods = { path = "test_program_methods" } -bedrock_client = { path = "bedrock_client" } testnet_initial_state = { path = "testnet_initial_state" } keycard_wallet = { path = "keycard_wallet" } @@ -82,6 +86,7 @@ tokio-util = "0.7.18" risc0-zkvm = { version = "3.0.5", features = ['std'] } risc0-build = "3.0.5" anyhow = "1.0.98" +derive_more = "2.1.1" num_cpus = "1.13.1" openssl = { version = "0.10", features = ["vendored"] } openssl-probe = { version = "0.1.2" } @@ -123,13 +128,13 @@ url = { version = "2.5.4", features = ["serde"] } tokio-retry = "0.3.0" schemars = "1.2" async-stream = "0.3.6" -pyo3 = { version = "0.24", features = ["auto-initialize"] } -logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "1da154c74b911318fb853d37261f8a05ffe513b4" } -logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "1da154c74b911318fb853d37261f8a05ffe513b4" } -logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "1da154c74b911318fb853d37261f8a05ffe513b4" } -logos-blockchain-chain-broadcast-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "1da154c74b911318fb853d37261f8a05ffe513b4" } -logos-blockchain-chain-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "1da154c74b911318fb853d37261f8a05ffe513b4" } +logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "ee281a447d95a951752461ee0a6e88eb4a0f17cf" } +logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "ee281a447d95a951752461ee0a6e88eb4a0f17cf" } +logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "ee281a447d95a951752461ee0a6e88eb4a0f17cf" } +logos-blockchain-chain-broadcast-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "ee281a447d95a951752461ee0a6e88eb4a0f17cf" } +logos-blockchain-chain-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "ee281a447d95a951752461ee0a6e88eb4a0f17cf" } +logos-blockchain-zone-sdk = { git = "https://github.com/logos-blockchain/logos-blockchain.git", rev = "ee281a447d95a951752461ee0a6e88eb4a0f17cf" } rocksdb = { version = "0.24.0", default-features = false, features = [ "snappy", @@ -149,6 +154,7 @@ actix-web = { version = "4.13.0", default-features = false, features = [ ] } clap = { version = "4.5.42", features = ["derive", "env"] } reqwest = { version = "0.12", features = ["json", "rustls-tls", "stream"] } +pyo3 = { version = "0.24", features = ["auto-initialize"] } # Profile for leptos WASM release builds [profile.wasm-release] diff --git a/artifacts/program_methods/amm.bin b/artifacts/program_methods/amm.bin index 36caad85..14e92d63 100644 Binary files a/artifacts/program_methods/amm.bin and b/artifacts/program_methods/amm.bin differ diff --git a/artifacts/program_methods/associated_token_account.bin b/artifacts/program_methods/associated_token_account.bin index 5439d1af..87eb0b7c 100644 Binary files a/artifacts/program_methods/associated_token_account.bin and b/artifacts/program_methods/associated_token_account.bin differ diff --git a/artifacts/program_methods/authenticated_transfer.bin b/artifacts/program_methods/authenticated_transfer.bin index bdbcef61..d050cd62 100644 Binary files a/artifacts/program_methods/authenticated_transfer.bin and b/artifacts/program_methods/authenticated_transfer.bin differ diff --git a/artifacts/program_methods/clock.bin b/artifacts/program_methods/clock.bin index d3ca0dab..f225da54 100644 Binary files a/artifacts/program_methods/clock.bin and b/artifacts/program_methods/clock.bin differ diff --git a/artifacts/program_methods/faucet.bin b/artifacts/program_methods/faucet.bin new file mode 100644 index 00000000..00a81dc9 Binary files /dev/null and b/artifacts/program_methods/faucet.bin differ diff --git a/artifacts/program_methods/genesis_supply_account.bin b/artifacts/program_methods/genesis_supply_account.bin new file mode 100644 index 00000000..c377a1e6 Binary files /dev/null and b/artifacts/program_methods/genesis_supply_account.bin differ diff --git a/artifacts/program_methods/genesis_supply_private_account.bin b/artifacts/program_methods/genesis_supply_private_account.bin new file mode 100644 index 00000000..9d6aa313 Binary files /dev/null and b/artifacts/program_methods/genesis_supply_private_account.bin differ diff --git a/artifacts/program_methods/pinata.bin b/artifacts/program_methods/pinata.bin index 5e6a011b..5c9233bd 100644 Binary files a/artifacts/program_methods/pinata.bin and b/artifacts/program_methods/pinata.bin differ diff --git a/artifacts/program_methods/pinata_token.bin b/artifacts/program_methods/pinata_token.bin index 57a201c4..fd3adddb 100644 Binary files a/artifacts/program_methods/pinata_token.bin and b/artifacts/program_methods/pinata_token.bin differ diff --git a/artifacts/program_methods/privacy_preserving_circuit.bin b/artifacts/program_methods/privacy_preserving_circuit.bin index dd613143..d9f6f94d 100644 Binary files a/artifacts/program_methods/privacy_preserving_circuit.bin and b/artifacts/program_methods/privacy_preserving_circuit.bin differ diff --git a/artifacts/program_methods/token.bin b/artifacts/program_methods/token.bin index 6366eba6..e451632a 100644 Binary files a/artifacts/program_methods/token.bin and b/artifacts/program_methods/token.bin differ diff --git a/artifacts/program_methods/vault.bin b/artifacts/program_methods/vault.bin new file mode 100644 index 00000000..58417603 Binary files /dev/null and b/artifacts/program_methods/vault.bin differ diff --git a/artifacts/test_program_methods/auth_asserting_noop.bin b/artifacts/test_program_methods/auth_asserting_noop.bin index f9e4d1d4..3293d845 100644 Binary files a/artifacts/test_program_methods/auth_asserting_noop.bin and b/artifacts/test_program_methods/auth_asserting_noop.bin differ diff --git a/artifacts/test_program_methods/auth_transfer_proxy.bin b/artifacts/test_program_methods/auth_transfer_proxy.bin new file mode 100644 index 00000000..a3122134 Binary files /dev/null and b/artifacts/test_program_methods/auth_transfer_proxy.bin differ diff --git a/artifacts/test_program_methods/burner.bin b/artifacts/test_program_methods/burner.bin index 94a90236..3873ab01 100644 Binary files a/artifacts/test_program_methods/burner.bin and b/artifacts/test_program_methods/burner.bin differ diff --git a/artifacts/test_program_methods/chain_caller.bin b/artifacts/test_program_methods/chain_caller.bin index 58331d6c..a6fe86e0 100644 Binary files a/artifacts/test_program_methods/chain_caller.bin and b/artifacts/test_program_methods/chain_caller.bin differ diff --git a/artifacts/test_program_methods/changer_claimer.bin b/artifacts/test_program_methods/changer_claimer.bin index 2760b7a3..d285e4c8 100644 Binary files a/artifacts/test_program_methods/changer_claimer.bin and b/artifacts/test_program_methods/changer_claimer.bin differ diff --git a/artifacts/test_program_methods/claimer.bin b/artifacts/test_program_methods/claimer.bin index ff504da1..21a5c887 100644 Binary files a/artifacts/test_program_methods/claimer.bin and b/artifacts/test_program_methods/claimer.bin differ diff --git a/artifacts/test_program_methods/clock_chain_caller.bin b/artifacts/test_program_methods/clock_chain_caller.bin index 37c9a004..10cd6910 100644 Binary files a/artifacts/test_program_methods/clock_chain_caller.bin and b/artifacts/test_program_methods/clock_chain_caller.bin differ diff --git a/artifacts/test_program_methods/data_changer.bin b/artifacts/test_program_methods/data_changer.bin index 3d69b8cb..1a4dc474 100644 Binary files a/artifacts/test_program_methods/data_changer.bin and b/artifacts/test_program_methods/data_changer.bin differ diff --git a/artifacts/test_program_methods/extra_output.bin b/artifacts/test_program_methods/extra_output.bin index 873ce66a..2152e32f 100644 Binary files a/artifacts/test_program_methods/extra_output.bin and b/artifacts/test_program_methods/extra_output.bin differ diff --git a/artifacts/test_program_methods/flash_swap_callback.bin b/artifacts/test_program_methods/flash_swap_callback.bin index 0846f255..4a725f3e 100644 Binary files a/artifacts/test_program_methods/flash_swap_callback.bin and b/artifacts/test_program_methods/flash_swap_callback.bin differ diff --git a/artifacts/test_program_methods/flash_swap_initiator.bin b/artifacts/test_program_methods/flash_swap_initiator.bin index 1e285245..e4b303ee 100644 Binary files a/artifacts/test_program_methods/flash_swap_initiator.bin and b/artifacts/test_program_methods/flash_swap_initiator.bin differ diff --git a/artifacts/test_program_methods/malicious_authorization_changer.bin b/artifacts/test_program_methods/malicious_authorization_changer.bin index cc757683..2b41c197 100644 Binary files a/artifacts/test_program_methods/malicious_authorization_changer.bin and b/artifacts/test_program_methods/malicious_authorization_changer.bin differ diff --git a/artifacts/test_program_methods/malicious_caller_program_id.bin b/artifacts/test_program_methods/malicious_caller_program_id.bin index f152051d..20c03dfa 100644 Binary files a/artifacts/test_program_methods/malicious_caller_program_id.bin and b/artifacts/test_program_methods/malicious_caller_program_id.bin differ diff --git a/artifacts/test_program_methods/malicious_self_program_id.bin b/artifacts/test_program_methods/malicious_self_program_id.bin index 6d83b95b..0aef2bd7 100644 Binary files a/artifacts/test_program_methods/malicious_self_program_id.bin and b/artifacts/test_program_methods/malicious_self_program_id.bin differ diff --git a/artifacts/test_program_methods/minter.bin b/artifacts/test_program_methods/minter.bin index 29bcd715..d6ca6b99 100644 Binary files a/artifacts/test_program_methods/minter.bin and b/artifacts/test_program_methods/minter.bin differ diff --git a/artifacts/test_program_methods/missing_output.bin b/artifacts/test_program_methods/missing_output.bin index c7cc1571..1c9f6914 100644 Binary files a/artifacts/test_program_methods/missing_output.bin and b/artifacts/test_program_methods/missing_output.bin differ diff --git a/artifacts/test_program_methods/modified_transfer.bin b/artifacts/test_program_methods/modified_transfer.bin index 8f2b1e39..a8a87da8 100644 Binary files a/artifacts/test_program_methods/modified_transfer.bin and b/artifacts/test_program_methods/modified_transfer.bin differ diff --git a/artifacts/test_program_methods/nonce_changer.bin b/artifacts/test_program_methods/nonce_changer.bin index 993c1451..e5659b80 100644 Binary files a/artifacts/test_program_methods/nonce_changer.bin and b/artifacts/test_program_methods/nonce_changer.bin differ diff --git a/artifacts/test_program_methods/noop.bin b/artifacts/test_program_methods/noop.bin index 579db977..023f2e21 100644 Binary files a/artifacts/test_program_methods/noop.bin and b/artifacts/test_program_methods/noop.bin differ diff --git a/artifacts/test_program_methods/pda_claimer.bin b/artifacts/test_program_methods/pda_claimer.bin index 1a541384..e1bea8f8 100644 Binary files a/artifacts/test_program_methods/pda_claimer.bin and b/artifacts/test_program_methods/pda_claimer.bin differ diff --git a/artifacts/test_program_methods/pda_fund_spend_proxy.bin b/artifacts/test_program_methods/pda_fund_spend_proxy.bin new file mode 100644 index 00000000..7cd0839a Binary files /dev/null and b/artifacts/test_program_methods/pda_fund_spend_proxy.bin differ diff --git a/artifacts/test_program_methods/pinata_cooldown.bin b/artifacts/test_program_methods/pinata_cooldown.bin index 2b0d979a..8e3b97c5 100644 Binary files a/artifacts/test_program_methods/pinata_cooldown.bin and b/artifacts/test_program_methods/pinata_cooldown.bin differ diff --git a/artifacts/test_program_methods/private_pda_delegator.bin b/artifacts/test_program_methods/private_pda_delegator.bin index 4b55e871..44d566ec 100644 Binary files a/artifacts/test_program_methods/private_pda_delegator.bin and b/artifacts/test_program_methods/private_pda_delegator.bin differ diff --git a/artifacts/test_program_methods/private_pda_spender.bin b/artifacts/test_program_methods/private_pda_spender.bin new file mode 100644 index 00000000..70e4c5a0 Binary files /dev/null and b/artifacts/test_program_methods/private_pda_spender.bin differ diff --git a/artifacts/test_program_methods/program_owner_changer.bin b/artifacts/test_program_methods/program_owner_changer.bin index 3bdabade..4a47211b 100644 Binary files a/artifacts/test_program_methods/program_owner_changer.bin and b/artifacts/test_program_methods/program_owner_changer.bin differ diff --git a/artifacts/test_program_methods/simple_balance_transfer.bin b/artifacts/test_program_methods/simple_balance_transfer.bin index 0aaf1a23..647f86fa 100644 Binary files a/artifacts/test_program_methods/simple_balance_transfer.bin and b/artifacts/test_program_methods/simple_balance_transfer.bin differ diff --git a/artifacts/test_program_methods/time_locked_transfer.bin b/artifacts/test_program_methods/time_locked_transfer.bin index 5700322e..8f423fa7 100644 Binary files a/artifacts/test_program_methods/time_locked_transfer.bin and b/artifacts/test_program_methods/time_locked_transfer.bin differ diff --git a/artifacts/test_program_methods/two_pda_claimer.bin b/artifacts/test_program_methods/two_pda_claimer.bin index 600b819d..9d5001f9 100644 Binary files a/artifacts/test_program_methods/two_pda_claimer.bin and b/artifacts/test_program_methods/two_pda_claimer.bin differ diff --git a/artifacts/test_program_methods/validity_window.bin b/artifacts/test_program_methods/validity_window.bin index 02ccc149..a0a578e1 100644 Binary files a/artifacts/test_program_methods/validity_window.bin and b/artifacts/test_program_methods/validity_window.bin differ diff --git a/artifacts/test_program_methods/validity_window_chain_caller.bin b/artifacts/test_program_methods/validity_window_chain_caller.bin index d239c750..326ed7a1 100644 Binary files a/artifacts/test_program_methods/validity_window_chain_caller.bin and b/artifacts/test_program_methods/validity_window_chain_caller.bin differ diff --git a/bedrock/deployment-settings.yaml b/bedrock/deployment-settings.yaml index d0c05e24..7ef63f03 100644 --- a/bedrock/deployment-settings.yaml +++ b/bedrock/deployment-settings.yaml @@ -39,42 +39,42 @@ cryptarchia: threshold: 1 timestamp: 0 gossipsub_protocol: /integration/logos-blockchain/cryptarchia/proto/1.0.0 - genesis_state: - mantle_tx: - ops: + genesis_block: + header: + version: Bedrock + parent_block: '0000000000000000000000000000000000000000000000000000000000000000' + slot: 0 + block_root: b5f8787ac23674822414c70eea15d842da38f2e806ede1a73cf7b5cf0277da07 + proof_of_leadership: + proof: '0000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + entropy_contribution: '0000000000000000000000000000000000000000000000000000000000000000' + leader_key: '0000000000000000000000000000000000000000000000000000000000000000' + voucher_cm: '0000000000000000000000000000000000000000000000000000000000000000' + signature: '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + transactions: + - mantle_tx: + ops: - opcode: 0 payload: - inputs: [ ] + inputs: [] outputs: - - value: 1 - pk: d204000000000000000000000000000000000000000000000000000000000000 - - value: 100 - pk: 2e03b2eff5a45478e7e79668d2a146cf2c5c7925bce927f2b1c67f2ab4fc0d26 + - value: 1 + pk: d204000000000000000000000000000000000000000000000000000000000000 + - value: 100 + pk: '2e03b2eff5a45478e7e79668d2a146cf2c5c7925bce927f2b1c67f2ab4fc0d26' + - value: 1 + pk: ed266e6e887b9b97059dc1aa1b7b2e19b934291753c6336a163fe4ebaa28e717 - opcode: 17 payload: - channel_id: "0000000000000000000000000000000000000000000000000000000000000000" - inscription: [ 103, 101, 110, 101, 115, 105, 115 ] # "genesis" in bytes - parent: "0000000000000000000000000000000000000000000000000000000000000000" - signer: "0000000000000000000000000000000000000000000000000000000000000000" - execution_gas_price: 0 - storage_gas_price: 0 - ops_proofs: - - !ZkSig - pi_a: [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] - pi_b: [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] - pi_c: [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 - ] - - NoProof + channel_id: '0000000000000000000000000000000000000000000000000000000000000000' + inscription: '67656e65736973' + parent: '0000000000000000000000000000000000000000000000000000000000000000' + signer: '0000000000000000000000000000000000000000000000000000000000000000' + execution_gas_price: 0 + storage_gas_price: 0 + ops_proofs: + - !Ed25519Sig '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' + - !Ed25519Sig '00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000' time: slot_duration: '1.0' chain_start_time: PLACEHOLDER_CHAIN_START_TIME diff --git a/bedrock/docker-compose.yml b/bedrock/docker-compose.yml index 73795666..e16e505b 100644 --- a/bedrock/docker-compose.yml +++ b/bedrock/docker-compose.yml @@ -1,7 +1,7 @@ services: logos-blockchain-node-0: - image: ghcr.io/logos-blockchain/logos-blockchain@sha256:c5243681b353278cabb562a176f0a5cfbefc2056f18cebc47fe0e3720c29fb12 + image: ghcr.io/logos-blockchain/logos-blockchain@sha256:9f1829dea335c56f6ff68ae37ea872ed5313b96b69e8ffe143c02b7217de85fc ports: - "${PORT:-8080}:18080/tcp" volumes: diff --git a/common/Cargo.toml b/common/Cargo.toml index dbf5ec0c..5d8e278c 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -10,6 +10,7 @@ workspace = true [dependencies] nssa.workspace = true nssa_core.workspace = true +authenticated_transfer_core.workspace = true clock_core.workspace = true anyhow.workspace = true diff --git a/common/src/block.rs b/common/src/block.rs index 3f354c2d..fbc4c9a6 100644 --- a/common/src/block.rs +++ b/common/src/block.rs @@ -114,11 +114,6 @@ impl HashableBlockData { bedrock_parent_id, } } - - #[must_use] - pub fn block_hash(&self) -> BlockHash { - OwnHasher::hash(&borsh::to_vec(&self).unwrap()) - } } impl From for HashableBlockData { diff --git a/common/src/test_utils.rs b/common/src/test_utils.rs index 267d10ce..806048e1 100644 --- a/common/src/test_utils.rs +++ b/common/src/test_utils.rs @@ -47,12 +47,11 @@ pub fn produce_dummy_empty_transaction() -> NSSATransaction { let program_id = nssa::program::Program::authenticated_transfer_program().id(); let account_ids = vec![]; let nonces = vec![]; - let instruction_data: u128 = 0; let message = nssa::public_transaction::Message::try_new( program_id, account_ids, nonces, - instruction_data, + authenticated_transfer_core::Instruction::Initialize, ) .unwrap(); let private_key = nssa::PrivateKey::try_new([1; 32]).unwrap(); @@ -78,7 +77,9 @@ pub fn create_transaction_native_token_transfer( program_id, account_ids, nonces, - balance_to_move, + authenticated_transfer_core::Instruction::Transfer { + amount: balance_to_move, + }, ) .unwrap(); let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[signing_key]); diff --git a/common/src/transaction.rs b/common/src/transaction.rs index 7ce0e76f..6175f1a1 100644 --- a/common/src/transaction.rs +++ b/common/src/transaction.rs @@ -67,13 +67,26 @@ impl NSSATransaction { } /// Validates the transaction against the current state and returns the resulting diff - /// without applying it. Rejects transactions that modify clock system accounts. + /// without applying it. Rejects transactions that modify clock system accounts and + /// rejects unsafe modifications of the system faucet account. Also rejects direct + /// invocation of the faucet program for user-submitted transactions. + /// + /// This check is required for all user transactions. Only sequencer transaction may bypass this + /// check. pub fn validate_on_state( &self, state: &V03State, block_id: BlockId, timestamp: Timestamp, ) -> Result { + if let Self::Public(tx) = self + && tx.message().program_id == nssa::program::Program::faucet().id() + { + return Err(nssa::error::NssaError::InvalidInput( + "Transaction invokes restricted faucet program".into(), + )); + } + let diff = match self { Self::Public(tx) => { ValidatedStateDiff::from_public_transaction(tx, state, block_id, timestamp) diff --git a/configs/docker-all-in-one/indexer_config.json b/configs/docker-all-in-one/indexer_config.json index c2b07e3e..f2005ff5 100644 --- a/configs/docker-all-in-one/indexer_config.json +++ b/configs/docker-all-in-one/indexer_config.json @@ -1,160 +1,8 @@ { "home": "./indexer/service", "consensus_info_polling_interval": "1s", - "bedrock_client_config": { - "addr": "http://logos-blockchain-node-0:18080", - "backoff": { - "start_delay": "100ms", - "max_retries": 5 - } + "bedrock_config": { + "addr": "http://logos-blockchain-node-0:18080" }, - "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", - "initial_accounts": [ - { - "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", - "balance": 10000 - }, - { - "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", - "balance": 20000 - } - ], - "initial_commitments": [ - { - "npk":[ - 177, - 64, - 1, - 11, - 87, - 38, - 254, - 159, - 231, - 165, - 1, - 94, - 64, - 137, - 243, - 76, - 249, - 101, - 251, - 129, - 33, - 101, - 189, - 30, - 42, - 11, - 191, - 34, - 103, - 186, - 227, - 230 - ] , - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 10000, - "data": [], - "nonce": 0 - } - }, - { - "npk": [ - 32, - 67, - 72, - 164, - 106, - 53, - 66, - 239, - 141, - 15, - 52, - 230, - 136, - 177, - 2, - 236, - 207, - 243, - 134, - 135, - 210, - 143, - 87, - 232, - 215, - 128, - 194, - 120, - 113, - 224, - 4, - 165 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 20000, - "data": [], - "nonce": 0 - } - } - ], - "signing_key": [ - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37 - ] + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101" } diff --git a/configs/docker-all-in-one/sequencer_config.json b/configs/docker-all-in-one/sequencer_config.json index d7fd3490..207f2e79 100644 --- a/configs/docker-all-in-one/sequencer_config.json +++ b/configs/docker-all-in-one/sequencer_config.json @@ -1,7 +1,5 @@ { "home": "/var/lib/sequencer_service", - "genesis_id": 1, - "is_genesis_random": true, "max_num_tx_in_block": 20, "max_block_size": "1 MiB", "mempool_max_size": 10000, @@ -16,117 +14,29 @@ "node_url": "http://logos-blockchain-node-0:18080" }, "indexer_rpc_url": "ws://indexer_service:8779", - "initial_accounts": [ + "genesis": [ { - "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", - "balance": 10000 - }, - { - "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", - "balance": 20000 - } - ], - "initial_commitments": [ - { - "npk":[ - 177, - 64, - 1, - 11, - 87, - 38, - 254, - 159, - 231, - 165, - 1, - 94, - 64, - 137, - 243, - 76, - 249, - 101, - 251, - 129, - 33, - 101, - 189, - 30, - 42, - 11, - 191, - 34, - 103, - 186, - 227, - 230 - ] , - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 10000, - "data": [], - "nonce": 0 + "supply_account": { + "account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV", + "balance": 10000 } }, { - "npk": [ - 32, - 67, - 72, - 164, - 106, - 53, - 66, - 239, - 141, - 15, - 52, - 230, - 136, - 177, - 2, - 236, - 207, - 243, - 134, - 135, - 210, - 143, - 87, - 232, - 215, - 128, - 194, - 120, - 113, - 224, - 4, - 165 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 20000, - "data": [], - "nonce": 0 + "supply_account": { + "account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo", + "balance": 20000 + } + }, + { + "supply_account": { + "account_id": "61EsoYN6gvTLkveh1YSTMG3yJkncpHy5EGmxhSK4ew29", + "balance": 10000 + } + }, + { + "supply_account": { + "account_id": "3m6HQmCgmAvsxZtxAHPqqEqoBG4335fCG8TzxigyW7rE", + "balance": 20000 } } ], diff --git a/docs/LEZ testnet v0.1 tutorials/keycard.md b/docs/LEZ testnet v0.1 tutorials/keycard.md index 460b70f2..72f168d4 100644 --- a/docs/LEZ testnet v0.1 tutorials/keycard.md +++ b/docs/LEZ testnet v0.1 tutorials/keycard.md @@ -6,12 +6,17 @@ This tutorial walks you through using Keycard with Wallet CLI. Keycard is option ### Required hardware - Keycard (Blank) - a Keycard, directly, from Keycard.tech cannot (currently) be updated to support LEE. - Smartcard reader +- Applets (`math.cap` and `LEE_keycard.cap`). Eventually, both of these applets will be available in separate repos. + - `math.cap` is an applet to speed up computations on Keycard; developed by Bitgamma (Keycard-tech team). + - `LEE_keycard.cap` is an applet that contains LEE keycard protocol; developed by Bitgamma (Keycard-tech team) ### Firmware installation Installation: 1. Install math applet on your keycard; this process only needs to be done once. In the root of repo: ``` + sudo apt-get install -y default-jdk + wget https://github.com/martinpaljak/GlobalPlatformPro/releases/download/v25.10.20/gp.jar -P keycard_wallet/keycard_applets cd keycard_wallet/keycard_applets java -jar gp.jar --key c212e073ff8b4bbfaff4de8ab655221f --load math.cap ``` @@ -19,6 +24,7 @@ Installation: - Keycard Desktop is used to install the LEE key protocol to a blank keycard. - Select (Re)Install Applet and upload the key binary (`keycard_wallet/keycard_applets/LEE_keycard.cap`). ![keycard-desktop.png](keycard-desktop.png) + - **Important:** keycard can only connect with one application at a time; if Keycard-Desktop is using keycard then Wallet CLI cannot access the same keycard, and vice-versa. ## Wallet with Keycard Keycard functionality is available to Wallet CLI by setting up the following Python virtual environment: @@ -40,15 +46,32 @@ pip install -e keycard_wallet/python/keycard-py source venv/bin/activate ``` +## PIN entry + +Each Keycard command prompts for a PIN interactively. To avoid re-entering it across multiple commands, export it as an environment variable: + +```bash +export KEYCARD_PIN=123456 +``` + +Unset it when done: + +```bash +unset KEYCARD_PIN +``` + ## Keycard Commands ### Keycard -| Command | Key-path options | Description | -|-----------------------------------|------------------|--------------------------------------------------------------------------| -| `wallet keycard available` | — | Checks whether a Keycard reader and card are accessible | -| `wallet keycard load` | — | Loads a mnemonic phrase onto the Keycard | -| `wallet keycard get-private-keys` | `--key-path` | Retrieves private account keys (nsk, vsk) for the given BIP32 path | +| Command | Description | +|-----------------------------------|--------------------------------------------------------------------------| +| `wallet keycard available` | Checks whether a Keycard reader and card are accessible | +| `wallet keycard init` | Initializes a blank Keycard with a PIN and a generated PUK | +| `wallet keycard connect` | Establishes and saves a pairing with the Keycard | +| `wallet keycard disconnect` | Unpairs the Keycard and clears the saved pairing | +| `wallet keycard load` | Loads a mnemonic phrase onto the Keycard | +| `wallet keycard get-private-keys` | Retrieves private account keys (nsk, vsk) for a given BIP32 path | 1. Check keycard availability ```bash @@ -58,16 +81,40 @@ wallet keycard available ✅ Keycard is available. ``` -2. Load a mnemonic phrase +2. Initialize a blank Keycard ```bash -wallet keycard load --mnemonic "fashion degree mountain wool question damp current pond grow dolphin chronic then" +wallet keycard init + +# Output: +Keycard PIN: +Keycard PUK: 847302916485 +Record this PUK and store it somewhere safe. It cannot be recovered. +✅ Keycard initialized successfully. +``` + +3. Connect (pair and save pairing for subsequent commands) +```bash +wallet keycard connect + +# Output: +Keycard PIN: +✅ Keycard paired and ready. +``` + +4. Load a mnemonic phrase +```bash +# Supply mnemonic via environment variable to avoid interactive prompt +export KEYCARD_MNEMONIC="fashion degree mountain wool question damp current pond grow dolphin chronic then" +wallet keycard load +unset KEYCARD_MNEMONIC # Output: Keycard PIN: ✅ Keycard is now connected to wallet. +✅ Mnemonic phrase loaded successfully. ``` -3. Get private keys for a path +5. Get private keys for a path ```bash wallet keycard get-private-keys --key-path "m/44'/60'/0'/0/0" @@ -77,17 +124,31 @@ nsk: 55e505bf925e536c843a12ebc08c41ca5f4761eeeb7fa33725f0b44e6f1ac2e4 vsk: 30f798893977a7b7263d1f77abf58e11e014428c92030d6a02fe363cceb41ffa ``` +6. Disconnect (unpair and clear saved pairing) +```bash +wallet keycard disconnect + +# Output: +Keycard PIN: +✅ Keycard unpaired and pairing cleared. +``` + ### Pinata (testnet) -| Command | Key-path options | Description | -|-----------------------|-------------------------------|--------------------------------------------------------------------------| -| `wallet pinata claim` | `--key-path` | Claims a testnet pinata reward to a public or private recipient account | +| Command | Description | +|-----------------------|--------------------------------------------------------------------------| +| `wallet pinata claim` | Claims a testnet pinata reward to a public or private recipient account | Note: The recipient account must be initialized with `wallet auth-transfer init` before claiming. +`--to` accepts any of: +- A BIP32 key path — uses Keycard (e.g. `m/44'/60'/0'/0/0`) +- An account ID with privacy prefix (e.g. `Public/9bKm...`) +- An account label (e.g. `my-account`) + 1. Claim to a Keycard public account ```bash -wallet pinata claim --key-path "m/44'/60'/0'/0/0" +wallet pinata claim --to "m/44'/60'/0'/0/0" # Output: Keycard PIN: @@ -98,7 +159,7 @@ Transaction hash is fd320c01f5469e62d2486afa1d9d5be39afcca0cd01d1575905b7acd95cf 2. Claim to a local wallet account by label ```bash -wallet pinata claim --to-label my-account +wallet pinata claim --to my-account # Output: Transaction hash is 2c8a4f1e903d5b76e80214c5b82e1d46a105e28930ad71bcce48f2d07b49a16f @@ -106,16 +167,21 @@ Transaction hash is 2c8a4f1e903d5b76e80214c5b82e1d46a105e28930ad71bcce48f2d07b49 ### Authenticated-transfer program -| Command | Key-path options | Description | -|-----------------------------|--------------------------------------|------------------------------------------------------------------------------------| -| `wallet auth-transfer init` | `--key-path` | Registers a public or private account with the auth-transfer program | -| `wallet auth-transfer send` | `--from-key-path`, `--to-key-path` | Sends native tokens; either or both endpoints can be Keycard public accounts | +| Command | Description | +|-----------------------------|-------------------------------------------------------------------------------| +| `wallet auth-transfer init` | Registers an account with the auth-transfer program | +| `wallet auth-transfer send` | Sends native tokens between accounts | -For `send`, `--from-key-path` and `--to-key-path` can be used together (both Keycard) or individually (one Keycard, one local/label). Shielded sends to foreign private accounts use `--to-npk`/`--to-vpk` instead of `--to-key-path`. +`--account` (for `init`) and `--from`/`--to` (for `send`) each accept any of: +- A BIP32 key path — uses Keycard (e.g. `m/44'/60'/0'/0/0`) +- An account ID with privacy prefix (e.g. `Public/9bKm...`) +- An account label (e.g. `my-account`) + +For `send`, foreign recipient accounts (not in the local wallet and not a Keycard path) do not need to sign — pass their account ID directly via `--to`. Shielded sends to foreign private accounts use `--to-npk`/`--to-vpk`. 1. Initialize a Keycard public account ```bash -wallet auth-transfer init --key-path "m/44'/60'/0'/0/0" +wallet auth-transfer init --account "m/44'/60'/0'/0/0" # Output: Keycard PIN: @@ -125,8 +191,8 @@ Transaction hash is 49c16940493e1618c393645c1211b5c793d405838221c29ac6562a8a4b11 2. Send native tokens between two Keycard accounts ```bash wallet auth-transfer send \ - --from-key-path "m/44'/60'/0'/0/0" \ - --to-key-path "m/44'/60'/0'/0/1" \ + --from "m/44'/60'/0'/0/0" \ + --to "m/44'/60'/0'/0/1" \ --amount 40 # Output: @@ -134,15 +200,26 @@ Keycard PIN: Transaction hash is 1a9764ab20763dcc1ffb51c6e9badd5a6316a773759032ca48e0eee59caaf488 ``` -3. Send native tokens from Keycard to a local wallet account +3. Send native tokens from a Keycard account to a foreign account ```bash -# Note: non-keycard account ID below — replace with actual account ID or use --to-label wallet auth-transfer send \ - --from-key-path "m/44'/60'/0'/0/0" \ - --to "Public/9bKmZ4n7PqVRxEtY3dWsQjA2cHrFT5LpDoGXM8wJuNv6" \ + --from "m/44'/60'/0'/0/0" \ + --to "Public/9bKmZ4n7PqVRxEtY3dWsQjA2cHrFT5LpDoGXM8wJuNv6" \ --amount 20 # Output: Keycard PIN: Transaction hash is 3e7b2a91cf804d56fe19084b3c8b25d07e8f243829bc50addf6e2c78b4b09d34 +``` + +4. Send native tokens from a Keycard account to a local wallet account by label +```bash +wallet auth-transfer send \ + --from "m/44'/60'/0'/0/0" \ + --to my-account \ + --amount 20 + +# Output: +Keycard PIN: +Transaction hash is 7d4c1b8e2f903a56fd19084b3c8b25d07e8f243829bc50addf6e2c78b4b09e45 ``` \ No newline at end of file diff --git a/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs b/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs index a9750bce..18b4ba80 100644 --- a/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs +++ b/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs @@ -50,8 +50,8 @@ async fn main() { // Load signing keys to provide authorization let signing_key = wallet_core .storage() - .user_data - .get_pub_account_signing_key(account_id) + .key_chain() + .pub_account_signing_key(account_id) .expect("Input account should be a self owned public account"); // Define the desired greeting in ASCII diff --git a/explorer_service/src/api.rs b/explorer_service/src/api.rs index 8c2a0e36..5984a636 100644 --- a/explorer_service/src/api.rs +++ b/explorer_service/src/api.rs @@ -86,7 +86,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result /// Get latest block ID #[server] -pub async fn get_latest_block_id() -> Result { +pub async fn get_latest_block_id() -> Result, ServerFnError> { use indexer_service_rpc::RpcClient as _; let client = expect_context::(); client diff --git a/flake.nix b/flake.nix index 0b6ff35f..b4adfe71 100644 --- a/flake.nix +++ b/flake.nix @@ -2,7 +2,9 @@ description = "Logos Execution Zone"; inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + logos-liblogos.url = "github:logos-co/logos-liblogos"; + + nixpkgs.follows = "logos-liblogos/nixpkgs"; rust-overlay = { url = "github:oxalica/rust-overlay"; @@ -139,7 +141,7 @@ cargoExtraArgs = "-p indexer_ffi"; postInstall = '' mkdir -p $out/include - cp indexer_ffi/indexer_ffi.h $out/include/ + cp indexer/ffi/indexer_ffi.h $out/include/ '' + pkgs.lib.optionalString pkgs.stdenv.isDarwin '' install_name_tool -id @rpath/libindexer_ffi.dylib $out/lib/libindexer_ffi.dylib diff --git a/indexer/core/Cargo.toml b/indexer/core/Cargo.toml index 33fe2d9d..6c7ad01f 100644 --- a/indexer/core/Cargo.toml +++ b/indexer/core/Cargo.toml @@ -9,7 +9,7 @@ workspace = true [dependencies] common.workspace = true -bedrock_client.workspace = true +logos-blockchain-zone-sdk.workspace = true nssa.workspace = true nssa_core.workspace = true storage.workspace = true @@ -19,13 +19,14 @@ anyhow.workspace = true log.workspace = true serde.workspace = true humantime-serde.workspace = true -tokio.workspace = true borsh.workspace = true futures.workspace = true url.workspace = true logos-blockchain-core.workspace = true serde_json.workspace = true async-stream.workspace = true +tokio.workspace = true [dev-dependencies] tempfile.workspace = true +authenticated_transfer_core.workspace = true diff --git a/indexer/core/src/block_store.rs b/indexer/core/src/block_store.rs index cff07b0f..b66b778f 100644 --- a/indexer/core/src/block_store.rs +++ b/indexer/core/src/block_store.rs @@ -1,11 +1,13 @@ use std::{path::Path, sync::Arc}; -use anyhow::Result; -use bedrock_client::HeaderId; +use anyhow::{Context as _, Result}; use common::{ block::{BedrockStatus, Block}, transaction::{NSSATransaction, clock_invocation}, }; +use log::info; +use logos_blockchain_core::{header::HeaderId, mantle::ops::channel::MsgId}; +use logos_blockchain_zone_sdk::Slot; use nssa::{Account, AccountId, V03State}; use nssa_core::BlockId; use storage::indexer::RocksDBIO; @@ -20,14 +22,10 @@ pub struct IndexerStore { impl IndexerStore { /// Starting database at the start of new chain. /// Creates files if necessary. - /// - /// ATTENTION: Will overwrite genesis block. - pub fn open_db_with_genesis( - location: &Path, - genesis_block: &Block, - initial_state: &V03State, - ) -> Result { - let dbio = RocksDBIO::open_or_create(location, genesis_block, initial_state)?; + pub fn open_db(location: &Path) -> Result { + let initial_state = testnet_initial_state::initial_state(); + let dbio = RocksDBIO::open_or_create(location, &initial_state)?; + let current_state = dbio.final_state()?; Ok(Self { @@ -43,8 +41,8 @@ impl IndexerStore { .map(HeaderId::from)) } - pub fn get_last_block_id(&self) -> Result { - Ok(self.dbio.get_meta_last_block_in_db()?) + pub fn get_last_block_id(&self) -> Result> { + self.dbio.get_meta_last_block_id_in_db().map_err(Into::into) } pub fn get_block_at_id(&self, id: u64) -> Result> { @@ -85,24 +83,36 @@ impl IndexerStore { Ok(self.dbio.get_acc_transactions(acc_id, offset, limit)?) } - #[must_use] - pub fn genesis_id(&self) -> u64 { + pub fn genesis_id(&self) -> Result> { self.dbio - .get_meta_first_block_in_db() - .expect("Must be set at the DB startup") + .get_meta_first_block_id_in_db() + .map_err(Into::into) } - #[must_use] - pub fn last_block(&self) -> u64 { - self.dbio - .get_meta_last_block_in_db() - .expect("Must be set at the DB startup") + pub fn last_block(&self) -> Result> { + self.dbio.get_meta_last_block_id_in_db().map_err(Into::into) } pub fn get_state_at_block(&self, block_id: u64) -> Result { Ok(self.dbio.calculate_state_for_id(block_id)?) } + pub fn get_zone_cursor(&self) -> Result> { + let Some(bytes) = self.dbio.get_zone_sdk_indexer_cursor_bytes()? else { + return Ok(None); + }; + let cursor: (MsgId, Slot) = serde_json::from_slice(&bytes) + .context("Failed to deserialize stored zone-sdk indexer cursor")?; + Ok(Some(cursor)) + } + + pub fn set_zone_cursor(&self, cursor: &(MsgId, Slot)) -> Result<()> { + let bytes = + serde_json::to_vec(cursor).context("Failed to serialize zone-sdk indexer cursor")?; + self.dbio.put_zone_sdk_indexer_cursor_bytes(&bytes)?; + Ok(()) + } + /// Recalculation of final state directly from DB. /// /// Used for indexer healthcheck. @@ -118,7 +128,14 @@ impl IndexerStore { .get_account_by_id(*account_id)) } + pub fn account_state_at_block(&self, account_id: &AccountId, block_id: u64) -> Result { + Ok(self + .get_state_at_block(block_id)? + .get_account_by_id(*account_id)) + } + pub async fn put_block(&self, mut block: Block, l1_header: HeaderId) -> Result<()> { + info!("Applying block {}", block.header.block_id); { let mut state_guard = self.current_state.write().await; @@ -133,15 +150,33 @@ impl IndexerStore { "Last transaction in block must be the clock invocation for the block timestamp" ); + let is_genesis = block.header.block_id == 1; for transaction in user_txs { - transaction - .clone() - .transaction_stateless_check()? - .execute_check_on_state( - &mut state_guard, - block.header.block_id, - block.header.timestamp, - )?; + if is_genesis { + let genesis_tx = match transaction { + NSSATransaction::Public(public_tx) => public_tx, + NSSATransaction::PrivacyPreserving(_) + | NSSATransaction::ProgramDeployment(_) => { + anyhow::bail!("Genesis block should contain only public transactions") + } + }; + state_guard + .transition_from_public_transaction( + genesis_tx, + block.header.block_id, + block.header.timestamp, + ) + .context("Failed to execute genesis public transaction")?; + } else { + transaction + .clone() + .transaction_stateless_check()? + .execute_check_on_state( + &mut state_guard, + block.header.block_id, + block.header.timestamp, + )?; + } } // Apply the clock invocation directly (it is expected to modify clock accounts). @@ -160,104 +195,131 @@ impl IndexerStore { // to represent correct block finality block.bedrock_status = BedrockStatus::Finalized; + info!("Putting block {} into DB", block.header.block_id); Ok(self.dbio.put_block(&block, l1_header.into())?) } } #[cfg(test)] mod tests { - use nssa::{AccountId, PublicKey}; + use common::{HashType, block::HashableBlockData}; use tempfile::tempdir; + use testnet_initial_state::initial_pub_accounts_private_keys; use super::*; - fn genesis_block() -> Block { - common::test_utils::produce_dummy_block(1, None, vec![]) - } - - fn acc1_sign_key() -> nssa::PrivateKey { - nssa::PrivateKey::try_new([1; 32]).unwrap() - } - - fn acc2_sign_key() -> nssa::PrivateKey { - nssa::PrivateKey::try_new([2; 32]).unwrap() - } - - fn acc1() -> AccountId { - AccountId::from(&PublicKey::new_from_private_key(&acc1_sign_key())) - } - - fn acc2() -> AccountId { - AccountId::from(&PublicKey::new_from_private_key(&acc2_sign_key())) - } - #[test] fn correct_startup() { let home = tempdir().unwrap(); - let storage = IndexerStore::open_db_with_genesis( - home.as_ref(), - &genesis_block(), - &nssa::V03State::new_with_genesis_accounts( - &[(acc1(), 10000), (acc2(), 20000)], - vec![], - 0, - ), - ) - .unwrap(); + let storage = IndexerStore::open_db(home.as_ref()).unwrap(); - let block = storage.get_block_at_id(1).unwrap().unwrap(); let final_id = storage.get_last_block_id().unwrap(); - assert_eq!(block.header.hash, genesis_block().header.hash); - assert_eq!(final_id, 1); + assert_eq!(final_id, None); } #[tokio::test] async fn state_transition() { let home = tempdir().unwrap(); - let storage = IndexerStore::open_db_with_genesis( - home.as_ref(), - &genesis_block(), - &nssa::V03State::new_with_genesis_accounts( - &[(acc1(), 10000), (acc2(), 20000)], - vec![], - 0, - ), - ) - .unwrap(); + let storage = IndexerStore::open_db(home.as_ref()).unwrap(); - let mut prev_hash = genesis_block().header.hash; + let initial_accounts = initial_pub_accounts_private_keys(); + let from = initial_accounts[0].account_id; + let to = initial_accounts[1].account_id; + let sign_key = initial_accounts[0].pub_sign_key.clone(); - let from = acc1(); - let to = acc2(); - let sign_key = acc1_sign_key(); + // Submit genesis block + let clock_tx = NSSATransaction::Public(clock_invocation(0)); + let genesis_block_data = HashableBlockData { + block_id: 1, + prev_block_hash: HashType::default(), + timestamp: 0, + transactions: vec![clock_tx], + }; + let genesis_block = genesis_block_data.into_pending_block( + &common::test_utils::sequencer_sign_key_for_testing(), + [0; 32], + ); + let mut prev_hash = Some(genesis_block.header.hash); + storage + .put_block(genesis_block, HeaderId::from([0_u8; 32])) + .await + .unwrap(); - for i in 2..10 { + for i in 0..10 { let tx = common::test_utils::create_transaction_native_token_transfer( - from, - i - 2, - to, - 10, - &sign_key, + from, i, to, 10, &sign_key, ); - let block_id = u64::try_from(i).unwrap(); + let block_id = u64::try_from(i + 1).unwrap(); - let next_block = - common::test_utils::produce_dummy_block(block_id, Some(prev_hash), vec![tx]); - prev_hash = next_block.header.hash; + let next_block = common::test_utils::produce_dummy_block(block_id, prev_hash, vec![tx]); + prev_hash = Some(next_block.header.hash); storage - .put_block(next_block, HeaderId::from([u8::try_from(i).unwrap(); 32])) + .put_block( + next_block, + HeaderId::from([u8::try_from(i + 1).unwrap(); 32]), + ) .await .unwrap(); } - let acc1_val = storage.account_current_state(&acc1()).await.unwrap(); - let acc2_val = storage.account_current_state(&acc2()).await.unwrap(); + let acc1_val = storage.account_current_state(&from).await.unwrap(); + let acc2_val = storage.account_current_state(&to).await.unwrap(); - assert_eq!(acc1_val.balance, 9920); - assert_eq!(acc2_val.balance, 20080); + assert_eq!(acc1_val.balance, 9900); + assert_eq!(acc2_val.balance, 20100); + } + + #[tokio::test] + async fn account_state_at_block() { + let home = tempdir().unwrap(); + + let storage = IndexerStore::open_db(home.as_ref()).unwrap(); + + let mut prev_hash = None; + + let initial_accounts = initial_pub_accounts_private_keys(); + let from = initial_accounts[0].account_id; + let to = initial_accounts[1].account_id; + let sign_key = initial_accounts[0].pub_sign_key.clone(); + + for i in 0..10 { + let tx = common::test_utils::create_transaction_native_token_transfer( + from, i, to, 10, &sign_key, + ); + let block_id = u64::try_from(i + 1).unwrap(); + + let next_block = common::test_utils::produce_dummy_block(block_id, prev_hash, vec![tx]); + prev_hash = Some(next_block.header.hash); + + storage + .put_block( + next_block, + HeaderId::from([u8::try_from(i + 1).unwrap(); 32]), + ) + .await + .unwrap(); + } + + // Genesis block: no transfers applied yet. + let acc1_at_1 = storage.account_state_at_block(&from, 1).unwrap(); + let acc2_at_1 = storage.account_state_at_block(&to, 1).unwrap(); + assert_eq!(acc1_at_1.balance, 9990); + assert_eq!(acc2_at_1.balance, 20010); + + // After block 5: 4 transfers of 10 applied (one each in blocks 2..=5). + let acc1_at_5 = storage.account_state_at_block(&from, 5).unwrap(); + let acc2_at_5 = storage.account_state_at_block(&to, 5).unwrap(); + assert_eq!(acc1_at_5.balance, 9950); + assert_eq!(acc2_at_5.balance, 20050); + + // After final block 9: 8 transfers applied; should match current state. + let acc1_at_9 = storage.account_state_at_block(&from, 9).unwrap(); + let acc2_at_9 = storage.account_state_at_block(&to, 9).unwrap(); + assert_eq!(acc1_at_9.balance, 9910); + assert_eq!(acc2_at_9.balance, 20090); } } diff --git a/indexer/core/src/config.rs b/indexer/core/src/config.rs index 291e54f5..6a019828 100644 --- a/indexer/core/src/config.rs +++ b/indexer/core/src/config.rs @@ -6,18 +6,14 @@ use std::{ }; use anyhow::{Context as _, Result}; -pub use bedrock_client::BackoffConfig; use common::config::BasicAuth; use humantime_serde; pub use logos_blockchain_core::mantle::ops::channel::ChannelId; use serde::{Deserialize, Serialize}; -use testnet_initial_state::{PrivateAccountPublicInitialData, PublicAccountPublicInitialData}; use url::Url; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct ClientConfig { - /// For individual RPC requests we use Fibonacci backoff retry strategy. - pub backoff: BackoffConfig, pub addr: Url, #[serde(default, skip_serializing_if = "Option::is_none")] pub auth: Option, @@ -25,18 +21,12 @@ pub struct ClientConfig { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct IndexerConfig { - /// Home dir of sequencer storage. + /// Home dir of indexer storage. pub home: PathBuf, - /// Sequencers signing key. - pub signing_key: [u8; 32], #[serde(with = "humantime_serde")] pub consensus_info_polling_interval: Duration, - pub bedrock_client_config: ClientConfig, + pub bedrock_config: ClientConfig, pub channel_id: ChannelId, - #[serde(skip_serializing_if = "Option::is_none")] - pub initial_public_accounts: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub initial_private_accounts: Option>, } impl IndexerConfig { diff --git a/indexer/core/src/lib.rs b/indexer/core/src/lib.rs index 44f0dc19..400d0a9d 100644 --- a/indexer/core/src/lib.rs +++ b/indexer/core/src/lib.rs @@ -1,18 +1,14 @@ -use std::collections::VecDeque; +use std::sync::Arc; use anyhow::Result; -use bedrock_client::{BedrockClient, HeaderId}; -use common::{ - HashType, PINATA_BASE58, - block::{Block, HashableBlockData}, +use common::block::Block; +// ToDo: Remove after testnet +use futures::StreamExt as _; +use log::{error, info, warn}; +use logos_blockchain_core::header::HeaderId; +use logos_blockchain_zone_sdk::{ + CommonHttpClient, ZoneMessage, adapter::NodeHttpClient, indexer::ZoneIndexer, }; -use log::{debug, error, info}; -use logos_blockchain_core::mantle::{ - Op, SignedMantleTx, - ops::channel::{ChannelId, inscribe::InscriptionOp}, -}; -use nssa::V03State; -use testnet_initial_state::initial_state_testnet; use crate::{block_store::IndexerStore, config::IndexerConfig}; @@ -21,365 +17,97 @@ pub mod config; #[derive(Clone)] pub struct IndexerCore { - pub bedrock_client: BedrockClient, + pub zone_indexer: Arc>, pub config: IndexerConfig, pub store: IndexerStore, } -#[derive(Clone)] -/// This struct represents one L1 block data fetched from backfilling. -pub struct BackfillBlockData { - l2_blocks: Vec, - l1_header: HeaderId, -} - -#[derive(Clone)] -/// This struct represents data fetched fom backfilling in one iteration. -pub struct BackfillData { - block_data: VecDeque, - curr_fin_l1_lib_header: HeaderId, -} - impl IndexerCore { pub fn new(config: IndexerConfig) -> Result { - let hashable_data = HashableBlockData { - block_id: 1, - transactions: vec![], - prev_block_hash: HashType([0; 32]), - timestamp: 0, - }; - - // Genesis creation is fine as it is, - // because it will be overwritten by sequencer. - // Therefore: - // ToDo: remove key from indexer config, use some default. - let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); - let channel_genesis_msg_id = [0; 32]; - let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id); - - let initial_private_accounts: Option> = - config.initial_private_accounts.as_ref().map(|accounts| { - accounts - .iter() - .map(|init_comm_data| { - let npk = &init_comm_data.npk; - let account_id = nssa::AccountId::from((npk, 0)); - - let mut acc = init_comm_data.account.clone(); - - acc.program_owner = - nssa::program::Program::authenticated_transfer_program().id(); - - ( - nssa_core::Commitment::new(&account_id, &acc), - nssa_core::Nullifier::for_account_initialization(&account_id), - ) - }) - .collect() - }); - - let init_accs: Option> = config - .initial_public_accounts - .as_ref() - .map(|initial_accounts| { - initial_accounts - .iter() - .map(|acc_data| (acc_data.account_id, acc_data.balance)) - .collect() - }); - - // If initial commitments or accounts are present in config, need to construct state from - // them - let state = if initial_private_accounts.is_some() || init_accs.is_some() { - let mut state = V03State::new_with_genesis_accounts( - &init_accs.unwrap_or_default(), - initial_private_accounts.unwrap_or_default(), - genesis_block.header.timestamp, - ); - - // ToDo: Remove after testnet - state.add_pinata_program(PINATA_BASE58.parse().unwrap()); - - state - } else { - initial_state_testnet() - }; - let home = config.home.join("rocksdb"); + let basic_auth = config.bedrock_config.auth.clone().map(Into::into); + let node = NodeHttpClient::new( + CommonHttpClient::new(basic_auth), + config.bedrock_config.addr.clone(), + ); + let zone_indexer = ZoneIndexer::new(config.channel_id, node); + Ok(Self { - bedrock_client: BedrockClient::new( - config.bedrock_client_config.backoff, - config.bedrock_client_config.addr.clone(), - config.bedrock_client_config.auth.clone(), - )?, + zone_indexer: Arc::new(zone_indexer), config, - store: IndexerStore::open_db_with_genesis(&home, &genesis_block, &state)?, + store: IndexerStore::open_db(&home)?, }) } - pub fn subscribe_parse_block_stream(&self) -> impl futures::Stream> { + pub fn subscribe_parse_block_stream(&self) -> impl futures::Stream> + '_ { + let poll_interval = self.config.consensus_info_polling_interval; + let initial_cursor = self + .store + .get_zone_cursor() + .expect("Failed to load zone-sdk indexer cursor"); + async_stream::stream! { - info!("Searching for initial header"); + let mut cursor = initial_cursor; - let last_stored_l1_lib_header = self.store.last_observed_l1_lib_header()?; - - let mut prev_last_l1_lib_header = if let Some(last_l1_lib_header) = last_stored_l1_lib_header { - info!("Last l1 lib header found: {last_l1_lib_header}"); - last_l1_lib_header + if cursor.is_some() { + info!("Resuming indexer from cursor {cursor:?}"); } else { - info!("Last l1 lib header not found in DB"); - info!("Searching for the start of a channel"); - - let BackfillData { - block_data: start_buff, - curr_fin_l1_lib_header: last_l1_lib_header, - } = self.search_for_channel_start().await?; - - for BackfillBlockData { - l2_blocks: l2_block_vec, - l1_header, - } in start_buff { - let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect(); - l2_blocks_parsed_ids.sort_unstable(); - info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids); - - for l2_block in l2_block_vec { - // TODO: proper fix is to make the sequencer's genesis include a - // trailing `clock_invocation(0)` (and have the indexer's - // `open_db_with_genesis` not pre-apply state transitions) so the - // inscribed genesis can flow through `put_block` like any other - // block. For now we skip re-applying it. - // - // The channel-start (block_id == 1) is the sequencer's genesis - // inscription that we re-discover during initial search. The - // indexer already has its own locally-constructed genesis in - // the store from `open_db_with_genesis`, so re-applying the - // inscribed copy is both redundant and would fail the strict - // block validation in `put_block` (the inscribed genesis lacks - // the trailing clock invocation). - if l2_block.header.block_id != 1 { - self - .store - .put_block(l2_block.clone(), l1_header) - .await - .inspect_err(|err| error!("Failed to put block with err {err:?}"))?; - } - - yield Ok(l2_block); - } - } - - last_l1_lib_header - }; - - info!("Searching for initial header finished"); - - info!("Starting backfilling from {prev_last_l1_lib_header}"); + info!("Starting indexer from beginning of channel"); + } loop { - let BackfillData { - block_data: buff, - curr_fin_l1_lib_header, - } = self - .backfill_to_last_l1_lib_header_id(prev_last_l1_lib_header, &self.config.channel_id) - .await - .inspect_err(|err| error!("Failed to backfill to last l1 lib header id with err {err:#?}"))?; - - prev_last_l1_lib_header = curr_fin_l1_lib_header; - - for BackfillBlockData { - l2_blocks: l2_block_vec, - l1_header: header, - } in buff { - let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect(); - l2_blocks_parsed_ids.sort_unstable(); - info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids); - - for l2_block in l2_block_vec { - self.store.put_block(l2_block.clone(), header).await?; - - yield Ok(l2_block); + let stream = match self.zone_indexer.next_messages(cursor).await { + Ok(s) => s, + Err(err) => { + error!("Failed to start zone-sdk next_messages stream: {err}"); + tokio::time::sleep(poll_interval).await; + continue; } - } - } - } - } - - async fn get_lib(&self) -> Result { - Ok(self.bedrock_client.get_consensus_info().await?.lib) - } - - async fn get_next_lib(&self, prev_lib: HeaderId) -> Result { - loop { - let next_lib = self.get_lib().await?; - if next_lib == prev_lib { - info!( - "Wait {:?} to not spam the node", - self.config.consensus_info_polling_interval - ); - tokio::time::sleep(self.config.consensus_info_polling_interval).await; - } else { - break Ok(next_lib); - } - } - } - - /// WARNING: depending on channel state, - /// may take indefinite amount of time. - pub async fn search_for_channel_start(&self) -> Result { - let mut curr_last_l1_lib_header = self.get_lib().await?; - let mut backfill_start = curr_last_l1_lib_header; - // ToDo: How to get root? - let mut backfill_limit = HeaderId::from([0; 32]); - // ToDo: Not scalable, initial buffer should be stored in DB to not run out of memory - // Don't want to complicate DB even more right now. - let mut block_buffer = VecDeque::new(); - - 'outer: loop { - let mut cycle_header = curr_last_l1_lib_header; - - loop { - let Some(cycle_block) = self.bedrock_client.get_block_by_id(cycle_header).await? - else { - // First run can reach root easily - // so here we are optimistic about L1 - // failing to get parent. - break; }; + let mut stream = std::pin::pin!(stream); - // It would be better to have id, but block does not have it, so slot will do. - info!( - "INITIAL SEARCH: Observed L1 block at slot {}", - cycle_block.header().slot().into_inner() - ); - debug!( - "INITIAL SEARCH: This block header is {}", - cycle_block.header().id() - ); - debug!( - "INITIAL SEARCH: This block parent is {}", - cycle_block.header().parent() - ); + while let Some((msg, slot)) = stream.next().await { + let zone_block = match msg { + ZoneMessage::Block(b) => b, + // Non-block messages don't carry a cursor position; the + // next ZoneBlock advances past them implicitly. + ZoneMessage::Deposit(_) | ZoneMessage::Withdraw(_) => continue, + }; - let (l2_block_vec, l1_header) = - parse_block_owned(&cycle_block, &self.config.channel_id); + let block: Block = match borsh::from_slice(&zone_block.data) { + Ok(b) => b, + Err(e) => { + error!("Failed to deserialize L2 block from zone-sdk: {e}"); + // Advance past the broken inscription so we don't + // re-process it on restart. + cursor = Some((zone_block.id, slot)); + if let Err(err) = self.store.set_zone_cursor(&(zone_block.id, slot)) { + warn!("Failed to persist indexer cursor: {err:#}"); + } + continue; + } + }; - info!("Parsed {} L2 blocks", l2_block_vec.len()); + info!("Indexed L2 block {}", block.header.block_id); - if !l2_block_vec.is_empty() { - block_buffer.push_front(BackfillBlockData { - l2_blocks: l2_block_vec.clone(), - l1_header, - }); - } - - if let Some(first_l2_block) = l2_block_vec.first() - && first_l2_block.header.block_id == 1 - { - info!("INITIAL_SEARCH: Found channel start"); - break 'outer; - } - - // Step back to parent - let parent = cycle_block.header().parent(); - - if parent == backfill_limit { - break; - } - - cycle_header = parent; - } - - info!("INITIAL_SEARCH: Reached backfill limit, refetching last l1 lib header"); - - block_buffer.clear(); - backfill_limit = backfill_start; - curr_last_l1_lib_header = self.get_next_lib(curr_last_l1_lib_header).await?; - backfill_start = curr_last_l1_lib_header; - } - - Ok(BackfillData { - block_data: block_buffer, - curr_fin_l1_lib_header: curr_last_l1_lib_header, - }) - } - - pub async fn backfill_to_last_l1_lib_header_id( - &self, - last_fin_l1_lib_header: HeaderId, - channel_id: &ChannelId, - ) -> Result { - let curr_fin_l1_lib_header = self.get_next_lib(last_fin_l1_lib_header).await?; - // ToDo: Not scalable, buffer should be stored in DB to not run out of memory - // Don't want to complicate DB even more right now. - let mut block_buffer = VecDeque::new(); - - let mut cycle_header = curr_fin_l1_lib_header; - loop { - let Some(cycle_block) = self.bedrock_client.get_block_by_id(cycle_header).await? else { - return Err(anyhow::anyhow!("Parent not found")); - }; - - if cycle_block.header().id() == last_fin_l1_lib_header { - break; - } - // Step back to parent - cycle_header = cycle_block.header().parent(); - - // It would be better to have id, but block does not have it, so slot will do. - info!( - "Observed L1 block at slot {}", - cycle_block.header().slot().into_inner() - ); - - let (l2_block_vec, l1_header) = parse_block_owned(&cycle_block, channel_id); - - info!("Parsed {} L2 blocks", l2_block_vec.len()); - - if !l2_block_vec.is_empty() { - block_buffer.push_front(BackfillBlockData { - l2_blocks: l2_block_vec, - l1_header, - }); - } - } - - Ok(BackfillData { - block_data: block_buffer, - curr_fin_l1_lib_header, - }) - } -} - -fn parse_block_owned( - l1_block: &bedrock_client::Block, - decoded_channel_id: &ChannelId, -) -> (Vec, HeaderId) { - ( - #[expect( - clippy::wildcard_enum_match_arm, - reason = "We are only interested in channel inscription ops, so it's fine to ignore the rest" - )] - l1_block - .transactions() - .flat_map(|tx| { - tx.mantle_tx.ops.iter().filter_map(|op| match op { - Op::ChannelInscribe(InscriptionOp { - channel_id, - inscription, - .. - }) if channel_id == decoded_channel_id => { - borsh::from_slice::(inscription) - .inspect_err(|err| { - error!("Failed to deserialize our inscription with err: {err:#?}"); - }) - .ok() + // TODO: Remove l1_header placeholder once storage layer + // no longer requires it. Zone-sdk handles L1 tracking internally. + let placeholder_l1_header = HeaderId::from([0_u8; 32]); + if let Err(err) = self.store.put_block(block.clone(), placeholder_l1_header).await { + error!("Failed to store block {}: {err:#}", block.header.block_id); } - _ => None, - }) - }) - .collect(), - l1_block.header().id(), - ) + + cursor = Some((zone_block.id, slot)); + if let Err(err) = self.store.set_zone_cursor(&(zone_block.id, slot)) { + warn!("Failed to persist indexer cursor: {err:#}"); + } + yield Ok(block); + } + + // Stream ended (caught up to LIB). Sleep then poll again. + tokio::time::sleep(poll_interval).await; + } + } + } } diff --git a/indexer/ffi/Cargo.toml b/indexer/ffi/Cargo.toml new file mode 100644 index 00000000..1e6b1468 --- /dev/null +++ b/indexer/ffi/Cargo.toml @@ -0,0 +1,32 @@ +[package] +edition = "2024" +license = { workspace = true } +name = "indexer_ffi" +version = "0.1.0" + +[dependencies] +nssa.workspace = true +indexer_service.workspace = true +indexer_service_rpc = { workspace = true, features = ["client"] } +indexer_service_protocol.workspace = true + +url.workspace = true +log = { workspace = true } +tokio = { features = ["rt-multi-thread"], workspace = true } +jsonrpsee.workspace = true +anyhow.workspace = true + +[build-dependencies] +cbindgen = "0.29" + +[lib] +crate-type = ["rlib", "cdylib", "staticlib"] +name = "indexer_ffi" + +[lints] +workspace = true + +[package.metadata.cargo-machete] +ignored = [ + "cbindgen", +] # machete does not recognize this for build dep and complains. diff --git a/indexer/ffi/build.rs b/indexer/ffi/build.rs new file mode 100644 index 00000000..92c95407 --- /dev/null +++ b/indexer/ffi/build.rs @@ -0,0 +1,12 @@ +use std::env; + +fn main() { + let crate_dir = env::var("CARGO_MANIFEST_DIR").unwrap(); + println!("cargo:rerun-if-changed=src/"); + cbindgen::Builder::new() + .with_crate(crate_dir) + .with_language(cbindgen::Language::C) + .generate() + .expect("Unable to generate bindings") + .write_to_file("indexer_ffi.h"); +} diff --git a/indexer/ffi/cbindgen.toml b/indexer/ffi/cbindgen.toml new file mode 100644 index 00000000..79f622b7 --- /dev/null +++ b/indexer/ffi/cbindgen.toml @@ -0,0 +1,2 @@ +language = "C" # For increased compatibility +no_includes = true diff --git a/indexer/ffi/indexer_ffi.h b/indexer/ffi/indexer_ffi.h new file mode 100644 index 00000000..b2ba41bf --- /dev/null +++ b/indexer/ffi/indexer_ffi.h @@ -0,0 +1,752 @@ +#include +#include +#include +#include + +typedef enum OperationStatus { + Ok = 0, + NullPointer = 1, + InitializationError = 2, + ClientError = 3, +} OperationStatus; + +typedef enum FfiTransactionKind { + Public = 0, + Private, + ProgramDeploy, +} FfiTransactionKind; + +typedef enum FfiBedrockStatus { + Pending = 0, + Safe, + Finalized, +} FfiBedrockStatus; + +typedef struct Option_u64 Option_u64; + +typedef struct IndexerServiceFFI { + void *indexer_handle; + void *indexer_client; +} IndexerServiceFFI; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_IndexerServiceFFI__OperationStatus { + struct IndexerServiceFFI *value; + enum OperationStatus error; +} PointerResult_IndexerServiceFFI__OperationStatus; + +typedef struct PointerResult_IndexerServiceFFI__OperationStatus InitializedIndexerServiceFFIResult; + +typedef enum PointerKind_Tag { + Owned, + Borrowed, + Null, +} PointerKind_Tag; + +typedef struct PointerKind { + PointerKind_Tag tag; + union { + struct { + void *owned; + }; + struct { + const void *borrowed; + }; + }; +} PointerKind; + +typedef struct Pointer_Runtime { + struct PointerKind kind; +} Pointer_Runtime; + +/** + * Wrapper around [`tokio::runtime::Runtime`] that can be safely passed across the FFI boundary. + */ +typedef struct Runtime { + struct Pointer_Runtime inner; +} Runtime; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_Runtime__OperationStatus { + struct Runtime *value; + enum OperationStatus error; +} PointerResult_Runtime__OperationStatus; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_Option_u64_____OperationStatus { + struct Option_u64 *value; + enum OperationStatus error; +} PointerResult_Option_u64_____OperationStatus; + +typedef uint64_t FfiBlockId; + +/** + * 32-byte array type for `AccountId`, keys, hashes, etc. + */ +typedef struct FfiBytes32 { + uint8_t data[32]; +} FfiBytes32; + +typedef struct FfiBytes32 FfiHashType; + +typedef uint64_t FfiTimestamp; + +/** + * 64-byte array type for signatures, etc. + */ +typedef struct FfiBytes64 { + uint8_t data[64]; +} FfiBytes64; + +typedef struct FfiBytes64 FfiSignature; + +typedef struct FfiBlockHeader { + FfiBlockId block_id; + FfiHashType prev_block_hash; + FfiHashType hash; + FfiTimestamp timestamp; + FfiSignature signature; +} FfiBlockHeader; + +/** + * Program ID - 8 u32 values (32 bytes total). + */ +typedef struct FfiProgramId { + uint32_t data[8]; +} FfiProgramId; + +typedef struct FfiBytes32 FfiAccountId; + +typedef struct FfiVec_FfiAccountId { + FfiAccountId *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiAccountId; + +typedef struct FfiVec_FfiAccountId FfiAccountIdList; + +/** + * U128 - 16 bytes little endian. + */ +typedef struct FfiU128 { + uint8_t data[16]; +} FfiU128; + +typedef struct FfiU128 FfiNonce; + +typedef struct FfiVec_FfiNonce { + FfiNonce *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiNonce; + +typedef struct FfiVec_FfiNonce FfiNonceList; + +typedef struct FfiVec_u32 { + uint32_t *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_u32; + +typedef struct FfiVec_u32 FfiInstructionDataList; + +typedef struct FfiPublicMessage { + struct FfiProgramId program_id; + FfiAccountIdList account_ids; + FfiNonceList nonces; + FfiInstructionDataList instruction_data; +} FfiPublicMessage; + +typedef struct FfiBytes32 FfiPublicKey; + +typedef struct FfiSignaturePubKeyEntry { + FfiSignature signature; + FfiPublicKey public_key; +} FfiSignaturePubKeyEntry; + +typedef struct FfiVec_FfiSignaturePubKeyEntry { + struct FfiSignaturePubKeyEntry *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiSignaturePubKeyEntry; + +typedef struct FfiVec_FfiSignaturePubKeyEntry FfiSignaturePubKeyList; + +typedef struct FfiPublicTransactionBody { + FfiHashType hash; + struct FfiPublicMessage message; + FfiSignaturePubKeyList witness_set; +} FfiPublicTransactionBody; + +/** + * Account data structure - C-compatible version of nssa Account. + * + * Note: `balance` and `nonce` are u128 values represented as little-endian + * byte arrays since C doesn't have native u128 support. + */ +typedef struct FfiAccount { + struct FfiProgramId program_owner; + /** + * Balance as little-endian [u8; 16]. + */ + struct FfiU128 balance; + /** + * Pointer to account data bytes. + */ + uint8_t *data; + /** + * Length of account data. + */ + uintptr_t data_len; + /** + * Capacity of account data. + */ + uintptr_t data_cap; + /** + * Nonce as little-endian [u8; 16]. + */ + struct FfiU128 nonce; +} FfiAccount; + +typedef struct FfiVec_FfiAccount { + struct FfiAccount *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiAccount; + +typedef struct FfiVec_FfiAccount FfiAccountList; + +typedef struct FfiVec_u8 { + uint8_t *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_u8; + +typedef struct FfiVec_u8 FfiVecU8; + +typedef struct FfiEncryptedAccountData { + FfiVecU8 ciphertext; + FfiVecU8 epk; + uint8_t view_tag; +} FfiEncryptedAccountData; + +typedef struct FfiVec_FfiEncryptedAccountData { + struct FfiEncryptedAccountData *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiEncryptedAccountData; + +typedef struct FfiVec_FfiEncryptedAccountData FfiEncryptedAccountDataList; + +typedef struct FfiVec_FfiBytes32 { + struct FfiBytes32 *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiBytes32; + +typedef struct FfiVec_FfiBytes32 FfiVecBytes32; + +typedef struct FfiNullifierCommitmentSet { + struct FfiBytes32 nullifier; + struct FfiBytes32 commitment_set_digest; +} FfiNullifierCommitmentSet; + +typedef struct FfiVec_FfiNullifierCommitmentSet { + struct FfiNullifierCommitmentSet *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiNullifierCommitmentSet; + +typedef struct FfiVec_FfiNullifierCommitmentSet FfiNullifierCommitmentSetList; + +typedef struct FfiPrivacyPreservingMessage { + FfiAccountIdList public_account_ids; + FfiNonceList nonces; + FfiAccountList public_post_states; + FfiEncryptedAccountDataList encrypted_private_post_states; + FfiVecBytes32 new_commitments; + FfiNullifierCommitmentSetList new_nullifiers; + uint64_t block_validity_window[2]; + uint64_t timestamp_validity_window[2]; +} FfiPrivacyPreservingMessage; + +typedef FfiVecU8 FfiProof; + +typedef struct FfiPrivateTransactionBody { + FfiHashType hash; + struct FfiPrivacyPreservingMessage message; + FfiSignaturePubKeyList witness_set; + FfiProof proof; +} FfiPrivateTransactionBody; + +typedef FfiVecU8 FfiProgramDeploymentMessage; + +typedef struct FfiProgramDeploymentTransactionBody { + FfiHashType hash; + FfiProgramDeploymentMessage message; +} FfiProgramDeploymentTransactionBody; + +typedef struct FfiTransactionBody { + struct FfiPublicTransactionBody *public_body; + struct FfiPrivateTransactionBody *private_body; + struct FfiProgramDeploymentTransactionBody *program_deployment_body; +} FfiTransactionBody; + +typedef struct FfiTransaction { + struct FfiTransactionBody body; + enum FfiTransactionKind kind; +} FfiTransaction; + +typedef struct FfiVec_FfiTransaction { + struct FfiTransaction *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiTransaction; + +typedef struct FfiVec_FfiTransaction FfiBlockBody; + +typedef struct FfiBytes32 FfiMsgId; + +typedef struct FfiBlock { + struct FfiBlockHeader header; + FfiBlockBody body; + enum FfiBedrockStatus bedrock_status; + FfiMsgId bedrock_parent_id; +} FfiBlock; + +typedef struct FfiOption_FfiBlock { + struct FfiBlock *value; + bool is_some; +} FfiOption_FfiBlock; + +typedef struct FfiOption_FfiBlock FfiBlockOpt; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_FfiBlockOpt__OperationStatus { + FfiBlockOpt *value; + enum OperationStatus error; +} PointerResult_FfiBlockOpt__OperationStatus; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_FfiAccount__OperationStatus { + struct FfiAccount *value; + enum OperationStatus error; +} PointerResult_FfiAccount__OperationStatus; + +typedef struct FfiOption_FfiTransaction { + struct FfiTransaction *value; + bool is_some; +} FfiOption_FfiTransaction; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_FfiOption_FfiTransaction_____OperationStatus { + struct FfiOption_FfiTransaction *value; + enum OperationStatus error; +} PointerResult_FfiOption_FfiTransaction_____OperationStatus; + +typedef struct FfiVec_FfiBlock { + struct FfiBlock *entries; + uintptr_t len; + uintptr_t capacity; +} FfiVec_FfiBlock; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_FfiVec_FfiBlock_____OperationStatus { + struct FfiVec_FfiBlock *value; + enum OperationStatus error; +} PointerResult_FfiVec_FfiBlock_____OperationStatus; + +typedef struct FfiOption_u64 { + uint64_t *value; + bool is_some; +} FfiOption_u64; + +/** + * Simple wrapper around a pointer to a value or an error. + * + * Pointer is not guaranteed. You should check the error field before + * dereferencing the pointer. + */ +typedef struct PointerResult_FfiVec_FfiTransaction_____OperationStatus { + struct FfiVec_FfiTransaction *value; + enum OperationStatus error; +} PointerResult_FfiVec_FfiTransaction_____OperationStatus; + +/** + * Creates and starts an indexer based on the provided + * configuration file path. + * + * # Arguments + * + * - `config_path`: A pointer to a string representing the path to the configuration file. + * - `port`: Number representing a port, on which indexers RPC will start. + * + * # Returns + * + * An `InitializedIndexerServiceFFIResult` containing either a pointer to the + * initialized `IndexerServiceFFI` or an error code. + * + * # Safety + * The caller must ensure that: + * - `runtime` is a valid pointer to a `tokio::runtime::Runtime` instance. + * - `config_path` is a valid pointer to a null-terminated C string. + */ +InitializedIndexerServiceFFIResult start_indexer(const struct Runtime *runtime, + const char *config_path, + uint16_t port); + +/** + * Creates a new [`tokio::runtime::Runtime`]. + */ +struct PointerResult_Runtime__OperationStatus new_runtime(void); + +/** + * Stops and frees the resources associated with the given indexer service. + * + * # Arguments + * + * - `indexer`: A pointer to the `IndexerServiceFFI` instance to be stopped. + * + * # Returns + * + * An `OperationStatus` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `indexer` is a valid pointer to a `IndexerServiceFFI` instance + * - The `IndexerServiceFFI` instance was created by this library + * - The pointer will not be used after this function returns + */ +enum OperationStatus stop_indexer(struct IndexerServiceFFI *indexer); + +/** + * # Safety + * It's up to the caller to pass a proper pointer, if somehow from c/c++ side + * this is called with a type which doesn't come from a returned `CString` it + * will cause a segfault. + */ +void free_cstring(char *block); + +/** + * Query the last block id from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * + * # Returns + * + * A `PointerResult, OperationStatus>` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `runtime` is a valid pointer to a [`Runtime`] instance. + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + */ +struct PointerResult_Option_u64_____OperationStatus query_last_block(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer); + +/** + * Query the block by id from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * - `block_id`: `u64` number of block id + * + * # Returns + * + * A `PointerResult` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `runtime` is a valid pointer to a [`Runtime`] instance. + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + */ +struct PointerResult_FfiBlockOpt__OperationStatus query_block(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer, + FfiBlockId block_id); + +/** + * Query the block by id from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * - `hash`: `FfiHashType` - hash of block + * + * # Returns + * + * A `PointerResult` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `runtime` is a valid pointer to a [`Runtime`] instance. + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + */ +struct PointerResult_FfiBlockOpt__OperationStatus query_block_by_hash(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer, + FfiHashType hash); + +/** + * Query the account by id from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * - `account_id`: `FfiAccountId` - id of queried account + * + * # Returns + * + * A `PointerResult` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `runtime` is a valid pointer to a [`Runtime`] instance. + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + */ +struct PointerResult_FfiAccount__OperationStatus query_account(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer, + FfiAccountId account_id); + +/** + * Query the trasnaction by hash from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * - `hash`: `FfiHashType` - hash of transaction + * + * # Returns + * + * A `PointerResult, OperationStatus>` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + * - `runtime` is a valid pointer to a [`Runtime`] instance. + */ +struct PointerResult_FfiOption_FfiTransaction_____OperationStatus query_transaction(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer, + FfiHashType hash); + +/** + * Query the blocks by block range from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * - `before`: `FfiOption` - end block of query + * - `limit`: `u64` - number of blocks to query before `before` + * + * # Returns + * + * A `PointerResult, OperationStatus>` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + * - `runtime` is a valid pointer to a [`Runtime`] instance. + */ +struct PointerResult_FfiVec_FfiBlock_____OperationStatus query_block_vec(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer, + struct FfiOption_u64 before, + uint64_t limit); + +/** + * Query the transactions range by account id from indexer. + * + * # Arguments + * + * - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. + * - `account_id`: `FfiAccountId` - id of queried account + * - `offset`: `u64` - first tx id of query + * - `limit`: `u64` - number of tx ids to query after `offset` + * + * # Returns + * + * A `PointerResult, OperationStatus>` indicating success or failure. + * + * # Safety + * + * The caller must ensure that: + * - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. + * - `runtime` is a valid pointer to a [`Runtime`] instance. + */ +struct PointerResult_FfiVec_FfiTransaction_____OperationStatus query_transactions_by_account(const struct Runtime *runtime, + const struct IndexerServiceFFI *indexer, + FfiAccountId account_id, + uint64_t offset, + uint64_t limit); + +/** + * Frees the resources associated with the given ffi account. + * + * # Arguments + * + * - `val`: An instance of `FfiAccount`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiAccount`. + */ +void free_ffi_account(struct FfiAccount val); + +/** + * Frees the resources associated with the given ffi block. + * + * # Arguments + * + * - `val`: An instance of `FfiBlock`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiBlock`. + */ +void free_ffi_block(struct FfiBlock val); + +/** + * Frees the resources associated with the given ffi block option. + * + * # Arguments + * + * - `val`: An instance of `FfiBlockOpt`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiBlockOpt`. + */ +void free_ffi_block_opt(FfiBlockOpt val); + +/** + * Frees the resources associated with the given ffi block vector. + * + * # Arguments + * + * - `val`: An instance of `FfiVec`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiVec`. + */ +void free_ffi_block_vec(struct FfiVec_FfiBlock val); + +/** + * Frees the resources associated with the given ffi transaction. + * + * # Arguments + * + * - `val`: An instance of `FfiTransaction`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiTransaction`. + */ +void free_ffi_transaction(struct FfiTransaction val); + +/** + * Frees the resources associated with the given ffi transaction option. + * + * # Arguments + * + * - `val`: An instance of `FfiOption`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiOption`. + */ +void free_ffi_transaction_opt(struct FfiOption_FfiTransaction val); + +/** + * Frees the resources associated with the given vector of ffi transactions. + * + * # Arguments + * + * - `val`: An instance of `FfiVec`. + * + * # Returns + * + * void. + * + * # Safety + * + * The caller must ensure that: + * - `val` is a valid instance of `FfiVec`. + */ +void free_ffi_transaction_vec(struct FfiVec_FfiTransaction val); + +bool is_ok(const enum OperationStatus *self); + +bool is_error(const enum OperationStatus *self); diff --git a/indexer/ffi/src/api/client.rs b/indexer/ffi/src/api/client.rs new file mode 100644 index 00000000..825a57de --- /dev/null +++ b/indexer/ffi/src/api/client.rs @@ -0,0 +1,36 @@ +use std::net::SocketAddr; + +use url::Url; + +use crate::OperationStatus; + +#[derive(Debug, Clone, Copy)] +pub enum UrlProtocol { + Http, + Ws, +} + +impl std::fmt::Display for UrlProtocol { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Http => write!(f, "http"), + Self::Ws => write!(f, "ws"), + } + } +} + +pub(crate) fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result { + // Convert 0.0.0.0 to 127.0.0.1 for client connections + // When binding to port 0, the server binds to 0.0.0.0: + // but clients need to connect to 127.0.0.1: to work reliably + let url_string = if addr.ip().is_unspecified() { + format!("{protocol}://127.0.0.1:{}", addr.port()) + } else { + format!("{protocol}://{addr}") + }; + + url_string.parse().map_err(|e| { + log::error!("Could not parse indexer url: {e}"); + OperationStatus::InitializationError + }) +} diff --git a/indexer/ffi/src/api/lifecycle.rs b/indexer/ffi/src/api/lifecycle.rs new file mode 100644 index 00000000..d124901f --- /dev/null +++ b/indexer/ffi/src/api/lifecycle.rs @@ -0,0 +1,138 @@ +use std::{ffi::c_char, path::PathBuf}; + +use crate::{ + IndexerServiceFFI, Runtime, + api::{ + PointerResult, + client::{UrlProtocol, addr_to_url}, + }, + client::{IndexerClient, IndexerClientTrait as _}, + errors::OperationStatus, +}; + +pub type InitializedIndexerServiceFFIResult = PointerResult; + +/// Creates and starts an indexer based on the provided +/// configuration file path. +/// +/// # Arguments +/// +/// - `config_path`: A pointer to a string representing the path to the configuration file. +/// - `port`: Number representing a port, on which indexers RPC will start. +/// +/// # Returns +/// +/// An `InitializedIndexerServiceFFIResult` containing either a pointer to the +/// initialized `IndexerServiceFFI` or an error code. +/// +/// # Safety +/// The caller must ensure that: +/// - `runtime` is a valid pointer to a `tokio::runtime::Runtime` instance. +/// - `config_path` is a valid pointer to a null-terminated C string. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn start_indexer( + runtime: *const Runtime, + config_path: *const c_char, + port: u16, +) -> InitializedIndexerServiceFFIResult { + // SAFETY: The caller must ensure the validness of the `runtime` and `config_path` pointers. + unsafe { setup_indexer(runtime, config_path, port) }.map_or_else( + InitializedIndexerServiceFFIResult::from_error, + InitializedIndexerServiceFFIResult::from_value, + ) +} + +/// Creates a new [`tokio::runtime::Runtime`]. +#[unsafe(no_mangle)] +pub extern "C" fn new_runtime() -> PointerResult { + Runtime::new().map_or_else( + |_e| PointerResult::from_error(OperationStatus::InitializationError), + PointerResult::from_value, + ) +} + +/// Initializes and starts an indexer based on the provided +/// configuration file path. +/// +/// # Arguments +/// +/// - `config_path`: A pointer to a string representing the path to the configuration file. +/// - `port`: Number representing a port, on which indexers RPC will start. +/// +/// # Returns +/// +/// A `Result` containing either the initialized `IndexerServiceFFI` or an +/// error code. +/// +/// # Safety +/// The caller must ensure that: +/// - `runtime` is a valid pointer to a `tokio::runtime::Runtime` instance. +/// - `config_path` is a valid pointer to a null-terminated C string. +unsafe fn setup_indexer( + runtime: *const Runtime, + config_path: *const c_char, + port: u16, +) -> Result { + let user_config_path = PathBuf::from( + unsafe { std::ffi::CStr::from_ptr(config_path) } + .to_str() + .map_err(|e| { + log::error!("Could not convert the config path to string: {e}"); + OperationStatus::InitializationError + })?, + ); + let config = indexer_service::IndexerConfig::from_path(&user_config_path).map_err(|e| { + log::error!("Failed to read config: {e}"); + OperationStatus::InitializationError + })?; + + // SAFETY: The caller must ensure that `runtime` is a valid pointer to a + // `tokio::runtime::Runtime` instance. + let runtime = unsafe { &*runtime }; + + let indexer_handle = runtime + .block_on(indexer_service::run_server(config, port)) + .map_err(|e| { + log::error!("Could not start indexer service: {e}"); + OperationStatus::InitializationError + })?; + + let indexer_url = addr_to_url(UrlProtocol::Ws, indexer_handle.addr())?; + let indexer_client = runtime + .block_on(IndexerClient::new(&indexer_url)) + .map_err(|e| { + log::error!("Could not start indexer client: {e}"); + OperationStatus::InitializationError + })?; + + Ok(IndexerServiceFFI::new(indexer_handle, indexer_client)) +} + +/// Stops and frees the resources associated with the given indexer service. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the `IndexerServiceFFI` instance to be stopped. +/// +/// # Returns +/// +/// An `OperationStatus` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `indexer` is a valid pointer to a `IndexerServiceFFI` instance +/// - The `IndexerServiceFFI` instance was created by this library +/// - The pointer will not be used after this function returns +#[unsafe(no_mangle)] +pub unsafe extern "C" fn stop_indexer(indexer: *mut IndexerServiceFFI) -> OperationStatus { + if indexer.is_null() { + log::error!("Attempted to stop a null indexer pointer. This is a bug. Aborting."); + return OperationStatus::NullPointer; + } + + let indexer = unsafe { Box::from_raw(indexer) }; + drop(indexer); + + OperationStatus::Ok +} diff --git a/indexer/ffi/src/api/memory.rs b/indexer/ffi/src/api/memory.rs new file mode 100644 index 00000000..f266d309 --- /dev/null +++ b/indexer/ffi/src/api/memory.rs @@ -0,0 +1,14 @@ +use std::ffi::{CString, c_char}; + +/// # Safety +/// It's up to the caller to pass a proper pointer, if somehow from c/c++ side +/// this is called with a type which doesn't come from a returned `CString` it +/// will cause a segfault. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_cstring(block: *mut c_char) { + if block.is_null() { + log::error!("Trying to free a null pointer. Exiting"); + return; + } + drop(unsafe { CString::from_raw(block) }); +} diff --git a/indexer/ffi/src/api/mod.rs b/indexer/ffi/src/api/mod.rs new file mode 100644 index 00000000..ea2b91d7 --- /dev/null +++ b/indexer/ffi/src/api/mod.rs @@ -0,0 +1,8 @@ +pub use result::PointerResult; + +pub mod client; +pub mod lifecycle; +pub mod memory; +pub mod query; +pub mod result; +pub mod types; diff --git a/indexer/ffi/src/api/query.rs b/indexer/ffi/src/api/query.rs new file mode 100644 index 00000000..44951014 --- /dev/null +++ b/indexer/ffi/src/api/query.rs @@ -0,0 +1,348 @@ +use indexer_service_protocol::{AccountId, HashType}; +use indexer_service_rpc::RpcClient as _; + +use crate::{ + IndexerServiceFFI, Runtime, + api::{ + PointerResult, + types::{ + FfiAccountId, FfiBlockId, FfiHashType, FfiOption, FfiVec, + account::FfiAccount, + block::{FfiBlock, FfiBlockOpt}, + transaction::FfiTransaction, + }, + }, + errors::OperationStatus, +}; + +/// Query the last block id from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// +/// # Returns +/// +/// A `PointerResult, OperationStatus>` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_last_block( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, +) -> PointerResult, OperationStatus> { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + runtime + .block_on(client.get_last_finalized_block_id()) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + PointerResult::from_value, + ) +} + +/// Query the block by id from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// - `block_id`: `u64` number of block id +/// +/// # Returns +/// +/// A `PointerResult` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_block( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + block_id: FfiBlockId, +) -> PointerResult { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + runtime + .block_on(client.get_block_by_id(block_id)) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + |block_opt| { + let block_ffi = block_opt.map_or_else(FfiBlockOpt::from_none, |block| { + FfiBlockOpt::from_value(block.into()) + }); + + PointerResult::from_value(block_ffi) + }, + ) +} + +/// Query the block by id from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// - `hash`: `FfiHashType` - hash of block +/// +/// # Returns +/// +/// A `PointerResult` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_block_by_hash( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + hash: FfiHashType, +) -> PointerResult { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + runtime + .block_on(client.get_block_by_hash(HashType(hash.data))) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + |block_opt| { + let block_ffi = block_opt.map_or_else(FfiBlockOpt::from_none, |block| { + FfiBlockOpt::from_value(block.into()) + }); + + PointerResult::from_value(block_ffi) + }, + ) +} + +/// Query the account by id from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// - `account_id`: `FfiAccountId` - id of queried account +/// +/// # Returns +/// +/// A `PointerResult` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_account( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + account_id: FfiAccountId, +) -> PointerResult { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + runtime + .block_on(client.get_account(AccountId { + value: account_id.data, + })) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + |acc| { + let acc_nssa: nssa::Account = + acc.try_into().expect("Source is in blocks, must fit"); + PointerResult::from_value(acc_nssa.into()) + }, + ) +} + +/// Query the trasnaction by hash from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// - `hash`: `FfiHashType` - hash of transaction +/// +/// # Returns +/// +/// A `PointerResult, OperationStatus>` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_transaction( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + hash: FfiHashType, +) -> PointerResult, OperationStatus> { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + runtime + .block_on(client.get_transaction(HashType(hash.data))) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + |tx_opt| { + let tx_ffi = tx_opt.map_or_else(FfiOption::::from_none, |tx| { + FfiOption::::from_value(tx.into()) + }); + + PointerResult::from_value(tx_ffi) + }, + ) +} + +/// Query the blocks by block range from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// - `before`: `FfiOption` - end block of query +/// - `limit`: `u64` - number of blocks to query before `before` +/// +/// # Returns +/// +/// A `PointerResult, OperationStatus>` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_block_vec( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + before: FfiOption, + limit: u64, +) -> PointerResult, OperationStatus> { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + let before_std = before.is_some.then(|| unsafe { *before.value }); + + runtime + .block_on(client.get_blocks(before_std, limit)) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + |block_vec| { + PointerResult::from_value( + block_vec + .into_iter() + .map(Into::into) + .collect::>() + .into(), + ) + }, + ) +} + +/// Query the transactions range by account id from indexer. +/// +/// # Arguments +/// +/// - `indexer`: A pointer to the [`IndexerServiceFFI`] instance to be queried. +/// - `account_id`: `FfiAccountId` - id of queried account +/// - `offset`: `u64` - first tx id of query +/// - `limit`: `u64` - number of tx ids to query after `offset` +/// +/// # Returns +/// +/// A `PointerResult, OperationStatus>` indicating success or failure. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `indexer` is a valid pointer to a [`IndexerServiceFFI`] instance. +/// - `runtime` is a valid pointer to a [`Runtime`] instance. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn query_transactions_by_account( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + account_id: FfiAccountId, + offset: u64, + limit: u64, +) -> PointerResult, OperationStatus> { + if indexer.is_null() { + log::error!("Attempted to query a null indexer pointer. This is a bug. Aborting."); + return PointerResult::from_error(OperationStatus::NullPointer); + } + + let indexer = unsafe { &*indexer }; + + let client = indexer.client(); + let runtime = unsafe { &*runtime }; + + runtime + .block_on(client.get_transactions_by_account( + AccountId { + value: account_id.data, + }, + offset, + limit, + )) + .map_or_else( + |_| PointerResult::from_error(OperationStatus::ClientError), + |tx_vec| { + PointerResult::from_value( + tx_vec + .into_iter() + .map(Into::into) + .collect::>() + .into(), + ) + }, + ) +} diff --git a/indexer/ffi/src/api/result.rs b/indexer/ffi/src/api/result.rs new file mode 100644 index 00000000..96cbcdd8 --- /dev/null +++ b/indexer/ffi/src/api/result.rs @@ -0,0 +1,29 @@ +/// Simple wrapper around a pointer to a value or an error. +/// +/// Pointer is not guaranteed. You should check the error field before +/// dereferencing the pointer. +#[repr(C)] +pub struct PointerResult { + pub value: *mut Type, + pub error: Error, +} + +impl PointerResult { + pub fn from_pointer(pointer: *mut Type) -> Self { + Self { + value: pointer, + error: Error::default(), + } + } + + pub fn from_value(value: Type) -> Self { + Self::from_pointer(Box::into_raw(Box::new(value))) + } + + pub const fn from_error(error: Error) -> Self { + Self { + value: std::ptr::null_mut(), + error, + } + } +} diff --git a/indexer/ffi/src/api/types/account.rs b/indexer/ffi/src/api/types/account.rs new file mode 100644 index 00000000..6c35347f --- /dev/null +++ b/indexer/ffi/src/api/types/account.rs @@ -0,0 +1,119 @@ +use indexer_service_protocol::ProgramId; + +use crate::api::types::{FfiBytes32, FfiProgramId, FfiU128}; + +/// Account data structure - C-compatible version of nssa Account. +/// +/// Note: `balance` and `nonce` are u128 values represented as little-endian +/// byte arrays since C doesn't have native u128 support. +#[repr(C)] +pub struct FfiAccount { + pub program_owner: FfiProgramId, + /// Balance as little-endian [u8; 16]. + pub balance: FfiU128, + /// Pointer to account data bytes. + pub data: *mut u8, + /// Length of account data. + pub data_len: usize, + /// Capacity of account data. + pub data_cap: usize, + /// Nonce as little-endian [u8; 16]. + pub nonce: FfiU128, +} + +// Helper functions to convert between Rust and FFI types + +impl From<&nssa::AccountId> for FfiBytes32 { + fn from(id: &nssa::AccountId) -> Self { + Self::from_account_id(id) + } +} + +impl From for FfiAccount { + fn from(value: nssa::Account) -> Self { + let nssa::Account { + program_owner, + balance, + data, + nonce, + } = value; + + let (data, data_len, data_cap) = data.into_inner().into_raw_parts(); + + let program_owner = FfiProgramId { + data: program_owner, + }; + Self { + program_owner, + balance: balance.into(), + data, + data_len, + data_cap, + nonce: nonce.0.into(), + } + } +} + +impl From for indexer_service_protocol::Account { + fn from(value: FfiAccount) -> Self { + let FfiAccount { + program_owner, + balance, + data, + data_cap, + data_len, + nonce, + } = value; + + Self { + program_owner: ProgramId(program_owner.data), + balance: balance.into(), + data: indexer_service_protocol::Data(unsafe { + Vec::from_raw_parts(data, data_len, data_cap) + }), + nonce: nonce.into(), + } + } +} + +impl From<&FfiAccount> for indexer_service_protocol::Account { + fn from(value: &FfiAccount) -> Self { + let &FfiAccount { + program_owner, + balance, + data, + data_cap, + data_len, + nonce, + } = value; + + Self { + program_owner: ProgramId(program_owner.data), + balance: balance.into(), + data: indexer_service_protocol::Data(unsafe { + Vec::from_raw_parts(data, data_len, data_cap) + }), + nonce: nonce.into(), + } + } +} + +/// Frees the resources associated with the given ffi account. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiAccount`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiAccount`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_account(val: FfiAccount) { + let orig_val: indexer_service_protocol::Account = val.into(); + drop(orig_val); +} diff --git a/indexer/ffi/src/api/types/block.rs b/indexer/ffi/src/api/types/block.rs new file mode 100644 index 00000000..bca2fdb5 --- /dev/null +++ b/indexer/ffi/src/api/types/block.rs @@ -0,0 +1,199 @@ +use indexer_service_protocol::{ + BedrockStatus, Block, BlockHeader, HashType, MantleMsgId, Signature, +}; + +use crate::api::types::{ + FfiBlockId, FfiHashType, FfiMsgId, FfiOption, FfiSignature, FfiTimestamp, FfiVec, + transaction::free_ffi_transaction_vec, vectors::FfiBlockBody, +}; + +#[repr(C)] +pub struct FfiBlock { + pub header: FfiBlockHeader, + pub body: FfiBlockBody, + pub bedrock_status: FfiBedrockStatus, + pub bedrock_parent_id: FfiMsgId, +} + +impl From for FfiBlock { + fn from(value: Block) -> Self { + let Block { + header, + body, + bedrock_status, + bedrock_parent_id, + } = value; + + Self { + header: header.into(), + body: body + .transactions + .into_iter() + .map(Into::into) + .collect::>() + .into(), + bedrock_status: bedrock_status.into(), + bedrock_parent_id: bedrock_parent_id.into(), + } + } +} + +pub type FfiBlockOpt = FfiOption; + +#[repr(C)] +pub struct FfiBlockHeader { + pub block_id: FfiBlockId, + pub prev_block_hash: FfiHashType, + pub hash: FfiHashType, + pub timestamp: FfiTimestamp, + pub signature: FfiSignature, +} + +impl From for FfiBlockHeader { + fn from(value: BlockHeader) -> Self { + let BlockHeader { + block_id, + prev_block_hash, + hash, + timestamp, + signature, + } = value; + + Self { + block_id, + prev_block_hash: prev_block_hash.into(), + hash: hash.into(), + timestamp, + signature: signature.into(), + } + } +} + +#[repr(C)] +pub enum FfiBedrockStatus { + Pending = 0x0, + Safe, + Finalized, +} + +impl From for FfiBedrockStatus { + fn from(value: BedrockStatus) -> Self { + match value { + BedrockStatus::Finalized => Self::Finalized, + BedrockStatus::Pending => Self::Pending, + BedrockStatus::Safe => Self::Safe, + } + } +} + +impl From for BedrockStatus { + fn from(value: FfiBedrockStatus) -> Self { + match value { + FfiBedrockStatus::Finalized => Self::Finalized, + FfiBedrockStatus::Pending => Self::Pending, + FfiBedrockStatus::Safe => Self::Safe, + } + } +} + +/// Frees the resources associated with the given ffi block. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiBlock`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiBlock`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_block(val: FfiBlock) { + // We don't really need all the casts, but just in case + // All except `ffi_tx_ffi_vec` is Copy types, so no need for Drop + let _ = BlockHeader { + block_id: val.header.block_id, + prev_block_hash: HashType(val.header.prev_block_hash.data), + hash: HashType(val.header.hash.data), + timestamp: val.header.timestamp, + signature: Signature(val.header.signature.data), + }; + let ffi_tx_ffi_vec = val.body; + + #[expect(clippy::let_underscore_must_use, reason = "No use for this Copy type")] + let _: BedrockStatus = val.bedrock_status.into(); + + let _ = MantleMsgId(val.bedrock_parent_id.data); + + unsafe { + free_ffi_transaction_vec(ffi_tx_ffi_vec); + }; +} + +/// Frees the resources associated with the given ffi block option. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiBlockOpt`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiBlockOpt`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_block_opt(val: FfiBlockOpt) { + if val.is_some { + let value = unsafe { Box::from_raw(val.value) }; + + // We don't really need all the casts, but just in case + // All except `ffi_tx_ffi_vec` is Copy types, so no need for Drop + let _ = BlockHeader { + block_id: value.header.block_id, + prev_block_hash: HashType(value.header.prev_block_hash.data), + hash: HashType(value.header.hash.data), + timestamp: value.header.timestamp, + signature: Signature(value.header.signature.data), + }; + let ffi_tx_ffi_vec = value.body; + + #[expect(clippy::let_underscore_must_use, reason = "No use for this Copy type")] + let _: BedrockStatus = value.bedrock_status.into(); + + let _ = MantleMsgId(value.bedrock_parent_id.data); + + unsafe { + free_ffi_transaction_vec(ffi_tx_ffi_vec); + }; + } +} + +/// Frees the resources associated with the given ffi block vector. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiVec`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiVec`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_block_vec(val: FfiVec) { + let ffi_block_std_vec: Vec<_> = val.into(); + for block in ffi_block_std_vec { + unsafe { + free_ffi_block(block); + } + } +} diff --git a/indexer/ffi/src/api/types/mod.rs b/indexer/ffi/src/api/types/mod.rs new file mode 100644 index 00000000..2e7a77ad --- /dev/null +++ b/indexer/ffi/src/api/types/mod.rs @@ -0,0 +1,165 @@ +use indexer_service_protocol::{AccountId, HashType, MantleMsgId, ProgramId, PublicKey, Signature}; + +pub mod account; +pub mod block; +pub mod transaction; +pub mod vectors; + +/// 32-byte array type for `AccountId`, keys, hashes, etc. +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiBytes32 { + pub data: [u8; 32], +} + +/// 64-byte array type for signatures, etc. +#[repr(C)] +#[derive(Clone, Copy)] +pub struct FfiBytes64 { + pub data: [u8; 64], +} + +/// Program ID - 8 u32 values (32 bytes total). +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiProgramId { + pub data: [u32; 8], +} + +impl From for FfiProgramId { + fn from(value: ProgramId) -> Self { + Self { data: value.0 } + } +} + +/// U128 - 16 bytes little endian. +#[repr(C)] +#[derive(Clone, Copy, Default)] +pub struct FfiU128 { + pub data: [u8; 16], +} + +impl FfiBytes32 { + /// Create from a 32-byte array. + #[must_use] + pub const fn from_bytes(bytes: [u8; 32]) -> Self { + Self { data: bytes } + } + + /// Create from an `AccountId`. + #[must_use] + pub const fn from_account_id(id: &nssa::AccountId) -> Self { + Self { data: *id.value() } + } +} + +impl From for FfiU128 { + fn from(value: u128) -> Self { + Self { + data: value.to_le_bytes(), + } + } +} + +impl From for u128 { + fn from(value: FfiU128) -> Self { + Self::from_le_bytes(value.data) + } +} + +pub type FfiHashType = FfiBytes32; +pub type FfiMsgId = FfiBytes32; +pub type FfiBlockId = u64; +pub type FfiTimestamp = u64; +pub type FfiSignature = FfiBytes64; +pub type FfiAccountId = FfiBytes32; +pub type FfiNonce = FfiU128; +pub type FfiPublicKey = FfiBytes32; + +impl From for FfiHashType { + fn from(value: HashType) -> Self { + Self { data: value.0 } + } +} + +impl From for FfiMsgId { + fn from(value: MantleMsgId) -> Self { + Self { data: value.0 } + } +} + +impl From for FfiSignature { + fn from(value: Signature) -> Self { + Self { data: value.0 } + } +} + +impl From for FfiAccountId { + fn from(value: AccountId) -> Self { + Self { data: value.value } + } +} + +impl From for FfiPublicKey { + fn from(value: PublicKey) -> Self { + Self { data: value.0 } + } +} + +#[repr(C)] +pub struct FfiVec { + pub entries: *mut T, + pub len: usize, + pub capacity: usize, +} + +impl From> for FfiVec { + fn from(value: Vec) -> Self { + let (entries, len, capacity) = value.into_raw_parts(); + Self { + entries, + len, + capacity, + } + } +} + +impl From> for Vec { + fn from(value: FfiVec) -> Self { + unsafe { Self::from_raw_parts(value.entries, value.len, value.capacity) } + } +} + +impl FfiVec { + /// # Safety + /// + /// `index` must be lesser than `self.len`. + #[must_use] + pub unsafe fn get(&self, index: usize) -> &T { + let ptr = unsafe { self.entries.add(index) }; + unsafe { &*ptr } + } +} + +#[repr(C)] +pub struct FfiOption { + pub value: *mut T, + pub is_some: bool, +} + +impl FfiOption { + pub fn from_value(val: T) -> Self { + Self { + value: Box::into_raw(Box::new(val)), + is_some: true, + } + } + + #[must_use] + pub const fn from_none() -> Self { + Self { + value: std::ptr::null_mut(), + is_some: false, + } + } +} diff --git a/indexer/ffi/src/api/types/transaction.rs b/indexer/ffi/src/api/types/transaction.rs new file mode 100644 index 00000000..ee3bd01b --- /dev/null +++ b/indexer/ffi/src/api/types/transaction.rs @@ -0,0 +1,548 @@ +use indexer_service_protocol::{ + AccountId, Ciphertext, Commitment, CommitmentSetDigest, EncryptedAccountData, + EphemeralPublicKey, HashType, Nullifier, PrivacyPreservingMessage, + PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction, + ProgramId, Proof, PublicKey, PublicMessage, PublicTransaction, Signature, Transaction, + ValidityWindow, WitnessSet, +}; + +use crate::api::types::{ + FfiBytes32, FfiHashType, FfiOption, FfiProgramId, FfiPublicKey, FfiSignature, FfiVec, + vectors::{ + FfiAccountIdList, FfiAccountList, FfiEncryptedAccountDataList, FfiInstructionDataList, + FfiNonceList, FfiNullifierCommitmentSetList, FfiProgramDeploymentMessage, FfiProof, + FfiSignaturePubKeyList, FfiVecBytes32, FfiVecU8, + }, +}; + +#[repr(C)] +pub struct FfiPublicTransactionBody { + pub hash: FfiHashType, + pub message: FfiPublicMessage, + pub witness_set: FfiSignaturePubKeyList, +} + +impl From for FfiPublicTransactionBody { + fn from(value: PublicTransaction) -> Self { + let PublicTransaction { + hash, + message, + witness_set, + } = value; + + Self { + hash: hash.into(), + message: message.into(), + witness_set: witness_set + .signatures_and_public_keys + .into_iter() + .map(Into::into) + .collect::>() + .into(), + } + } +} + +impl From> for PublicTransaction { + fn from(value: Box) -> Self { + Self { + hash: HashType(value.hash.data), + message: PublicMessage { + program_id: ProgramId(value.message.program_id.data), + account_ids: { + let std_vec: Vec<_> = value.message.account_ids.into(); + std_vec + .into_iter() + .map(|ffi_val| AccountId { + value: ffi_val.data, + }) + .collect() + }, + nonces: { + let std_vec: Vec<_> = value.message.nonces.into(); + std_vec.into_iter().map(Into::into).collect() + }, + instruction_data: value.message.instruction_data.into(), + }, + witness_set: WitnessSet { + signatures_and_public_keys: { + let std_vec: Vec<_> = value.witness_set.into(); + std_vec + .into_iter() + .map(|ffi_val| { + ( + Signature(ffi_val.signature.data), + PublicKey(ffi_val.public_key.data), + ) + }) + .collect() + }, + proof: None, + }, + } + } +} + +#[repr(C)] +pub struct FfiPublicMessage { + pub program_id: FfiProgramId, + pub account_ids: FfiAccountIdList, + pub nonces: FfiNonceList, + pub instruction_data: FfiInstructionDataList, +} + +impl From for FfiPublicMessage { + fn from(value: PublicMessage) -> Self { + let PublicMessage { + program_id, + account_ids, + nonces, + instruction_data, + } = value; + + Self { + program_id: program_id.into(), + account_ids: account_ids + .into_iter() + .map(Into::into) + .collect::>() + .into(), + nonces: nonces + .into_iter() + .map(Into::into) + .collect::>() + .into(), + instruction_data: instruction_data.into(), + } + } +} + +#[repr(C)] +pub struct FfiPrivateTransactionBody { + pub hash: FfiHashType, + pub message: FfiPrivacyPreservingMessage, + pub witness_set: FfiSignaturePubKeyList, + pub proof: FfiProof, +} + +impl From for FfiPrivateTransactionBody { + fn from(value: PrivacyPreservingTransaction) -> Self { + let PrivacyPreservingTransaction { + hash, + message, + witness_set, + } = value; + + Self { + hash: hash.into(), + message: message.into(), + witness_set: witness_set + .signatures_and_public_keys + .into_iter() + .map(Into::into) + .collect::>() + .into(), + proof: witness_set + .proof + .expect("Private execution: proof must be present") + .0 + .into(), + } + } +} + +impl From> for PrivacyPreservingTransaction { + fn from(value: Box) -> Self { + Self { + hash: HashType(value.hash.data), + message: PrivacyPreservingMessage { + public_account_ids: { + let std_vec: Vec<_> = value.message.public_account_ids.into(); + std_vec + .into_iter() + .map(|ffi_val| AccountId { + value: ffi_val.data, + }) + .collect() + }, + nonces: { + let std_vec: Vec<_> = value.message.nonces.into(); + std_vec.into_iter().map(Into::into).collect() + }, + public_post_states: { + let std_vec: Vec<_> = value.message.public_post_states.into(); + std_vec.into_iter().map(Into::into).collect() + }, + encrypted_private_post_states: { + let std_vec: Vec<_> = value.message.encrypted_private_post_states.into(); + std_vec + .into_iter() + .map(|ffi_val| EncryptedAccountData { + ciphertext: Ciphertext(ffi_val.ciphertext.into()), + epk: EphemeralPublicKey(ffi_val.epk.into()), + view_tag: ffi_val.view_tag, + }) + .collect() + }, + new_commitments: { + let std_vec: Vec<_> = value.message.new_commitments.into(); + std_vec + .into_iter() + .map(|ffi_val| Commitment(ffi_val.data)) + .collect() + }, + new_nullifiers: { + let std_vec: Vec<_> = value.message.new_nullifiers.into(); + std_vec + .into_iter() + .map(|ffi_val| { + ( + Nullifier(ffi_val.nullifier.data), + CommitmentSetDigest(ffi_val.commitment_set_digest.data), + ) + }) + .collect() + }, + block_validity_window: cast_ffi_validity_window( + value.message.block_validity_window, + ), + timestamp_validity_window: cast_ffi_validity_window( + value.message.timestamp_validity_window, + ), + }, + witness_set: WitnessSet { + signatures_and_public_keys: { + let std_vec: Vec<_> = value.witness_set.into(); + std_vec + .into_iter() + .map(|ffi_val| { + ( + Signature(ffi_val.signature.data), + PublicKey(ffi_val.public_key.data), + ) + }) + .collect() + }, + proof: Some(Proof(value.proof.into())), + }, + } + } +} + +#[repr(C)] +pub struct FfiPrivacyPreservingMessage { + pub public_account_ids: FfiAccountIdList, + pub nonces: FfiNonceList, + pub public_post_states: FfiAccountList, + pub encrypted_private_post_states: FfiEncryptedAccountDataList, + pub new_commitments: FfiVecBytes32, + pub new_nullifiers: FfiNullifierCommitmentSetList, + pub block_validity_window: [u64; 2], + pub timestamp_validity_window: [u64; 2], +} + +impl From for FfiPrivacyPreservingMessage { + fn from(value: PrivacyPreservingMessage) -> Self { + let PrivacyPreservingMessage { + public_account_ids, + nonces, + public_post_states, + encrypted_private_post_states, + new_commitments, + new_nullifiers, + block_validity_window, + timestamp_validity_window, + } = value; + + Self { + public_account_ids: public_account_ids + .into_iter() + .map(Into::into) + .collect::>() + .into(), + nonces: nonces + .into_iter() + .map(Into::into) + .collect::>() + .into(), + public_post_states: public_post_states + .into_iter() + .map(|acc_ind| -> nssa::Account { + acc_ind.try_into().expect("Source is in blocks, must fit") + }) + .map(Into::into) + .collect::>() + .into(), + encrypted_private_post_states: encrypted_private_post_states + .into_iter() + .map(Into::into) + .collect::>() + .into(), + new_commitments: new_commitments + .into_iter() + .map(|comm| FfiBytes32 { data: comm.0 }) + .collect::>() + .into(), + new_nullifiers: new_nullifiers + .into_iter() + .map(Into::into) + .collect::>() + .into(), + block_validity_window: cast_validity_window(block_validity_window), + timestamp_validity_window: cast_validity_window(timestamp_validity_window), + } + } +} + +#[repr(C)] +pub struct FfiNullifierCommitmentSet { + pub nullifier: FfiBytes32, + pub commitment_set_digest: FfiBytes32, +} + +impl From<(Nullifier, CommitmentSetDigest)> for FfiNullifierCommitmentSet { + fn from(value: (Nullifier, CommitmentSetDigest)) -> Self { + Self { + nullifier: FfiBytes32 { data: value.0.0 }, + commitment_set_digest: FfiBytes32 { data: value.1.0 }, + } + } +} + +#[repr(C)] +pub struct FfiEncryptedAccountData { + pub ciphertext: FfiVecU8, + pub epk: FfiVecU8, + pub view_tag: u8, +} + +impl From for FfiEncryptedAccountData { + fn from(value: EncryptedAccountData) -> Self { + let EncryptedAccountData { + ciphertext, + epk, + view_tag, + } = value; + + Self { + ciphertext: ciphertext.0.into(), + epk: epk.0.into(), + view_tag, + } + } +} + +#[repr(C)] +pub struct FfiSignaturePubKeyEntry { + pub signature: FfiSignature, + pub public_key: FfiPublicKey, +} + +impl From<(Signature, PublicKey)> for FfiSignaturePubKeyEntry { + fn from(value: (Signature, PublicKey)) -> Self { + Self { + signature: value.0.into(), + public_key: value.1.into(), + } + } +} + +#[repr(C)] +pub struct FfiProgramDeploymentTransactionBody { + pub hash: FfiHashType, + pub message: FfiProgramDeploymentMessage, +} + +impl From> for ProgramDeploymentTransaction { + fn from(value: Box) -> Self { + Self { + hash: HashType(value.hash.data), + message: ProgramDeploymentMessage { + bytecode: value.message.into(), + }, + } + } +} + +impl From for FfiProgramDeploymentTransactionBody { + fn from(value: ProgramDeploymentTransaction) -> Self { + let ProgramDeploymentTransaction { hash, message } = value; + + Self { + hash: hash.into(), + message: message.bytecode.into(), + } + } +} + +#[repr(C)] +pub struct FfiTransactionBody { + pub public_body: *mut FfiPublicTransactionBody, + pub private_body: *mut FfiPrivateTransactionBody, + pub program_deployment_body: *mut FfiProgramDeploymentTransactionBody, +} + +#[repr(C)] +pub struct FfiTransaction { + pub body: FfiTransactionBody, + pub kind: FfiTransactionKind, +} + +impl From for FfiTransaction { + fn from(value: Transaction) -> Self { + match value { + Transaction::Public(pub_tx) => Self { + body: FfiTransactionBody { + public_body: Box::into_raw(Box::new(pub_tx.into())), + private_body: std::ptr::null_mut(), + program_deployment_body: std::ptr::null_mut(), + }, + kind: FfiTransactionKind::Public, + }, + Transaction::PrivacyPreserving(priv_tx) => Self { + body: FfiTransactionBody { + public_body: std::ptr::null_mut(), + private_body: Box::into_raw(Box::new(priv_tx.into())), + program_deployment_body: std::ptr::null_mut(), + }, + kind: FfiTransactionKind::Private, + }, + Transaction::ProgramDeployment(pr_dep_tx) => Self { + body: FfiTransactionBody { + public_body: std::ptr::null_mut(), + private_body: std::ptr::null_mut(), + program_deployment_body: Box::into_raw(Box::new(pr_dep_tx.into())), + }, + kind: FfiTransactionKind::ProgramDeploy, + }, + } + } +} + +#[repr(C)] +pub enum FfiTransactionKind { + Public = 0x0, + Private, + ProgramDeploy, +} + +/// Frees the resources associated with the given ffi transaction. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiTransaction`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiTransaction`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_transaction(val: FfiTransaction) { + match val.kind { + FfiTransactionKind::Public => { + let body = unsafe { Box::from_raw(val.body.public_body) }; + let std_body: PublicTransaction = body.into(); + drop(std_body); + } + FfiTransactionKind::Private => { + let body = unsafe { Box::from_raw(val.body.private_body) }; + let std_body: PrivacyPreservingTransaction = body.into(); + drop(std_body); + } + FfiTransactionKind::ProgramDeploy => { + let body = unsafe { Box::from_raw(val.body.program_deployment_body) }; + let std_body: ProgramDeploymentTransaction = body.into(); + drop(std_body); + } + } +} + +/// Frees the resources associated with the given ffi transaction option. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiOption`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiOption`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_transaction_opt(val: FfiOption) { + if val.is_some { + let value = unsafe { Box::from_raw(val.value) }; + + match value.kind { + FfiTransactionKind::Public => { + let body = unsafe { Box::from_raw(value.body.public_body) }; + let std_body: PublicTransaction = body.into(); + drop(std_body); + } + FfiTransactionKind::Private => { + let body = unsafe { Box::from_raw(value.body.private_body) }; + let std_body: PrivacyPreservingTransaction = body.into(); + drop(std_body); + } + FfiTransactionKind::ProgramDeploy => { + let body = unsafe { Box::from_raw(value.body.program_deployment_body) }; + let std_body: ProgramDeploymentTransaction = body.into(); + drop(std_body); + } + } + } +} + +/// Frees the resources associated with the given vector of ffi transactions. +/// +/// # Arguments +/// +/// - `val`: An instance of `FfiVec`. +/// +/// # Returns +/// +/// void. +/// +/// # Safety +/// +/// The caller must ensure that: +/// - `val` is a valid instance of `FfiVec`. +#[unsafe(no_mangle)] +pub unsafe extern "C" fn free_ffi_transaction_vec(val: FfiVec) { + let ffi_tx_std_vec: Vec<_> = val.into(); + for tx in ffi_tx_std_vec { + unsafe { + free_ffi_transaction(tx); + } + } +} + +fn cast_validity_window(window: ValidityWindow) -> [u64; 2] { + [ + window.0.0.unwrap_or_default(), + window.0.1.unwrap_or(u64::MAX), + ] +} + +const fn cast_ffi_validity_window(ffi_window: [u64; 2]) -> ValidityWindow { + let left = if ffi_window[0] == 0 { + None + } else { + Some(ffi_window[0]) + }; + + let right = if ffi_window[1] == u64::MAX { + None + } else { + Some(ffi_window[1]) + }; + + ValidityWindow((left, right)) +} diff --git a/indexer/ffi/src/api/types/vectors.rs b/indexer/ffi/src/api/types/vectors.rs new file mode 100644 index 00000000..46f08737 --- /dev/null +++ b/indexer/ffi/src/api/types/vectors.rs @@ -0,0 +1,31 @@ +use crate::api::types::{ + FfiAccountId, FfiBytes32, FfiNonce, FfiVec, + account::FfiAccount, + transaction::{ + FfiEncryptedAccountData, FfiNullifierCommitmentSet, FfiSignaturePubKeyEntry, FfiTransaction, + }, +}; + +pub type FfiVecU8 = FfiVec; + +pub type FfiAccountList = FfiVec; + +pub type FfiAccountIdList = FfiVec; + +pub type FfiVecBytes32 = FfiVec; + +pub type FfiBlockBody = FfiVec; + +pub type FfiNonceList = FfiVec; + +pub type FfiInstructionDataList = FfiVec; + +pub type FfiSignaturePubKeyList = FfiVec; + +pub type FfiProof = FfiVecU8; + +pub type FfiProgramDeploymentMessage = FfiVecU8; + +pub type FfiEncryptedAccountDataList = FfiVec; + +pub type FfiNullifierCommitmentSetList = FfiVec; diff --git a/sequencer/core/src/indexer_client.rs b/indexer/ffi/src/client.rs similarity index 91% rename from sequencer/core/src/indexer_client.rs rename to indexer/ffi/src/client.rs index 960b77a4..f05b350e 100644 --- a/sequencer/core/src/indexer_client.rs +++ b/indexer/ffi/src/client.rs @@ -4,7 +4,6 @@ use anyhow::{Context as _, Result}; use log::info; pub use url::Url; -#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")] pub trait IndexerClientTrait: Clone { async fn new(indexer_url: &Url) -> Result; } diff --git a/indexer/ffi/src/errors.rs b/indexer/ffi/src/errors.rs new file mode 100644 index 00000000..4572474c --- /dev/null +++ b/indexer/ffi/src/errors.rs @@ -0,0 +1,23 @@ +#[derive(Debug, Default, PartialEq, Eq)] +#[repr(C)] +pub enum OperationStatus { + #[default] + Ok = 0x0, + NullPointer = 0x1, + InitializationError = 0x2, + ClientError = 0x3, +} + +impl OperationStatus { + #[must_use] + #[unsafe(no_mangle)] + pub extern "C" fn is_ok(&self) -> bool { + *self == Self::Ok + } + + #[must_use] + #[unsafe(no_mangle)] + pub extern "C" fn is_error(&self) -> bool { + !self.is_ok() + } +} diff --git a/indexer/ffi/src/indexer.rs b/indexer/ffi/src/indexer.rs new file mode 100644 index 00000000..e8707697 --- /dev/null +++ b/indexer/ffi/src/indexer.rs @@ -0,0 +1,95 @@ +use std::{ffi::c_void, net::SocketAddr}; + +use indexer_service::IndexerHandle; + +use crate::client::IndexerClient; + +#[repr(C)] +pub struct IndexerServiceFFI { + indexer_handle: *mut c_void, + indexer_client: *mut c_void, +} + +impl IndexerServiceFFI { + #[must_use] + pub fn new( + indexer_handle: indexer_service::IndexerHandle, + indexer_client: IndexerClient, + ) -> Self { + Self { + // Box the complex types and convert to opaque pointers + indexer_handle: Box::into_raw(Box::new(indexer_handle)).cast::(), + indexer_client: Box::into_raw(Box::new(indexer_client)).cast::(), + } + } + + /// Helper to take ownership back. + #[must_use] + pub fn into_parts(mut self) -> (Box, Box) { + let Self { + indexer_handle, + indexer_client, + } = &mut self; + + let indexer_handle_boxed = unsafe { Box::from_raw(indexer_handle.cast::()) }; + let indexer_client_boxed = unsafe { Box::from_raw(indexer_client.cast::()) }; + + // Assigning nulls to prevent double free on drop, since ownership is transferred to caller + *indexer_handle = std::ptr::null_mut(); + *indexer_client = std::ptr::null_mut(); + + (indexer_handle_boxed, indexer_client_boxed) + } + + /// Helper to get indexer handle addr. + #[must_use] + pub const fn addr(&self) -> SocketAddr { + let indexer_handle = unsafe { + self.indexer_handle + .cast::() + .as_ref() + .expect("Indexer Handle must be non-null pointer") + }; + + indexer_handle.addr() + } + + /// Helper to get indexer handle ref. + #[must_use] + pub const fn handle(&self) -> &IndexerHandle { + unsafe { + self.indexer_handle + .cast::() + .as_ref() + .expect("Indexer Handle must be non-null pointer") + } + } + + /// Helper to get indexer client ref. + #[must_use] + pub const fn client(&self) -> &IndexerClient { + unsafe { + self.indexer_client + .cast::() + .as_ref() + .expect("Indexer Client must be non-null pointer") + } + } +} + +// Implement Drop to prevent memory leaks +impl Drop for IndexerServiceFFI { + fn drop(&mut self) { + let Self { + indexer_handle, + indexer_client, + } = self; + + if !indexer_handle.is_null() { + drop(unsafe { Box::from_raw(indexer_handle.cast::()) }); + } + if !indexer_client.is_null() { + drop(unsafe { Box::from_raw(indexer_client.cast::()) }); + } + } +} diff --git a/indexer/ffi/src/lib.rs b/indexer/ffi/src/lib.rs new file mode 100644 index 00000000..9e34b111 --- /dev/null +++ b/indexer/ffi/src/lib.rs @@ -0,0 +1,11 @@ +#![allow(clippy::undocumented_unsafe_blocks, reason = "It is an FFI")] + +pub use errors::OperationStatus; +pub use indexer::IndexerServiceFFI; +pub use runtime::Runtime; + +pub mod api; +mod client; +mod errors; +mod indexer; +mod runtime; diff --git a/indexer/ffi/src/runtime.rs b/indexer/ffi/src/runtime.rs new file mode 100644 index 00000000..ba361fd8 --- /dev/null +++ b/indexer/ffi/src/runtime.rs @@ -0,0 +1,129 @@ +use std::ffi::c_void; + +/// Wrapper around [`tokio::runtime::Runtime`] that can be safely passed across the FFI boundary. +#[repr(C)] +pub struct Runtime { + inner: Pointer, +} + +impl Runtime { + /// Creates a new owned [`Runtime`] instance. + pub fn new() -> Result> { + let inner = tokio::runtime::Runtime::new()?; + Ok(Self { + inner: Pointer::owned(inner), + }) + } + + /// Creates a new owned [`Runtime`] instance from an existing [`tokio::runtime::Runtime`]. + pub fn from_owned(inner: tokio::runtime::Runtime) -> Self { + Self { + inner: Pointer::owned(inner), + } + } + + /// Creates a new borrowed [`Runtime`] instance from a reference to an existing + /// `tokio::runtime::Runtime`. + /// + /// # Safety + /// The caller must ensure that the provided reference remains valid for the lifetime of the + /// returned [`Runtime`]. + pub const unsafe fn from_borrowed(inner: &tokio::runtime::Runtime) -> Self { + Self { + // SAFETY: The caller must ensure the validness of the `inner` reference. + inner: unsafe { Pointer::borrowed(inner) }, + } + } +} + +impl AsRef for Runtime { + fn as_ref(&self) -> &tokio::runtime::Runtime { + self.inner + .as_ref() + .expect("Runtime pointer should not be null") + } +} + +impl std::ops::Deref for Runtime { + type Target = tokio::runtime::Runtime; + + fn deref(&self) -> &Self::Target { + self.as_ref() + } +} + +#[repr(C)] +struct Pointer { + kind: PointerKind, + _marker: std::marker::PhantomData, +} + +#[repr(C)] +enum PointerKind { + Owned(*mut c_void), + Borrowed(*const c_void), + Null, +} + +impl Pointer { + /// Creates a new owned pointer from a value. + pub fn owned(value: T) -> Self { + let boxed = Box::new(value); + let kind = PointerKind::Owned(Box::into_raw(boxed).cast::()); + Self { + kind, + _marker: std::marker::PhantomData, + } + } + + /// Creates a new borrowed pointer from a reference to an existing value. + /// + /// # Safety + /// The caller must ensure that the provided reference remains valid for the lifetime of the + /// returned pointer. + pub const unsafe fn borrowed(value: &T) -> Self { + let kind = PointerKind::Borrowed(std::ptr::from_ref(value).cast::()); + Self { + kind, + _marker: std::marker::PhantomData, + } + } + + /// Returns a reference to the value if the pointer is owned or borrowed, or [`None`] if it is + /// null. + pub const fn as_ref(&self) -> Option<&T> { + match self.kind { + PointerKind::Owned(ptr) => unsafe { (ptr.cast::()).as_ref() }, + PointerKind::Borrowed(ptr) => unsafe { (ptr.cast::()).as_ref() }, + PointerKind::Null => None, + } + } + + /// Takes ownership of the pointer if it is owned, returning the raw pointer and leaving a null + /// pointer in its place. + /// If the pointer is borrowed or null, returns [`None`]. + #[expect(dead_code, reason = "May be useful in future")] + pub fn take(&mut self) -> Option { + match std::mem::replace(&mut self.kind, PointerKind::Null) { + PointerKind::Owned(ptr) => { + // SAFETY: We ensure that the pointer is valid and was allocated by us. + let boxed = unsafe { Box::from_raw(ptr.cast::()) }; + Some(*boxed) + } + PointerKind::Borrowed(_) | PointerKind::Null => None, + } + } +} + +impl Drop for Pointer { + fn drop(&mut self) { + let Self { kind, _marker } = self; + + if let PointerKind::Owned(ptr) = *kind { + // SAFETY: We ensure that the pointer is valid and was allocated by us. + unsafe { + drop(Box::from_raw(ptr.cast::())); + } + } + } +} diff --git a/indexer/service/configs/indexer_config.json b/indexer/service/configs/indexer_config.json index e4dd8f93..f6a0e07c 100644 --- a/indexer/service/configs/indexer_config.json +++ b/indexer/service/configs/indexer_config.json @@ -1,160 +1,8 @@ { "home": ".", "consensus_info_polling_interval": "1s", - "bedrock_client_config": { - "addr": "http://localhost:8080", - "backoff": { - "start_delay": "100ms", - "max_retries": 5 - } + "bedrock_config": { + "addr": "http://localhost:8080" }, - "channel_id": "0101010101010101010101010101010101010101010101010101010101010101", - "initial_accounts": [ - { - "account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r", - "balance": 10000 - }, - { - "account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2", - "balance": 20000 - } - ], - "initial_commitments": [ - { - "npk": [ - 139, - 19, - 158, - 11, - 155, - 231, - 85, - 206, - 132, - 228, - 220, - 114, - 145, - 89, - 113, - 156, - 238, - 142, - 242, - 74, - 182, - 91, - 43, - 100, - 6, - 190, - 31, - 15, - 31, - 88, - 96, - 204 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 10000, - "data": [], - "nonce": 0 - } - }, - { - "npk": [ - 173, - 134, - 33, - 223, - 54, - 226, - 10, - 71, - 215, - 254, - 143, - 172, - 24, - 244, - 243, - 208, - 65, - 112, - 118, - 70, - 217, - 240, - 69, - 100, - 129, - 3, - 121, - 25, - 213, - 132, - 42, - 45 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 20000, - "data": [], - "nonce": 0 - } - } - ], - "signing_key": [ - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37, - 37 - ] + "channel_id": "0101010101010101010101010101010101010101010101010101010101010101" } \ No newline at end of file diff --git a/indexer/service/rpc/src/lib.rs b/indexer/service/rpc/src/lib.rs index 217c60d4..5763fe82 100644 --- a/indexer/service/rpc/src/lib.rs +++ b/indexer/service/rpc/src/lib.rs @@ -27,7 +27,7 @@ pub trait Rpc { async fn subscribe_to_finalized_blocks(&self) -> SubscriptionResult; #[method(name = "getLastFinalizedBlockId")] - async fn get_last_finalized_block_id(&self) -> Result; + async fn get_last_finalized_block_id(&self) -> Result, ErrorObjectOwned>; #[method(name = "getBlockById")] async fn get_block_by_id(&self, block_id: BlockId) -> Result, ErrorObjectOwned>; @@ -41,6 +41,13 @@ pub trait Rpc { #[method(name = "getAccount")] async fn get_account(&self, account_id: AccountId) -> Result; + #[method(name = "getAccountAtBlock")] + async fn get_account_at_block( + &self, + account_id: AccountId, + block_id: BlockId, + ) -> Result; + #[method(name = "getTransaction")] async fn get_transaction( &self, diff --git a/indexer/service/src/lib.rs b/indexer/service/src/lib.rs index 10f1cade..b0a6e516 100644 --- a/indexer/service/src/lib.rs +++ b/indexer/service/src/lib.rs @@ -16,6 +16,7 @@ pub struct IndexerHandle { /// Option because of `Drop` which forbids to simply move out of `self` in `stopped()`. server_handle: Option, } + impl IndexerHandle { const fn new(addr: SocketAddr, server_handle: ServerHandle) -> Self { Self { diff --git a/indexer/service/src/mock_service.rs b/indexer/service/src/mock_service.rs index c4a099b8..a83e9ccc 100644 --- a/indexer/service/src/mock_service.rs +++ b/indexer/service/src/mock_service.rs @@ -190,18 +190,16 @@ impl indexer_service_rpc::RpcServer for MockIndexerService { Ok(()) } - async fn get_last_finalized_block_id(&self) -> Result { - self.state + async fn get_last_finalized_block_id(&self) -> Result, ErrorObjectOwned> { + Ok(self + .state .read() .await .blocks .iter() .rev() .find(|block| block.bedrock_status == BedrockStatus::Finalized) - .map(|block| block.header.block_id) - .ok_or_else(|| { - ErrorObjectOwned::owned(-32001, "Last block not found".to_owned(), None::<()>) - }) + .map(|block| block.header.block_id)) } async fn get_block_by_id(&self, block_id: BlockId) -> Result, ErrorObjectOwned> { @@ -239,6 +237,22 @@ impl indexer_service_rpc::RpcServer for MockIndexerService { .ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>)) } + async fn get_account_at_block( + &self, + account_id: AccountId, + _block_id: BlockId, + ) -> Result { + // Mock service does not track historical state; returns current state regardless of + // block_id. + self.state + .read() + .await + .accounts + .get(&account_id) + .cloned() + .ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>)) + } + async fn get_transaction( &self, tx_hash: HashType, diff --git a/indexer/service/src/service.rs b/indexer/service/src/service.rs index e2f8a321..a959b80c 100644 --- a/indexer/service/src/service.rs +++ b/indexer/service/src/service.rs @@ -48,7 +48,7 @@ impl indexer_service_rpc::RpcServer for IndexerService { Ok(()) } - async fn get_last_finalized_block_id(&self) -> Result { + async fn get_last_finalized_block_id(&self) -> Result, ErrorObjectOwned> { self.indexer.store.get_last_block_id().map_err(db_error) } @@ -83,6 +83,19 @@ impl indexer_service_rpc::RpcServer for IndexerService { .into()) } + async fn get_account_at_block( + &self, + account_id: AccountId, + block_id: BlockId, + ) -> Result { + Ok(self + .indexer + .store + .account_state_at_block(&account_id.into(), block_id) + .map_err(db_error)? + .into()) + } + async fn get_transaction( &self, tx_hash: HashType, @@ -201,43 +214,49 @@ impl SubscriptionService { tokio::sync::mpsc::unbounded_channel::>(); let handle = tokio::spawn(async move { - let mut subscribers = Vec::new(); + let run_loop = async { + let mut subscribers = Vec::new(); - let mut block_stream = pin!(indexer.subscribe_parse_block_stream()); + let mut block_stream = pin!(indexer.subscribe_parse_block_stream()); - #[expect( - clippy::integer_division_remainder_used, - reason = "Generated by select! macro, can't be easily rewritten to avoid this lint" - )] - loop { - tokio::select! { - sub = sub_receiver.recv() => { - let Some(subscription) = sub else { - bail!("Subscription receiver closed unexpectedly"); - }; - info!("Added new subscription with ID {:?}", subscription.sink.subscription_id()); - subscribers.push(subscription); - } - block_opt = block_stream.next() => { - debug!("Got new block from block stream"); - let Some(block) = block_opt else { - bail!("Block stream ended unexpectedly"); - }; - let block = block.context("Failed to get L2 block data")?; - let block: indexer_service_protocol::Block = block.into(); + #[expect( + clippy::integer_division_remainder_used, + reason = "Generated by select! macro, can't be easily rewritten to avoid this lint" + )] + loop { + tokio::select! { + sub = sub_receiver.recv() => { + let Some(subscription) = sub else { + bail!("Subscription receiver closed unexpectedly"); + }; + info!("Added new subscription with ID {:?}", subscription.sink.subscription_id()); + subscribers.push(subscription); + } + block_opt = block_stream.next() => { + debug!("Got new block from block stream"); + let Some(block) = block_opt else { + bail!("Block stream ended unexpectedly"); + }; + let block = block.context("Failed to get L2 block data")?; + let block: indexer_service_protocol::Block = block.into(); - for sub in &mut subscribers { - if let Err(err) = sub.try_send(&block.header.block_id) { - warn!( - "Failed to send block ID {:?} to subscription ID {:?} with error: {err:#?}", - block.header.block_id, - sub.sink.subscription_id(), - ); + for sub in &mut subscribers { + if let Err(err) = sub.try_send(&block.header.block_id) { + warn!( + "Failed to send block ID {:?} to subscription ID {:?} with error: {err:#?}", + block.header.block_id, + sub.sink.subscription_id(), + ); + } } } } } - } + }; + let res: anyhow::Result = run_loop.await; + let Err(err) = res; + error!("Subscription service loop has unexpectedly finished with error: {err:#?}"); + Err(err) }); SubscriptionLoopParts { handle, diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index 53f0ee98..536f30bc 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -10,6 +10,7 @@ workspace = true [dependencies] nssa_core = { workspace = true, features = ["host"] } nssa.workspace = true +authenticated_transfer_core.workspace = true sequencer_core = { workspace = true, features = ["default", "testnet"] } sequencer_service.workspace = true wallet.workspace = true @@ -19,14 +20,16 @@ indexer_service.workspace = true serde_json.workspace = true token_core.workspace = true ata_core.workspace = true -indexer_service_rpc.workspace = true +vault_core.workspace = true +faucet_core.workspace = true +indexer_service_rpc = { workspace = true, features = ["client"] } sequencer_service_rpc = { workspace = true, features = ["client"] } +jsonrpsee = { workspace = true, features = ["ws-client"] } wallet-ffi.workspace = true indexer_ffi.workspace = true -testnet_initial_state.workspace = true +indexer_service_protocol.workspace = true url.workspace = true - anyhow.workspace = true env_logger.workspace = true log.workspace = true @@ -35,4 +38,4 @@ hex.workspace = true tempfile.workspace = true bytesize.workspace = true futures.workspace = true -testcontainers = { version = "0.27.0", features = ["docker-compose"] } +testcontainers = { version = "0.27.3", features = ["docker-compose"] } diff --git a/integration_tests/src/config.rs b/integration_tests/src/config.rs index faff1e79..00bdc74a 100644 --- a/integration_tests/src/config.rs +++ b/integration_tests/src/config.rs @@ -2,17 +2,30 @@ use std::{net::SocketAddr, path::PathBuf, time::Duration}; use anyhow::{Context as _, Result}; use bytesize::ByteSize; -use indexer_service::{BackoffConfig, ChannelId, ClientConfig, IndexerConfig}; +use indexer_service::{ChannelId, ClientConfig, IndexerConfig}; use key_protocol::key_management::KeyChain; -use nssa::{Account, AccountId, PrivateKey, PublicKey}; -use nssa_core::{account::Data, program::DEFAULT_PROGRAM_ID}; -use sequencer_core::config::{BedrockConfig, SequencerConfig}; -use testnet_initial_state::{ - PrivateAccountPrivateInitialData, PrivateAccountPublicInitialData, - PublicAccountPrivateInitialData, PublicAccountPublicInitialData, -}; +use nssa::{AccountId, PrivateKey, PublicKey}; +use nssa_core::Identifier; +use sequencer_core::config::{BedrockConfig, GenesisAction, SequencerConfig}; use url::Url; -use wallet::config::{InitialAccountData, WalletConfig}; +use wallet::config::WalletConfig; + +pub const INITIAL_PUBLIC_BALANCES_FOR_WALLET: [u128; 2] = [10_000, 20_000]; +pub const INITIAL_PRIVATE_BALANCES_FOR_WALLET: [u128; 2] = [10_000, 20_000]; + +#[derive(Clone)] +pub struct InitialPrivateAccountForWallet { + pub key_chain: KeyChain, + pub identifier: Identifier, + pub balance: u128, +} + +impl InitialPrivateAccountForWallet { + #[must_use] + pub fn account_id(&self) -> AccountId { + AccountId::from((&self.key_chain.nullifier_public_key, self.identifier)) + } +} /// Sequencer config options available for custom changes in integration tests. #[derive(Debug, Clone, Copy)] @@ -34,121 +47,6 @@ impl Default for SequencerPartialConfig { } } -pub struct InitialData { - pub public_accounts: Vec<(PrivateKey, u128)>, - pub private_accounts: Vec<(KeyChain, Account)>, -} - -impl InitialData { - #[must_use] - pub fn with_two_public_and_two_private_initialized_accounts() -> Self { - let mut public_alice_private_key = PrivateKey::new_os_random(); - let mut public_alice_public_key = - PublicKey::new_from_private_key(&public_alice_private_key); - let mut public_alice_account_id = AccountId::from(&public_alice_public_key); - - let mut public_bob_private_key = PrivateKey::new_os_random(); - let mut public_bob_public_key = PublicKey::new_from_private_key(&public_bob_private_key); - let mut public_bob_account_id = AccountId::from(&public_bob_public_key); - - // Ensure consistent ordering - if public_alice_account_id > public_bob_account_id { - std::mem::swap(&mut public_alice_private_key, &mut public_bob_private_key); - std::mem::swap(&mut public_alice_public_key, &mut public_bob_public_key); - std::mem::swap(&mut public_alice_account_id, &mut public_bob_account_id); - } - - let mut private_charlie_key_chain = KeyChain::new_os_random(); - let mut private_charlie_account_id = - AccountId::from((&private_charlie_key_chain.nullifier_public_key, 0)); - - let mut private_david_key_chain = KeyChain::new_os_random(); - let mut private_david_account_id = - AccountId::from((&private_david_key_chain.nullifier_public_key, 0)); - - // Ensure consistent ordering - if private_charlie_account_id > private_david_account_id { - std::mem::swap(&mut private_charlie_key_chain, &mut private_david_key_chain); - std::mem::swap( - &mut private_charlie_account_id, - &mut private_david_account_id, - ); - } - - Self { - public_accounts: vec![ - (public_alice_private_key, 10_000), - (public_bob_private_key, 20_000), - ], - private_accounts: vec![ - ( - private_charlie_key_chain, - Account { - balance: 10_000, - data: Data::default(), - program_owner: DEFAULT_PROGRAM_ID, - nonce: 0_u128.into(), - }, - ), - ( - private_david_key_chain, - Account { - balance: 20_000, - data: Data::default(), - program_owner: DEFAULT_PROGRAM_ID, - nonce: 0_u128.into(), - }, - ), - ], - } - } - - fn sequencer_initial_public_accounts(&self) -> Vec { - self.public_accounts - .iter() - .map(|(priv_key, balance)| { - let pub_key = PublicKey::new_from_private_key(priv_key); - let account_id = AccountId::from(&pub_key); - PublicAccountPublicInitialData { - account_id, - balance: *balance, - } - }) - .collect() - } - - fn sequencer_initial_private_accounts(&self) -> Vec { - self.private_accounts - .iter() - .map(|(key_chain, account)| PrivateAccountPublicInitialData { - npk: key_chain.nullifier_public_key, - account: account.clone(), - }) - .collect() - } - - fn wallet_initial_accounts(&self) -> Vec { - self.public_accounts - .iter() - .map(|(priv_key, _)| { - let pub_key = PublicKey::new_from_private_key(priv_key); - let account_id = AccountId::from(&pub_key); - InitialAccountData::Public(PublicAccountPrivateInitialData { - account_id, - pub_sign_key: priv_key.clone(), - }) - }) - .chain(self.private_accounts.iter().map(|(key_chain, account)| { - InitialAccountData::Private(Box::new(PrivateAccountPrivateInitialData { - account: account.clone(), - key_chain: key_chain.clone(), - identifier: 0, - })) - })) - .collect() - } -} - #[derive(Debug, Clone, Copy)] pub enum UrlProtocol { Http, @@ -164,36 +62,11 @@ impl std::fmt::Display for UrlProtocol { } } -pub fn indexer_config( - bedrock_addr: SocketAddr, - home: PathBuf, - initial_data: &InitialData, -) -> Result { - Ok(IndexerConfig { - home, - consensus_info_polling_interval: Duration::from_secs(1), - bedrock_client_config: ClientConfig { - addr: addr_to_url(UrlProtocol::Http, bedrock_addr) - .context("Failed to convert bedrock addr to URL")?, - auth: None, - backoff: BackoffConfig { - start_delay: Duration::from_millis(100), - max_retries: 10, - }, - }, - initial_public_accounts: Some(initial_data.sequencer_initial_public_accounts()), - initial_private_accounts: Some(initial_data.sequencer_initial_private_accounts()), - signing_key: [37; 32], - channel_id: bedrock_channel_id(), - }) -} - pub fn sequencer_config( partial: SequencerPartialConfig, home: PathBuf, bedrock_addr: SocketAddr, - indexer_addr: SocketAddr, - initial_data: &InitialData, + genesis_transactions: Vec, ) -> Result { let SequencerPartialConfig { max_num_tx_in_block, @@ -204,35 +77,76 @@ pub fn sequencer_config( Ok(SequencerConfig { home, - genesis_id: 1, - is_genesis_random: true, max_num_tx_in_block, max_block_size, mempool_max_size, block_create_timeout, retry_pending_blocks_timeout: Duration::from_secs(5), - initial_public_accounts: Some(initial_data.sequencer_initial_public_accounts()), - initial_private_accounts: Some(initial_data.sequencer_initial_private_accounts()), + genesis: genesis_transactions, signing_key: [37; 32], bedrock_config: BedrockConfig { - backoff: BackoffConfig { - start_delay: Duration::from_millis(100), - max_retries: 5, - }, channel_id: bedrock_channel_id(), node_url: addr_to_url(UrlProtocol::Http, bedrock_addr) .context("Failed to convert bedrock addr to URL")?, auth: None, }, - indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr) - .context("Failed to convert indexer addr to URL")?, }) } -pub fn wallet_config( - sequencer_addr: SocketAddr, - initial_data: &InitialData, -) -> Result { +#[must_use] +pub fn default_public_accounts_for_wallet() -> Vec<(PrivateKey, u128)> { + let mut private_keys = vec![PrivateKey::new_os_random(), PrivateKey::new_os_random()]; + private_keys.sort_unstable_by_key(|private_key| { + AccountId::from(&PublicKey::new_from_private_key(private_key)) + }); + + private_keys + .into_iter() + .zip(INITIAL_PUBLIC_BALANCES_FOR_WALLET) + .collect() +} + +#[must_use] +pub fn default_private_accounts_for_wallet() -> Vec { + let mut key_chains = vec![KeyChain::new_os_random(), KeyChain::new_os_random()]; + key_chains.sort_unstable(); + + key_chains + .into_iter() + .zip(INITIAL_PRIVATE_BALANCES_FOR_WALLET) + .map(|(key_chain, balance)| InitialPrivateAccountForWallet { + key_chain, + identifier: 0, + balance, + }) + .collect() +} + +#[must_use] +pub fn genesis_from_accounts( + public_accounts: &[(PrivateKey, u128)], + private_accounts: &[InitialPrivateAccountForWallet], +) -> Vec { + let public_genesis = public_accounts.iter().map(|(private_key, balance)| { + let public_key = PublicKey::new_from_private_key(private_key); + let account_id = AccountId::from(&public_key); + GenesisAction::SupplyAccount { + account_id, + balance: *balance, + } + }); + + let private_genesis = private_accounts + .iter() + .map(|account| GenesisAction::SupplyAccount { + account_id: account.account_id(), + balance: account.balance, + }); + + public_genesis.chain(private_genesis).collect() +} + +pub fn wallet_config(sequencer_addr: SocketAddr) -> Result { Ok(WalletConfig { sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr) .context("Failed to convert sequencer addr to URL")?, @@ -240,11 +154,23 @@ pub fn wallet_config( seq_tx_poll_max_blocks: 15, seq_poll_max_retries: 10, seq_block_poll_max_amount: 100, - initial_accounts: Some(initial_data.wallet_initial_accounts()), basic_auth: None, }) } +pub fn indexer_config(bedrock_addr: SocketAddr, home: PathBuf) -> Result { + Ok(IndexerConfig { + home, + consensus_info_polling_interval: Duration::from_secs(1), + bedrock_config: ClientConfig { + addr: addr_to_url(UrlProtocol::Http, bedrock_addr) + .context("Failed to convert bedrock addr to URL")?, + auth: None, + }, + channel_id: bedrock_channel_id(), + }) +} + pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result { // Convert 0.0.0.0 to 127.0.0.1 for client connections // When binding to port 0, the server binds to 0.0.0.0: diff --git a/integration_tests/src/indexer_client.rs b/integration_tests/src/indexer_client.rs new file mode 100644 index 00000000..5641d824 --- /dev/null +++ b/integration_tests/src/indexer_client.rs @@ -0,0 +1,34 @@ +//! Thin client wrapper for querying the indexer's JSON-RPC API in tests. +//! +//! The sequencer doesn't depend on the indexer at runtime — finalization comes +//! from zone-sdk events. This wrapper exists purely for test ergonomics so +//! integration tests can construct a single connection and call +//! `indexer_service_rpc::RpcClient` methods directly via `Deref`. + +use std::ops::Deref; + +use anyhow::{Context as _, Result}; +use jsonrpsee::ws_client::{WsClient, WsClientBuilder}; +use log::info; +use url::Url; + +pub struct IndexerClient(WsClient); + +impl IndexerClient { + pub async fn new(indexer_url: &Url) -> Result { + info!("Connecting to Indexer at {indexer_url}"); + let client = WsClientBuilder::default() + .build(indexer_url) + .await + .context("Failed to create websocket client")?; + Ok(Self(client)) + } +} + +impl Deref for IndexerClient { + type Target = WsClient; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index fcae2c71..3662e006 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -1,6 +1,6 @@ //! This library contains common code for integration tests. -use std::sync::LazyLock; +use std::{net::SocketAddr, sync::LazyLock}; use anyhow::{Context as _, Result}; use common::{HashType, transaction::NSSATransaction}; @@ -9,29 +9,56 @@ use indexer_service::IndexerHandle; use log::{debug, error}; use nssa::{AccountId, PrivacyPreservingTransaction}; use nssa_core::Commitment; -use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait as _}; +use sequencer_core::config::GenesisAction; use sequencer_service::SequencerHandle; use sequencer_service_rpc::{RpcClient as _, SequencerClient, SequencerClientBuilder}; use tempfile::TempDir; use testcontainers::compose::DockerCompose; -use wallet::WalletCore; +use wallet::{WalletCore, account::AccountIdWithPrivacy, cli::CliAccountMention}; -use crate::setup::{setup_bedrock_node, setup_indexer, setup_sequencer, setup_wallet}; +use crate::{ + indexer_client::IndexerClient, + setup::{ + setup_bedrock_node, setup_indexer, setup_private_accounts_with_initial_supply, + setup_public_accounts_with_initial_supply, setup_sequencer, setup_wallet, + }, +}; pub mod config; +pub mod indexer_client; pub mod setup; -pub mod test_context_ffi; // TODO: Remove this and control time from tests pub const TIME_TO_WAIT_FOR_BLOCK_SECONDS: u64 = 12; pub const NSSA_PROGRAM_FOR_TEST_DATA_CHANGER: &str = "data_changer.bin"; pub const NSSA_PROGRAM_FOR_TEST_NOOP: &str = "noop.bin"; +pub const NSSA_PROGRAM_FOR_TEST_PDA_FUND_SPEND_PROXY: &str = "pda_fund_spend_proxy.bin"; const BEDROCK_SERVICE_WITH_OPEN_PORT: &str = "logos-blockchain-node-0"; const BEDROCK_SERVICE_PORT: u16 = 18080; static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init); +struct IndexerComponents { + indexer_handle: IndexerHandle, + indexer_client: IndexerClient, + _temp_dir: TempDir, +} + +impl Drop for IndexerComponents { + fn drop(&mut self) { + let Self { + indexer_handle, + indexer_client: _, + _temp_dir: _, + } = self; + + if !indexer_handle.is_healthy() { + error!("Indexer handle has unexpectedly stopped before IndexerComponents drop"); + } + } +} + /// Test context which sets up a sequencer and a wallet for integration tests. /// /// It's memory and logically safe to create multiple instances of this struct in parallel tests, @@ -39,14 +66,13 @@ static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init); // NOTE: Order of fields is important for proper drop order. pub struct TestContext { sequencer_client: SequencerClient, - indexer_client: IndexerClient, wallet: WalletCore, wallet_password: String, /// Optional to move out value in Drop. sequencer_handle: Option, - indexer_handle: IndexerHandle, + indexer_components: Option, bedrock_compose: DockerCompose, - _temp_indexer_dir: TempDir, + bedrock_addr: SocketAddr, _temp_sequencer_dir: TempDir, _temp_wallet_dir: TempDir, } @@ -57,65 +83,12 @@ impl TestContext { Self::builder().build().await } + /// Get a builder for the test context to customize its configuration. #[must_use] pub const fn builder() -> TestContextBuilder { TestContextBuilder::new() } - async fn new_configured( - sequencer_partial_config: config::SequencerPartialConfig, - initial_data: config::InitialData, - ) -> Result { - // Ensure logger is initialized only once - *LOGGER; - - debug!("Test context setup"); - - let (bedrock_compose, bedrock_addr) = setup_bedrock_node().await?; - - let (indexer_handle, temp_indexer_dir) = setup_indexer(bedrock_addr, &initial_data) - .await - .context("Failed to setup Indexer")?; - - let (sequencer_handle, temp_sequencer_dir) = setup_sequencer( - sequencer_partial_config, - bedrock_addr, - indexer_handle.addr(), - &initial_data, - ) - .await - .context("Failed to setup Sequencer")?; - - let (wallet, temp_wallet_dir, wallet_password) = - setup_wallet(sequencer_handle.addr(), &initial_data) - .await - .context("Failed to setup wallet")?; - - let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr()) - .context("Failed to convert sequencer addr to URL")?; - let indexer_url = config::addr_to_url(config::UrlProtocol::Ws, indexer_handle.addr()) - .context("Failed to convert indexer addr to URL")?; - let sequencer_client = SequencerClientBuilder::default() - .build(sequencer_url) - .context("Failed to create sequencer client")?; - let indexer_client = IndexerClient::new(&indexer_url) - .await - .context("Failed to create indexer client")?; - - Ok(Self { - sequencer_client, - indexer_client, - wallet, - wallet_password, - bedrock_compose, - sequencer_handle: Some(sequencer_handle), - indexer_handle, - _temp_indexer_dir: temp_indexer_dir, - _temp_sequencer_dir: temp_sequencer_dir, - _temp_wallet_dir: temp_wallet_dir, - }) - } - /// Get reference to the wallet. #[must_use] pub const fn wallet(&self) -> &WalletCore { @@ -138,10 +111,38 @@ impl TestContext { &self.sequencer_client } - /// Get reference to the indexer client. + /// Get the Bedrock Node address. #[must_use] - pub const fn indexer_client(&self) -> &IndexerClient { - &self.indexer_client + pub const fn bedrock_addr(&self) -> SocketAddr { + self.bedrock_addr + } + + /// Get reference to the indexer. + /// + /// # Panics + /// + /// Panics if the indexer is not enabled in the test context. See + /// [`TestContextBuilder::disable_indexer()`]. + #[must_use] + pub fn indexer(&self) -> &IndexerHandle { + self.indexer_components + .as_ref() + .map(|components| &components.indexer_handle) + .expect("Called `TestContext::indexer()` on context with disabled indexer") + } + + /// Get reference to the indexer client. + /// + /// # Panics + /// + /// Panics if the indexer is not enabled in the test context. See + /// [`TestContextBuilder::disable_indexer()`]. + #[must_use] + pub fn indexer_client(&self) -> &IndexerClient { + self.indexer_components + .as_ref() + .map(|components| &components.indexer_client) + .expect("Called `TestContext::indexer_client()` on context with disabled indexer") } /// Get existing public account IDs in the wallet. @@ -149,8 +150,9 @@ impl TestContext { pub fn existing_public_accounts(&self) -> Vec { self.wallet .storage() - .user_data + .key_chain() .public_account_ids() + .map(|(account_id, _idx)| account_id) .collect() } @@ -159,8 +161,9 @@ impl TestContext { pub fn existing_private_accounts(&self) -> Vec { self.wallet .storage() - .user_data + .key_chain() .private_account_ids() + .map(|(account_id, _idx)| account_id) .collect() } } @@ -169,15 +172,14 @@ impl Drop for TestContext { fn drop(&mut self) { let Self { sequencer_handle, - indexer_handle, bedrock_compose, - _temp_indexer_dir: _, - _temp_sequencer_dir: _, - _temp_wallet_dir: _, + bedrock_addr: _, + indexer_components: _, sequencer_client: _, - indexer_client: _, wallet: _, wallet_password: _, + _temp_sequencer_dir: _, + _temp_wallet_dir: _, } = self; let sequencer_handle = sequencer_handle @@ -193,10 +195,6 @@ impl Drop for TestContext { ); } - if !indexer_handle.is_healthy() { - error!("Indexer handle has unexpectedly stopped before TestContext drop"); - } - let container = bedrock_compose .service(BEDROCK_SERVICE_WITH_OPEN_PORT) .unwrap_or_else(|| { @@ -217,43 +215,24 @@ impl Drop for TestContext { } } -/// A test context to be used in normal #[test] tests. -pub struct BlockingTestContext { - ctx: Option, - runtime: tokio::runtime::Runtime, -} - -impl BlockingTestContext { - pub fn new() -> Result { - let runtime = tokio::runtime::Runtime::new().unwrap(); - let ctx = runtime.block_on(TestContext::new())?; - Ok(Self { - ctx: Some(ctx), - runtime, - }) - } - - pub const fn ctx(&self) -> &TestContext { - self.ctx.as_ref().expect("TestContext is set") - } -} - pub struct TestContextBuilder { - initial_data: Option, + genesis_transactions: Option>, sequencer_partial_config: Option, + enable_indexer: bool, } impl TestContextBuilder { const fn new() -> Self { Self { - initial_data: None, + genesis_transactions: None, sequencer_partial_config: None, + enable_indexer: true, } } #[must_use] - pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self { - self.initial_data = Some(initial_data); + pub fn with_genesis(mut self, genesis_transactions: Vec) -> Self { + self.genesis_transactions = Some(genesis_transactions); self } @@ -266,14 +245,145 @@ impl TestContextBuilder { self } + /// Exclude Indexer from test context. + /// Indexer is enabled by default. + /// + /// Methods like [`TestContext::indexer()`] and [`TestContext::indexer_client()`] will panic if + /// called when indexer is disabled. + #[must_use] + pub const fn disable_indexer(mut self) -> Self { + self.enable_indexer = false; + self + } + pub async fn build(self) -> Result { - TestContext::new_configured( - self.sequencer_partial_config.unwrap_or_default(), - self.initial_data.unwrap_or_else(|| { - config::InitialData::with_two_public_and_two_private_initialized_accounts() + let Self { + genesis_transactions, + sequencer_partial_config, + enable_indexer, + } = self; + + // Ensure logger is initialized only once + *LOGGER; + + debug!("Test context setup"); + + let (bedrock_compose, bedrock_addr) = setup_bedrock_node() + .await + .context("Failed to setup Bedrock node")?; + + let indexer_components = if enable_indexer { + let (indexer_handle, temp_indexer_dir) = setup_indexer(bedrock_addr) + .await + .context("Failed to setup Indexer")?; + let indexer_url = config::addr_to_url(config::UrlProtocol::Ws, indexer_handle.addr()) + .context("Failed to convert indexer addr to URL")?; + let indexer_client = IndexerClient::new(&indexer_url) + .await + .context("Failed to create indexer client")?; + Some(IndexerComponents { + indexer_handle, + indexer_client, + _temp_dir: temp_indexer_dir, + }) + } else { + None + }; + + let initial_public_accounts = config::default_public_accounts_for_wallet(); + let initial_private_accounts = config::default_private_accounts_for_wallet(); + let (sequencer_handle, temp_sequencer_dir) = setup_sequencer( + sequencer_partial_config.unwrap_or_default(), + bedrock_addr, + genesis_transactions.unwrap_or_else(|| { + config::genesis_from_accounts(&initial_public_accounts, &initial_private_accounts) }), ) .await + .context("Failed to setup Sequencer")?; + + let (mut wallet, temp_wallet_dir, wallet_password) = setup_wallet( + sequencer_handle.addr(), + &initial_public_accounts, + &initial_private_accounts, + ) + .context("Failed to setup wallet")?; + + setup_public_accounts_with_initial_supply(&wallet, &initial_public_accounts) + .await + .context("Failed to initialize public accounts in wallet")?; + + setup_private_accounts_with_initial_supply(&mut wallet, &initial_private_accounts) + .await + .context("Failed to initialize private accounts in wallet")?; + + let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr()) + .context("Failed to convert sequencer addr to URL")?; + let sequencer_client = SequencerClientBuilder::default() + .build(sequencer_url) + .context("Failed to create sequencer client")?; + + Ok(TestContext { + sequencer_client, + wallet, + wallet_password, + bedrock_compose, + bedrock_addr, + sequencer_handle: Some(sequencer_handle), + indexer_components, + _temp_sequencer_dir: temp_sequencer_dir, + _temp_wallet_dir: temp_wallet_dir, + }) + } + + pub fn build_blocking(self) -> Result { + let runtime = tokio::runtime::Runtime::new().context("Failed to create Tokio runtime")?; + + let ctx = runtime.block_on(self.build())?; + + Ok(BlockingTestContext { + ctx: Some(ctx), + runtime, + }) + } +} +/// A test context to be used in normal #[test] tests. +pub struct BlockingTestContext { + ctx: Option, + runtime: tokio::runtime::Runtime, +} + +impl BlockingTestContext { + pub fn new() -> Result { + TestContext::builder().build_blocking() + } + + pub const fn ctx(&self) -> &TestContext { + self.ctx.as_ref().expect("TestContext is set") + } + + pub const fn runtime(&self) -> &tokio::runtime::Runtime { + &self.runtime + } + + pub fn block_on<'ctx, F>(&'ctx self, f: impl FnOnce(&'ctx TestContext) -> F) -> F::Output + where + F: std::future::Future + 'ctx, + { + let future = f(self.ctx()); + self.runtime.block_on(future) + } + + pub fn block_on_mut<'ctx, F>( + &'ctx mut self, + f: impl FnOnce(&'ctx mut TestContext) -> F, + ) -> F::Output + where + F: std::future::Future + 'ctx, + { + let ctx_mut = self.ctx.as_mut().expect("TestContext is set"); + let future = f(ctx_mut); + self.runtime.block_on(future) } } @@ -291,13 +401,13 @@ impl Drop for BlockingTestContext { } #[must_use] -pub fn format_public_account_id(account_id: AccountId) -> String { - format!("Public/{account_id}") +pub const fn public_mention(account_id: AccountId) -> CliAccountMention { + CliAccountMention::Id(AccountIdWithPrivacy::Public(account_id)) } #[must_use] -pub fn format_private_account_id(account_id: AccountId) -> String { - format!("Private/{account_id}") +pub const fn private_mention(account_id: AccountId) -> CliAccountMention { + CliAccountMention::Id(AccountIdWithPrivacy::Private(account_id)) } #[expect( diff --git a/integration_tests/src/setup.rs b/integration_tests/src/setup.rs index 58b33c60..c43590d0 100644 --- a/integration_tests/src/setup.rs +++ b/integration_tests/src/setup.rs @@ -1,27 +1,24 @@ -use std::{ - ffi::{CString, c_char}, - fs::File, - io::Write as _, - net::SocketAddr, - path::PathBuf, -}; +use std::{collections::HashMap, net::SocketAddr, path::PathBuf}; use anyhow::{Context as _, Result, bail}; -use indexer_ffi::{IndexerServiceFFI, api::lifecycle::InitializedIndexerServiceFFIResult}; +use common::transaction::NSSATransaction; use indexer_service::IndexerHandle; use log::{debug, warn}; -use sequencer_service::SequencerHandle; +use nssa::{AccountId, PrivateKey, PublicKey, PublicTransaction, program::Program}; +use sequencer_service::{GenesisAction, SequencerHandle}; +use sequencer_service_rpc::RpcClient as _; use tempfile::TempDir; use testcontainers::compose::DockerCompose; -use wallet::{WalletCore, config::WalletConfigOverrides}; +use wallet::{ + AccDecodeData::Decode, PrivacyPreservingAccount, WalletCore, config::WalletConfigOverrides, +}; -use crate::{BEDROCK_SERVICE_PORT, BEDROCK_SERVICE_WITH_OPEN_PORT, config}; +use crate::{ + BEDROCK_SERVICE_PORT, BEDROCK_SERVICE_WITH_OPEN_PORT, + config::{self, InitialPrivateAccountForWallet}, +}; -unsafe extern "C" { - fn start_indexer(config_path: *const c_char, port: u16) -> InitializedIndexerServiceFFIResult; -} - -pub(crate) async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> { +pub async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> { let manifest_dir = env!("CARGO_MANIFEST_DIR"); let bedrock_compose_path = PathBuf::from(manifest_dir).join("../bedrock/docker-compose.yml"); @@ -91,10 +88,7 @@ pub(crate) async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> Ok((compose, addr)) } -pub(crate) async fn setup_indexer( - bedrock_addr: SocketAddr, - initial_data: &config::InitialData, -) -> Result<(IndexerHandle, TempDir)> { +pub async fn setup_indexer(bedrock_addr: SocketAddr) -> Result<(IndexerHandle, TempDir)> { let temp_indexer_dir = tempfile::tempdir().context("Failed to create temp dir for indexer home")?; @@ -103,12 +97,8 @@ pub(crate) async fn setup_indexer( temp_indexer_dir.path().display() ); - let indexer_config = config::indexer_config( - bedrock_addr, - temp_indexer_dir.path().to_owned(), - initial_data, - ) - .context("Failed to create Indexer config")?; + let indexer_config = config::indexer_config(bedrock_addr, temp_indexer_dir.path().to_owned()) + .context("Failed to create Indexer config")?; indexer_service::run_server(indexer_config, 0) .await @@ -116,11 +106,10 @@ pub(crate) async fn setup_indexer( .map(|handle| (handle, temp_indexer_dir)) } -pub(crate) async fn setup_sequencer( +pub async fn setup_sequencer( partial: config::SequencerPartialConfig, bedrock_addr: SocketAddr, - indexer_addr: SocketAddr, - initial_data: &config::InitialData, + genesis_transactions: Vec, ) -> Result<(SequencerHandle, TempDir)> { let temp_sequencer_dir = tempfile::tempdir().context("Failed to create temp dir for sequencer home")?; @@ -134,8 +123,7 @@ pub(crate) async fn setup_sequencer( partial, temp_sequencer_dir.path().to_owned(), bedrock_addr, - indexer_addr, - initial_data, + genesis_transactions, ) .context("Failed to create Sequencer config")?; @@ -144,12 +132,12 @@ pub(crate) async fn setup_sequencer( Ok((sequencer_handle, temp_sequencer_dir)) } -pub(crate) async fn setup_wallet( +pub fn setup_wallet( sequencer_addr: SocketAddr, - initial_data: &config::InitialData, + initial_public_accounts: &[(PrivateKey, u128)], + initial_private_accounts: &[InitialPrivateAccountForWallet], ) -> Result<(WalletCore, TempDir, String)> { - let config = config::wallet_config(sequencer_addr, initial_data) - .context("Failed to create Wallet config")?; + let config = config::wallet_config(sequencer_addr).context("Failed to create Wallet config")?; let config_serialized = serde_json::to_string_pretty(&config).context("Failed to serialize Wallet config")?; @@ -164,57 +152,176 @@ pub(crate) async fn setup_wallet( let config_overrides = WalletConfigOverrides::default(); let wallet_password = "test_pass".to_owned(); - let (wallet, _mnemonic) = WalletCore::new_init_storage( + let (mut wallet, _mnemonic) = WalletCore::new_init_storage( config_path, storage_path, Some(config_overrides), &wallet_password, ) .context("Failed to init wallet")?; + + for (private_key, _balance) in initial_public_accounts { + wallet + .storage_mut() + .key_chain_mut() + .add_imported_public_account(private_key.clone()); + } + + for private_account in initial_private_accounts { + wallet + .storage_mut() + .key_chain_mut() + .add_imported_private_account( + private_account.key_chain.clone(), + None, + private_account.identifier, + nssa::Account::default(), + ); + } + wallet .store_persistent_data() - .await .context("Failed to store wallet persistent data")?; Ok((wallet, temp_wallet_dir, wallet_password)) } -pub(crate) fn setup_indexer_ffi( - bedrock_addr: SocketAddr, - initial_data: &config::InitialData, -) -> Result<(IndexerServiceFFI, TempDir)> { - let temp_indexer_dir = - tempfile::tempdir().context("Failed to create temp dir for indexer home")?; - - debug!( - "Using temp indexer home at {}", - temp_indexer_dir.path().display() - ); - - let indexer_config = config::indexer_config( - bedrock_addr, - temp_indexer_dir.path().to_owned(), - initial_data, - ) - .context("Failed to create Indexer config")?; - - let config_json = serde_json::to_vec(&indexer_config)?; - let config_path = temp_indexer_dir.path().join("indexer_config.json"); - let mut file = File::create(config_path.as_path())?; - file.write_all(&config_json)?; - file.flush()?; - - let res = - // SAFETY: lib function ensures validity of value. - unsafe { start_indexer(CString::new(config_path.to_str().unwrap())?.as_ptr(), 0) }; - - if res.error.is_error() { - anyhow::bail!("Indexer FFI error {:?}", res.error); +pub async fn setup_public_accounts_with_initial_supply( + wallet: &WalletCore, + initial_public_accounts: &[(PrivateKey, u128)], +) -> Result<()> { + for (private_key, amount) in initial_public_accounts { + claim_funds_from_vault( + wallet, + AccountId::from(&PublicKey::new_from_private_key(private_key)), + *amount, + ) + .await + .context("Failed to claim funds from vault into public account")?; } - Ok(( - // SAFETY: lib function ensures validity of value. - unsafe { std::ptr::read(res.value) }, - temp_indexer_dir, - )) + Ok(()) +} + +pub async fn setup_private_accounts_with_initial_supply( + wallet: &mut WalletCore, + initial_private_accounts: &[InitialPrivateAccountForWallet], +) -> Result<()> { + for private_account in initial_private_accounts { + claim_funds_from_vault_to_private( + wallet, + private_account.account_id(), + private_account.balance, + ) + .await + .context("Failed to claim funds from vault into private account")?; + } + + Ok(()) +} + +async fn claim_funds_from_vault( + wallet: &WalletCore, + owner_id: AccountId, + amount: u128, +) -> Result<()> { + let vault_program_id = Program::vault().id(); + let owner_vault_id = vault_core::compute_vault_account_id(vault_program_id, owner_id); + + let nonces = wallet + .get_accounts_nonces(vec![owner_id]) + .await + .context("Failed to fetch owner nonce")?; + + let signing_key = wallet + .storage() + .key_chain() + .pub_account_signing_key(owner_id) + .with_context(|| format!("Missing signing key for public account {owner_id}"))?; + + let message = nssa::public_transaction::Message::try_new( + vault_program_id, + vec![owner_id, owner_vault_id], + nonces, + vault_core::Instruction::Claim { amount }, + ) + .context("Failed to build vault claim message")?; + + let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[signing_key]); + let tx = PublicTransaction::new(message, witness_set); + + let tx_hash = wallet + .sequencer_client + .send_transaction(NSSATransaction::Public(tx)) + .await + .context("Failed to submit vault claim transaction")?; + + wallet + .poll_native_token_transfer(tx_hash) + .await + .context("Failed to confirm vault claim transaction")?; + + Ok(()) +} + +async fn claim_funds_from_vault_to_private( + wallet: &mut WalletCore, + owner_id: AccountId, + amount: u128, +) -> Result<()> { + let Some(_) = wallet.storage().key_chain().private_account(owner_id) else { + bail!("Missing private account in wallet key chain for account {owner_id}"); + }; + + let vault_program = Program::vault(); + let vault_program_id = vault_program.id(); + let owner_vault_id = vault_core::compute_vault_account_id(vault_program_id, owner_id); + + let instruction_data = + Program::serialize_instruction(vault_core::Instruction::Claim { amount }) + .context("Failed to serialize vault private claim instruction")?; + + let program_with_dependencies = + nssa::privacy_preserving_transaction::circuit::ProgramWithDependencies::new( + vault_program, + HashMap::from([( + Program::authenticated_transfer_program().id(), + Program::authenticated_transfer_program(), + )]), + ); + + let (tx_hash, mut secrets) = wallet + .send_privacy_preserving_tx( + vec![ + PrivacyPreservingAccount::PrivateOwned(owner_id), + PrivacyPreservingAccount::Public(owner_vault_id), + ], + instruction_data, + &program_with_dependencies, + ) + .await + .context("Failed to submit private vault claim transaction")?; + + let secret = secrets + .pop() + .context("Expected one private output secret for vault claim")?; + + let transfer_tx = wallet + .poll_native_token_transfer(tx_hash) + .await + .context("Failed to confirm private vault claim transaction")?; + + let NSSATransaction::PrivacyPreserving(tx) = transfer_tx else { + bail!("Expected privacy preserving transaction result for private vault claim"); + }; + + wallet + .decode_insert_privacy_preserving_transaction_results(&tx, &[Decode(secret, owner_id)]) + .context("Failed to decode private vault claim transaction")?; + + wallet + .store_persistent_data() + .context("Failed to store wallet data after private vault claim")?; + + Ok(()) } diff --git a/integration_tests/src/test_context_ffi.rs b/integration_tests/src/test_context_ffi.rs deleted file mode 100644 index 7d21aa28..00000000 --- a/integration_tests/src/test_context_ffi.rs +++ /dev/null @@ -1,296 +0,0 @@ -use std::sync::Arc; - -use anyhow::{Context as _, Result}; -use futures::FutureExt as _; -use indexer_ffi::IndexerServiceFFI; -use indexer_service_rpc::RpcClient as _; -use log::{debug, error}; -use nssa::AccountId; -use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait as _}; -use sequencer_service::SequencerHandle; -use sequencer_service_rpc::{RpcClient as _, SequencerClient, SequencerClientBuilder}; -use tempfile::TempDir; -use testcontainers::compose::DockerCompose; -use wallet::WalletCore; - -use crate::{ - BEDROCK_SERVICE_WITH_OPEN_PORT, LOGGER, TestContextBuilder, config, - setup::{setup_bedrock_node, setup_indexer_ffi, setup_sequencer, setup_wallet}, -}; - -/// Test context which sets up a sequencer, indexer through ffi and a wallet for integration tests. -/// -/// It's memory and logically safe to create multiple instances of this struct in parallel tests, -/// as each instance uses its own temporary directories for sequencer and wallet data. -// NOTE: Order of fields is important for proper drop order. -pub struct TestContextFFI { - sequencer_client: SequencerClient, - indexer_client: IndexerClient, - wallet: WalletCore, - wallet_password: String, - /// Optional to move out value in Drop. - sequencer_handle: Option, - bedrock_compose: DockerCompose, - _temp_indexer_dir: TempDir, - _temp_sequencer_dir: TempDir, - _temp_wallet_dir: TempDir, -} - -#[expect( - clippy::multiple_inherent_impl, - reason = "It is more natural to have this implementation here" -)] -impl TestContextBuilder { - pub fn build_ffi( - self, - runtime: &Arc, - ) -> Result<(TestContextFFI, IndexerServiceFFI)> { - TestContextFFI::new_configured( - self.sequencer_partial_config.unwrap_or_default(), - &self.initial_data.unwrap_or_else(|| { - config::InitialData::with_two_public_and_two_private_initialized_accounts() - }), - runtime, - ) - } -} - -impl TestContextFFI { - /// Create new test context. - pub fn new(runtime: &Arc) -> Result<(Self, IndexerServiceFFI)> { - Self::builder().build_ffi(runtime) - } - - #[must_use] - pub const fn builder() -> TestContextBuilder { - TestContextBuilder::new() - } - - fn new_configured( - sequencer_partial_config: config::SequencerPartialConfig, - initial_data: &config::InitialData, - runtime: &Arc, - ) -> Result<(Self, IndexerServiceFFI)> { - // Ensure logger is initialized only once - *LOGGER; - - debug!("Test context setup"); - - let (bedrock_compose, bedrock_addr) = runtime.block_on(setup_bedrock_node())?; - - let (indexer_ffi, temp_indexer_dir) = - setup_indexer_ffi(bedrock_addr, initial_data).context("Failed to setup Indexer")?; - - let (sequencer_handle, temp_sequencer_dir) = runtime - .block_on(setup_sequencer( - sequencer_partial_config, - bedrock_addr, - // SAFETY: addr is valid if indexer_ffi is valid. - unsafe { indexer_ffi.addr() }, - initial_data, - )) - .context("Failed to setup Sequencer")?; - - let (wallet, temp_wallet_dir, wallet_password) = runtime - .block_on(setup_wallet(sequencer_handle.addr(), initial_data)) - .context("Failed to setup wallet")?; - - let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr()) - .context("Failed to convert sequencer addr to URL")?; - let indexer_url = config::addr_to_url( - config::UrlProtocol::Ws, - // SAFETY: addr is valid if indexer_ffi is valid. - unsafe { indexer_ffi.addr() }, - ) - .context("Failed to convert indexer addr to URL")?; - let sequencer_client = SequencerClientBuilder::default() - .build(sequencer_url) - .context("Failed to create sequencer client")?; - let indexer_client = runtime - .block_on(IndexerClient::new(&indexer_url)) - .context("Failed to create indexer client")?; - - Ok(( - Self { - sequencer_client, - indexer_client, - wallet, - wallet_password, - bedrock_compose, - sequencer_handle: Some(sequencer_handle), - _temp_indexer_dir: temp_indexer_dir, - _temp_sequencer_dir: temp_sequencer_dir, - _temp_wallet_dir: temp_wallet_dir, - }, - indexer_ffi, - )) - } - - /// Get reference to the wallet. - #[must_use] - pub const fn wallet(&self) -> &WalletCore { - &self.wallet - } - - #[must_use] - pub fn wallet_password(&self) -> &str { - &self.wallet_password - } - - /// Get mutable reference to the wallet. - pub const fn wallet_mut(&mut self) -> &mut WalletCore { - &mut self.wallet - } - - /// Get reference to the sequencer client. - #[must_use] - pub const fn sequencer_client(&self) -> &SequencerClient { - &self.sequencer_client - } - - /// Get reference to the indexer client. - #[must_use] - pub const fn indexer_client(&self) -> &IndexerClient { - &self.indexer_client - } - - /// Get existing public account IDs in the wallet. - #[must_use] - pub fn existing_public_accounts(&self) -> Vec { - self.wallet - .storage() - .user_data - .public_account_ids() - .collect() - } - - /// Get existing private account IDs in the wallet. - #[must_use] - pub fn existing_private_accounts(&self) -> Vec { - self.wallet - .storage() - .user_data - .private_account_ids() - .collect() - } - - pub fn get_last_block_sequencer(&self, runtime: &Arc) -> Result { - Ok(runtime.block_on(self.sequencer_client.get_last_block_id())?) - } - - pub fn get_last_block_indexer(&self, runtime: &Arc) -> Result { - Ok(runtime.block_on(self.indexer_client.get_last_finalized_block_id())?) - } -} - -impl Drop for TestContextFFI { - fn drop(&mut self) { - let Self { - sequencer_handle, - bedrock_compose, - _temp_indexer_dir: _, - _temp_sequencer_dir: _, - _temp_wallet_dir: _, - sequencer_client: _, - indexer_client: _, - wallet: _, - wallet_password: _, - } = self; - - let sequencer_handle = sequencer_handle - .take() - .expect("Sequencer handle should be present in TestContext drop"); - if !sequencer_handle.is_healthy() { - let Err(err) = sequencer_handle - .failed() - .now_or_never() - .expect("Sequencer handle should not be running"); - error!( - "Sequencer handle has unexpectedly stopped before TestContext drop with error: {err:#}" - ); - } - - let container = bedrock_compose - .service(BEDROCK_SERVICE_WITH_OPEN_PORT) - .unwrap_or_else(|| { - panic!("Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`") - }); - let output = std::process::Command::new("docker") - .args(["inspect", "-f", "{{.State.Running}}", container.id()]) - .output() - .expect("Failed to execute docker inspect command to check if Bedrock container is still running"); - let stdout = String::from_utf8(output.stdout) - .expect("Failed to parse docker inspect output as String"); - if stdout.trim() != "true" { - error!( - "Bedrock container `{}` is not running during TestContext drop, docker inspect output: {stdout}", - container.id() - ); - } - } -} - -/// A test context with ffi to be used in normal #[test] tests. -pub struct BlockingTestContextFFI { - ctx: Option, - runtime: Arc, - indexer_ffi: IndexerServiceFFI, -} - -impl BlockingTestContextFFI { - pub fn new() -> Result { - let runtime = tokio::runtime::Runtime::new().unwrap(); - let runtime_wrapped = Arc::new(runtime); - let (ctx, indexer_ffi) = TestContextFFI::new(&runtime_wrapped)?; - Ok(Self { - ctx: Some(ctx), - runtime: runtime_wrapped, - indexer_ffi, - }) - } - - #[must_use] - pub const fn ctx(&self) -> &TestContextFFI { - self.ctx.as_ref().expect("TestContext is set") - } - - #[must_use] - pub const fn ctx_mut(&mut self) -> &mut TestContextFFI { - self.ctx.as_mut().expect("TestContext is set") - } - - #[must_use] - pub const fn runtime(&self) -> &Arc { - &self.runtime - } - - #[must_use] - pub fn runtime_clone(&self) -> Arc { - Arc::::clone(&self.runtime) - } -} - -impl Drop for BlockingTestContextFFI { - fn drop(&mut self) { - let Self { - ctx, - runtime, - indexer_ffi, - } = self; - - // Ensure async cleanup of TestContext by blocking on its drop in the runtime. - runtime.block_on(async { - if let Some(ctx) = ctx.take() { - drop(ctx); - } - }); - - let indexer_handle = - // SAFETY: lib function ensures validity of value. - unsafe { indexer_ffi.handle() }; - - if !indexer_handle.is_healthy() { - error!("Indexer handle has unexpectedly stopped before TestContext drop"); - } - } -} diff --git a/integration_tests/tests/account.rs b/integration_tests/tests/account.rs index 47fda69f..f779695d 100644 --- a/integration_tests/tests/account.rs +++ b/integration_tests/tests/account.rs @@ -3,16 +3,21 @@ reason = "We don't care about these in tests" )] -use anyhow::Result; -use integration_tests::{TestContext, format_private_account_id}; +use anyhow::{Context as _, Result}; +use integration_tests::{TestContext, private_mention}; +use key_protocol::key_management::KeyChain; use log::info; -use nssa::program::Program; +use nssa::{Data, program::Program}; +use nssa_core::account::Nonce; use sequencer_service_rpc::RpcClient as _; use tokio::test; -use wallet::cli::{ - Command, - account::{AccountSubcommand, NewSubcommand}, - execute_subcommand, +use wallet::{ + account::{AccountIdWithPrivacy, HumanReadableAccount, Label}, + cli::{ + Command, SubcommandReturnValue, + account::{AccountSubcommand, ImportSubcommand, NewSubcommand}, + execute_subcommand, + }, }; #[test] @@ -30,7 +35,7 @@ async fn get_existing_account() -> Result<()> { ); assert_eq!(account.balance, 10000); assert!(account.data.is_empty()); - assert_eq!(account.nonce.0, 0); + assert_eq!(account.nonce.0, 1); info!("Successfully retrieved account with correct details"); @@ -41,7 +46,7 @@ async fn get_existing_account() -> Result<()> { async fn new_public_account_with_label() -> Result<()> { let mut ctx = TestContext::new().await?; - let label = "my-test-public-account".to_owned(); + let label = Label::new("my-test-public-account"); let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public { cci: None, label: Some(label.clone()), @@ -55,14 +60,9 @@ async fn new_public_account_with_label() -> Result<()> { }; // Verify the label was stored - let stored_label = ctx - .wallet() - .storage() - .labels - .get(&account_id.to_string()) - .expect("Label should be stored for the new account"); + let resolved = ctx.wallet().storage().resolve_label(&label); - assert_eq!(stored_label.to_string(), label); + assert_eq!(resolved, Some(AccountIdWithPrivacy::Public(account_id))); info!("Successfully created public account with label"); @@ -74,23 +74,17 @@ async fn add_label_to_existing_account() -> Result<()> { let mut ctx = TestContext::new().await?; let account_id = ctx.existing_private_accounts()[0]; - let label = "my-test-private-account".to_owned(); + let label = Label::new("my-test-private-account"); let command = Command::Account(AccountSubcommand::Label { - account_id: Some(format_private_account_id(account_id)), - account_label: None, + account_id: private_mention(account_id), label: label.clone(), }); execute_subcommand(ctx.wallet_mut(), command).await?; - let stored_label = ctx - .wallet() - .storage() - .labels - .get(&account_id.to_string()) - .expect("Label should be stored for the account"); + let resolved = ctx.wallet().storage().resolve_label(&label); - assert_eq!(stored_label.to_string(), label); + assert_eq!(resolved, Some(AccountIdWithPrivacy::Private(account_id))); info!("Successfully set label on existing private account"); @@ -114,12 +108,13 @@ async fn new_public_account_without_label() -> Result<()> { panic!("Expected RegisterAccount return value") }; - // Verify no label was stored + // Verify no label was stored for the account id assert!( - !ctx.wallet() + ctx.wallet() .storage() - .labels - .contains_key(&account_id.to_string()), + .labels_for_account(AccountIdWithPrivacy::Public(account_id)) + .next() + .is_none(), "No label should be stored when not provided" ); @@ -127,3 +122,150 @@ async fn new_public_account_without_label() -> Result<()> { Ok(()) } + +#[test] +async fn import_public_account() -> Result<()> { + let mut ctx = TestContext::new().await?; + + let private_key = nssa::PrivateKey::new_os_random(); + let account_id = nssa::AccountId::from(&nssa::PublicKey::new_from_private_key(&private_key)); + + let command = Command::Account(AccountSubcommand::Import(ImportSubcommand::Public { + private_key, + })); + let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + let SubcommandReturnValue::Empty = sub_ret else { + anyhow::bail!("Expected Empty return value"); + }; + + let imported_key = ctx + .wallet() + .storage() + .key_chain() + .pub_account_signing_key(account_id); + assert!( + imported_key.is_some(), + "Imported public account should be present" + ); + + Ok(()) +} + +#[test] +async fn import_private_account() -> Result<()> { + let mut ctx = TestContext::new().await?; + + let key_chain = KeyChain::new_os_random(); + let account_id = nssa::AccountId::from((&key_chain.nullifier_public_key, 0)); + let account = nssa::Account { + program_owner: Program::authenticated_transfer_program().id(), + balance: 777, + data: Data::default(), + nonce: Nonce::default(), + }; + + let key_chain_json = serde_json::to_string(&key_chain) + .context("Failed to serialize key chain for private import")?; + let account_state = HumanReadableAccount::from(account.clone()); + + let command = Command::Account(AccountSubcommand::Import(ImportSubcommand::Private { + key_chain_json, + account_state, + chain_index: None, + identifier: 0, + })); + let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + let SubcommandReturnValue::Empty = sub_ret else { + anyhow::bail!("Expected Empty return value"); + }; + + let imported_acc = ctx + .wallet() + .storage() + .key_chain() + .private_account(account_id) + .context("Imported private account should be present")?; + + assert_eq!( + imported_acc.key_chain.secret_spending_key, + key_chain.secret_spending_key + ); + assert_eq!( + imported_acc.key_chain.nullifier_public_key, + key_chain.nullifier_public_key + ); + assert_eq!( + imported_acc.key_chain.viewing_public_key, + key_chain.viewing_public_key + ); + + assert_eq!(imported_acc.chain_index, None); + + assert_eq!(imported_acc.kind.identifier(), 0); + + assert_eq!(imported_acc.account, &account); + + Ok(()) +} + +#[test] +async fn import_private_account_second_time_overrides_account_data() -> Result<()> { + let mut ctx = TestContext::new().await?; + + let key_chain = KeyChain::new_os_random(); + let account_id = nssa::AccountId::from((&key_chain.nullifier_public_key, 0)); + let key_chain_json = + serde_json::to_string(&key_chain).context("Failed to serialize key chain")?; + + let initial_account = nssa::Account { + program_owner: Program::authenticated_transfer_program().id(), + balance: 100, + data: Data::default(), + nonce: Nonce::default(), + }; + + // First import + wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Account(AccountSubcommand::Import(ImportSubcommand::Private { + key_chain_json: key_chain_json.clone(), + account_state: HumanReadableAccount::from(initial_account), + chain_index: None, + identifier: 0, + })), + ) + .await?; + + let updated_account = nssa::Account { + program_owner: Program::authenticated_transfer_program().id(), + balance: 999, + data: Data::default(), + nonce: Nonce::default(), + }; + + // Second import with different account data (same key chain) + wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Account(AccountSubcommand::Import(ImportSubcommand::Private { + key_chain_json, + account_state: HumanReadableAccount::from(updated_account.clone()), + chain_index: None, + identifier: 0, + })), + ) + .await?; + + let imported = ctx + .wallet() + .storage() + .key_chain() + .private_account(account_id) + .context("Imported private account should be present")?; + + assert_eq!( + imported.account, &updated_account, + "Second import should override account data" + ); + + Ok(()) +} diff --git a/integration_tests/tests/amm.rs b/integration_tests/tests/amm.rs index d1e4f8ee..b7a747f1 100644 --- a/integration_tests/tests/amm.rs +++ b/integration_tests/tests/amm.rs @@ -7,14 +7,17 @@ use std::time::Duration; use anyhow::Result; -use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id}; +use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, public_mention}; use log::info; use sequencer_service_rpc::RpcClient as _; use tokio::test; -use wallet::cli::{ - Command, SubcommandReturnValue, - account::{AccountSubcommand, NewSubcommand}, - programs::{amm::AmmProgramAgnosticSubcommand, token::TokenProgramAgnosticSubcommand}, +use wallet::{ + account::Label, + cli::{ + Command, SubcommandReturnValue, + account::{AccountSubcommand, NewSubcommand}, + programs::{amm::AmmProgramAgnosticSubcommand, token::TokenProgramAgnosticSubcommand}, + }, }; #[test] @@ -113,10 +116,8 @@ async fn amm_public() -> Result<()> { // Create new token let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id_1)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id_1)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id_1), + supply_account_id: public_mention(supply_account_id_1), name: "A NAM1".to_owned(), total_supply: 37, @@ -127,15 +128,12 @@ async fn amm_public() -> Result<()> { // Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_1` let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id_1)), - from_label: None, - to: Some(format_public_account_id(recipient_account_id_1)), - to_label: None, + from: public_mention(supply_account_id_1), + to: Some(public_mention(recipient_account_id_1)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 7, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -144,10 +142,8 @@ async fn amm_public() -> Result<()> { // Create new token let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id_2)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id_2)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id_2), + supply_account_id: public_mention(supply_account_id_2), name: "A NAM2".to_owned(), total_supply: 37, @@ -158,15 +154,12 @@ async fn amm_public() -> Result<()> { // Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_2` let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id_2)), - from_label: None, - to: Some(format_public_account_id(recipient_account_id_2)), - to_label: None, + from: public_mention(supply_account_id_2), + to: Some(public_mention(recipient_account_id_2)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 7, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -195,12 +188,9 @@ async fn amm_public() -> Result<()> { // Send creation tx let subcommand = AmmProgramAgnosticSubcommand::New { - user_holding_a: Some(format_public_account_id(recipient_account_id_1)), - user_holding_a_label: None, - user_holding_b: Some(format_public_account_id(recipient_account_id_2)), - user_holding_b_label: None, - user_holding_lp: Some(format_public_account_id(user_holding_lp)), - user_holding_lp_label: None, + user_holding_a: public_mention(recipient_account_id_1), + user_holding_b: public_mention(recipient_account_id_2), + user_holding_lp: public_mention(user_holding_lp), balance_a: 3, balance_b: 3, }; @@ -241,13 +231,11 @@ async fn amm_public() -> Result<()> { // Make swap let subcommand = AmmProgramAgnosticSubcommand::SwapExactInput { - user_holding_a: Some(format_public_account_id(recipient_account_id_1)), - user_holding_a_label: None, - user_holding_b: Some(format_public_account_id(recipient_account_id_2)), - user_holding_b_label: None, + user_holding_a: public_mention(recipient_account_id_1), + user_holding_b: public_mention(recipient_account_id_2), amount_in: 2, min_amount_out: 1, - token_definition: definition_account_id_1.to_string(), + token_definition: definition_account_id_1, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::AMM(subcommand)).await?; @@ -286,13 +274,11 @@ async fn amm_public() -> Result<()> { // Make swap let subcommand = AmmProgramAgnosticSubcommand::SwapExactInput { - user_holding_a: Some(format_public_account_id(recipient_account_id_1)), - user_holding_a_label: None, - user_holding_b: Some(format_public_account_id(recipient_account_id_2)), - user_holding_b_label: None, + user_holding_a: public_mention(recipient_account_id_1), + user_holding_b: public_mention(recipient_account_id_2), amount_in: 2, min_amount_out: 1, - token_definition: definition_account_id_2.to_string(), + token_definition: definition_account_id_2, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::AMM(subcommand)).await?; @@ -331,12 +317,9 @@ async fn amm_public() -> Result<()> { // Add liquidity let subcommand = AmmProgramAgnosticSubcommand::AddLiquidity { - user_holding_a: Some(format_public_account_id(recipient_account_id_1)), - user_holding_a_label: None, - user_holding_b: Some(format_public_account_id(recipient_account_id_2)), - user_holding_b_label: None, - user_holding_lp: Some(format_public_account_id(user_holding_lp)), - user_holding_lp_label: None, + user_holding_a: public_mention(recipient_account_id_1), + user_holding_b: public_mention(recipient_account_id_2), + user_holding_lp: public_mention(user_holding_lp), min_amount_lp: 1, max_amount_a: 2, max_amount_b: 2, @@ -378,12 +361,9 @@ async fn amm_public() -> Result<()> { // Remove liquidity let subcommand = AmmProgramAgnosticSubcommand::RemoveLiquidity { - user_holding_a: Some(format_public_account_id(recipient_account_id_1)), - user_holding_a_label: None, - user_holding_b: Some(format_public_account_id(recipient_account_id_2)), - user_holding_b_label: None, - user_holding_lp: Some(format_public_account_id(user_holding_lp)), - user_holding_lp_label: None, + user_holding_a: public_mention(recipient_account_id_1), + user_holding_b: public_mention(recipient_account_id_2), + user_holding_lp: public_mention(user_holding_lp), balance_lp: 2, min_amount_a: 1, min_amount_b: 1, @@ -459,14 +439,14 @@ async fn amm_new_pool_using_labels() -> Result<()> { }; // Create holding_a with a label - let holding_a_label = "amm-holding-a-label".to_owned(); + let holding_a_label = Label::new("amm-holding-a-label"); let SubcommandReturnValue::RegisterAccount { account_id: holding_a_id, } = wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Account(AccountSubcommand::New(NewSubcommand::Public { cci: None, - label: Some(holding_a_label.clone()), + label: Some(Label::new(holding_a_label.clone())), })), ) .await? @@ -504,14 +484,14 @@ async fn amm_new_pool_using_labels() -> Result<()> { }; // Create holding_b with a label - let holding_b_label = "amm-holding-b-label".to_owned(); + let holding_b_label = Label::new("amm-holding-b-label"); let SubcommandReturnValue::RegisterAccount { account_id: holding_b_id, } = wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Account(AccountSubcommand::New(NewSubcommand::Public { cci: None, - label: Some(holding_b_label.clone()), + label: Some(Label::new(holding_b_label.clone())), })), ) .await? @@ -520,14 +500,14 @@ async fn amm_new_pool_using_labels() -> Result<()> { }; // Create holding_lp with a label - let holding_lp_label = "amm-holding-lp-label".to_owned(); + let holding_lp_label = Label::new("amm-holding-lp-label"); let SubcommandReturnValue::RegisterAccount { account_id: holding_lp_id, } = wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Account(AccountSubcommand::New(NewSubcommand::Public { cci: None, - label: Some(holding_lp_label.clone()), + label: Some(Label::new(holding_lp_label.clone())), })), ) .await? @@ -537,10 +517,8 @@ async fn amm_new_pool_using_labels() -> Result<()> { // Create token 1 and distribute to holding_a let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id_1)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id_1)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id_1), + supply_account_id: public_mention(supply_account_id_1), name: "TOKEN1".to_owned(), total_supply: 10, }; @@ -548,25 +526,20 @@ async fn amm_new_pool_using_labels() -> Result<()> { tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id_1)), - from_label: None, - to: Some(format_public_account_id(holding_a_id)), - to_label: None, + from: public_mention(supply_account_id_1), + to: Some(public_mention(holding_a_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 5, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; // Create token 2 and distribute to holding_b let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id_2)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id_2)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id_2), + supply_account_id: public_mention(supply_account_id_2), name: "TOKEN2".to_owned(), total_supply: 10, }; @@ -574,27 +547,21 @@ async fn amm_new_pool_using_labels() -> Result<()> { tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id_2)), - from_label: None, - to: Some(format_public_account_id(holding_b_id)), - to_label: None, + from: public_mention(supply_account_id_2), + to: Some(public_mention(holding_b_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 5, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; // Create AMM pool using account labels instead of IDs let subcommand = AmmProgramAgnosticSubcommand::New { - user_holding_a: None, - user_holding_a_label: Some(holding_a_label), - user_holding_b: None, - user_holding_b_label: Some(holding_b_label), - user_holding_lp: None, - user_holding_lp_label: Some(holding_lp_label), + user_holding_a: holding_a_label.into(), + user_holding_b: holding_b_label.into(), + user_holding_lp: holding_lp_label.into(), balance_a: 3, balance_b: 3, }; diff --git a/integration_tests/tests/ata.rs b/integration_tests/tests/ata.rs index 057b7817..d0eddeae 100644 --- a/integration_tests/tests/ata.rs +++ b/integration_tests/tests/ata.rs @@ -9,8 +9,8 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use ata_core::{compute_ata_seed, get_associated_token_account_id}; use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id, - format_public_account_id, verify_commitment_is_in_state, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, private_mention, public_mention, + verify_commitment_is_in_state, }; use log::info; use nssa::program::Program; @@ -68,10 +68,8 @@ async fn create_ata_initializes_holding_account() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "TEST".to_owned(), total_supply, }), @@ -85,8 +83,8 @@ async fn create_ata_initializes_holding_account() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_public_account_id(owner_account_id), - token_definition: definition_account_id.to_string(), + owner: public_mention(owner_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -132,10 +130,8 @@ async fn create_ata_is_idempotent() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "TEST".to_owned(), total_supply: 100, }), @@ -149,8 +145,8 @@ async fn create_ata_is_idempotent() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_public_account_id(owner_account_id), - token_definition: definition_account_id.to_string(), + owner: public_mention(owner_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -162,8 +158,8 @@ async fn create_ata_is_idempotent() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_public_account_id(owner_account_id), - token_definition: definition_account_id.to_string(), + owner: public_mention(owner_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -212,10 +208,8 @@ async fn transfer_and_burn_via_ata() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "TEST".to_owned(), total_supply, }), @@ -240,16 +234,16 @@ async fn transfer_and_burn_via_ata() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_public_account_id(sender_account_id), - token_definition: definition_account_id.to_string(), + owner: public_mention(sender_account_id), + token_definition: definition_account_id, }), ) .await?; wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_public_account_id(recipient_account_id), - token_definition: definition_account_id.to_string(), + owner: public_mention(recipient_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -262,15 +256,12 @@ async fn transfer_and_burn_via_ata() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id)), - from_label: None, - to: Some(format_public_account_id(sender_ata_id)), - to_label: None, + from: public_mention(supply_account_id), + to: Some(public_mention(sender_ata_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: fund_amount, - from_key_path: None, }), ) .await?; @@ -283,9 +274,9 @@ async fn transfer_and_burn_via_ata() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Send { - from: format_public_account_id(sender_account_id), - token_definition: definition_account_id.to_string(), - to: recipient_ata_id.to_string(), + from: public_mention(sender_account_id), + token_definition: definition_account_id, + to: recipient_ata_id, amount: transfer_amount, }), ) @@ -321,8 +312,8 @@ async fn transfer_and_burn_via_ata() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Burn { - holder: format_public_account_id(sender_account_id), - token_definition: definition_account_id.to_string(), + holder: public_mention(sender_account_id), + token_definition: definition_account_id, amount: burn_amount, }), ) @@ -372,10 +363,8 @@ async fn create_ata_with_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "TEST".to_owned(), total_supply: 100, }), @@ -389,8 +378,8 @@ async fn create_ata_with_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_private_account_id(owner_account_id), - token_definition: definition_account_id.to_string(), + owner: private_mention(owner_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -446,10 +435,8 @@ async fn transfer_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "TEST".to_owned(), total_supply, }), @@ -474,16 +461,16 @@ async fn transfer_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_private_account_id(sender_account_id), - token_definition: definition_account_id.to_string(), + owner: private_mention(sender_account_id), + token_definition: definition_account_id, }), ) .await?; wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_public_account_id(recipient_account_id), - token_definition: definition_account_id.to_string(), + owner: public_mention(recipient_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -496,15 +483,12 @@ async fn transfer_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id)), - from_label: None, - to: Some(format_public_account_id(sender_ata_id)), - to_label: None, + from: public_mention(supply_account_id), + to: Some(public_mention(sender_ata_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: fund_amount, - from_key_path: None, }), ) .await?; @@ -517,9 +501,9 @@ async fn transfer_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Send { - from: format_private_account_id(sender_account_id), - token_definition: definition_account_id.to_string(), - to: recipient_ata_id.to_string(), + from: private_mention(sender_account_id), + token_definition: definition_account_id, + to: recipient_ata_id, amount: transfer_amount, }), ) @@ -574,10 +558,8 @@ async fn burn_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "TEST".to_owned(), total_supply, }), @@ -598,8 +580,8 @@ async fn burn_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Create { - owner: format_private_account_id(holder_account_id), - token_definition: definition_account_id.to_string(), + owner: private_mention(holder_account_id), + token_definition: definition_account_id, }), ) .await?; @@ -612,15 +594,12 @@ async fn burn_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Token(TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id)), - from_label: None, - to: Some(format_public_account_id(holder_ata_id)), - to_label: None, + from: public_mention(supply_account_id), + to: Some(public_mention(holder_ata_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: fund_amount, - from_key_path: None, }), ) .await?; @@ -633,8 +612,8 @@ async fn burn_via_ata_private_owner() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Ata(AtaSubcommand::Burn { - holder: format_private_account_id(holder_account_id), - token_definition: definition_account_id.to_string(), + holder: private_mention(holder_account_id), + token_definition: definition_account_id, amount: burn_amount, }), ) diff --git a/integration_tests/tests/auth_transfer/private.rs b/integration_tests/tests/auth_transfer/private.rs index f08e3759..30225975 100644 --- a/integration_tests/tests/auth_transfer/private.rs +++ b/integration_tests/tests/auth_transfer/private.rs @@ -2,18 +2,21 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx, - format_private_account_id, format_public_account_id, verify_commitment_is_in_state, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx, private_mention, + public_mention, verify_commitment_is_in_state, }; use log::info; use nssa::{AccountId, program::Program}; use nssa_core::{NullifierPublicKey, encryption::shared_key_derivation::Secp256k1Point}; use sequencer_service_rpc::RpcClient as _; use tokio::test; -use wallet::cli::{ - Command, SubcommandReturnValue, - account::{AccountSubcommand, NewSubcommand}, - programs::native_token_transfer::AuthTransferSubcommand, +use wallet::{ + account::Label, + cli::{ + CliAccountMention, Command, SubcommandReturnValue, + account::{AccountSubcommand, NewSubcommand}, + programs::native_token_transfer::AuthTransferSubcommand, + }, }; #[test] @@ -24,16 +27,12 @@ async fn private_transfer_to_owned_account() -> Result<()> { let to: AccountId = ctx.existing_private_accounts()[1]; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, - to: Some(format_private_account_id(to)), - to_label: None, + from: private_mention(from), + to: Some(private_mention(to)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - to_key_path: None, - from_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -68,16 +67,12 @@ async fn private_transfer_to_foreign_account() -> Result<()> { let to_vpk = Secp256k1Point::from_scalar(to_npk.0); let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, + from: private_mention(from), to: None, - to_label: None, to_npk: Some(to_npk_string), to_vpk: Some(hex::encode(to_vpk.0)), to_identifier: Some(0), amount: 100, - to_key_path: None, - from_key_path: None, }); let result = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -121,16 +116,12 @@ async fn deshielded_transfer_to_public_account() -> Result<()> { assert_eq!(from_acc.balance, 10000); let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, - to: Some(format_public_account_id(to)), - to_label: None, + from: private_mention(from), + to: Some(public_mention(to)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - to_key_path: None, - from_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -179,25 +170,21 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> { }; // Get the keys for the newly created account - let (to_keys, _, to_identifier) = ctx + let to = ctx .wallet() .storage() - .user_data - .get_private_account(to_account_id) + .key_chain() + .private_account(to_account_id) .context("Failed to get private account")?; // Send to this account using claiming path (using npk and vpk instead of account ID) let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, + from: private_mention(from), to: None, - to_label: None, - to_npk: Some(hex::encode(to_keys.nullifier_public_key.0)), - to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)), - to_identifier: Some(to_identifier), + to_npk: Some(hex::encode(to.key_chain.nullifier_public_key.0)), + to_vpk: Some(hex::encode(&to.key_chain.viewing_public_key.0)), + to_identifier: Some(to.kind.identifier()), amount: 100, - to_key_path: None, - from_key_path: None, }); let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -241,16 +228,12 @@ async fn shielded_transfer_to_owned_private_account() -> Result<()> { let to: AccountId = ctx.existing_private_accounts()[1]; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(from)), - from_label: None, - to: Some(format_private_account_id(to)), - to_label: None, + from: public_mention(from), + to: Some(private_mention(to)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - to_key_path: None, - from_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -288,16 +271,12 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> { let from: AccountId = ctx.existing_public_accounts()[0]; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(from)), - from_label: None, + from: public_mention(from), to: None, - to_label: None, to_npk: Some(to_npk_string), to_vpk: Some(hex::encode(to_vpk.0)), to_identifier: Some(0), amount: 100, - to_key_path: None, - from_key_path: None, }); let result = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -353,25 +332,21 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> { }; // Get the newly created account's keys - let (to_keys, _, to_identifier) = ctx + let to = ctx .wallet() .storage() - .user_data - .get_private_account(to_account_id) + .key_chain() + .private_account(to_account_id) .context("Failed to get private account")?; // Send transfer using nullifier and viewing public keys let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, + from: private_mention(from), to: None, - to_label: None, - to_npk: Some(hex::encode(to_keys.nullifier_public_key.0)), - to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)), - to_identifier: Some(to_identifier), + to_npk: Some(hex::encode(to.key_chain.nullifier_public_key.0)), + to_vpk: Some(hex::encode(&to.key_chain.viewing_public_key.0)), + to_identifier: Some(to.kind.identifier()), amount: 100, - to_key_path: None, - from_key_path: None, }); let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -416,9 +391,7 @@ async fn initialize_private_account() -> Result<()> { }; let command = Command::AuthTransfer(AuthTransferSubcommand::Init { - account_id: Some(format_private_account_id(account_id)), - account_label: None, - key_path: None, + account: private_mention(account_id), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -459,26 +432,21 @@ async fn private_transfer_using_from_label() -> Result<()> { let to: AccountId = ctx.existing_private_accounts()[1]; // Assign a label to the sender account - let label = "private-sender-label".to_owned(); + let label = Label::new("private-sender-label"); let command = Command::Account(AccountSubcommand::Label { - account_id: Some(format_private_account_id(from)), - account_label: None, + account_id: private_mention(from), label: label.clone(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; // Send using the label instead of account ID let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: None, - from_label: Some(label), - to: Some(format_private_account_id(to)), - to_label: None, + from: CliAccountMention::Label(label), + to: Some(private_mention(to)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -508,7 +476,7 @@ async fn initialize_private_account_using_label() -> Result<()> { let mut ctx = TestContext::new().await?; // Create a new private account with a label - let label = "init-private-label".to_owned(); + let label = Label::new("init-private-label"); let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None, label: Some(label.clone()), @@ -520,9 +488,7 @@ async fn initialize_private_account_using_label() -> Result<()> { // Initialize using the label instead of account ID let command = Command::AuthTransfer(AuthTransferSubcommand::Init { - account_id: None, - account_label: Some(label), - key_path: None, + account: label.into(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -559,15 +525,12 @@ async fn shielded_transfers_to_two_identifiers_same_npk() -> Result<()> { // Both transfers below will target this same node with distinct identifiers. let chain_index = ctx.wallet_mut().create_private_accounts_key(None); let (npk, vpk) = { - let node = ctx + let key_chain = ctx .wallet() .storage() - .user_data - .private_key_tree - .key_map - .get(&chain_index) - .expect("node was just inserted"); - let key_chain = &node.value.0; + .key_chain() + .private_account_key_chain_by_index(&chain_index) + .expect("Failed to get private account key chain for chain index"); ( key_chain.nullifier_public_key, key_chain.viewing_public_key.clone(), @@ -586,16 +549,12 @@ async fn shielded_transfers_to_two_identifiers_same_npk() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(sender_0)), - from_label: None, + from: public_mention(sender_0), to: None, - to_label: None, to_npk: Some(npk_hex.clone()), to_vpk: Some(vpk_hex.clone()), to_identifier: Some(identifier_1), amount: 100, - from_key_path: None, - to_key_path: None, }), ) .await?; @@ -603,16 +562,12 @@ async fn shielded_transfers_to_two_identifiers_same_npk() -> Result<()> { wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(sender_1)), - from_label: None, + from: public_mention(sender_1), to: None, - to_label: None, to_npk: Some(npk_hex), to_vpk: Some(vpk_hex), to_identifier: Some(identifier_2), amount: 200, - from_key_path: None, - to_key_path: None, }), ) .await?; @@ -627,14 +582,14 @@ async fn shielded_transfers_to_two_identifiers_same_npk() -> Result<()> { .await?; // Both accounts must be discovered with the correct balances. - let account_id_1 = AccountId::from((&npk, identifier_1)); + let account_id_1 = AccountId::for_regular_private_account(&npk, identifier_1); let acc_1 = ctx .wallet() .get_account_private(account_id_1) .context("account for identifier 1 not found after sync")?; assert_eq!(acc_1.balance, 100); - let account_id_2 = AccountId::from((&npk, identifier_2)); + let account_id_2 = AccountId::for_regular_private_account(&npk, identifier_2); let acc_2 = ctx .wallet() .get_account_private(account_id_2) @@ -642,21 +597,25 @@ async fn shielded_transfers_to_two_identifiers_same_npk() -> Result<()> { assert_eq!(acc_2.balance, 200); // Both account ids must resolve to the same key node. - let tree = &ctx.wallet().storage().user_data.private_key_tree; - let ci_1 = tree - .account_id_map - .get(&account_id_1) - .context("account_id_1 missing from private_key_tree.account_id_map")?; - let ci_2 = tree - .account_id_map - .get(&account_id_2) - .context("account_id_2 missing from private_key_tree.account_id_map")?; + let found_acc1 = ctx + .wallet() + .storage() + .key_chain() + .private_account(account_id_1) + .context("account_id_1 not found in key chain")?; + let found_acc2 = ctx + .wallet() + .storage() + .key_chain() + .private_account(account_id_2) + .context("account_id_2 not found in key chain")?; assert_eq!( - ci_1, ci_2, + found_acc1.chain_index, found_acc2.chain_index, "identifiers 1 and 2 under the same NPK must share a single chain_index" ); assert_eq!( - ci_1, &chain_index, + found_acc1.chain_index, + Some(chain_index), "both accounts must resolve to the key node created at the start of the test" ); diff --git a/integration_tests/tests/auth_transfer/public.rs b/integration_tests/tests/auth_transfer/public.rs index 9eabbf5e..4b99c37a 100644 --- a/integration_tests/tests/auth_transfer/public.rs +++ b/integration_tests/tests/auth_transfer/public.rs @@ -1,15 +1,19 @@ use std::time::Duration; use anyhow::Result; -use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id}; +use common::transaction::NSSATransaction; +use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, public_mention}; use log::info; -use nssa::program::Program; +use nssa::{program::Program, public_transaction, system_faucet_account_id}; use sequencer_service_rpc::RpcClient as _; use tokio::test; -use wallet::cli::{ - Command, SubcommandReturnValue, - account::{AccountSubcommand, NewSubcommand}, - programs::native_token_transfer::AuthTransferSubcommand, +use wallet::{ + account::Label, + cli::{ + CliAccountMention, Command, SubcommandReturnValue, + account::{AccountSubcommand, NewSubcommand}, + programs::native_token_transfer::AuthTransferSubcommand, + }, }; #[test] @@ -17,16 +21,12 @@ async fn successful_transfer_to_existing_account() -> Result<()> { let mut ctx = TestContext::new().await?; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(public_mention(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -69,8 +69,9 @@ pub async fn successful_transfer_to_new_account() -> Result<()> { let new_persistent_account_id = ctx .wallet() .storage() - .user_data - .account_ids() + .key_chain() + .public_account_ids() + .map(|(account_id, _)| account_id) .find(|acc_id| { *acc_id != ctx.existing_public_accounts()[0] && *acc_id != ctx.existing_public_accounts()[1] @@ -78,16 +79,12 @@ pub async fn successful_transfer_to_new_account() -> Result<()> { .expect("Failed to find newly created account in the wallet storage"); let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(new_persistent_account_id)), - to_label: None, + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(public_mention(new_persistent_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -119,16 +116,12 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> { let mut ctx = TestContext::new().await?; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(public_mention(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 1_000_000, - from_key_path: None, - to_key_path: None, }); let failed_send = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await; @@ -162,16 +155,12 @@ async fn two_consecutive_successful_transfers() -> Result<()> { // First transfer let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(public_mention(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -199,16 +188,12 @@ async fn two_consecutive_successful_transfers() -> Result<()> { // Second transfer let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(public_mention(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -251,9 +236,7 @@ async fn initialize_public_account() -> Result<()> { }; let command = Command::AuthTransfer(AuthTransferSubcommand::Init { - account_id: Some(format_public_account_id(account_id)), - account_label: None, - key_path: None, + account: public_mention(account_id), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -278,26 +261,21 @@ async fn successful_transfer_using_from_label() -> Result<()> { let mut ctx = TestContext::new().await?; // Assign a label to the sender account - let label = "sender-label".to_owned(); + let label = Label::new("sender-label"); let command = Command::Account(AccountSubcommand::Label { - account_id: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - account_label: None, + account_id: public_mention(ctx.existing_public_accounts()[0]), label: label.clone(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; // Send using the label instead of account ID let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: None, - from_label: Some(label), - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: CliAccountMention::Label(label), + to: Some(public_mention(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -328,26 +306,21 @@ async fn successful_transfer_using_to_label() -> Result<()> { let mut ctx = TestContext::new().await?; // Assign a label to the receiver account - let label = "receiver-label".to_owned(); + let label = Label::new("receiver-label"); let command = Command::Account(AccountSubcommand::Label { - account_id: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - account_label: None, + account_id: public_mention(ctx.existing_public_accounts()[1]), label: label.clone(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; // Send using the label for the recipient let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: None, - to_label: Some(label), + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(CliAccountMention::Label(label)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -372,3 +345,150 @@ async fn successful_transfer_using_to_label() -> Result<()> { Ok(()) } + +#[test] +async fn cannot_transfer_funds_from_system_faucet_account() -> Result<()> { + let ctx = TestContext::new().await?; + let faucet_account_id = system_faucet_account_id(); + + let recipient = ctx.existing_public_accounts()[0]; + let recipient_balance_before = ctx + .sequencer_client() + .get_account_balance(recipient) + .await?; + let faucet_balance_before = ctx + .sequencer_client() + .get_account_balance(faucet_account_id) + .await?; + + let amount = 1_u128; + let message = public_transaction::Message::try_new( + Program::authenticated_transfer_program().id(), + vec![faucet_account_id, recipient], + vec![], + authenticated_transfer_core::Instruction::Transfer { amount }, + )?; + let tx = nssa::PublicTransaction::new( + message, + nssa::public_transaction::WitnessSet::from_raw_parts(vec![]), + ); + let tx_hash = ctx + .sequencer_client() + .send_transaction(NSSATransaction::Public(tx)) + .await?; + + info!("Waiting for next block creation"); + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + let recipient_balance_after = ctx + .sequencer_client() + .get_account_balance(recipient) + .await?; + let faucet_balance_after = ctx + .sequencer_client() + .get_account_balance(faucet_account_id) + .await?; + let tx_on_chain = ctx.sequencer_client().get_transaction(tx_hash).await?; + + assert_eq!(recipient_balance_after, recipient_balance_before); + assert_eq!(faucet_balance_after, faucet_balance_before); + assert!(tx_on_chain.is_none()); + + Ok(()) +} + +#[test] +async fn can_transfer_funds_to_system_faucet_account() -> Result<()> { + let mut ctx = TestContext::new().await?; + let faucet_account_id = system_faucet_account_id(); + + let sender = ctx.existing_public_accounts()[0]; + let sender_balance_before = ctx.sequencer_client().get_account_balance(sender).await?; + let faucet_balance_before = ctx + .sequencer_client() + .get_account_balance(faucet_account_id) + .await?; + + let amount = 100_u128; + let command = Command::AuthTransfer(AuthTransferSubcommand::Send { + from: public_mention(sender), + to: Some(public_mention(faucet_account_id)), + to_npk: None, + to_vpk: None, + to_identifier: Some(0), + amount, + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + info!("Waiting for next block creation"); + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + let sender_balance_after = ctx.sequencer_client().get_account_balance(sender).await?; + let faucet_balance_after = ctx + .sequencer_client() + .get_account_balance(faucet_account_id) + .await?; + + assert_eq!(sender_balance_after, sender_balance_before - amount); + assert_eq!(faucet_balance_after, faucet_balance_before + amount); + + Ok(()) +} + +#[test] +async fn cannot_execute_faucet_program() -> Result<()> { + let ctx = TestContext::new().await?; + let faucet_account_id = system_faucet_account_id(); + + let recipient = ctx.existing_public_accounts()[0]; + let vault_program_id = Program::vault().id(); + let recipient_vault_id = vault_core::compute_vault_account_id(vault_program_id, recipient); + + let recipient_balance_before = ctx + .sequencer_client() + .get_account_balance(recipient) + .await?; + let faucet_balance_before = ctx + .sequencer_client() + .get_account_balance(faucet_account_id) + .await?; + + let amount = 1_u128; + let message = public_transaction::Message::try_new( + Program::faucet().id(), + vec![faucet_account_id, recipient_vault_id], + vec![], + faucet_core::Instruction::Transfer { + vault_program_id, + recipient_id: recipient, + amount, + }, + )?; + let tx = nssa::PublicTransaction::new( + message, + nssa::public_transaction::WitnessSet::from_raw_parts(vec![]), + ); + let tx_hash = ctx + .sequencer_client() + .send_transaction(NSSATransaction::Public(tx)) + .await?; + + info!("Waiting for next block creation"); + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + let recipient_balance_after = ctx + .sequencer_client() + .get_account_balance(recipient) + .await?; + let faucet_balance_after = ctx + .sequencer_client() + .get_account_balance(faucet_account_id) + .await?; + let tx_on_chain = ctx.sequencer_client().get_transaction(tx_hash).await?; + + assert_eq!(recipient_balance_after, recipient_balance_before); + assert_eq!(faucet_balance_after, faucet_balance_before); + assert!(tx_on_chain.is_none()); + + Ok(()) +} diff --git a/integration_tests/tests/indexer.rs b/integration_tests/tests/indexer.rs index 89bafe56..5cf33cde 100644 --- a/integration_tests/tests/indexer.rs +++ b/integration_tests/tests/indexer.rs @@ -9,54 +9,61 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use indexer_service_rpc::RpcClient as _; use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id, - format_public_account_id, verify_commitment_is_in_state, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, private_mention, public_mention, + verify_commitment_is_in_state, }; use log::info; use nssa::AccountId; -use wallet::cli::{Command, programs::native_token_transfer::AuthTransferSubcommand}; +use wallet::{ + account::Label, + cli::{CliAccountMention, Command, programs::native_token_transfer::AuthTransferSubcommand}, +}; /// Maximum time to wait for the indexer to catch up to the sequencer. const L2_TO_L1_TIMEOUT_MILLIS: u64 = 180_000; /// Poll the indexer until its last finalized block id reaches the sequencer's -/// current last block id (and at least the genesis block has been advanced past), -/// or until [`L2_TO_L1_TIMEOUT_MILLIS`] elapses. Returns the last indexer block -/// id observed. -async fn wait_for_indexer_to_catch_up(ctx: &TestContext) -> u64 { +/// current last block id or until [`L2_TO_L1_TIMEOUT_MILLIS`] elapses. +/// Returns the last indexer block id observed. +async fn wait_for_indexer_to_catch_up(ctx: &TestContext) -> Result { let timeout = Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS); + let block_id_to_catch_up = + sequencer_service_rpc::RpcClient::get_last_block_id(ctx.sequencer_client()).await?; let mut last_ind: u64 = 1; let inner = async { loop { - let seq = sequencer_service_rpc::RpcClient::get_last_block_id(ctx.sequencer_client()) - .await - .unwrap_or(0); let ind = ctx .indexer_client() .get_last_finalized_block_id() - .await - .unwrap_or(1); + .await? + .unwrap_or(0); last_ind = ind; - if ind >= seq && ind > 1 { - info!("Indexer caught up: seq={seq}, ind={ind}"); - return ind; + if ind >= block_id_to_catch_up { + let last_seq = + sequencer_service_rpc::RpcClient::get_last_block_id(ctx.sequencer_client()) + .await?; + info!( + "Indexer caught up. Indexer last block id: {ind}. Current sequencer last block id: {last_seq}" + ); + return Ok(ind); } tokio::time::sleep(Duration::from_secs(2)).await; } }; tokio::time::timeout(timeout, inner) .await - .unwrap_or_else(|_| { - info!("Indexer catch-up timed out: ind={last_ind}"); - last_ind - }) + .with_context(|| { + format!( + "Indexer failed to catch up within {L2_TO_L1_TIMEOUT_MILLIS} milliseconds. Last indexer block id observed: {last_ind}, but needed to catch up to at least {block_id_to_catch_up}" + ) + })? } #[tokio::test] async fn indexer_test_run() -> Result<()> { let ctx = TestContext::new().await?; - let last_block_indexer = wait_for_indexer_to_catch_up(&ctx).await; + let last_block_indexer = wait_for_indexer_to_catch_up(&ctx).await?; let last_block_seq = sequencer_service_rpc::RpcClient::get_last_block_id(ctx.sequencer_client()).await?; @@ -64,7 +71,7 @@ async fn indexer_test_run() -> Result<()> { info!("Last block on seq now is {last_block_seq}"); info!("Last block on ind now is {last_block_indexer}"); - assert!(last_block_indexer > 1); + assert!(last_block_indexer > 0); Ok(()) } @@ -74,11 +81,11 @@ async fn indexer_block_batching() -> Result<()> { let ctx = TestContext::new().await?; info!("Waiting for indexer to parse blocks"); - let last_block_indexer = wait_for_indexer_to_catch_up(&ctx).await; + let last_block_indexer = wait_for_indexer_to_catch_up(&ctx).await?; info!("Last block on ind now is {last_block_indexer}"); - assert!(last_block_indexer > 1); + assert!(last_block_indexer > 0); // Getting wide batch to fit all blocks (from latest backwards) let mut block_batch = ctx.indexer_client().get_blocks(None, 100).await.unwrap(); @@ -105,16 +112,12 @@ async fn indexer_state_consistency() -> Result<()> { let mut ctx = TestContext::new().await?; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: public_mention(ctx.existing_public_accounts()[0]), + to: Some(public_mention(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -144,16 +147,12 @@ async fn indexer_state_consistency() -> Result<()> { let to: AccountId = ctx.existing_private_accounts()[1]; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, - to: Some(format_private_account_id(to)), - to_label: None, + from: private_mention(from), + to: Some(private_mention(to)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -176,7 +175,7 @@ async fn indexer_state_consistency() -> Result<()> { info!("Successfully transferred privately to owned account"); info!("Waiting for indexer to parse blocks"); - wait_for_indexer_to_catch_up(&ctx).await; + wait_for_indexer_to_catch_up(&ctx).await?; let acc1_ind_state = ctx .indexer_client() @@ -214,35 +213,29 @@ async fn indexer_state_consistency_with_labels() -> Result<()> { let mut ctx = TestContext::new().await?; // Assign labels to both accounts - let from_label = "idx-sender-label".to_owned(); - let to_label_str = "idx-receiver-label".to_owned(); + let from_label = Label::new("idx-sender-label"); + let to_label = Label::new("idx-receiver-label"); let label_cmd = Command::Account(wallet::cli::account::AccountSubcommand::Label { - account_id: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - account_label: None, + account_id: public_mention(ctx.existing_public_accounts()[0]), label: from_label.clone(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), label_cmd).await?; let label_cmd = Command::Account(wallet::cli::account::AccountSubcommand::Label { - account_id: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - account_label: None, - label: to_label_str.clone(), + account_id: public_mention(ctx.existing_public_accounts()[1]), + label: to_label.clone(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), label_cmd).await?; // Send using labels instead of account IDs let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: None, - from_label: Some(from_label), - to: None, - to_label: Some(to_label_str), + from: CliAccountMention::Label(from_label), + to: Some(CliAccountMention::Label(to_label)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -265,7 +258,7 @@ async fn indexer_state_consistency_with_labels() -> Result<()> { assert_eq!(acc_2_balance, 20100); info!("Waiting for indexer to parse blocks"); - wait_for_indexer_to_catch_up(&ctx).await; + wait_for_indexer_to_catch_up(&ctx).await?; let acc1_ind_state = ctx .indexer_client() diff --git a/integration_tests/tests/indexer_ffi.rs b/integration_tests/tests/indexer_ffi.rs index 2730f9b5..178b2640 100644 --- a/integration_tests/tests/indexer_ffi.rs +++ b/integration_tests/tests/indexer_ffi.rs @@ -1,78 +1,184 @@ #![expect( clippy::shadow_unrelated, clippy::tests_outside_test_module, + clippy::undocumented_unsafe_blocks, reason = "We don't care about these in tests" )] -use anyhow::{Context as _, Result}; -use indexer_service_rpc::RpcClient as _; -use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, format_private_account_id, format_public_account_id, - test_context_ffi::BlockingTestContextFFI, verify_commitment_is_in_state, +use std::{ + ffi::{CString, c_char}, + fs::File, + io::Write as _, + net::SocketAddr, }; -use log::info; + +use anyhow::{Context as _, Result}; +use indexer_ffi::{ + IndexerServiceFFI, OperationStatus, Runtime, + api::{ + PointerResult, + lifecycle::InitializedIndexerServiceFFIResult, + types::{FfiAccountId, FfiOption, FfiVec, account::FfiAccount, block::FfiBlock}, + }, +}; +use integration_tests::{ + BlockingTestContext, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, private_mention, + public_mention, verify_commitment_is_in_state, +}; +use log::{debug, info}; use nssa::AccountId; -use wallet::cli::{Command, programs::native_token_transfer::AuthTransferSubcommand}; +use tempfile::TempDir; +use wallet::{ + account::Label, + cli::{Command, programs::native_token_transfer::AuthTransferSubcommand}, +}; /// Maximum time to wait for the indexer to catch up to the sequencer. const L2_TO_L1_TIMEOUT_MILLIS: u64 = 180_000; +unsafe extern "C" { + unsafe fn query_last_block( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + ) -> PointerResult; + + unsafe fn query_block_vec( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + before: FfiOption, + limit: u64, + ) -> PointerResult, OperationStatus>; + + unsafe fn query_account( + runtime: *const Runtime, + indexer: *const IndexerServiceFFI, + account_id: FfiAccountId, + ) -> PointerResult; + + unsafe fn start_indexer( + runtime: *const Runtime, + config_path: *const c_char, + port: u16, + ) -> InitializedIndexerServiceFFIResult; +} + +fn setup_indexer_ffi( + runtime: &Runtime, + bedrock_addr: SocketAddr, +) -> Result<(IndexerServiceFFI, TempDir)> { + let temp_indexer_dir = + tempfile::tempdir().context("Failed to create temp dir for indexer home")?; + + debug!( + "Using temp indexer home at {}", + temp_indexer_dir.path().display() + ); + + let indexer_config = + integration_tests::config::indexer_config(bedrock_addr, temp_indexer_dir.path().to_owned()) + .context("Failed to create Indexer config")?; + + let config_json = serde_json::to_vec(&indexer_config)?; + let config_path = temp_indexer_dir.path().join("indexer_config.json"); + let mut file = File::create(config_path.as_path())?; + file.write_all(&config_json)?; + file.flush()?; + + let res = + // SAFETY: lib function ensures validity of value. + unsafe { start_indexer(std::ptr::from_ref(runtime), CString::new(config_path.to_str().unwrap())?.as_ptr(), 0) }; + + if res.error.is_error() { + anyhow::bail!("Indexer FFI error {:?}", res.error); + } + + Ok(( + // SAFETY: lib function ensures validity of value. + unsafe { std::ptr::read(res.value) }, + temp_indexer_dir, + )) +} + +/// Prepare setup for tests. +fn setup() -> Result<(BlockingTestContext, IndexerServiceFFI, TempDir)> { + let ctx = TestContext::builder().disable_indexer().build_blocking()?; + // Safety: ctx runtime is valid for the lifetime of the returned Runtime + let runtime = unsafe { Runtime::from_borrowed(ctx.runtime()) }; + let (indexer_ffi, indexer_dir) = setup_indexer_ffi(&runtime, ctx.ctx().bedrock_addr())?; + + Ok((ctx, indexer_ffi, indexer_dir)) +} + #[test] fn indexer_test_run_ffi() -> Result<()> { - let blocking_ctx = BlockingTestContextFFI::new()?; - let runtime_wrapped = blocking_ctx.runtime(); + let (ctx, indexer_ffi, _indexer_dir) = setup()?; // RUN OBSERVATION - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)).await; - }); + std::thread::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)); - let last_block_indexer = blocking_ctx.ctx().get_last_block_indexer(runtime_wrapped)?; + // Safety: ctx runtime is valid for the lifetime of the returned Runtime + let runtime = unsafe { Runtime::from_borrowed(ctx.runtime()) }; + let last_block_indexer_ffi_res = + unsafe { query_last_block(&raw const runtime, &raw const indexer_ffi) }; - info!("Last block on ind now is {last_block_indexer}"); + assert!(last_block_indexer_ffi_res.error.is_ok()); - assert!(last_block_indexer > 1); + let last_block_indexer_ffi = unsafe { *last_block_indexer_ffi_res.value }; + + info!("Last block on indexer FFI now is {last_block_indexer_ffi}"); + + assert!(last_block_indexer_ffi > 0); Ok(()) } #[test] fn indexer_ffi_block_batching() -> Result<()> { - let blocking_ctx = BlockingTestContextFFI::new()?; - let runtime_wrapped = blocking_ctx.runtime(); - let ctx = blocking_ctx.ctx(); + let (ctx, indexer_ffi, _indexer_dir) = setup()?; // WAIT info!("Waiting for indexer to parse blocks"); - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)).await; - }); + std::thread::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)); - let last_block_indexer = runtime_wrapped - .block_on(ctx.indexer_client().get_last_finalized_block_id()) - .unwrap(); + // Safety: ctx runtime is valid for the lifetime of the returned Runtime + let runtime = unsafe { Runtime::from_borrowed(ctx.runtime()) }; + let last_block_indexer_ffi_res = + unsafe { query_last_block(&raw const runtime, &raw const indexer_ffi) }; - info!("Last block on ind now is {last_block_indexer}"); + assert!(last_block_indexer_ffi_res.error.is_ok()); - assert!(last_block_indexer > 1); + let last_block_indexer = unsafe { *last_block_indexer_ffi_res.value }; - // Getting wide batch to fit all blocks (from latest backwards) - let mut block_batch = runtime_wrapped - .block_on(ctx.indexer_client().get_blocks(None, 100)) - .unwrap(); + info!("Last block on indexer FFI now is {last_block_indexer}"); - // Reverse to check chain consistency from oldest to newest - block_batch.reverse(); + assert!(last_block_indexer > 0); - // Checking chain consistency - let mut prev_block_hash = block_batch.first().unwrap().header.hash; + let before_ffi = FfiOption::::from_none(); + let limit = 100; - for block in &block_batch[1..] { - assert_eq!(block.header.prev_block_hash, prev_block_hash); + let block_batch_ffi_res = unsafe { + query_block_vec( + &raw const runtime, + &raw const indexer_ffi, + before_ffi, + limit, + ) + }; + + assert!(block_batch_ffi_res.error.is_ok()); + + let block_batch = unsafe { &*block_batch_ffi_res.value }; + + let mut last_block_prev_hash = unsafe { block_batch.get(0) }.header.prev_block_hash.data; + + for i in 1..block_batch.len { + let block = unsafe { block_batch.get(i) }; + + assert_eq!(last_block_prev_hash, block.header.hash.data); info!("Block {} chain-consistent", block.header.block_id); - prev_block_hash = block.header.hash; + last_block_prev_hash = block.header.prev_block_hash.data; } Ok(()) @@ -80,44 +186,37 @@ fn indexer_ffi_block_batching() -> Result<()> { #[test] fn indexer_ffi_state_consistency() -> Result<()> { - let mut blocking_ctx = BlockingTestContextFFI::new()?; - let runtime_wrapped = blocking_ctx.runtime_clone(); - let ctx = blocking_ctx.ctx_mut(); + let (mut ctx, indexer_ffi, _indexer_dir) = setup()?; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - from_label: None, - to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - to_label: None, + from: public_mention(ctx.ctx().existing_public_accounts()[0]), + to: Some(public_mention(ctx.ctx().existing_public_accounts()[1])), to_npk: None, to_vpk: None, amount: 100, to_identifier: Some(0), - from_key_path: None, - to_key_path: None, }); - runtime_wrapped.block_on(wallet::cli::execute_subcommand(ctx.wallet_mut(), command))?; + ctx.block_on_mut(|ctx| wallet::cli::execute_subcommand(ctx.wallet_mut(), command))?; info!("Waiting for next block creation"); - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis( - TIME_TO_WAIT_FOR_BLOCK_SECONDS, - )) - .await; - }); + std::thread::sleep(std::time::Duration::from_secs( + TIME_TO_WAIT_FOR_BLOCK_SECONDS, + )); info!("Checking correct balance move"); - let acc_1_balance = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account_balance( + let acc_1_balance = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account_balance( ctx.sequencer_client(), ctx.existing_public_accounts()[0], - ))?; - let acc_2_balance = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account_balance( + ) + })?; + let acc_2_balance = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account_balance( ctx.sequencer_client(), ctx.existing_public_accounts()[1], - ))?; + ) + })?; info!("Balance of sender: {acc_1_balance:#?}"); info!("Balance of receiver: {acc_2_balance:#?}"); @@ -125,80 +224,90 @@ fn indexer_ffi_state_consistency() -> Result<()> { assert_eq!(acc_1_balance, 9900); assert_eq!(acc_2_balance, 20100); - let from: AccountId = ctx.existing_private_accounts()[0]; - let to: AccountId = ctx.existing_private_accounts()[1]; + let from: AccountId = ctx.ctx().existing_private_accounts()[0]; + let to: AccountId = ctx.ctx().existing_private_accounts()[1]; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, - to: Some(format_private_account_id(to)), - to_label: None, + from: private_mention(from), + to: Some(private_mention(to)), to_npk: None, to_vpk: None, amount: 100, to_identifier: Some(0), - from_key_path: None, - to_key_path: None, }); - runtime_wrapped.block_on(wallet::cli::execute_subcommand(ctx.wallet_mut(), command))?; + ctx.block_on_mut(|ctx| wallet::cli::execute_subcommand(ctx.wallet_mut(), command))?; info!("Waiting for next block creation"); - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis( - TIME_TO_WAIT_FOR_BLOCK_SECONDS, - )) - .await; - }); + std::thread::sleep(std::time::Duration::from_secs( + TIME_TO_WAIT_FOR_BLOCK_SECONDS, + )); let new_commitment1 = ctx + .ctx() .wallet() .get_private_account_commitment(from) .context("Failed to get private account commitment for sender")?; - let commitment_check1 = runtime_wrapped.block_on(verify_commitment_is_in_state( - new_commitment1, - ctx.sequencer_client(), - )); + let commitment_check1 = + ctx.block_on(|ctx| verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client())); assert!(commitment_check1); let new_commitment2 = ctx + .ctx() .wallet() .get_private_account_commitment(to) .context("Failed to get private account commitment for receiver")?; - let commitment_check2 = runtime_wrapped.block_on(verify_commitment_is_in_state( - new_commitment2, - ctx.sequencer_client(), - )); + let commitment_check2 = + ctx.block_on(|ctx| verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client())); assert!(commitment_check2); info!("Successfully transferred privately to owned account"); // WAIT info!("Waiting for indexer to parse blocks"); - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)).await; - }); + std::thread::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)); - let acc1_ind_state = runtime_wrapped.block_on( - ctx.indexer_client() - .get_account(ctx.existing_public_accounts()[0].into()), - )?; - let acc2_ind_state = runtime_wrapped.block_on( - ctx.indexer_client() - .get_account(ctx.existing_public_accounts()[1].into()), - )?; + // Safety: ctx runtime is valid for the lifetime of the returned Runtime + let runtime = unsafe { Runtime::from_borrowed(ctx.runtime()) }; + let acc1_ind_state_ffi = unsafe { + query_account( + &raw const runtime, + &raw const indexer_ffi, + (&ctx.ctx().existing_public_accounts()[0]).into(), + ) + }; + + assert!(acc1_ind_state_ffi.error.is_ok()); + + let acc1_ind_state_pre = unsafe { &*acc1_ind_state_ffi.value }; + let acc1_ind_state: indexer_service_protocol::Account = acc1_ind_state_pre.into(); + + let acc2_ind_state_ffi = unsafe { + query_account( + &raw const runtime, + &raw const indexer_ffi, + (&ctx.ctx().existing_public_accounts()[1]).into(), + ) + }; + + assert!(acc2_ind_state_ffi.error.is_ok()); + + let acc2_ind_state_pre = unsafe { &*acc2_ind_state_ffi.value }; + let acc2_ind_state: indexer_service_protocol::Account = acc2_ind_state_pre.into(); info!("Checking correct state transition"); - let acc1_seq_state = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account( + let acc1_seq_state = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account( ctx.sequencer_client(), ctx.existing_public_accounts()[0], - ))?; - let acc2_seq_state = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account( + ) + })?; + let acc2_seq_state = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account( ctx.sequencer_client(), ctx.existing_public_accounts()[1], - ))?; + ) + })?; assert_eq!(acc1_ind_state, acc1_seq_state.into()); assert_eq!(acc2_ind_state, acc2_seq_state.into()); @@ -210,80 +319,81 @@ fn indexer_ffi_state_consistency() -> Result<()> { #[test] fn indexer_ffi_state_consistency_with_labels() -> Result<()> { - let mut blocking_ctx = BlockingTestContextFFI::new()?; - let runtime_wrapped = blocking_ctx.runtime_clone(); - let ctx = blocking_ctx.ctx_mut(); + let (mut ctx, indexer_ffi, _indexer_dir) = setup()?; // Assign labels to both accounts - let from_label = "idx-sender-label".to_owned(); - let to_label_str = "idx-receiver-label".to_owned(); + let from_label = Label::new("idx-sender-label"); + let to_label = Label::new("idx-receiver-label"); let label_cmd = Command::Account(wallet::cli::account::AccountSubcommand::Label { - account_id: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - account_label: None, + account_id: public_mention(ctx.ctx().existing_public_accounts()[0]), label: from_label.clone(), }); - runtime_wrapped.block_on(wallet::cli::execute_subcommand(ctx.wallet_mut(), label_cmd))?; + ctx.block_on_mut(|ctx| wallet::cli::execute_subcommand(ctx.wallet_mut(), label_cmd))?; let label_cmd = Command::Account(wallet::cli::account::AccountSubcommand::Label { - account_id: Some(format_public_account_id(ctx.existing_public_accounts()[1])), - account_label: None, - label: to_label_str.clone(), + account_id: public_mention(ctx.ctx().existing_public_accounts()[1]), + label: to_label.clone(), }); - runtime_wrapped.block_on(wallet::cli::execute_subcommand(ctx.wallet_mut(), label_cmd))?; + ctx.block_on_mut(|ctx| wallet::cli::execute_subcommand(ctx.wallet_mut(), label_cmd))?; // Send using labels instead of account IDs let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: None, - from_label: Some(from_label), - to: None, - to_label: Some(to_label_str), + from: from_label.into(), + to: Some(to_label.into()), to_npk: None, to_vpk: None, amount: 100, to_identifier: Some(0), - from_key_path: None, - to_key_path: None, }); - runtime_wrapped.block_on(wallet::cli::execute_subcommand(ctx.wallet_mut(), command))?; + ctx.block_on_mut(|ctx| wallet::cli::execute_subcommand(ctx.wallet_mut(), command))?; info!("Waiting for next block creation"); - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis( - TIME_TO_WAIT_FOR_BLOCK_SECONDS, - )) - .await; - }); + std::thread::sleep(std::time::Duration::from_secs( + TIME_TO_WAIT_FOR_BLOCK_SECONDS, + )); - let acc_1_balance = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account_balance( + let acc_1_balance = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account_balance( ctx.sequencer_client(), ctx.existing_public_accounts()[0], - ))?; - let acc_2_balance = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account_balance( + ) + })?; + let acc_2_balance = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account_balance( ctx.sequencer_client(), ctx.existing_public_accounts()[1], - ))?; + ) + })?; assert_eq!(acc_1_balance, 9900); assert_eq!(acc_2_balance, 20100); info!("Waiting for indexer to parse blocks"); - runtime_wrapped.block_on(async { - tokio::time::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)).await; - }); + std::thread::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)); - let acc1_ind_state = runtime_wrapped.block_on( - ctx.indexer_client() - .get_account(ctx.existing_public_accounts()[0].into()), - )?; - let acc1_seq_state = - runtime_wrapped.block_on(sequencer_service_rpc::RpcClient::get_account( + // Safety: ctx runtime is valid for the lifetime of the returned Runtime + let runtime = unsafe { Runtime::from_borrowed(ctx.runtime()) }; + let acc1_ind_state_ffi = unsafe { + query_account( + &raw const runtime, + &raw const indexer_ffi, + (&ctx.ctx().existing_public_accounts()[0]).into(), + ) + }; + + assert!(acc1_ind_state_ffi.error.is_ok()); + + let acc1_ind_state_pre = unsafe { &*acc1_ind_state_ffi.value }; + let acc1_ind_state: indexer_service_protocol::Account = acc1_ind_state_pre.into(); + + let acc1_seq_state = ctx.block_on(|ctx| { + sequencer_service_rpc::RpcClient::get_account( ctx.sequencer_client(), ctx.existing_public_accounts()[0], - ))?; + ) + })?; assert_eq!(acc1_ind_state, acc1_seq_state.into()); diff --git a/integration_tests/tests/keys_restoration.rs b/integration_tests/tests/keys.rs similarity index 79% rename from integration_tests/tests/keys_restoration.rs rename to integration_tests/tests/keys.rs index 98099020..0cc3c187 100644 --- a/integration_tests/tests/keys_restoration.rs +++ b/integration_tests/tests/keys.rs @@ -8,8 +8,8 @@ use std::{str::FromStr as _, time::Duration}; use anyhow::{Context as _, Result}; use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx, - format_private_account_id, format_public_account_id, verify_commitment_is_in_state, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx, private_mention, + public_mention, verify_commitment_is_in_state, }; use key_protocol::key_management::key_tree::chain_index::ChainIndex; use log::info; @@ -59,25 +59,21 @@ async fn sync_private_account_with_non_zero_chain_index() -> Result<()> { }; // Get the keys for the newly created account - let (to_keys, _, to_identifier) = ctx + let to_account = ctx .wallet() .storage() - .user_data - .get_private_account(to_account_id) + .key_chain() + .private_account(to_account_id) .context("Failed to get private account")?; // Send to this account using claiming path (using npk and vpk instead of account ID) let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, + from: private_mention(from), to: None, - to_label: None, - to_npk: Some(hex::encode(to_keys.nullifier_public_key.0)), - to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)), - to_identifier: Some(to_identifier), + to_npk: Some(hex::encode(to_account.key_chain.nullifier_public_key.0)), + to_vpk: Some(hex::encode(&to_account.key_chain.viewing_public_key.0)), + to_identifier: Some(to_account.kind.identifier()), amount: 100, - from_key_path: None, - to_key_path: None, }); let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -147,31 +143,23 @@ async fn restore_keys_from_seed() -> Result<()> { // Send to first private account let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, - to: Some(format_private_account_id(to_account_id1)), - to_label: None, + from: private_mention(from), + to: Some(private_mention(to_account_id1)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 100, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; // Send to second private account let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(from)), - from_label: None, - to: Some(format_private_account_id(to_account_id2)), - to_label: None, + from: private_mention(from), + to: Some(private_mention(to_account_id2)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 101, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -205,31 +193,23 @@ async fn restore_keys_from_seed() -> Result<()> { // Send to first public account let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(from)), - from_label: None, - to: Some(format_public_account_id(to_account_id3)), - to_label: None, + from: public_mention(from), + to: Some(public_mention(to_account_id3)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 102, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; // Send to second public account let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(from)), - from_label: None, - to: Some(format_public_account_id(to_account_id4)), - to_label: None, + from: public_mention(from), + to: Some(public_mention(to_account_id4)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 103, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -242,76 +222,64 @@ async fn restore_keys_from_seed() -> Result<()> { let acc1 = ctx .wallet() .storage() - .user_data - .private_key_tree - .get_node(to_account_id1) + .key_chain() + .private_account(to_account_id1) .expect("Acc 1 should be restored"); let acc2 = ctx .wallet() .storage() - .user_data - .private_key_tree - .get_node(to_account_id2) + .key_chain() + .private_account(to_account_id2) .expect("Acc 2 should be restored"); // Verify restored public accounts let _acc3 = ctx .wallet() .storage() - .user_data - .public_key_tree - .get_node(to_account_id3) + .key_chain() + .pub_account_signing_key(to_account_id3) .expect("Acc 3 should be restored"); let _acc4 = ctx .wallet() .storage() - .user_data - .public_key_tree - .get_node(to_account_id4) + .key_chain() + .pub_account_signing_key(to_account_id4) .expect("Acc 4 should be restored"); assert_eq!( - acc1.value.1[0].1.program_owner, + acc1.account.program_owner, Program::authenticated_transfer_program().id() ); assert_eq!( - acc2.value.1[0].1.program_owner, + acc2.account.program_owner, Program::authenticated_transfer_program().id() ); - assert_eq!(acc1.value.1[0].1.balance, 100); - assert_eq!(acc2.value.1[0].1.balance, 101); + assert_eq!(acc1.account.balance, 100); + assert_eq!(acc2.account.balance, 101); info!("Tree checks passed, testing restored accounts can transact"); // Test that restored accounts can send transactions let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_private_account_id(to_account_id1)), - from_label: None, - to: Some(format_private_account_id(to_account_id2)), - to_label: None, + from: private_mention(to_account_id1), + to: Some(private_mention(to_account_id2)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 10, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: Some(format_public_account_id(to_account_id3)), - from_label: None, - to: Some(format_public_account_id(to_account_id4)), - to_label: None, + from: public_mention(to_account_id3), + to: Some(public_mention(to_account_id4)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: 11, - from_key_path: None, - to_key_path: None, }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; diff --git a/integration_tests/tests/pinata.rs b/integration_tests/tests/pinata.rs index fcc77a76..eef80b55 100644 --- a/integration_tests/tests/pinata.rs +++ b/integration_tests/tests/pinata.rs @@ -9,8 +9,8 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use common::PINATA_BASE58; use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id, - format_public_account_id, verify_commitment_is_in_state, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, private_mention, public_mention, + verify_commitment_is_in_state, }; use log::info; use sequencer_service_rpc::RpcClient as _; @@ -42,8 +42,6 @@ async fn claim_pinata_to_uninitialized_public_account_fails_fast() -> Result<()> anyhow::bail!("Expected RegisterAccount return value"); }; - let winner_account_id_formatted = format_public_account_id(winner_account_id); - let pinata_balance_pre = ctx .sequencer_client() .get_account_balance(PINATA_BASE58.parse().unwrap()) @@ -52,9 +50,7 @@ async fn claim_pinata_to_uninitialized_public_account_fails_fast() -> Result<()> let claim_result = wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Pinata(PinataProgramAgnosticSubcommand::Claim { - to: Some(winner_account_id_formatted), - to_label: None, - key_path: None, + to: public_mention(winner_account_id), }), ) .await; @@ -98,8 +94,6 @@ async fn claim_pinata_to_uninitialized_private_account_fails_fast() -> Result<() anyhow::bail!("Expected RegisterAccount return value"); }; - let winner_account_id_formatted = format_private_account_id(winner_account_id); - let pinata_balance_pre = ctx .sequencer_client() .get_account_balance(PINATA_BASE58.parse().unwrap()) @@ -108,9 +102,7 @@ async fn claim_pinata_to_uninitialized_private_account_fails_fast() -> Result<() let claim_result = wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Pinata(PinataProgramAgnosticSubcommand::Claim { - to: Some(winner_account_id_formatted), - to_label: None, - key_path: None, + to: private_mention(winner_account_id), }), ) .await; @@ -141,9 +133,7 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> { let pinata_prize = 150; let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim { - to: Some(format_public_account_id(ctx.existing_public_accounts()[0])), - to_label: None, - key_path: None, + to: public_mention(ctx.existing_public_accounts()[0]), }); let pinata_balance_pre = ctx @@ -181,11 +171,7 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> { let pinata_prize = 150; let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim { - to: Some(format_private_account_id( - ctx.existing_private_accounts()[0], - )), - to_label: None, - key_path: None, + to: private_mention(ctx.existing_private_accounts()[0]), }); let pinata_balance_pre = ctx @@ -245,13 +231,9 @@ async fn claim_pinata_to_new_private_account() -> Result<()> { anyhow::bail!("Expected RegisterAccount return value"); }; - let winner_account_id_formatted = format_private_account_id(winner_account_id); - // Initialize account under auth transfer program let command = Command::AuthTransfer(AuthTransferSubcommand::Init { - account_id: Some(winner_account_id_formatted.clone()), - account_label: None, - key_path: None, + account: private_mention(winner_account_id), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -266,9 +248,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> { // Claim pinata to the new private account let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim { - to: Some(winner_account_id_formatted), - to_label: None, - key_path: None, + to: private_mention(winner_account_id), }); let pinata_balance_pre = ctx diff --git a/integration_tests/tests/private_pda.rs b/integration_tests/tests/private_pda.rs new file mode 100644 index 00000000..f9969d98 --- /dev/null +++ b/integration_tests/tests/private_pda.rs @@ -0,0 +1,307 @@ +#![expect( + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + +use std::{path::PathBuf, time::Duration}; + +use anyhow::{Context as _, Result}; +use integration_tests::{ + NSSA_PROGRAM_FOR_TEST_PDA_FUND_SPEND_PROXY, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, + verify_commitment_is_in_state, +}; +use log::info; +use nssa::{ + AccountId, ProgramId, privacy_preserving_transaction::circuit::ProgramWithDependencies, + program::Program, +}; +use nssa_core::{NullifierPublicKey, encryption::ViewingPublicKey, program::PdaSeed}; +use tokio::test; +use wallet::{ + PrivacyPreservingAccount, WalletCore, + cli::{Command, account::AccountSubcommand}, +}; + +/// Funds a private PDA via the proxy program with a chained call to `auth_transfer`. +/// +/// A direct call to `auth_transfer` cannot establish the PDA-to-npk binding because it uses +/// `Claim::Authorized` rather than `Claim::Pda`. Routing through the proxy provides the binding +/// via `pda_seeds` in the chained call to `auth_transfer`. +#[expect( + clippy::too_many_arguments, + reason = "test helper — grouping args would obscure intent" +)] +async fn fund_private_pda( + wallet: &WalletCore, + sender: AccountId, + pda_account_id: AccountId, + npk: NullifierPublicKey, + vpk: ViewingPublicKey, + identifier: u128, + seed: PdaSeed, + amount: u128, + proxy_program: &ProgramWithDependencies, + auth_transfer_id: ProgramId, +) -> Result<()> { + wallet + .send_privacy_preserving_tx( + vec![ + PrivacyPreservingAccount::Public(sender), + PrivacyPreservingAccount::PrivatePdaForeign { + account_id: pda_account_id, + npk, + vpk, + identifier, + }, + ], + Program::serialize_instruction((seed, amount, auth_transfer_id, true)) + .context("failed to serialize pda_fund_spend_proxy fund instruction")?, + proxy_program, + ) + .await + .map_err(|e| anyhow::anyhow!("{e}"))?; + Ok(()) +} + +/// Spends from an owned private PDA to a fresh private-foreign recipient. +/// +/// Alice must own the PDA in the wallet (i.e. it must have been synced after a receive). +#[expect( + clippy::too_many_arguments, + reason = "test helper — grouping args would obscure intent" +)] +async fn spend_private_pda( + wallet: &WalletCore, + pda_account_id: AccountId, + recipient_npk: NullifierPublicKey, + recipient_vpk: ViewingPublicKey, + seed: PdaSeed, + amount: u128, + spend_program: &ProgramWithDependencies, + auth_transfer_id: nssa::ProgramId, +) -> Result<()> { + wallet + .send_privacy_preserving_tx( + vec![ + PrivacyPreservingAccount::PrivatePdaOwned(pda_account_id), + PrivacyPreservingAccount::PrivateForeign { + npk: recipient_npk, + vpk: recipient_vpk, + identifier: 0, + }, + ], + Program::serialize_instruction((seed, amount, auth_transfer_id, false)) + .context("failed to serialize pda_fund_spend_proxy instruction")?, + spend_program, + ) + .await + .map_err(|e| anyhow::anyhow!("{e}"))?; + Ok(()) +} + +/// Two private transfers go to distinct members of the same PDA family (same seed and npk, +/// but identifier=0 and identifier=1). Alice then spends from both PDAs. +/// +/// This exercises the full identifier-diversified private PDA lifecycle: +/// receive(id=0), receive(id=1) → sync → spend(id=0), spend(id=1) → sync → assert. +#[test] +async fn private_pda_family_members_receive_and_spend() -> Result<()> { + let mut ctx = TestContext::new().await?; + + // ── Build alice's key chain ────────────────────────────────────────────────────────────────── + let (alice_id, _alice_chain_index) = ctx.wallet_mut().create_new_account_private(None); + let (alice_npk, alice_vpk) = { + let account = ctx + .wallet() + .storage() + .key_chain() + .private_account(alice_id) + .expect("Account was just created, should be present"); + let kc = account.key_chain; + (kc.nullifier_public_key, kc.viewing_public_key.clone()) + }; + + let proxy = { + let path = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .join("../artifacts/test_program_methods") + .join(NSSA_PROGRAM_FOR_TEST_PDA_FUND_SPEND_PROXY); + Program::new(std::fs::read(&path).with_context(|| format!("reading {path:?}"))?) + .context("invalid pda_fund_spend_proxy binary")? + }; + let auth_transfer = Program::authenticated_transfer_program(); + let proxy_id = proxy.id(); + let auth_transfer_id = auth_transfer.id(); + let seed = PdaSeed::new([42; 32]); + let amount: u128 = 100; + + let spend_program = + ProgramWithDependencies::new(proxy, [(auth_transfer_id, auth_transfer)].into()); + + let alice_pda_0_id = AccountId::for_private_pda(&proxy_id, &seed, &alice_npk, 0); + let alice_pda_1_id = AccountId::for_private_pda(&proxy_id, &seed, &alice_npk, 1); + + // Use two different public senders to avoid nonce conflicts between the back-to-back txs. + let senders = ctx.existing_public_accounts(); + let sender_0 = senders[0]; + let sender_1 = senders[1]; + + // ── Receive ────────────────────────────────────────────────────────────────────────────────── + + info!("Sending to alice_pda_0 (identifier=0)"); + fund_private_pda( + ctx.wallet(), + sender_0, + alice_pda_0_id, + alice_npk, + alice_vpk.clone(), + 0, + seed, + amount, + &spend_program, + auth_transfer_id, + ) + .await?; + + info!("Sending to alice_pda_1 (identifier=1)"); + fund_private_pda( + ctx.wallet(), + sender_1, + alice_pda_1_id, + alice_npk, + alice_vpk.clone(), + 1, + seed, + amount, + &spend_program, + auth_transfer_id, + ) + .await?; + + info!("Waiting for block"); + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + // Sync so alice's wallet discovers and stores both PDAs. + wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Account(AccountSubcommand::SyncPrivate {}), + ) + .await?; + + // Both PDAs must be discoverable and have the correct balance. + let pda_0_account = ctx + .wallet() + .get_account_private(alice_pda_0_id) + .context("alice_pda_0 not found after sync")?; + assert_eq!(pda_0_account.balance, amount); + + let pda_1_account = ctx + .wallet() + .get_account_private(alice_pda_1_id) + .context("alice_pda_1 not found after sync")?; + assert_eq!(pda_1_account.balance, amount); + + // Commitments for both PDAs must be in the sequencer's state. + let commitment_0 = ctx + .wallet() + .get_private_account_commitment(alice_pda_0_id) + .context("commitment for alice_pda_0 missing")?; + assert!( + verify_commitment_is_in_state(commitment_0.clone(), ctx.sequencer_client()).await, + "alice_pda_0 commitment not in state after receive" + ); + + let commitment_1 = ctx + .wallet() + .get_private_account_commitment(alice_pda_1_id) + .context("commitment for alice_pda_1 missing")?; + assert!( + verify_commitment_is_in_state(commitment_1.clone(), ctx.sequencer_client()).await, + "alice_pda_1 commitment not in state after receive" + ); + assert_ne!( + commitment_0, commitment_1, + "distinct identifiers must yield distinct commitments" + ); + + // ── Spend ───────────────────────────────────────────────────────────────────────────────────── + + // Fresh recipients — hardcoded npks not in any wallet. + let recipient_npk_0 = NullifierPublicKey([0xAA; 32]); + let recipient_vpk_0 = ViewingPublicKey::from_scalar(recipient_npk_0.0); + + let recipient_npk_1 = NullifierPublicKey([0xBB; 32]); + let recipient_vpk_1 = ViewingPublicKey::from_scalar(recipient_npk_1.0); + + let amount_spend_0: u128 = 13; + let amount_spend_1: u128 = 37; + + info!("Alice spending from alice_pda_0"); + spend_private_pda( + ctx.wallet(), + alice_pda_0_id, + recipient_npk_0, + recipient_vpk_0, + seed, + amount_spend_0, + &spend_program, + auth_transfer_id, + ) + .await?; + + info!("Alice spending from alice_pda_1"); + spend_private_pda( + ctx.wallet(), + alice_pda_1_id, + recipient_npk_1, + recipient_vpk_1, + seed, + amount_spend_1, + &spend_program, + auth_transfer_id, + ) + .await?; + + info!("Waiting for block"); + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Account(AccountSubcommand::SyncPrivate {}), + ) + .await?; + + // After spending, PDAs should have the remaining balance. + let pda_0_spent = ctx + .wallet() + .get_account_private(alice_pda_0_id) + .context("alice_pda_0 not found after spend sync")?; + assert_eq!(pda_0_spent.balance, amount - amount_spend_0); + + let pda_1_spent = ctx + .wallet() + .get_account_private(alice_pda_1_id) + .context("alice_pda_1 not found after spend sync")?; + assert_eq!(pda_1_spent.balance, amount - amount_spend_1); + + // Post-spend commitments must be in state. + let post_spend_commitment_0 = ctx + .wallet() + .get_private_account_commitment(alice_pda_0_id) + .context("post-spend commitment for alice_pda_0 missing")?; + assert!( + verify_commitment_is_in_state(post_spend_commitment_0, ctx.sequencer_client()).await, + "alice_pda_0 post-spend commitment not in state" + ); + + let post_spend_commitment_1 = ctx + .wallet() + .get_private_account_commitment(alice_pda_1_id) + .context("post-spend commitment for alice_pda_1 missing")?; + assert!( + verify_commitment_is_in_state(post_spend_commitment_1, ctx.sequencer_client()).await, + "alice_pda_1 post-spend commitment not in state" + ); + + info!("Private PDA family member receive-and-spend test passed"); + Ok(()) +} diff --git a/integration_tests/tests/shared_accounts.rs b/integration_tests/tests/shared_accounts.rs new file mode 100644 index 00000000..ba2dad08 --- /dev/null +++ b/integration_tests/tests/shared_accounts.rs @@ -0,0 +1,236 @@ +#![expect( + clippy::tests_outside_test_module, + reason = "Integration test file, not inside a #[cfg(test)] module" +)] +#![expect( + clippy::shadow_unrelated, + reason = "Sequential wallet commands naturally reuse the `command` binding" +)] + +//! Shared account integration tests. +//! +//! Demonstrates: +//! 1. Group creation and GMS distribution via seal/unseal. +//! 2. Shared regular private account creation via `--for-gms`. +//! 3. Funding a shared account from a public account. +//! 4. Syncing discovers the funded shared account state. + +use std::time::Duration; + +use anyhow::{Context as _, Result}; +use integration_tests::{ + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, private_mention, public_mention, +}; +use log::info; +use tokio::test; +use wallet::{ + account::Label, + cli::{ + Command, SubcommandReturnValue, + account::{AccountSubcommand, NewSubcommand}, + group::GroupSubcommand, + programs::native_token_transfer::AuthTransferSubcommand, + }, +}; + +/// Create a group, create a shared account from it, and verify registration. +#[test] +async fn group_create_and_shared_account_registration() -> Result<()> { + let mut ctx = TestContext::new().await?; + + // Create a group + let command = Command::Group(GroupSubcommand::New { + name: "test-group".into(), + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + // Verify group exists + assert!( + ctx.wallet() + .storage() + .key_chain() + .group_key_holder(&Label::new("test-group")) + .is_some() + ); + + // Create a shared regular private account from the group + let command = Command::Account(AccountSubcommand::New(NewSubcommand::PrivateGms { + group: "test-group".into(), + label: Some("shared-acc".into()), + pda: false, + seed: None, + program_id: None, + identifier: None, + })); + + let result = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + let SubcommandReturnValue::RegisterAccount { + account_id: shared_account_id, + } = result + else { + anyhow::bail!("Expected RegisterAccount return value"); + }; + + // Verify shared account is registered in storage + let entry = ctx + .wallet() + .storage() + .key_chain() + .shared_private_account(shared_account_id) + .context("Shared account not found in storage")?; + assert_eq!(entry.group_label, Label::new("test-group")); + assert!(entry.pda_seed.is_none()); + + info!("Shared account registered: {shared_account_id}"); + Ok(()) +} + +/// GMS seal/unseal round-trip via invite/join, verify key agreement. +#[test] +async fn group_invite_join_key_agreement() -> Result<()> { + let mut ctx = TestContext::new().await?; + + // Generate a sealing key + let command = Command::Group(GroupSubcommand::NewSealingKey); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + // Create a group + let command = Command::Group(GroupSubcommand::New { + name: "alice-group".into(), + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + // Seal GMS for ourselves (simulating invite to another wallet) + let sealing_sk = ctx + .wallet() + .storage() + .key_chain() + .sealing_secret_key() + .context("Sealing key not found")?; + let sealing_pk = + key_protocol::key_management::group_key_holder::SealingPublicKey::from_scalar(sealing_sk); + + let holder = ctx + .wallet() + .storage() + .key_chain() + .group_key_holder(&Label::new("alice-group")) + .context("Group not found")?; + let sealed = holder.seal_for(&sealing_pk); + let sealed_hex = hex::encode(&sealed); + + // Join under a different name (simulating Bob receiving the sealed GMS) + let command = Command::Group(GroupSubcommand::Join { + name: "bob-copy".into(), + sealed: sealed_hex, + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + // Both derive the same keys for the same derivation seed + let alice_holder = ctx + .wallet() + .storage() + .key_chain() + .group_key_holder(&Label::new("alice-group")) + .unwrap(); + let bob_holder = ctx + .wallet() + .storage() + .key_chain() + .group_key_holder(&Label::new("bob-copy")) + .unwrap(); + + let seed = [42_u8; 32]; + let alice_npk = alice_holder + .derive_keys_for_shared_account(&seed) + .generate_nullifier_public_key(); + let bob_npk = bob_holder + .derive_keys_for_shared_account(&seed) + .generate_nullifier_public_key(); + + assert_eq!( + alice_npk, bob_npk, + "Key agreement: same GMS produces same keys" + ); + + info!("Key agreement verified via invite/join"); + Ok(()) +} + +/// Fund a shared account from a public account via auth-transfer, then sync. +/// TODO: Requires auth-transfer init to work with shared accounts (authorization flow). +#[test] +async fn fund_shared_account_from_public() -> Result<()> { + let mut ctx = TestContext::new().await?; + + // Create group and shared account + let command = Command::Group(GroupSubcommand::New { + name: "fund-group".into(), + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + let command = Command::Account(AccountSubcommand::New(NewSubcommand::PrivateGms { + group: "fund-group".into(), + label: None, + pda: false, + seed: None, + program_id: None, + identifier: None, + })); + let result = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + let SubcommandReturnValue::RegisterAccount { + account_id: shared_id, + } = result + else { + anyhow::bail!("Expected RegisterAccount return value"); + }; + + // Initialize the shared account under auth-transfer + let command = Command::AuthTransfer(AuthTransferSubcommand::Init { + account: private_mention(shared_id), + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + // Sync private accounts + let command = Command::Account(AccountSubcommand::SyncPrivate); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + // Fund from a public account + let from_public = ctx.existing_public_accounts()[0]; + let command = Command::AuthTransfer(AuthTransferSubcommand::Send { + from: public_mention(from_public), + to: Some(private_mention(shared_id)), + to_npk: None, + to_vpk: None, + to_identifier: None, + amount: 100, + }); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; + + // Sync private accounts + let command = Command::Account(AccountSubcommand::SyncPrivate); + wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; + + // Verify the shared account was updated + let entry = ctx + .wallet() + .storage() + .key_chain() + .shared_private_account(shared_id) + .context("Shared account not found after sync")?; + + info!( + "Shared account balance after funding: {}", + entry.account.balance + ); + assert_eq!( + entry.account.balance, 100, + "Shared account should have received 100" + ); + + Ok(()) +} diff --git a/integration_tests/tests/token.rs b/integration_tests/tests/token.rs index 0dc3382a..65011976 100644 --- a/integration_tests/tests/token.rs +++ b/integration_tests/tests/token.rs @@ -8,8 +8,8 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id, - format_public_account_id, verify_commitment_is_in_state, + TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, private_mention, public_mention, + verify_commitment_is_in_state, }; use key_protocol::key_management::key_tree::chain_index::ChainIndex; use log::info; @@ -17,10 +17,13 @@ use nssa::program::Program; use sequencer_service_rpc::RpcClient as _; use token_core::{TokenDefinition, TokenHolding}; use tokio::test; -use wallet::cli::{ - Command, SubcommandReturnValue, - account::{AccountSubcommand, NewSubcommand}, - programs::token::TokenProgramAgnosticSubcommand, +use wallet::{ + account::Label, + cli::{ + Command, SubcommandReturnValue, + account::{AccountSubcommand, NewSubcommand}, + programs::token::TokenProgramAgnosticSubcommand, + }, }; #[test] @@ -79,10 +82,8 @@ async fn create_and_transfer_public_token() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: name.clone(), total_supply, }; @@ -128,15 +129,12 @@ async fn create_and_transfer_public_token() -> Result<()> { // Transfer 7 tokens from supply_acc to recipient_account_id let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id)), - from_label: None, - to: Some(format_public_account_id(recipient_account_id)), - to_label: None, + from: public_mention(supply_account_id), + to: Some(public_mention(recipient_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: transfer_amount, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -177,12 +175,9 @@ async fn create_and_transfer_public_token() -> Result<()> { // Burn 3 tokens from recipient_acc let burn_amount = 3; let subcommand = TokenProgramAgnosticSubcommand::Burn { - definition: Some(format_public_account_id(definition_account_id)), - definition_label: None, - holder: Some(format_public_account_id(recipient_account_id)), - holder_label: None, + definition: public_mention(definition_account_id), + holder: public_mention(recipient_account_id), amount: burn_amount, - holder_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -224,10 +219,8 @@ async fn create_and_transfer_public_token() -> Result<()> { // Mint 10 tokens at recipient_acc let mint_amount = 10; let subcommand = TokenProgramAgnosticSubcommand::Mint { - definition: Some(format_public_account_id(definition_account_id)), - definition_label: None, - holder: Some(format_public_account_id(recipient_account_id)), - holder_label: None, + definition: public_mention(definition_account_id), + holder: Some(public_mention(recipient_account_id)), holder_npk: None, holder_vpk: None, holder_identifier: None, @@ -331,10 +324,8 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_private_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: private_mention(supply_account_id), name: name.clone(), total_supply, }; @@ -370,15 +361,12 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { // Transfer 7 tokens from supply_acc to recipient_account_id let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_private_account_id(supply_account_id)), - from_label: None, - to: Some(format_private_account_id(recipient_account_id)), - to_label: None, + from: private_mention(supply_account_id), + to: Some(private_mention(recipient_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: transfer_amount, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -401,12 +389,9 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { // Burn 3 tokens from recipient_acc let burn_amount = 3; let subcommand = TokenProgramAgnosticSubcommand::Burn { - definition: Some(format_public_account_id(definition_account_id)), - definition_label: None, - holder: Some(format_private_account_id(recipient_account_id)), - holder_label: None, + definition: public_mention(definition_account_id), + holder: private_mention(recipient_account_id), amount: burn_amount, - holder_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -496,10 +481,8 @@ async fn create_token_with_private_definition() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_private_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: private_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: name.clone(), total_supply, }; @@ -567,10 +550,8 @@ async fn create_token_with_private_definition() -> Result<()> { // Mint to public account let mint_amount_public = 10; let subcommand = TokenProgramAgnosticSubcommand::Mint { - definition: Some(format_private_account_id(definition_account_id)), - definition_label: None, - holder: Some(format_public_account_id(recipient_account_id_public)), - holder_label: None, + definition: private_mention(definition_account_id), + holder: Some(public_mention(recipient_account_id_public)), holder_npk: None, holder_vpk: None, holder_identifier: None, @@ -616,10 +597,8 @@ async fn create_token_with_private_definition() -> Result<()> { // Mint to private account let mint_amount_private = 5; let subcommand = TokenProgramAgnosticSubcommand::Mint { - definition: Some(format_private_account_id(definition_account_id)), - definition_label: None, - holder: Some(format_private_account_id(recipient_account_id_private)), - holder_label: None, + definition: private_mention(definition_account_id), + holder: Some(private_mention(recipient_account_id_private)), holder_npk: None, holder_vpk: None, holder_identifier: None, @@ -698,10 +677,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_private_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_private_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: private_mention(definition_account_id), + supply_account_id: private_mention(supply_account_id), name, total_supply, }; @@ -759,15 +736,12 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { // Transfer tokens let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_private_account_id(supply_account_id)), - from_label: None, - to: Some(format_private_account_id(recipient_account_id)), - to_label: None, + from: private_mention(supply_account_id), + to: Some(private_mention(recipient_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: transfer_amount, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -876,10 +850,8 @@ async fn shielded_token_transfer() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name, total_supply, }; @@ -892,15 +864,12 @@ async fn shielded_token_transfer() -> Result<()> { // Perform shielded transfer: public supply -> private recipient let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_public_account_id(supply_account_id)), - from_label: None, - to: Some(format_private_account_id(recipient_account_id)), - to_label: None, + from: public_mention(supply_account_id), + to: Some(private_mention(recipient_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: transfer_amount, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -1004,10 +973,8 @@ async fn deshielded_token_transfer() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_private_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: private_mention(supply_account_id), name, total_supply, }; @@ -1020,15 +987,12 @@ async fn deshielded_token_transfer() -> Result<()> { // Perform deshielded transfer: private supply -> public recipient let transfer_amount = 7; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: Some(format_private_account_id(supply_account_id)), - from_label: None, - to: Some(format_public_account_id(recipient_account_id)), - to_label: None, + from: private_mention(supply_account_id), + to: Some(public_mention(recipient_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: transfer_amount, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -1116,10 +1080,8 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_private_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_private_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: private_mention(definition_account_id), + supply_account_id: private_mention(supply_account_id), name, total_supply, }; @@ -1146,22 +1108,23 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { }; // Get keys for foreign mint (claiming path) - let (holder_keys, _, holder_identifier) = ctx + let holder = ctx .wallet() .storage() - .user_data - .get_private_account(recipient_account_id) + .key_chain() + .private_account(recipient_account_id) .context("Failed to get private account keys")?; + let holder_keys = holder.key_chain; + let holder_identifier = holder.kind.identifier(); + // Mint using claiming path (foreign account) let mint_amount = 9; let subcommand = TokenProgramAgnosticSubcommand::Mint { - definition: Some(format_private_account_id(definition_account_id)), - definition_label: None, + definition: private_mention(definition_account_id), holder: None, - holder_label: None, holder_npk: Some(hex::encode(holder_keys.nullifier_public_key.0)), - holder_vpk: Some(hex::encode(holder_keys.viewing_public_key.0)), + holder_vpk: Some(hex::encode(&holder_keys.viewing_public_key.0)), holder_identifier: Some(holder_identifier), amount: mint_amount, }; @@ -1206,8 +1169,8 @@ async fn create_token_using_labels() -> Result<()> { let mut ctx = TestContext::new().await?; // Create definition and supply accounts with labels - let def_label = "token-definition-label".to_owned(); - let supply_label = "token-supply-label".to_owned(); + let def_label = Label::new("token-definition-label"); + let supply_label = Label::new("token-supply-label"); let result = wallet::cli::execute_subcommand( ctx.wallet_mut(), @@ -1228,7 +1191,7 @@ async fn create_token_using_labels() -> Result<()> { ctx.wallet_mut(), Command::Account(AccountSubcommand::New(NewSubcommand::Public { cci: None, - label: Some(supply_label.clone()), + label: Some(Label::new(supply_label.clone())), })), ) .await?; @@ -1243,10 +1206,8 @@ async fn create_token_using_labels() -> Result<()> { let name = "LABELED TOKEN".to_owned(); let total_supply = 100; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: None, - definition_account_label: Some(def_label), - supply_account_id: None, - supply_account_label: Some(supply_label), + definition_account_id: def_label.into(), + supply_account_id: supply_label.into(), name: name.clone(), total_supply, }; @@ -1310,7 +1271,7 @@ async fn transfer_token_using_from_label() -> Result<()> { }; // Create supply account with a label - let supply_label = "token-supply-sender".to_owned(); + let supply_label = Label::new("token-supply-sender"); let result = wallet::cli::execute_subcommand( ctx.wallet_mut(), Command::Account(AccountSubcommand::New(NewSubcommand::Public { @@ -1345,10 +1306,8 @@ async fn transfer_token_using_from_label() -> Result<()> { // Create token let total_supply = 50; let subcommand = TokenProgramAgnosticSubcommand::New { - definition_account_id: Some(format_public_account_id(definition_account_id)), - definition_account_label: None, - supply_account_id: Some(format_public_account_id(supply_account_id)), - supply_account_label: None, + definition_account_id: public_mention(definition_account_id), + supply_account_id: public_mention(supply_account_id), name: "LABEL TEST TOKEN".to_owned(), total_supply, }; @@ -1360,15 +1319,12 @@ async fn transfer_token_using_from_label() -> Result<()> { // Transfer token using from_label instead of from let transfer_amount = 20; let subcommand = TokenProgramAgnosticSubcommand::Send { - from: None, - from_label: Some(supply_label), - to: Some(format_public_account_id(recipient_account_id)), - to_label: None, + from: supply_label.into(), + to: Some(public_mention(recipient_account_id)), to_npk: None, to_vpk: None, to_identifier: Some(0), amount: transfer_amount, - from_key_path: None, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; diff --git a/integration_tests/tests/tps.rs b/integration_tests/tests/tps.rs index 41de30ed..22550bb0 100644 --- a/integration_tests/tests/tps.rs +++ b/integration_tests/tests/tps.rs @@ -14,11 +14,8 @@ use std::time::{Duration, Instant}; use anyhow::Result; use bytesize::ByteSize; use common::transaction::NSSATransaction; -use integration_tests::{ - TestContext, - config::{InitialData, SequencerPartialConfig}, -}; -use key_protocol::key_management::{KeyChain, ephemeral_key_holder::EphemeralKeyHolder}; +use integration_tests::{TestContext, config::SequencerPartialConfig}; +use key_protocol::key_management::ephemeral_key_holder::EphemeralKeyHolder; use log::info; use nssa::{ Account, AccountId, PrivacyPreservingTransaction, PrivateKey, PublicKey, PublicTransaction, @@ -27,10 +24,11 @@ use nssa::{ public_transaction as putx, }; use nssa_core::{ - MembershipProof, NullifierPublicKey, + InputAccountIdentity, MembershipProof, NullifierPublicKey, account::{AccountWithMetadata, Nonce, data::Data}, encryption::ViewingPublicKey, }; +use sequencer_core::config::GenesisAction; use sequencer_service_rpc::RpcClient as _; use tokio::test; @@ -81,7 +79,7 @@ impl TpsTestManager { program.id(), [pair[0].1, pair[1].1].to_vec(), [Nonce(0_u128)].to_vec(), - amount, + authenticated_transfer_core::Instruction::Transfer { amount }, ) .unwrap(); let witness_set = @@ -96,28 +94,14 @@ impl TpsTestManager { /// Generates a sequencer configuration with initial balance in a number of public accounts. /// The transactions generated with the function `build_public_txs` will be valid in a node /// started with the config from this method. - fn generate_initial_data(&self) -> InitialData { - // Create public public keypairs - let public_accounts = self - .public_keypairs + fn generate_genesis(&self) -> Vec { + self.public_keypairs .iter() - .map(|(key, _)| (key.clone(), 10)) - .collect(); - - // Generate an initial commitment to be used with the privacy preserving transaction - // created with the `build_privacy_transaction` function. - let key_chain = KeyChain::new_os_random(); - let account = Account { - balance: 100, - nonce: Nonce(0xdead_beef), - program_owner: Program::authenticated_transfer_program().id(), - data: Data::default(), - }; - - InitialData { - public_accounts, - private_accounts: vec![(key_chain, account)], - } + .map(|(_, account_id)| GenesisAction::SupplyAccount { + account_id: *account_id, + balance: 10, + }) + .collect() } const fn generate_sequencer_partial_config() -> SequencerPartialConfig { @@ -139,7 +123,7 @@ pub async fn tps_test() -> Result<()> { let tps_test = TpsTestManager::new(target_tps, num_transactions); let ctx = TestContext::builder() .with_sequencer_partial_config(TpsTestManager::generate_sequencer_partial_config()) - .with_initial_data(tps_test.generate_initial_data()) + .with_genesis(tps_test.generate_genesis()) .build() .await?; @@ -166,7 +150,7 @@ pub async fn tps_test() -> Result<()> { loop { assert!( now.elapsed().as_millis() <= target_time.as_millis(), - "TPS test failed by timeout" + "TPS test failed by timeout, transactions processed {i}/{num_transactions}" ); let tx_obj = ctx @@ -220,7 +204,7 @@ fn build_privacy_transaction() -> PrivacyPreservingTransaction { data: Data::default(), }, true, - AccountId::from((&sender_npk, 0)), + AccountId::for_regular_private_account(&sender_npk, 0), ); let recipient_nsk = [2; 32]; let recipient_vsk = [99; 32]; @@ -229,7 +213,7 @@ fn build_privacy_transaction() -> PrivacyPreservingTransaction { let recipient_pre = AccountWithMetadata::new( Account::default(), false, - AccountId::from((&recipient_npk, 0)), + AccountId::for_regular_private_account(&recipient_npk, 0), ); let eph_holder_from = EphemeralKeyHolder::new(&sender_npk); @@ -250,11 +234,23 @@ fn build_privacy_transaction() -> PrivacyPreservingTransaction { ); let (output, proof) = circuit::execute_and_prove( vec![sender_pre, recipient_pre], - Program::serialize_instruction(balance_to_move).unwrap(), - vec![1, 2], - vec![(sender_npk, 0, sender_ss), (recipient_npk, 0, recipient_ss)], - vec![sender_nsk], - vec![Some(proof)], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: balance_to_move, + }) + .unwrap(), + vec![ + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: sender_ss, + nsk: sender_nsk, + membership_proof: proof, + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_npk, + ssk: recipient_ss, + identifier: 0, + }, + ], &program.into(), ) .unwrap(); diff --git a/integration_tests/tests/wallet_ffi.rs b/integration_tests/tests/wallet_ffi.rs index db84b066..2677e10e 100644 --- a/integration_tests/tests/wallet_ffi.rs +++ b/integration_tests/tests/wallet_ffi.rs @@ -24,6 +24,7 @@ use log::info; use nssa::{Account, AccountId, PrivateKey, PublicKey, program::Program}; use nssa_core::program::DEFAULT_PROGRAM_ID; use tempfile::tempdir; +use wallet::account::HumanReadableAccount; use wallet_ffi::{ FfiAccount, FfiAccountList, FfiBytes32, FfiPrivateAccountKeys, FfiPublicAccountKey, FfiTransferResult, FfiU128, WalletHandle, error, @@ -53,11 +54,24 @@ unsafe extern "C" { out_account_id: *mut FfiBytes32, ) -> error::WalletFfiError; + fn wallet_ffi_import_public_account( + handle: *mut WalletHandle, + private_key_hex: *const c_char, + ) -> error::WalletFfiError; + fn wallet_ffi_create_private_accounts_key( handle: *mut WalletHandle, out_keys: *mut FfiPrivateAccountKeys, ) -> error::WalletFfiError; + fn wallet_ffi_import_private_account( + handle: *mut WalletHandle, + key_chain_json: *const c_char, + chain_index: *const c_char, + identifier: *const FfiU128, + account_state_json: *const c_char, + ) -> error::WalletFfiError; + fn wallet_ffi_list_accounts( handle: *mut WalletHandle, out_list: *mut FfiAccountList, @@ -191,13 +205,59 @@ fn new_wallet_ffi_with_test_context_config( let storage_path = CString::new(storage_path.to_str().unwrap())?; let password = CString::new(ctx.ctx().wallet_password())?; - Ok(unsafe { + let wallet_ffi_handle = unsafe { wallet_ffi_create_new( config_path.as_ptr(), storage_path.as_ptr(), password.as_ptr(), ) - }) + }; + + // Import accounts from source wallet + let source_wallet = ctx.ctx().wallet(); + let source_key_chain = source_wallet.storage().key_chain(); + + for (account_id, _chain_index) in source_key_chain.public_account_ids() { + let private_key_hex = source_wallet + .get_account_public_signing_key(account_id) + .unwrap() + .to_string(); + let private_key_hex = CString::new(private_key_hex)?; + unsafe { wallet_ffi_import_public_account(wallet_ffi_handle, private_key_hex.as_ptr()) } + .unwrap(); + } + + for (account_id, _chain_index) in source_key_chain.private_account_ids() { + let account = source_key_chain.private_account(account_id).unwrap(); + let key_chain_json = CString::new(serde_json::to_string(account.key_chain)?)?; + let account_state_json = CString::new(serde_json::to_string( + &HumanReadableAccount::from(account.account.clone()), + )?)?; + + let chain_index = account + .chain_index + .map(|chain_index| CString::new(chain_index.to_string())) + .transpose()?; + let chain_index_ptr = chain_index + .as_ref() + .map_or(std::ptr::null(), |value| value.as_ptr()); + let identifier = FfiU128 { + data: account.kind.identifier().to_le_bytes(), + }; + + unsafe { + wallet_ffi_import_private_account( + wallet_ffi_handle, + key_chain_json.as_ptr(), + chain_index_ptr, + &raw const identifier, + account_state_json.as_ptr(), + ) + } + .unwrap(); + } + + Ok(wallet_ffi_handle) } fn new_wallet_ffi_with_default_config(password: &str) -> Result<*mut WalletHandle> { @@ -238,7 +298,7 @@ fn wallet_ffi_create_public_accounts() -> Result<()> { let wallet_ffi_handle = new_wallet_ffi_with_default_config(password)?; for _ in 0..n_accounts { let mut out_account_id = FfiBytes32::from_bytes([0; 32]); - wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id); + wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id).unwrap(); account_ids.push(out_account_id.data); } wallet_ffi_destroy(wallet_ffi_handle); @@ -274,7 +334,7 @@ fn wallet_ffi_create_private_accounts() -> Result<()> { let wallet_ffi_handle = new_wallet_ffi_with_default_config(password)?; for _ in 0..n_accounts { let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys); + wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys).unwrap(); npks.push(out_keys.nullifier_public_key.data); wallet_ffi_free_private_account_keys(&raw mut out_keys); } @@ -293,6 +353,7 @@ fn wallet_ffi_create_private_accounts() -> Result<()> { Ok(()) } + #[test] fn wallet_ffi_save_and_load_persistent_storage() -> Result<()> { let ctx = BlockingTestContext::new()?; @@ -301,10 +362,10 @@ fn wallet_ffi_save_and_load_persistent_storage() -> Result<()> { let first_npk = unsafe { let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys); + wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys).unwrap(); let npk = out_keys.nullifier_public_key.data; wallet_ffi_free_private_account_keys(&raw mut out_keys); - wallet_ffi_save(wallet_ffi_handle); + wallet_ffi_save(wallet_ffi_handle).unwrap(); wallet_ffi_destroy(wallet_ffi_handle); npk }; @@ -313,7 +374,7 @@ fn wallet_ffi_save_and_load_persistent_storage() -> Result<()> { let second_npk = unsafe { let wallet_ffi_handle = load_existing_ffi_wallet(home.path())?; let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys); + wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys).unwrap(); let npk = out_keys.nullifier_public_key.data; wallet_ffi_free_private_account_keys(&raw mut out_keys); wallet_ffi_destroy(wallet_ffi_handle); @@ -342,11 +403,11 @@ fn test_wallet_ffi_list_accounts() -> Result<()> { // Create 5 public accounts and 5 receiving keys for _ in 0..5 { let mut out_account_id = FfiBytes32::from_bytes([0; 32]); - wallet_ffi_create_account_public(handle, &raw mut out_account_id); + wallet_ffi_create_account_public(handle, &raw mut out_account_id).unwrap(); public_ids.push(out_account_id.data); let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_private_accounts_key(handle, &raw mut out_keys); + wallet_ffi_create_private_accounts_key(handle, &raw mut out_keys).unwrap(); wallet_ffi_free_private_account_keys(&raw mut out_keys); } @@ -356,7 +417,7 @@ fn test_wallet_ffi_list_accounts() -> Result<()> { // Get the account list with FFI method let mut wallet_ffi_account_list = unsafe { let mut out_list = FfiAccountList::default(); - wallet_ffi_list_accounts(wallet_ffi_handle, &raw mut out_list); + wallet_ffi_list_accounts(wallet_ffi_handle, &raw mut out_list).unwrap(); out_list }; @@ -405,7 +466,7 @@ fn test_wallet_ffi_get_balance_public() -> Result<()> { let balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; - let ffi_account_id = FfiBytes32::from(&account_id); + let ffi_account_id = FfiBytes32::from(account_id); wallet_ffi_get_balance( wallet_ffi_handle, &raw const ffi_account_id, @@ -435,7 +496,7 @@ fn test_wallet_ffi_get_account_public() -> Result<()> { let mut out_account = FfiAccount::default(); let account: Account = unsafe { - let ffi_account_id = FfiBytes32::from(&account_id); + let ffi_account_id = FfiBytes32::from(account_id); wallet_ffi_get_account_public( wallet_ffi_handle, &raw const ffi_account_id, @@ -451,7 +512,7 @@ fn test_wallet_ffi_get_account_public() -> Result<()> { ); assert_eq!(account.balance, 10000); assert!(account.data.is_empty()); - assert_eq!(account.nonce.0, 0); + assert_eq!(account.nonce.0, 1); unsafe { wallet_ffi_free_account_data(&raw mut out_account); @@ -472,7 +533,7 @@ fn test_wallet_ffi_get_account_private() -> Result<()> { let mut out_account = FfiAccount::default(); let account: Account = unsafe { - let ffi_account_id = FfiBytes32::from(&account_id); + let ffi_account_id = FfiBytes32::from(account_id); wallet_ffi_get_account_private( wallet_ffi_handle, &raw const ffi_account_id, @@ -488,7 +549,6 @@ fn test_wallet_ffi_get_account_private() -> Result<()> { ); assert_eq!(account.balance, 10000); assert!(account.data.is_empty()); - assert_eq!(account.nonce, 0_u128.into()); unsafe { wallet_ffi_free_account_data(&raw mut out_account); @@ -509,7 +569,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> { let mut out_key = FfiPublicAccountKey::default(); let key: PublicKey = unsafe { - let ffi_account_id = FfiBytes32::from(&account_id); + let ffi_account_id = FfiBytes32::from(account_id); wallet_ffi_get_public_account_key( wallet_ffi_handle, &raw const ffi_account_id, @@ -548,7 +608,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> { let mut keys = FfiPrivateAccountKeys::default(); unsafe { - let ffi_account_id = FfiBytes32::from(&account_id); + let ffi_account_id = FfiBytes32::from(account_id); wallet_ffi_get_private_account_keys( wallet_ffi_handle, &raw const ffi_account_id, @@ -557,15 +617,15 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> { .unwrap(); }; - let key_chain = &ctx + let account = &ctx .ctx() .wallet() .storage() - .user_data - .get_private_account(account_id) - .unwrap() - .0; + .key_chain() + .private_account(account_id) + .unwrap(); + let key_chain = account.key_chain; let expected_npk = &key_chain.nullifier_public_key; let expected_vpk = &key_chain.viewing_public_key; @@ -587,7 +647,7 @@ fn test_wallet_ffi_account_id_to_base58() -> Result<()> { let private_key = PrivateKey::new_os_random(); let public_key = PublicKey::new_from_private_key(&private_key); let account_id = AccountId::from(&public_key); - let ffi_bytes: FfiBytes32 = (&account_id).into(); + let ffi_bytes: FfiBytes32 = account_id.into(); let ptr = unsafe { wallet_ffi_account_id_to_base58(&raw const ffi_bytes) }; let ffi_result = unsafe { CStr::from_ptr(ptr).to_str()? }; @@ -610,7 +670,8 @@ fn wallet_ffi_base58_to_account_id() -> Result<()> { let account_id_c_str = CString::new(account_id_str.clone())?; let account_id: AccountId = unsafe { let mut out_account_id_bytes = FfiBytes32::default(); - wallet_ffi_account_id_from_base58(account_id_c_str.as_ptr(), &raw mut out_account_id_bytes); + wallet_ffi_account_id_from_base58(account_id_c_str.as_ptr(), &raw mut out_account_id_bytes) + .unwrap(); out_account_id_bytes.into() }; @@ -630,7 +691,7 @@ fn wallet_ffi_init_public_account_auth_transfer() -> Result<()> { // Create a new uninitialized public account let mut out_account_id = FfiBytes32::from_bytes([0; 32]); unsafe { - wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id); + wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id).unwrap(); } // Check its program owner is the default program id @@ -653,7 +714,8 @@ fn wallet_ffi_init_public_account_auth_transfer() -> Result<()> { wallet_ffi_handle, &raw const out_account_id, &raw mut transfer_result, - ); + ) + .unwrap(); } info!("Waiting for next block creation"); @@ -692,7 +754,7 @@ fn wallet_ffi_init_private_account_auth_transfer() -> Result<()> { // Create a new private account let mut out_account_id = FfiBytes32::default(); unsafe { - wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id); + wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id).unwrap(); } // Call the init function @@ -702,7 +764,8 @@ fn wallet_ffi_init_private_account_auth_transfer() -> Result<()> { wallet_ffi_handle, &raw const out_account_id, &raw mut transfer_result, - ); + ) + .unwrap(); } info!("Waiting for next block creation"); @@ -711,8 +774,8 @@ fn wallet_ffi_init_private_account_auth_transfer() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); - wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height).unwrap(); + wallet_ffi_sync_to_block(wallet_ffi_handle, current_height).unwrap(); }; // Check that the program owner is now the authenticated transfer program @@ -744,8 +807,8 @@ fn test_wallet_ffi_transfer_public() -> Result<()> { let ctx = BlockingTestContext::new()?; let home = tempfile::tempdir()?; let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; - let from: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into(); - let to: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[1]).into(); + let from: FfiBytes32 = ctx.ctx().existing_public_accounts()[0].into(); + let to: FfiBytes32 = ctx.ctx().existing_public_accounts()[1].into(); let amount: [u8; 16] = 100_u128.to_le_bytes(); let mut transfer_result = FfiTransferResult::default(); @@ -756,7 +819,8 @@ fn test_wallet_ffi_transfer_public() -> Result<()> { &raw const to, &raw const amount, &raw mut transfer_result, - ); + ) + .unwrap(); } info!("Waiting for next block creation"); @@ -797,12 +861,12 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { let ctx = BlockingTestContext::new()?; let home = tempfile::tempdir()?; let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; - let from: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into(); + let from: FfiBytes32 = ctx.ctx().existing_public_accounts()[0].into(); let (to, to_keys) = unsafe { let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys); - let account_id = nssa::AccountId::from((&out_keys.npk(), 0_u128)); - let to: FfiBytes32 = (&account_id).into(); + wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys).unwrap(); + let account_id = nssa::AccountId::for_regular_private_account(&out_keys.npk(), 0_u128); + let to: FfiBytes32 = account_id.into(); (to, out_keys) }; let amount: [u8; 16] = 100_u128.to_le_bytes(); @@ -819,7 +883,8 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { &raw const to_identifier, &raw const amount, &raw mut transfer_result, - ); + ) + .unwrap(); } info!("Waiting for next block creation"); @@ -828,8 +893,8 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); - wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height).unwrap(); + wallet_ffi_sync_to_block(wallet_ffi_handle, current_height).unwrap(); }; let from_balance = unsafe { @@ -871,8 +936,8 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { let ctx = BlockingTestContext::new()?; let home = tempfile::tempdir()?; let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; - let from: FfiBytes32 = (&ctx.ctx().existing_private_accounts()[0]).into(); - let to: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into(); + let from: FfiBytes32 = ctx.ctx().existing_private_accounts()[0].into(); + let to: FfiBytes32 = ctx.ctx().existing_public_accounts()[0].into(); let amount: [u8; 16] = 100_u128.to_le_bytes(); let mut transfer_result = FfiTransferResult::default(); @@ -883,8 +948,9 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { &raw const to, &raw const amount, &raw mut transfer_result, - ); + ) } + .unwrap(); info!("Waiting for next block creation"); std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)); @@ -892,9 +958,9 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); - wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); - }; + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height).unwrap(); + wallet_ffi_sync_to_block(wallet_ffi_handle, current_height).unwrap(); + } let from_balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; @@ -931,12 +997,12 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { let home = tempfile::tempdir()?; let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; - let from: FfiBytes32 = (&ctx.ctx().existing_private_accounts()[0]).into(); + let from: FfiBytes32 = ctx.ctx().existing_private_accounts()[0].into(); let (to, to_keys) = unsafe { let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys); - let account_id = nssa::AccountId::from((&out_keys.npk(), 0_u128)); - let to: FfiBytes32 = (&account_id).into(); + wallet_ffi_create_private_accounts_key(wallet_ffi_handle, &raw mut out_keys).unwrap(); + let account_id = nssa::AccountId::for_regular_private_account(&out_keys.npk(), 0_u128); + let to: FfiBytes32 = account_id.into(); (to, out_keys) }; @@ -954,7 +1020,8 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { &raw const to_identifier, &raw const amount, &raw mut transfer_result, - ); + ) + .unwrap(); } info!("Waiting for next block creation"); @@ -963,8 +1030,8 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); - wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height).unwrap(); + wallet_ffi_sync_to_block(wallet_ffi_handle, current_height).unwrap(); }; let from_balance = unsafe { diff --git a/key_protocol/Cargo.toml b/key_protocol/Cargo.toml index 022f3ccd..a0b5c397 100644 --- a/key_protocol/Cargo.toml +++ b/key_protocol/Cargo.toml @@ -7,6 +7,10 @@ license = { workspace = true } [lints] workspace = true +[features] +default = [] +test_utils = [] + [dependencies] nssa.workspace = true nssa_core.workspace = true @@ -26,3 +30,4 @@ itertools.workspace = true [dev-dependencies] base58.workspace = true +bincode.workspace = true diff --git a/key_protocol/src/key_management/ephemeral_key_holder.rs b/key_protocol/src/key_management/ephemeral_key_holder.rs index 6ef9e305..7a6dc7d0 100644 --- a/key_protocol/src/key_management/ephemeral_key_holder.rs +++ b/key_protocol/src/key_management/ephemeral_key_holder.rs @@ -36,7 +36,7 @@ impl EphemeralKeyHolder { &self, receiver_viewing_public_key: &ViewingPublicKey, ) -> SharedSecretKey { - SharedSecretKey::new(&self.ephemeral_secret_key, receiver_viewing_public_key) + SharedSecretKey::new(self.ephemeral_secret_key, receiver_viewing_public_key) } } @@ -47,7 +47,7 @@ pub fn produce_one_sided_shared_secret_receiver( let mut esk = [0; 32]; OsRng.fill_bytes(&mut esk); ( - SharedSecretKey::new(&esk, vpk), + SharedSecretKey::new(esk, vpk), EphemeralPublicKey::from_scalar(esk), ) } diff --git a/key_protocol/src/key_management/group_key_holder.rs b/key_protocol/src/key_management/group_key_holder.rs new file mode 100644 index 00000000..39a3fd19 --- /dev/null +++ b/key_protocol/src/key_management/group_key_holder.rs @@ -0,0 +1,601 @@ +use aes_gcm::{Aes256Gcm, KeyInit as _, aead::Aead as _}; +use nssa_core::{ + SharedSecretKey, + encryption::{Scalar, shared_key_derivation::Secp256k1Point}, + program::{PdaSeed, ProgramId}, +}; +use rand::{RngCore as _, rngs::OsRng}; +use serde::{Deserialize, Serialize}; +use sha2::{Digest as _, digest::FixedOutput as _}; + +use super::secret_holders::{PrivateKeyHolder, SecretSpendingKey}; + +/// Public key used to seal a `GroupKeyHolder` for distribution to a recipient. +/// +/// Wraps a secp256k1 point but is a distinct type from `ViewingPublicKey` to enforce +/// key separation: viewing keys encrypt account state, sealing keys encrypt the GMS +/// for off-chain distribution. +pub struct SealingPublicKey(Secp256k1Point); + +impl SealingPublicKey { + /// Derive the sealing public key from a secret scalar. + #[must_use] + pub fn from_scalar(scalar: Scalar) -> Self { + Self(Secp256k1Point::from_scalar(scalar)) + } + + /// Construct from raw serialized bytes (e.g. received from another wallet). + #[must_use] + pub const fn from_bytes(bytes: Vec) -> Self { + Self(Secp256k1Point(bytes)) + } + + /// Returns the raw bytes for display or transmission. + #[must_use] + pub fn to_bytes(&self) -> &[u8] { + &self.0.0 + } +} + +/// Secret key used to unseal a `GroupKeyHolder` received from another member. +pub type SealingSecretKey = Scalar; + +/// Manages shared viewing keys for a group of controllers owning private PDAs. +/// +/// The Group Master Secret (GMS) is a 32-byte random value shared among controllers. +/// Each private PDA owned by the group gets a unique [`SecretSpendingKey`] derived from +/// the GMS by mixing the PDA seed into the SHA-256 input (see `secret_spending_key_for_pda`). +/// +/// # Distribution +/// +/// The GMS is a long-term secret and must never cross a trust boundary in raw form. +/// Controllers share it off-chain by sealing it under each recipient's [`SealingPublicKey`] +/// (see `seal_for` / `unseal`). Wallets persisting a `GroupKeyHolder` must encrypt it at +/// rest; the raw bytes are exposed only via [`GroupKeyHolder::dangerous_raw_gms`], which +/// is intended for the sealing path exclusively. +/// +/// # Logging safety +/// +/// `Debug` is implemented manually to redact the GMS; formatting this value with `{:?}` +/// will not leak the secret. Code that formats through `{:#?}` on containing types is +/// safe for the same reason. +#[derive(Clone, PartialEq, Eq, Serialize, Deserialize)] +pub struct GroupKeyHolder { + gms: [u8; 32], +} + +impl std::fmt::Debug for GroupKeyHolder { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("GroupKeyHolder") + .field("gms", &"") + .finish() + } +} + +impl Default for GroupKeyHolder { + fn default() -> Self { + Self::new() + } +} + +impl GroupKeyHolder { + /// Create a new group with a fresh random GMS. + #[must_use] + pub fn new() -> Self { + let mut gms = [0_u8; 32]; + OsRng.fill_bytes(&mut gms); + Self { gms } + } + + /// Restore from an existing GMS (received via `unseal`). + #[must_use] + pub const fn from_gms(gms: [u8; 32]) -> Self { + Self { gms } + } + + /// Returns the raw 32-byte GMS. The name reflects intent: only the sealed-distribution + /// path (`seal_for`) and sealed-at-rest persistence should ever need the raw bytes. Do + /// not log the result, do not pass it across an untrusted channel. + #[must_use] + pub const fn dangerous_raw_gms(&self) -> &[u8; 32] { + &self.gms + } + + /// Derive a per-PDA [`SecretSpendingKey`] by mixing the seed into the SHA-256 input. + /// + /// Each distinct `(program_id, pda_seed)` pair produces a distinct SSK in the full 256-bit + /// space, so adversarial seed-grinding cannot collide two PDAs' derived keys under the same + /// group. Uses the codebase's 32-byte protocol-versioned domain-separation convention. + fn secret_spending_key_for_pda( + &self, + program_id: &ProgramId, + pda_seed: &PdaSeed, + ) -> SecretSpendingKey { + const PREFIX: &[u8; 32] = b"/LEE/v0.3/GroupKeyDerivation/SSK"; + let mut hasher = sha2::Sha256::new(); + hasher.update(PREFIX); + hasher.update(self.gms); + for word in program_id { + hasher.update(word.to_le_bytes()); + } + hasher.update(pda_seed.as_ref()); + SecretSpendingKey(hasher.finalize_fixed().into()) + } + + /// Derive keys for a specific PDA under a given program. + /// + /// All controllers holding the same GMS independently derive the same keys for the + /// same `(program_id, seed)` because the derivation is deterministic. + #[must_use] + pub fn derive_keys_for_pda( + &self, + program_id: &ProgramId, + pda_seed: &PdaSeed, + ) -> PrivateKeyHolder { + self.secret_spending_key_for_pda(program_id, pda_seed) + .produce_private_key_holder(None) + } + + /// Derive keys for a shared regular (non-PDA) private account. + /// + /// Uses a distinct domain separator from `derive_keys_for_pda` to prevent cross-domain + /// key collisions. The `derivation_seed` should be a stable, unique 32-byte value + /// (e.g. derived deterministically from the account's identifier). + #[must_use] + pub fn derive_keys_for_shared_account(&self, derivation_seed: &[u8; 32]) -> PrivateKeyHolder { + const PREFIX: &[u8; 32] = b"/LEE/v0.3/GroupKeyDerivation/SHA"; + let mut hasher = sha2::Sha256::new(); + hasher.update(PREFIX); + hasher.update(self.gms); + hasher.update(derivation_seed); + SecretSpendingKey(hasher.finalize_fixed().into()).produce_private_key_holder(None) + } + + /// Encrypts this holder's GMS under the recipient's [`SealingPublicKey`]. + /// + /// Uses an ephemeral ECDH key exchange to derive a shared secret, then AES-256-GCM + /// to encrypt the payload. The returned bytes are + /// `ephemeral_pubkey (33) || nonce (12) || ciphertext+tag (48)` = 93 bytes. + /// + /// Each call generates a fresh ephemeral key, so two seals of the same holder produce + /// different ciphertexts. + #[must_use] + pub fn seal_for(&self, recipient_key: &SealingPublicKey) -> Vec { + let mut ephemeral_scalar: Scalar = [0_u8; 32]; + OsRng.fill_bytes(&mut ephemeral_scalar); + let ephemeral_pubkey = Secp256k1Point::from_scalar(ephemeral_scalar); + let shared = SharedSecretKey::new(ephemeral_scalar, &recipient_key.0); + let aes_key = Self::seal_kdf(&shared); + let cipher = Aes256Gcm::new(&aes_key.into()); + + let mut nonce_bytes = [0_u8; 12]; + OsRng.fill_bytes(&mut nonce_bytes); + let nonce = aes_gcm::Nonce::from(nonce_bytes); + + let ciphertext = cipher + .encrypt(&nonce, self.gms.as_ref()) + .expect("AES-GCM encryption should not fail with valid key/nonce"); + + let capacity = 33_usize + .checked_add(12) + .and_then(|n| n.checked_add(ciphertext.len())) + .expect("seal capacity overflow"); + let mut out = Vec::with_capacity(capacity); + out.extend_from_slice(&ephemeral_pubkey.0); + out.extend_from_slice(&nonce_bytes); + out.extend_from_slice(&ciphertext); + out + } + + /// Decrypts a sealed `GroupKeyHolder` using the recipient's [`SealingSecretKey`]. + /// + /// Returns `Err` if the ciphertext is too short, the ECDH point is invalid, or the + /// AES-GCM authentication tag doesn't verify (wrong key or tampered data). + pub fn unseal(sealed: &[u8], own_key: SealingSecretKey) -> Result { + const HEADER_LEN: usize = 33 + 12; + const MIN_LEN: usize = HEADER_LEN + 16; + if sealed.len() < MIN_LEN { + return Err(SealError::TooShort); + } + // MIN_LEN (61) > HEADER_LEN (45), so all slicing below is in bounds. + let ephemeral_pubkey = Secp256k1Point(sealed[..33].to_vec()); + let nonce = aes_gcm::Nonce::from_slice(&sealed[33..HEADER_LEN]); + let ciphertext = &sealed[HEADER_LEN..]; + + let shared = SharedSecretKey::new(own_key, &ephemeral_pubkey); + let aes_key = Self::seal_kdf(&shared); + let cipher = Aes256Gcm::new(&aes_key.into()); + + let plaintext = cipher + .decrypt(nonce, ciphertext) + .map_err(|_err| SealError::DecryptionFailed)?; + + if plaintext.len() != 32 { + return Err(SealError::DecryptionFailed); + } + + let mut gms = [0_u8; 32]; + gms.copy_from_slice(&plaintext); + Ok(Self::from_gms(gms)) + } + + /// Derives an AES-256 key from the ECDH shared secret via SHA-256 with a domain prefix. + fn seal_kdf(shared: &SharedSecretKey) -> [u8; 32] { + const PREFIX: &[u8; 32] = b"/LEE/v0.3/GroupKeySeal/AES\x00\x00\x00\x00\x00\x00"; + let mut hasher = sha2::Sha256::new(); + hasher.update(PREFIX); + hasher.update(shared.0); + hasher.finalize_fixed().into() + } +} + +#[derive(Debug)] +pub enum SealError { + TooShort, + DecryptionFailed, +} + +#[cfg(test)] +mod tests { + use nssa_core::NullifierPublicKey; + + use super::*; + + const TEST_PROGRAM_ID: ProgramId = [9; 8]; + + /// Two holders from the same GMS derive identical keys for the same PDA seed. + #[test] + fn same_gms_same_seed_produces_same_keys() { + let gms = [42_u8; 32]; + let holder_a = GroupKeyHolder::from_gms(gms); + let holder_b = GroupKeyHolder::from_gms(gms); + let seed = PdaSeed::new([1; 32]); + + let keys_a = holder_a.derive_keys_for_pda(&TEST_PROGRAM_ID, &seed); + let keys_b = holder_b.derive_keys_for_pda(&TEST_PROGRAM_ID, &seed); + + assert_eq!( + keys_a.generate_nullifier_public_key().to_byte_array(), + keys_b.generate_nullifier_public_key().to_byte_array(), + ); + } + + /// Different PDA seeds produce different keys from the same GMS. + #[test] + fn same_gms_different_seed_produces_different_keys() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + let seed_a = PdaSeed::new([1; 32]); + let seed_b = PdaSeed::new([2; 32]); + + let npk_a = holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed_a) + .generate_nullifier_public_key(); + let npk_b = holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed_b) + .generate_nullifier_public_key(); + + assert_ne!(npk_a.to_byte_array(), npk_b.to_byte_array()); + } + + /// Different GMS produce different keys for the same PDA seed. + #[test] + fn different_gms_same_seed_produces_different_keys() { + let holder_a = GroupKeyHolder::from_gms([42_u8; 32]); + let holder_b = GroupKeyHolder::from_gms([99_u8; 32]); + let seed = PdaSeed::new([1; 32]); + + let npk_a = holder_a + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + let npk_b = holder_b + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + + assert_ne!(npk_a.to_byte_array(), npk_b.to_byte_array()); + } + + /// GMS round-trip: export and restore produces the same keys. + #[test] + fn gms_round_trip() { + let original = GroupKeyHolder::from_gms([7_u8; 32]); + let restored = GroupKeyHolder::from_gms(*original.dangerous_raw_gms()); + let seed = PdaSeed::new([1; 32]); + + let npk_original = original + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + let npk_restored = restored + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + + assert_eq!(npk_original.to_byte_array(), npk_restored.to_byte_array()); + } + + /// The derived `NullifierPublicKey` is non-zero (sanity check). + #[test] + fn derived_npk_is_non_zero() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + let seed = PdaSeed::new([1; 32]); + let npk = holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + + assert_ne!(npk, NullifierPublicKey([0; 32])); + } + + /// Pins the end-to-end derivation for a fixed (GMS, `ProgramId`, `PdaSeed`). Any change + /// to `secret_spending_key_for_pda`, the `PrivateKeyHolder` nsk/npk chain, or the + /// `AccountId::for_private_pda` formula breaks this test. Mirrors the pinned-value + /// pattern from `for_private_pda_matches_pinned_value` in `nssa_core`. + #[test] + fn pinned_end_to_end_derivation_for_private_pda() { + use nssa_core::{account::AccountId, program::ProgramId}; + + let gms = [42_u8; 32]; + let seed = PdaSeed::new([1; 32]); + let program_id: ProgramId = [9; 8]; + + let holder = GroupKeyHolder::from_gms(gms); + let npk = holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + let account_id = AccountId::for_private_pda(&program_id, &seed, &npk, u128::MAX); + + let expected_npk = NullifierPublicKey([ + 136, 176, 234, 71, 208, 8, 143, 142, 126, 155, 132, 18, 71, 27, 88, 56, 100, 90, 79, + 215, 76, 92, 60, 166, 104, 35, 51, 91, 16, 114, 188, 112, + ]); + // AccountId is derived from (program_id, seed, npk), so it changes when npk changes. + // We verify npk is pinned, and AccountId is deterministically derived from it. + let expected_account_id = + AccountId::for_private_pda(&program_id, &seed, &expected_npk, u128::MAX); + + assert_eq!(npk, expected_npk); + assert_eq!(account_id, expected_account_id); + } + + /// Wallets persist `GroupKeyHolder` to disk and reload it on startup. This test pins + /// the serde round-trip: serialize, deserialize, and assert the derived keys for a + /// sample seed match on both sides. A silent encoding drift would corrupt every + /// group-owned account. + #[test] + fn gms_serde_round_trip_preserves_derivation() { + let original = GroupKeyHolder::from_gms([7_u8; 32]); + let encoded = bincode::serialize(&original).expect("serialize"); + let restored: GroupKeyHolder = bincode::deserialize(&encoded).expect("deserialize"); + + let seed = PdaSeed::new([1; 32]); + let npk_original = original + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + let npk_restored = restored + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + + assert_eq!(npk_original, npk_restored); + assert_eq!(original.dangerous_raw_gms(), restored.dangerous_raw_gms()); + } + + /// A `GroupKeyHolder` constructed from the same 32 bytes as a personal + /// `SecretSpendingKey` must not derive the same `NullifierPublicKey` as the personal + /// path, so a private PDA cannot be spent by a personal nullifier even under + /// adversarial key-material reuse. The safety rests on the group path's distinct + /// domain-separation prefix plus the seed mix-in (see `secret_spending_key_for_pda`). + #[test] + fn group_derivation_does_not_collide_with_personal_path_at_shared_bytes() { + let shared_bytes = [13_u8; 32]; + let seed = PdaSeed::new([5; 32]); + + let group_npk = GroupKeyHolder::from_gms(shared_bytes) + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(); + + let personal_npk = SecretSpendingKey(shared_bytes) + .produce_private_key_holder(None) + .generate_nullifier_public_key(); + + assert_ne!(group_npk, personal_npk); + } + + /// Seal then unseal recovers the same GMS and derived keys. + #[test] + fn seal_unseal_round_trip() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + + let recipient_ssk = SecretSpendingKey([7_u8; 32]); + let recipient_keys = recipient_ssk.produce_private_key_holder(None); + let recipient_vpk = recipient_keys.generate_viewing_public_key(); + let recipient_vsk = recipient_keys.viewing_secret_key; + + let sealed = holder.seal_for(&SealingPublicKey::from_bytes(recipient_vpk.0)); + let restored = GroupKeyHolder::unseal(&sealed, recipient_vsk).expect("unseal"); + + assert_eq!(restored.dangerous_raw_gms(), holder.dangerous_raw_gms()); + + let seed = PdaSeed::new([1; 32]); + assert_eq!( + holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(), + restored + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key(), + ); + } + + /// Unsealing with a different VSK fails with `DecryptionFailed`. + #[test] + fn unseal_wrong_vsk_fails() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + + let recipient_ssk = SecretSpendingKey([7_u8; 32]); + let recipient_vpk = recipient_ssk + .produce_private_key_holder(None) + .generate_viewing_public_key(); + + let wrong_ssk = SecretSpendingKey([99_u8; 32]); + let wrong_vsk = wrong_ssk + .produce_private_key_holder(None) + .viewing_secret_key; + + let sealed = holder.seal_for(&SealingPublicKey::from_bytes(recipient_vpk.0)); + let result = GroupKeyHolder::unseal(&sealed, wrong_vsk); + assert!(matches!(result, Err(super::SealError::DecryptionFailed))); + } + + /// Tampered ciphertext fails authentication. + #[test] + fn unseal_tampered_ciphertext_fails() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + + let recipient_ssk = SecretSpendingKey([7_u8; 32]); + let recipient_keys = recipient_ssk.produce_private_key_holder(None); + let recipient_vpk = recipient_keys.generate_viewing_public_key(); + let recipient_vsk = recipient_keys.viewing_secret_key; + + let mut sealed = holder.seal_for(&SealingPublicKey::from_bytes(recipient_vpk.0)); + // Flip a byte in the ciphertext portion (after ephemeral_pubkey + nonce) + let last = sealed.len() - 1; + sealed[last] ^= 0xFF; + + let result = GroupKeyHolder::unseal(&sealed, recipient_vsk); + assert!(matches!(result, Err(super::SealError::DecryptionFailed))); + } + + /// Two seals of the same holder produce different ciphertexts (ephemeral randomness). + #[test] + fn two_seals_produce_different_ciphertexts() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + + let recipient_ssk = SecretSpendingKey([7_u8; 32]); + let recipient_vpk = recipient_ssk + .produce_private_key_holder(None) + .generate_viewing_public_key(); + + let sealing_key = SealingPublicKey::from_bytes(recipient_vpk.0); + let sealed_a = holder.seal_for(&sealing_key); + let sealed_b = holder.seal_for(&sealing_key); + assert_ne!(sealed_a, sealed_b); + } + + /// Sealed payload is too short. + #[test] + fn unseal_too_short_fails() { + let vsk: SealingSecretKey = [7_u8; 32]; + let result = GroupKeyHolder::unseal(&[0_u8; 10], vsk); + assert!(matches!(result, Err(super::SealError::TooShort))); + } + + /// Degenerate GMS values (all-zeros, all-ones, single-bit) must still produce valid, + /// non-zero, pairwise-distinct npks. Rules out accidental "if gms == default { return + /// default }" style shortcuts in the derivation. + #[test] + fn degenerate_gms_produces_distinct_non_zero_keys() { + let seed = PdaSeed::new([1; 32]); + let degenerate = [[0_u8; 32], [0xFF_u8; 32], { + let mut v = [0_u8; 32]; + v[0] = 1; + v + }]; + + let npks: Vec = degenerate + .iter() + .map(|gms| { + GroupKeyHolder::from_gms(*gms) + .derive_keys_for_pda(&TEST_PROGRAM_ID, &seed) + .generate_nullifier_public_key() + }) + .collect(); + + for npk in &npks { + assert_ne!(*npk, NullifierPublicKey([0; 32])); + } + for (i, a) in npks.iter().enumerate() { + for b in &npks[i + 1..] { + assert_ne!(a, b); + } + } + } + + /// Full lifecycle: create group, distribute GMS via seal/unseal, verify key agreement. + #[test] + fn group_pda_lifecycle() { + use nssa_core::account::AccountId; + + let alice_holder = GroupKeyHolder::new(); + let pda_seed = PdaSeed::new([42_u8; 32]); + let program_id: nssa_core::program::ProgramId = [1; 8]; + + // Derive Alice's keys + let alice_keys = alice_holder.derive_keys_for_pda(&TEST_PROGRAM_ID, &pda_seed); + let alice_npk = alice_keys.generate_nullifier_public_key(); + + // Seal GMS for Bob using Bob's viewing key, Bob unseals + let bob_ssk = SecretSpendingKey([77_u8; 32]); + let bob_keys = bob_ssk.produce_private_key_holder(None); + let bob_vpk = bob_keys.generate_viewing_public_key(); + let bob_vsk = bob_keys.viewing_secret_key; + + let sealed = alice_holder.seal_for(&SealingPublicKey::from_bytes(bob_vpk.0)); + let bob_holder = + GroupKeyHolder::unseal(&sealed, bob_vsk).expect("Bob should unseal the GMS"); + + // Key agreement: both derive identical NPK and AccountId + let bob_npk = bob_holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &pda_seed) + .generate_nullifier_public_key(); + assert_eq!(alice_npk, bob_npk); + + let alice_account_id = AccountId::for_private_pda(&program_id, &pda_seed, &alice_npk, 0); + let bob_account_id = AccountId::for_private_pda(&program_id, &pda_seed, &bob_npk, 0); + assert_eq!(alice_account_id, bob_account_id); + } + + /// Same GMS + same derivation seed produces same keys for shared accounts. + #[test] + fn shared_account_same_gms_same_seed_produces_same_keys() { + let gms = [42_u8; 32]; + let derivation_seed = [1_u8; 32]; + let holder_a = GroupKeyHolder::from_gms(gms); + let holder_b = GroupKeyHolder::from_gms(gms); + + let npk_a = holder_a + .derive_keys_for_shared_account(&derivation_seed) + .generate_nullifier_public_key(); + let npk_b = holder_b + .derive_keys_for_shared_account(&derivation_seed) + .generate_nullifier_public_key(); + + assert_eq!(npk_a, npk_b); + } + + /// Different derivation seeds produce different keys for shared accounts. + #[test] + fn shared_account_different_seeds_produce_different_keys() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + let npk_a = holder + .derive_keys_for_shared_account(&[1_u8; 32]) + .generate_nullifier_public_key(); + let npk_b = holder + .derive_keys_for_shared_account(&[2_u8; 32]) + .generate_nullifier_public_key(); + + assert_ne!(npk_a, npk_b); + } + + /// PDA and shared account derivations from the same GMS + same bytes never collide. + #[test] + fn pda_and_shared_derivations_do_not_collide() { + let holder = GroupKeyHolder::from_gms([42_u8; 32]); + let bytes = [1_u8; 32]; + + let pda_npk = holder + .derive_keys_for_pda(&TEST_PROGRAM_ID, &PdaSeed::new(bytes)) + .generate_nullifier_public_key(); + let shared_npk = holder + .derive_keys_for_shared_account(&bytes) + .generate_nullifier_public_key(); + + assert_ne!(pda_npk, shared_npk); + } +} diff --git a/key_protocol/src/key_management/key_tree/keys_private.rs b/key_protocol/src/key_management/key_tree/keys_private.rs index 6ffc8119..ab4c5c29 100644 --- a/key_protocol/src/key_management/key_tree/keys_private.rs +++ b/key_protocol/src/key_management/key_tree/keys_private.rs @@ -1,5 +1,7 @@ +use std::collections::BTreeMap; + use k256::{Scalar, elliptic_curve::PrimeField as _}; -use nssa_core::{Identifier, NullifierPublicKey, encryption::ViewingPublicKey}; +use nssa_core::{NullifierPublicKey, PrivateAccountKind, encryption::ViewingPublicKey}; use serde::{Deserialize, Serialize}; use crate::key_management::{ @@ -9,8 +11,9 @@ use crate::key_management::{ }; #[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(any(test, feature = "test_utils"), derive(PartialEq, Eq))] pub struct ChildKeysPrivate { - pub value: (KeyChain, Vec<(Identifier, nssa::Account)>), + pub value: (KeyChain, BTreeMap), pub ccc: [u8; 32], /// Can be [`None`] if root. pub cci: Option, @@ -47,7 +50,7 @@ impl ChildKeysPrivate { viewing_secret_key: vsk, }, }, - vec![], + BTreeMap::from_iter([(PrivateAccountKind::Regular(0), nssa::Account::default())]), ), ccc, cci: None, @@ -97,7 +100,7 @@ impl ChildKeysPrivate { viewing_secret_key: vsk, }, }, - vec![], + BTreeMap::from_iter([(PrivateAccountKind::Regular(0), nssa::Account::default())]), ), ccc, cci: Some(cci), @@ -115,9 +118,11 @@ impl KeyTreeNode for ChildKeysPrivate { } fn account_ids(&self) -> impl Iterator { - self.value.1.iter().map(|(identifier, _)| { - nssa::AccountId::from((&self.value.0.nullifier_public_key, *identifier)) - }) + let npk = self.value.0.nullifier_public_key; + self.value + .1 + .keys() + .map(move |kind| nssa::AccountId::for_private_account(&npk, kind)) } } diff --git a/key_protocol/src/key_management/key_tree/keys_public.rs b/key_protocol/src/key_management/key_tree/keys_public.rs index 3ab9cc35..4671795d 100644 --- a/key_protocol/src/key_management/key_tree/keys_public.rs +++ b/key_protocol/src/key_management/key_tree/keys_public.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; use crate::key_management::key_tree::traits::KeyTreeNode; #[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(any(test, feature = "test_utils"), derive(PartialEq, Eq))] pub struct ChildKeysPublic { pub csk: nssa::PrivateKey, pub cpk: nssa::PublicKey, diff --git a/key_protocol/src/key_management/key_tree/mod.rs b/key_protocol/src/key_management/key_tree/mod.rs index 0ae0a52f..3635c65c 100644 --- a/key_protocol/src/key_management/key_tree/mod.rs +++ b/key_protocol/src/key_management/key_tree/mod.rs @@ -21,6 +21,7 @@ pub mod traits; pub const DEPTH_SOFT_CAP: u32 = 20; #[derive(Debug, Serialize, Deserialize, Clone)] +#[cfg_attr(any(test, feature = "test_utils"), derive(PartialEq, Eq))] pub struct KeyTree { pub key_map: BTreeMap, pub account_id_map: BTreeMap, @@ -274,7 +275,10 @@ impl KeyTree { identifier: Identifier, ) -> Option { let node = self.key_map.get(cci)?; - let account_id = nssa::AccountId::from((&node.value.0.nullifier_public_key, identifier)); + let account_id = nssa::AccountId::for_regular_private_account( + &node.value.0.nullifier_public_key, + identifier, + ); if self.account_id_map.contains_key(&account_id) { return None; } @@ -297,7 +301,13 @@ impl KeyTree { println!("Cleanup of tree at depth {i}"); for id in ChainIndex::chain_ids_at_depth(i) { if let Some(node) = self.key_map.get(&id).cloned() { - if node.value.1.is_empty() { + if node.value.1.is_empty() + || node + .value + .1 + .iter() + .all(|(_, acc)| acc == &nssa::Account::default()) + { let account_ids = node.account_ids(); self.key_map.remove(&id); for addr in account_ids { @@ -319,6 +329,7 @@ mod tests { use std::{collections::HashSet, str::FromStr as _}; use nssa::AccountId; + use nssa_core::PrivateAccountKind; use super::*; @@ -531,49 +542,49 @@ mod tests { .key_map .get_mut(&ChainIndex::from_str("/1").unwrap()) .unwrap(); - acc.value.1.push(( - 0, + acc.value.1.insert( + PrivateAccountKind::Regular(0), nssa::Account { balance: 2, ..nssa::Account::default() }, - )); + ); let acc = tree .key_map .get_mut(&ChainIndex::from_str("/2").unwrap()) .unwrap(); - acc.value.1.push(( - 0, + acc.value.1.insert( + PrivateAccountKind::Regular(0), nssa::Account { balance: 3, ..nssa::Account::default() }, - )); + ); let acc = tree .key_map .get_mut(&ChainIndex::from_str("/0/1").unwrap()) .unwrap(); - acc.value.1.push(( - 0, + acc.value.1.insert( + PrivateAccountKind::Regular(0), nssa::Account { balance: 5, ..nssa::Account::default() }, - )); + ); let acc = tree .key_map .get_mut(&ChainIndex::from_str("/1/0").unwrap()) .unwrap(); - acc.value.1.push(( - 0, + acc.value.1.insert( + PrivateAccountKind::Regular(0), nssa::Account { balance: 6, ..nssa::Account::default() }, - )); + ); // Update account_id_map for nodes that now have entries for chain_index_str in ["/1", "/2", "/0/1", "/1/0"] { @@ -605,15 +616,15 @@ mod tests { assert_eq!(key_set, key_set_res); let acc = &tree.key_map[&ChainIndex::from_str("/1").unwrap()]; - assert_eq!(acc.value.1[0].1.balance, 2); + assert_eq!(acc.value.1[&PrivateAccountKind::Regular(0)].balance, 2); let acc = &tree.key_map[&ChainIndex::from_str("/2").unwrap()]; - assert_eq!(acc.value.1[0].1.balance, 3); + assert_eq!(acc.value.1[&PrivateAccountKind::Regular(0)].balance, 3); let acc = &tree.key_map[&ChainIndex::from_str("/0/1").unwrap()]; - assert_eq!(acc.value.1[0].1.balance, 5); + assert_eq!(acc.value.1[&PrivateAccountKind::Regular(0)].balance, 5); let acc = &tree.key_map[&ChainIndex::from_str("/1/0").unwrap()]; - assert_eq!(acc.value.1[0].1.balance, 6); + assert_eq!(acc.value.1[&PrivateAccountKind::Regular(0)].balance, 6); } } diff --git a/key_protocol/src/key_management/mod.rs b/key_protocol/src/key_management/mod.rs index 065af364..ad98d7e2 100644 --- a/key_protocol/src/key_management/mod.rs +++ b/key_protocol/src/key_management/mod.rs @@ -6,13 +6,14 @@ use secret_holders::{PrivateKeyHolder, SecretSpendingKey, SeedHolder}; use serde::{Deserialize, Serialize}; pub mod ephemeral_key_holder; +pub mod group_key_holder; pub mod key_tree; pub mod secret_holders; pub type PublicAccountSigningKey = [u8; 32]; -#[derive(Serialize, Deserialize, Clone, Debug)] -/// Entrypoint to key management. +/// Private account keychain. +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] pub struct KeyChain { pub secret_spending_key: SecretSpendingKey, pub private_key_holder: PrivateKeyHolder, @@ -71,7 +72,7 @@ impl KeyChain { index: Option, ) -> SharedSecretKey { SharedSecretKey::new( - &self.secret_spending_key.generate_viewing_secret_key(index), + self.secret_spending_key.generate_viewing_secret_key(index), ephemeral_public_key_sender, ) } diff --git a/key_protocol/src/key_management/secret_holders.rs b/key_protocol/src/key_management/secret_holders.rs index 9804ba39..f5e71ca8 100644 --- a/key_protocol/src/key_management/secret_holders.rs +++ b/key_protocol/src/key_management/secret_holders.rs @@ -17,14 +17,14 @@ pub struct SeedHolder { } /// Secret spending key object. Can produce `PrivateKeyHolder` objects. -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct SecretSpendingKey(pub [u8; 32]); pub type ViewingSecretKey = Scalar; -#[derive(Serialize, Deserialize, Debug, Clone)] /// Private key holder. Produces public keys. Can produce `account_id`. Can produce shared secret /// for recepient. +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct PrivateKeyHolder { pub nullifier_secret_key: NullifierSecretKey, pub viewing_secret_key: ViewingSecretKey, diff --git a/key_protocol/src/key_protocol_core/mod.rs b/key_protocol/src/key_protocol_core/mod.rs deleted file mode 100644 index 4df6df82..00000000 --- a/key_protocol/src/key_protocol_core/mod.rs +++ /dev/null @@ -1,232 +0,0 @@ -use std::collections::BTreeMap; - -use anyhow::Result; -use k256::AffinePoint; -use nssa::{Account, AccountId}; -use nssa_core::Identifier; -use serde::{Deserialize, Serialize}; - -use crate::key_management::{ - KeyChain, - key_tree::{KeyTreePrivate, KeyTreePublic, chain_index::ChainIndex}, - secret_holders::SeedHolder, -}; - -pub type PublicKey = AffinePoint; - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct UserPrivateAccountData { - pub key_chain: KeyChain, - pub accounts: Vec<(Identifier, Account)>, -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -pub struct NSSAUserData { - /// Default public accounts. - pub default_pub_account_signing_keys: BTreeMap, - /// Default private accounts. - pub default_user_private_accounts: BTreeMap, - /// Tree of public keys. - pub public_key_tree: KeyTreePublic, - /// Tree of private keys. - pub private_key_tree: KeyTreePrivate, -} - -impl NSSAUserData { - fn valid_public_key_transaction_pairing_check( - accounts_keys_map: &BTreeMap, - ) -> bool { - let mut check_res = true; - for (account_id, key) in accounts_keys_map { - let expected_account_id = - nssa::AccountId::from(&nssa::PublicKey::new_from_private_key(key)); - if &expected_account_id != account_id { - println!("{expected_account_id}, {account_id}"); - check_res = false; - } - } - check_res - } - - fn valid_private_key_transaction_pairing_check( - accounts_keys_map: &BTreeMap, - ) -> bool { - let mut check_res = true; - for (account_id, entry) in accounts_keys_map { - let any_match = entry.accounts.iter().any(|(identifier, _)| { - nssa::AccountId::from((&entry.key_chain.nullifier_public_key, *identifier)) - == *account_id - }); - if !any_match { - println!("No matching entry found for account_id {account_id}"); - check_res = false; - } - } - check_res - } - - pub fn new_with_accounts( - default_accounts_keys: BTreeMap, - default_accounts_key_chains: BTreeMap, - public_key_tree: KeyTreePublic, - private_key_tree: KeyTreePrivate, - ) -> Result { - if !Self::valid_public_key_transaction_pairing_check(&default_accounts_keys) { - anyhow::bail!( - "Key transaction pairing check not satisfied, there are public account_ids, which are not derived from keys" - ); - } - - if !Self::valid_private_key_transaction_pairing_check(&default_accounts_key_chains) { - anyhow::bail!( - "Key transaction pairing check not satisfied, there are private account_ids, which are not derived from keys" - ); - } - - Ok(Self { - default_pub_account_signing_keys: default_accounts_keys, - default_user_private_accounts: default_accounts_key_chains, - public_key_tree, - private_key_tree, - }) - } - - /// Generated new private key for public transaction signatures. - /// - /// Returns the `account_id` of new account. - pub fn generate_new_public_transaction_private_key( - &mut self, - parent_cci: Option, - ) -> (nssa::AccountId, ChainIndex) { - match parent_cci { - Some(parent_cci) => self - .public_key_tree - .generate_new_public_node(&parent_cci) - .expect("Parent must be present in a tree"), - None => self - .public_key_tree - .generate_new_public_node_layered() - .expect("Search for new node slot failed"), - } - } - - /// Returns the signing key for public transaction signatures. - #[must_use] - pub fn get_pub_account_signing_key( - &self, - account_id: nssa::AccountId, - ) -> Option<&nssa::PrivateKey> { - self.default_pub_account_signing_keys - .get(&account_id) - .or_else(|| self.public_key_tree.get_node(account_id).map(Into::into)) - } - - /// Creates a new receiving key node and returns its `ChainIndex`. - pub fn create_private_accounts_key(&mut self, parent_cci: Option) -> ChainIndex { - match parent_cci { - Some(parent_cci) => self - .private_key_tree - .create_private_accounts_key_node(&parent_cci) - .expect("Parent must be present in a tree"), - None => self - .private_key_tree - .create_private_accounts_key_node_layered() - .expect("Search for new node slot failed"), - } - } - - /// Registers an additional identifier on an existing private key node, deriving and recording - /// the corresponding `AccountId`. Returns `None` if the node does not exist or the identifier - /// is already registered. - pub fn register_identifier_on_private_key_chain( - &mut self, - cci: &ChainIndex, - identifier: Identifier, - ) -> Option { - self.private_key_tree - .register_identifier_on_node(cci, identifier) - } - - /// Returns the key chain and account data for the given private account ID. - #[must_use] - pub fn get_private_account( - &self, - account_id: nssa::AccountId, - ) -> Option<(KeyChain, nssa_core::account::Account, Identifier)> { - // Check default accounts - if let Some(entry) = self.default_user_private_accounts.get(&account_id) { - for (identifier, account) in &entry.accounts { - let expected_id = - nssa::AccountId::from((&entry.key_chain.nullifier_public_key, *identifier)); - if expected_id == account_id { - return Some((entry.key_chain.clone(), account.clone(), *identifier)); - } - } - return None; - } - // Check tree - if let Some(node) = self.private_key_tree.get_node(account_id) { - let key_chain = &node.value.0; - for (identifier, account) in &node.value.1 { - let expected_id = - nssa::AccountId::from((&key_chain.nullifier_public_key, *identifier)); - if expected_id == account_id { - return Some((key_chain.clone(), account.clone(), *identifier)); - } - } - } - None - } - - pub fn account_ids(&self) -> impl Iterator { - self.public_account_ids().chain(self.private_account_ids()) - } - - pub fn public_account_ids(&self) -> impl Iterator { - self.default_pub_account_signing_keys - .keys() - .copied() - .chain(self.public_key_tree.account_id_map.keys().copied()) - } - - pub fn private_account_ids(&self) -> impl Iterator { - self.default_user_private_accounts - .keys() - .copied() - .chain(self.private_key_tree.account_id_map.keys().copied()) - } -} - -impl Default for NSSAUserData { - fn default() -> Self { - let (seed_holder, _mnemonic) = SeedHolder::new_mnemonic(""); - Self::new_with_accounts( - BTreeMap::new(), - BTreeMap::new(), - KeyTreePublic::new(&seed_holder), - KeyTreePrivate::new(&seed_holder), - ) - .unwrap() - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn new_account() { - let mut user_data = NSSAUserData::default(); - - let chain_index = user_data.create_private_accounts_key(Some(ChainIndex::root())); - - let is_key_chain_generated = user_data - .private_key_tree - .key_map - .contains_key(&chain_index); - assert!(is_key_chain_generated); - - let key_chain = &user_data.private_key_tree.key_map[&chain_index].value.0; - println!("{key_chain:#?}"); - } -} diff --git a/key_protocol/src/lib.rs b/key_protocol/src/lib.rs index e3fe31cf..a8c333e4 100644 --- a/key_protocol/src/lib.rs +++ b/key_protocol/src/lib.rs @@ -1,4 +1,3 @@ #![expect(clippy::print_stdout, reason = "TODO: fix later")] pub mod key_management; -pub mod key_protocol_core; diff --git a/keycard_tests.sh b/keycard_tests.sh index 7b4e7ae5..808791bc 100644 --- a/keycard_tests.sh +++ b/keycard_tests.sh @@ -1,3 +1,4 @@ +#!/bin/bash # Run wallet_with_keycard.sh first source venv/bin/activate # Load the appropriate virtual environment @@ -15,24 +16,31 @@ export KEYCARD_MNEMONIC="fashion degree mountain wool question damp current pond wallet keycard load unset KEYCARD_MNEMONIC -echo "Test: wallet auth-transfer init --key-path \"m/44'/60'/0'/0/0\"" -wallet auth-transfer init --key-path "m/44'/60'/0'/0/0" +echo "Test: wallet auth-transfer init --account \"m/44'/60'/0'/0/0\"" +wallet auth-transfer init --account "m/44'/60'/0'/0/0" -echo "Test: wallet account get --key-path \"m/44'/60'/0'/0/0\"" -wallet account get --key-path "m/44'/60'/0'/0/0" +echo "Test: wallet account get --account-id \"m/44'/60'/0'/0/0\"" +wallet account get --account-id "m/44'/60'/0'/0/0" -echo "Test: wallet pinata claim --key-path \"m/44'/60'/0'/0/0\"" -wallet pinata claim --key-path "m/44'/60'/0'/0/0" +echo "Test: wallet pinata claim --to \"m/44'/60'/0'/0/0\"" +wallet pinata claim --to "m/44'/60'/0'/0/0" -echo "Test: wallet account get --key-path \"m/44'/60'/0'/0/0\"" -wallet account get --key-path "m/44'/60'/0'/0/0" +echo "Test: wallet account get --account-id \"m/44'/60'/0'/0/0\"" +wallet account get --account-id "m/44'/60'/0'/0/0" -#echo "Initialize new account (auth-transfer init) and send" -wallet auth-transfer init --key-path "m/44'/60'/0'/0/1" -wallet auth-transfer send --amount 40 --from-key-path "m/44'/60'/0'/0/0" --to-key-path "m/44'/60'/0'/0/1" +echo "Test: wallet auth-transfer init and send between two keycard accounts" +wallet auth-transfer init --account "m/44'/60'/0'/0/1" +wallet auth-transfer send --amount 40 --from "m/44'/60'/0'/0/0" --to "m/44'/60'/0'/0/1" -echo "Test: wallet account get --key-path \"m/44'/60'/0'/0/0\"" -wallet account get --key-path "m/44'/60'/0'/0/0" +echo "Test: wallet account get --account-id \"m/44'/60'/0'/0/0\"" +wallet account get --account-id "m/44'/60'/0'/0/0" -echo "Test: wallet account get --key-path \"m/44'/60'/0'/0/1\"" -wallet account get --key-path "m/44'/60'/0'/0/1" +echo "Test: wallet account get --account-id \"m/44'/60'/0'/0/1\"" +wallet account get --account-id "m/44'/60'/0'/0/1" + +# Send from keycard account to a local wallet account (foreign recipient — no signature needed) +echo "Test: wallet auth-transfer send from keycard to local account" +wallet auth-transfer send --amount 10 --from "m/44'/60'/0'/0/0" --to "Public/7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo" + +echo "Test: wallet account get --account-id \"m/44'/60'/0'/0/0\"" +wallet account get --account-id "m/44'/60'/0'/0/0" diff --git a/keycard_wallet/Cargo.toml b/keycard_wallet/Cargo.toml index d8299d44..f8f3fd0b 100644 --- a/keycard_wallet/Cargo.toml +++ b/keycard_wallet/Cargo.toml @@ -10,4 +10,6 @@ workspace = true [dependencies] nssa.workspace = true pyo3.workspace = true -log.workspace = true \ No newline at end of file +log.workspace = true +serde = { workspace = true, features = ["derive"] } +serde_json.workspace = true \ No newline at end of file diff --git a/keycard_wallet/python/keycard_wallet.py b/keycard_wallet/python/keycard_wallet.py index 767c1aba..7e18636a 100644 --- a/keycard_wallet/python/keycard_wallet.py +++ b/keycard_wallet/python/keycard_wallet.py @@ -1,13 +1,14 @@ -from smartcard.System import readers +from smartcard.System import readers from keycard.exceptions import APDUError, TransportError -from ecdsa import VerifyingKey, SECP256k1 +from ecdsa import VerifyingKey, SECP256k1 from keycard.keycard import KeyCard -from mnemonic import Mnemonic -from keycard import constants - +from mnemonic import Mnemonic +from keycard import constants + import keycard +import secrets DEFAULT_PAIRING_PASSWORD = "KeycardDefaultPairing" @@ -36,14 +37,30 @@ class KeycardWallet: return False return True + def initialize(self, pin: str) -> bool: + try: + self.card.select() + + if self.card.is_initialized: + raise RuntimeError("Card is already initialized") + + puk = ''.join(secrets.choice('0123456789') for _ in range(12)) + self.card.init(pin, puk, DEFAULT_PAIRING_PASSWORD) + print(f"Keycard PUK: {puk}") + print("Record this PUK and store it somewhere safe. It cannot be recovered.") + return True + except Exception as e: + raise RuntimeError(f"Error initializing keycard: {e}") from e + def setup_communication(self, pin: str, password = DEFAULT_PAIRING_PASSWORD) -> bool: self.card.select() if not self.card.is_initialized: - raise RuntimeError(f"Error setting up communication: uninitialized keycard") + raise RuntimeError("Card is not initialized — run 'wallet keycard init' first") pairing_index, pairing_key = self.card.pair(password) self.pairing_index = pairing_index + self.pairing_key = pairing_key try: self.card.open_secure_channel(pairing_index, pairing_key) @@ -57,11 +74,36 @@ class KeycardWallet: return True + def get_pairing_data(self) -> tuple[int, bytes]: + return (self.pairing_index, self.pairing_key) + + def setup_communication_with_pairing(self, pin: str, pairing_index: int, pairing_key: bytes) -> bool: + self.card.select() + + if not self.card.is_initialized: + raise RuntimeError("Card is not initialized — run 'wallet keycard init' first") + + self.pairing_index = pairing_index + self.pairing_key = pairing_key + + try: + self.card.open_secure_channel(pairing_index, pairing_key) + self.card.verify_pin(pin) + except Exception as e: + raise RuntimeError(f"Error setting up communication with stored pairing: {e}") from e + + return True + + def close_session(self) -> bool: + return True + def load_mnemonic(self, mnemonic: str) -> bool: try: # Convert mnemonic to seed - mnemo = Mnemonic("english") - seed = mnemo.to_seed(mnemonic) + mnemo = Mnemonic("english") + if not mnemo.check(mnemonic): + raise RuntimeError("Invalid mnemonic phrase — check spelling and word count") + seed = mnemo.to_seed(mnemonic) # Load the LEE seed onto the card result = self.card.load_key( diff --git a/keycard_wallet/src/lib.rs b/keycard_wallet/src/lib.rs index 37b010fd..17c62efd 100644 --- a/keycard_wallet/src/lib.rs +++ b/keycard_wallet/src/lib.rs @@ -1,8 +1,24 @@ +use std::path::PathBuf; + use nssa::{AccountId, PublicKey, Signature}; use pyo3::{prelude::*, types::PyAny}; +use serde::{Deserialize, Serialize}; pub mod python_path; +// TODO: encrypt at rest alongside broader wallet storage encryption work. +#[derive(Serialize, Deserialize)] +pub struct KeycardPairingData { + pub index: u8, + pub key: Vec, +} + +impl KeycardPairingData { + const fn is_valid(&self) -> bool { + self.key.len() == 32 && self.index <= 4 + } +} + /// Rust wrapper around the Python `KeycardWallet` class. pub struct KeycardWallet { instance: Py, @@ -28,6 +44,57 @@ impl KeycardWallet { .extract() } + pub fn initialize(&self, py: Python<'_>, pin: &str) -> PyResult { + self.instance + .bind(py) + .call_method1("initialize", (pin,))? + .extract() + } + + pub fn get_pairing_data(&self, py: Python<'_>) -> PyResult<(u8, Vec)> { + self.instance + .bind(py) + .call_method0("get_pairing_data")? + .extract() + } + + pub fn setup_communication_with_pairing( + &self, + py: Python<'_>, + pin: &str, + index: u8, + key: &[u8], + ) -> PyResult { + self.instance + .bind(py) + .call_method1("setup_communication_with_pairing", (pin, index, key.to_vec()))? + .extract() + } + + pub fn close_session(&self, py: Python<'_>) -> PyResult { + self.instance + .bind(py) + .call_method0("close_session")? + .extract() + } + + /// Connect using a stored pairing if available, falling back to a fresh pair. + /// Saves any newly established pairing to disk. + pub fn connect(&self, py: Python<'_>, pin: &str) -> PyResult<()> { + if let Some(pairing) = load_pairing().filter(KeycardPairingData::is_valid) + && self + .setup_communication_with_pairing(py, pin, pairing.index, &pairing.key) + .is_ok() + { + return Ok(()); + } + self.setup_communication(py, pin)?; + if let Ok((index, key)) = self.get_pairing_data(py) { + save_pairing(&KeycardPairingData { index, key }); + } + Ok(()) + } + pub fn setup_communication(&self, py: Python<'_>, pin: &str) -> PyResult { self.instance .bind(py) @@ -60,20 +127,10 @@ impl KeycardWallet { pub fn get_public_key_for_path_with_connect(pin: &str, path: &str) -> PyResult { Python::with_gil(|py| { python_path::add_python_path(py)?; - let wallet = Self::new(py)?; - - let is_connected = wallet.setup_communication(py, pin)?; - - if is_connected { - log::info!("\u{2705} Keycard is now connected to wallet."); - } else { - log::info!("\u{274c} Keycard is not connected to wallet."); - } - + wallet.connect(py, pin)?; let pub_key = wallet.get_public_key_for_path(py, path); - - drop(wallet.disconnect(py)); + drop(wallet.close_session(py)); pub_key }) } @@ -115,21 +172,10 @@ impl KeycardWallet { ) -> PyResult<(Signature, PublicKey)> { Python::with_gil(|py| { python_path::add_python_path(py)?; - let wallet = Self::new(py)?; - - let is_connected = wallet.setup_communication(py, pin)?; - - if is_connected { - log::info!("\u{2705} Keycard is now connected to wallet."); - } else { - log::info!("\u{274c} Keycard is not connected to wallet."); - } - + wallet.connect(py, pin)?; let result = wallet.sign_message_for_path(py, path, message); - - drop(wallet.disconnect(py)); - + drop(wallet.close_session(py)); result }) } @@ -147,3 +193,31 @@ impl KeycardWallet { Ok(format!("Public/{}", AccountId::from(&public_key))) } } + +fn pairing_file_path() -> Option { + let home = std::env::var("NSSA_WALLET_HOME_DIR") + .map(PathBuf::from) + .or_else(|_| std::env::home_dir().map(|h| h.join(".nssa").join("wallet")).ok_or(())) + .ok()?; + Some(home.join("keycard_pairing.json")) +} + +fn load_pairing() -> Option { + let path = pairing_file_path()?; + let file = std::fs::File::open(path).ok()?; + serde_json::from_reader(file).ok() +} + +fn save_pairing(data: &KeycardPairingData) { + if let Some(path) = pairing_file_path() + && let Ok(json) = serde_json::to_vec_pretty(data) + { + drop(std::fs::write(path, json)); + } +} + +pub fn clear_pairing() { + if let Some(path) = pairing_file_path() { + drop(std::fs::remove_file(path)); + } +} diff --git a/nssa/Cargo.toml b/nssa/Cargo.toml index d8f0807c..20889e11 100644 --- a/nssa/Cargo.toml +++ b/nssa/Cargo.toml @@ -10,6 +10,7 @@ workspace = true [dependencies] nssa_core = { workspace = true, features = ["host"] } clock_core.workspace = true +faucet_core.workspace = true anyhow.workspace = true thiserror.workspace = true @@ -30,6 +31,7 @@ risc0-binfmt = "3.0.2" [dev-dependencies] token_core.workspace = true +authenticated_transfer_core.workspace = true test_program_methods.workspace = true env_logger.workspace = true diff --git a/nssa/core/src/circuit_io.rs b/nssa/core/src/circuit_io.rs index c71003de..63c188ef 100644 --- a/nssa/core/src/circuit_io.rs +++ b/nssa/core/src/circuit_io.rs @@ -12,23 +12,99 @@ use crate::{ pub struct PrivacyPreservingCircuitInput { /// Outputs of the program execution. pub program_outputs: Vec, - /// Visibility mask for accounts. - /// - /// - `0` - public account - /// - `1` - private account with authentication - /// - `2` - private account without authentication - /// - `3` - private PDA account - pub visibility_mask: Vec, - /// Public keys and identifiers of private accounts. - pub private_account_keys: Vec<(NullifierPublicKey, Identifier, SharedSecretKey)>, - /// Nullifier secret keys for authorized private accounts. - pub private_account_nsks: Vec, - /// Membership proofs for private accounts. Can be [`None`] for uninitialized accounts. - pub private_account_membership_proofs: Vec>, + /// One entry per `pre_state`, in the same order as the program's `pre_states`. + /// Length must equal the number of `pre_states` derived from `program_outputs`. + /// The guest's `private_pda_npk_by_position` and `private_pda_bound_positions` + /// rely on this position alignment. + pub account_identities: Vec, /// Program ID. pub program_id: ProgramId, } +/// Per-account input to the privacy-preserving circuit. Each variant carries exactly the fields +/// the guest needs for that account's code path. +#[derive(Serialize, Deserialize, Clone)] +pub enum InputAccountIdentity { + /// Public account. The guest reads pre/post state from `program_outputs` and emits no + /// commitment, ciphertext, or nullifier. + Public, + /// Init of an authorized standalone private account: no membership proof. The `pre_state` + /// must be `Account::default()`. The `account_id` is derived as + /// `AccountId::for_regular_private_account(&NullifierPublicKey::from(nsk), identifier)` and + /// matched against `pre_state.account_id`. + PrivateAuthorizedInit { + ssk: SharedSecretKey, + nsk: NullifierSecretKey, + identifier: Identifier, + }, + /// Update of an authorized standalone private account: existing on-chain commitment, with + /// membership proof. + PrivateAuthorizedUpdate { + ssk: SharedSecretKey, + nsk: NullifierSecretKey, + membership_proof: MembershipProof, + identifier: Identifier, + }, + /// Init of a standalone private account the caller does not own (e.g. a recipient who + /// doesn't yet exist on chain). No `nsk`, no membership proof. + PrivateUnauthorized { + npk: NullifierPublicKey, + ssk: SharedSecretKey, + identifier: Identifier, + }, + /// Init of a private PDA, unauthorized. The npk-to-account_id binding is proven upstream + /// via `Claim::Pda(seed)` or a caller's `pda_seeds` match. The identifier diversifies the + /// PDA within the `(program_id, seed, npk)` family: `AccountId::for_private_pda` uses it + /// as the 4th input. + PrivatePdaInit { + npk: NullifierPublicKey, + ssk: SharedSecretKey, + identifier: Identifier, + }, + /// Update of an existing private PDA, authorized, with membership proof. `npk` is derived + /// from `nsk`. Authorization is established upstream by a caller `pda_seeds` match or a + /// previously-seen authorization in a chained call. + PrivatePdaUpdate { + ssk: SharedSecretKey, + nsk: NullifierSecretKey, + membership_proof: MembershipProof, + identifier: Identifier, + }, +} + +impl InputAccountIdentity { + #[must_use] + pub const fn is_public(&self) -> bool { + matches!(self, Self::Public) + } + + #[must_use] + pub const fn is_private_pda(&self) -> bool { + matches!( + self, + Self::PrivatePdaInit { .. } | Self::PrivatePdaUpdate { .. } + ) + } + + /// For private PDA variants, return the `(npk, identifier)` pair. `Init` carries both + /// directly; `Update` derives `npk` from `nsk`. For non-PDA variants returns `None`. + #[must_use] + pub fn npk_if_private_pda(&self) -> Option<(NullifierPublicKey, Identifier)> { + match self { + Self::PrivatePdaInit { + npk, identifier, .. + } => Some((*npk, *identifier)), + Self::PrivatePdaUpdate { + nsk, identifier, .. + } => Some((NullifierPublicKey::from(nsk), *identifier)), + Self::Public + | Self::PrivateAuthorizedInit { .. } + | Self::PrivateAuthorizedUpdate { .. } + | Self::PrivateUnauthorized { .. } => None, + } + } +} + #[derive(Serialize, Deserialize)] #[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))] pub struct PrivacyPreservingCircuitOutput { diff --git a/nssa/core/src/encryption/mod.rs b/nssa/core/src/encryption/mod.rs index 80d62f30..4b675d0e 100644 --- a/nssa/core/src/encryption/mod.rs +++ b/nssa/core/src/encryption/mod.rs @@ -8,7 +8,7 @@ use serde::{Deserialize, Serialize}; #[cfg(feature = "host")] pub use shared_key_derivation::{EphemeralPublicKey, EphemeralSecretKey, ViewingPublicKey}; -use crate::{Commitment, Identifier, account::Account}; +use crate::{Commitment, account::Account, program::PrivateAccountKind}; #[cfg(feature = "host")] pub mod shared_key_derivation; @@ -40,13 +40,14 @@ impl EncryptionScheme { #[must_use] pub fn encrypt( account: &Account, - identifier: Identifier, + kind: &PrivateAccountKind, shared_secret: &SharedSecretKey, commitment: &Commitment, output_index: u32, ) -> Ciphertext { - // Plaintext: identifier (16 bytes, little-endian) || account bytes - let mut buffer = identifier.to_le_bytes().to_vec(); + // Plaintext: PrivateAccountKind::HEADER_LEN bytes header || account bytes. + // Both variants produce the same header length — see PrivateAccountKind::to_header_bytes. + let mut buffer = kind.to_header_bytes().to_vec(); buffer.extend_from_slice(&account.to_bytes()); Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index); Ciphertext(buffer) @@ -89,17 +90,19 @@ impl EncryptionScheme { shared_secret: &SharedSecretKey, commitment: &Commitment, output_index: u32, - ) -> Option<(Identifier, Account)> { + ) -> Option<(PrivateAccountKind, Account)> { use std::io::Cursor; let mut buffer = ciphertext.0.clone(); Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index); - if buffer.len() < 16 { + if buffer.len() < PrivateAccountKind::HEADER_LEN { return None; } - let identifier = Identifier::from_le_bytes(buffer[..16].try_into().unwrap()); + let header: &[u8; PrivateAccountKind::HEADER_LEN] = + buffer[..PrivateAccountKind::HEADER_LEN].try_into().unwrap(); + let kind = PrivateAccountKind::from_header_bytes(header)?; - let mut cursor = Cursor::new(&buffer[16..]); + let mut cursor = Cursor::new(&buffer[PrivateAccountKind::HEADER_LEN..]); Account::from_cursor(&mut cursor) .inspect_err(|err| { println!( @@ -112,6 +115,43 @@ impl EncryptionScheme { ); }) .ok() - .map(|account| (identifier, account)) + .map(|account| (kind, account)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::{ + account::{Account, AccountId}, + program::PdaSeed, + }; + + #[test] + fn encrypt_same_length_for_account_and_pda() { + let account = Account::default(); + let secret = SharedSecretKey([0_u8; 32]); + let commitment = crate::Commitment::new(&AccountId::new([0_u8; 32]), &Account::default()); + + let account_ct = EncryptionScheme::encrypt( + &account, + &PrivateAccountKind::Regular(42), + &secret, + &commitment, + 0, + ); + let pda_ct = EncryptionScheme::encrypt( + &account, + &PrivateAccountKind::Pda { + program_id: [1_u32; 8], + seed: PdaSeed::new([2_u8; 32]), + identifier: 42, + }, + &secret, + &commitment, + 0, + ); + + assert_eq!(account_ct.0.len(), pda_ct.0.len()); } } diff --git a/nssa/core/src/encryption/shared_key_derivation.rs b/nssa/core/src/encryption/shared_key_derivation.rs index 8169e8f9..8ea5aac8 100644 --- a/nssa/core/src/encryption/shared_key_derivation.rs +++ b/nssa/core/src/encryption/shared_key_derivation.rs @@ -17,7 +17,9 @@ use serde::{Deserialize, Serialize}; use crate::{SharedSecretKey, encryption::Scalar}; -#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] +#[derive( + Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord, BorshSerialize, BorshDeserialize, +)] pub struct Secp256k1Point(pub Vec); impl std::fmt::Debug for Secp256k1Point { @@ -56,8 +58,8 @@ impl From<&EphemeralSecretKey> for EphemeralPublicKey { impl SharedSecretKey { /// Creates a new shared secret key from a scalar and a point. #[must_use] - pub fn new(scalar: &Scalar, point: &Secp256k1Point) -> Self { - let scalar = k256::Scalar::from_repr((*scalar).into()).unwrap(); + pub fn new(scalar: Scalar, point: &Secp256k1Point) -> Self { + let scalar = k256::Scalar::from_repr(scalar.into()).unwrap(); let point: [u8; 33] = point.0.clone().try_into().unwrap(); let encoded = EncodedPoint::from_bytes(point).unwrap(); diff --git a/nssa/core/src/lib.rs b/nssa/core/src/lib.rs index 478d475c..466e1f5d 100644 --- a/nssa/core/src/lib.rs +++ b/nssa/core/src/lib.rs @@ -3,13 +3,16 @@ reason = "We prefer to group methods by functionality rather than by type for encoding" )] -pub use circuit_io::{PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput}; +pub use circuit_io::{ + InputAccountIdentity, PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput, +}; pub use commitment::{ Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, DUMMY_COMMITMENT_HASH, MembershipProof, compute_digest_for_path, }; pub use encryption::{EncryptionScheme, SharedSecretKey}; pub use nullifier::{Identifier, Nullifier, NullifierPublicKey, NullifierSecretKey}; +pub use program::PrivateAccountKind; pub mod account; mod circuit_io; @@ -22,6 +25,8 @@ pub mod program; #[cfg(feature = "host")] pub mod error; +pub const GENESIS_BLOCK_ID: BlockId = 1; + pub type BlockId = u64; /// Unix timestamp in milliseconds. pub type Timestamp = u64; diff --git a/nssa/core/src/nullifier.rs b/nssa/core/src/nullifier.rs index aafe3f7c..d1fbae42 100644 --- a/nssa/core/src/nullifier.rs +++ b/nssa/core/src/nullifier.rs @@ -8,14 +8,15 @@ const PRIVATE_ACCOUNT_ID_PREFIX: &[u8; 32] = b"/LEE/v0.3/AccountId/Private/\x00\ pub type Identifier = u128; -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)] #[cfg_attr(any(feature = "host", test), derive(Hash))] pub struct NullifierPublicKey(pub [u8; 32]); -impl From<(&NullifierPublicKey, Identifier)> for AccountId { - fn from(value: (&NullifierPublicKey, Identifier)) -> Self { - let (npk, identifier) = value; - +impl AccountId { + /// Derives an [`AccountId`] for a regular (non-PDA) private account from the nullifier public + /// key and identifier. + #[must_use] + pub fn for_regular_private_account(npk: &NullifierPublicKey, identifier: Identifier) -> Self { // 32 bytes prefix || 32 bytes npk || 16 bytes identifier let mut bytes = [0; 80]; bytes[0..32].copy_from_slice(PRIVATE_ACCOUNT_ID_PREFIX); @@ -31,6 +32,12 @@ impl From<(&NullifierPublicKey, Identifier)> for AccountId { } } +impl From<(&NullifierPublicKey, Identifier)> for AccountId { + fn from((npk, identifier): (&NullifierPublicKey, Identifier)) -> Self { + Self::for_regular_private_account(npk, identifier) + } +} + impl AsRef<[u8]> for NullifierPublicKey { fn as_ref(&self) -> &[u8] { self.0.as_slice() @@ -155,7 +162,7 @@ mod tests { 253, 105, 164, 89, 84, 40, 191, 182, 119, 64, 255, 67, 142, ]); - let account_id = AccountId::from((&npk, 0)); + let account_id = AccountId::for_regular_private_account(&npk, 0); assert_eq!(account_id, expected_account_id); } @@ -172,7 +179,7 @@ mod tests { 56, 247, 99, 121, 165, 182, 234, 255, 19, 127, 191, 72, ]); - let account_id = AccountId::from((&npk, 1)); + let account_id = AccountId::for_regular_private_account(&npk, 1); assert_eq!(account_id, expected_account_id); } @@ -190,7 +197,7 @@ mod tests { 19, 245, 25, 214, 162, 209, 135, 252, 82, 27, 2, 174, 196, ]); - let account_id = AccountId::from((&npk, identifier)); + let account_id = AccountId::for_regular_private_account(&npk, identifier); assert_eq!(account_id, expected_account_id); } diff --git a/nssa/core/src/program.rs b/nssa/core/src/program.rs index 1ef2ef6c..27ad9b8b 100644 --- a/nssa/core/src/program.rs +++ b/nssa/core/src/program.rs @@ -1,12 +1,11 @@ use std::collections::HashSet; -#[cfg(any(feature = "host", test))] use borsh::{BorshDeserialize, BorshSerialize}; use risc0_zkvm::{DeserializeOwned, guest::env, serde::Deserializer}; use serde::{Deserialize, Serialize}; use crate::{ - BlockId, NullifierPublicKey, Timestamp, + BlockId, Identifier, NullifierPublicKey, Timestamp, account::{Account, AccountId, AccountWithMetadata}, }; @@ -27,7 +26,20 @@ pub struct ProgramInput { /// Each program can derive up to `2^256` unique account IDs by choosing different /// seeds. PDAs allow programs to control namespaced account identifiers without /// collisions between programs. -#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, Serialize, Deserialize)] +#[derive( + Debug, + Clone, + Copy, + Eq, + PartialEq, + PartialOrd, + Ord, + Hash, + Serialize, + Deserialize, + BorshSerialize, + BorshDeserialize, +)] pub struct PdaSeed([u8; 32]); impl PdaSeed { @@ -35,6 +47,77 @@ impl PdaSeed { pub const fn new(value: [u8; 32]) -> Self { Self(value) } + + #[must_use] + pub const fn as_bytes(&self) -> &[u8; 32] { + &self.0 + } +} + +impl AsRef<[u8]> for PdaSeed { + fn as_ref(&self) -> &[u8] { + &self.0 + } +} + +/// Discriminates the type of private account a ciphertext belongs to, carrying the data needed +/// to reconstruct the account's [`AccountId`] on the receiver side. +/// +/// [`AccountId`]: crate::account::AccountId +#[derive( + Debug, + Clone, + PartialEq, + Eq, + PartialOrd, + Ord, + Serialize, + Deserialize, + BorshSerialize, + BorshDeserialize, +)] +pub enum PrivateAccountKind { + Regular(Identifier), + Pda { + program_id: ProgramId, + seed: PdaSeed, + identifier: Identifier, + }, +} + +impl PrivateAccountKind { + /// Borsh layout (all integers little-endian, variant index is u8): + /// + /// ```text + /// Regular(ident): 0x00 || ident (16 LE) || [0u8; 64] + /// Pda { program_id, seed, ident }: 0x01 || program_id (32) || seed (32) || ident (16 LE) + /// ``` + /// + /// Both variants are zero-padded to the same length so all ciphertexts are the same size, + /// preventing observers from distinguishing `Regular` from `Pda` via ciphertext length. + /// `HEADER_LEN` equals the borsh size of the largest variant (`Pda`): 1 + 32 + 32 + 16 = 81. + pub const HEADER_LEN: usize = 81; + + #[must_use] + pub const fn identifier(&self) -> Identifier { + match self { + Self::Regular(identifier) | Self::Pda { identifier, .. } => *identifier, + } + } + + #[must_use] + pub fn to_header_bytes(&self) -> [u8; Self::HEADER_LEN] { + let mut bytes = [0_u8; Self::HEADER_LEN]; + let serialized = borsh::to_vec(self).expect("borsh serialization is infallible"); + bytes[..serialized.len()].copy_from_slice(&serialized); + bytes + } + + #[cfg(feature = "host")] + #[must_use] + pub fn from_header_bytes(bytes: &[u8; Self::HEADER_LEN]) -> Option { + BorshDeserialize::deserialize(&mut bytes.as_ref()).ok() + } } impl AccountId { @@ -59,27 +142,31 @@ impl AccountId { ) } - /// Derives an [`AccountId`] for a private PDA from the program ID, seed, and nullifier - /// public key. + /// Derives an [`AccountId`] for a private PDA from the program ID, seed, nullifier public + /// key, and identifier. /// /// Unlike public PDAs ([`AccountId::for_public_pda`]), this includes the `npk` in the /// derivation, making the address unique per group of controllers sharing viewing keys. + /// The `identifier` further diversifies the address, so a single `(program_id, seed, npk)` + /// tuple controls a family of 2^128 addresses. #[must_use] pub fn for_private_pda( program_id: &ProgramId, seed: &PdaSeed, npk: &NullifierPublicKey, + identifier: Identifier, ) -> Self { use risc0_zkvm::sha::{Impl, Sha256 as _}; const PRIVATE_PDA_PREFIX: &[u8; 32] = b"/LEE/v0.3/AccountId/PrivatePDA/\x00"; - let mut bytes = [0_u8; 128]; + let mut bytes = [0_u8; 144]; bytes[0..32].copy_from_slice(PRIVATE_PDA_PREFIX); let program_id_bytes: &[u8] = bytemuck::try_cast_slice(program_id).expect("ProgramId should be castable to &[u8]"); bytes[32..64].copy_from_slice(program_id_bytes); bytes[64..96].copy_from_slice(&seed.0); bytes[96..128].copy_from_slice(&npk.to_byte_array()); + bytes[128..144].copy_from_slice(&identifier.to_le_bytes()); Self::new( Impl::hash_bytes(&bytes) .as_bytes() @@ -87,6 +174,21 @@ impl AccountId { .expect("Hash output must be exactly 32 bytes long"), ) } + + /// Derives the [`AccountId`] for a private account from the nullifier public key and kind. + #[must_use] + pub fn for_private_account(npk: &NullifierPublicKey, kind: &PrivateAccountKind) -> Self { + match kind { + PrivateAccountKind::Regular(identifier) => { + Self::for_regular_private_account(npk, *identifier) + } + PrivateAccountKind::Pda { + program_id, + seed, + identifier, + } => Self::for_private_pda(program_id, seed, npk, *identifier), + } + } } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] @@ -630,7 +732,6 @@ pub fn validate_execution( } // 8. Total balance is preserved - let Some(total_balance_pre_states) = WrappedBalanceSum::from_balances(pre_states.iter().map(|pre| pre.account.balance)) else { @@ -845,19 +946,20 @@ mod tests { // ---- AccountId::for_private_pda tests ---- /// Pins `AccountId::for_private_pda` against a hardcoded expected output for a specific - /// `(program_id, seed, npk)` triple. Any change to `PRIVATE_PDA_PREFIX`, byte ordering, - /// or the underlying hash breaks this test. + /// `(program_id, seed, npk, identifier)` tuple. Any change to `PRIVATE_PDA_PREFIX`, byte + /// ordering, or the underlying hash breaks this test. #[test] fn for_private_pda_matches_pinned_value() { let program_id: ProgramId = [1; 8]; let seed = PdaSeed::new([2; 32]); let npk = NullifierPublicKey([3; 32]); + let identifier: Identifier = u128::MAX; let expected = AccountId::new([ - 132, 198, 103, 173, 244, 211, 188, 217, 249, 99, 126, 205, 152, 120, 192, 47, 13, 53, - 133, 3, 17, 69, 92, 243, 140, 94, 182, 211, 218, 75, 215, 45, + 59, 239, 182, 97, 14, 220, 96, 115, 238, 133, 143, 33, 234, 82, 237, 255, 148, 110, 54, + 124, 98, 159, 245, 101, 146, 182, 150, 54, 37, 62, 25, 17, ]); assert_eq!( - AccountId::for_private_pda(&program_id, &seed, &npk), + AccountId::for_private_pda(&program_id, &seed, &npk, identifier), expected ); } @@ -870,8 +972,8 @@ mod tests { let npk_a = NullifierPublicKey([3; 32]); let npk_b = NullifierPublicKey([4; 32]); assert_ne!( - AccountId::for_private_pda(&program_id, &seed, &npk_a), - AccountId::for_private_pda(&program_id, &seed, &npk_b), + AccountId::for_private_pda(&program_id, &seed, &npk_a, u128::MAX), + AccountId::for_private_pda(&program_id, &seed, &npk_b, u128::MAX), ); } @@ -883,8 +985,8 @@ mod tests { let seed_b = PdaSeed::new([5; 32]); let npk = NullifierPublicKey([3; 32]); assert_ne!( - AccountId::for_private_pda(&program_id, &seed_a, &npk), - AccountId::for_private_pda(&program_id, &seed_b, &npk), + AccountId::for_private_pda(&program_id, &seed_a, &npk, u128::MAX), + AccountId::for_private_pda(&program_id, &seed_b, &npk, u128::MAX), ); } @@ -896,8 +998,25 @@ mod tests { let seed = PdaSeed::new([2; 32]); let npk = NullifierPublicKey([3; 32]); assert_ne!( - AccountId::for_private_pda(&program_id_a, &seed, &npk), - AccountId::for_private_pda(&program_id_b, &seed, &npk), + AccountId::for_private_pda(&program_id_a, &seed, &npk, u128::MAX), + AccountId::for_private_pda(&program_id_b, &seed, &npk, u128::MAX), + ); + } + + /// Different identifiers produce different addresses for the same `(program_id, seed, npk)`, + /// confirming that each `(program_id, seed, npk)` tuple controls a family of 2^128 addresses. + #[test] + fn for_private_pda_differs_for_different_identifier() { + let program_id: ProgramId = [1; 8]; + let seed = PdaSeed::new([2; 32]); + let npk = NullifierPublicKey([3; 32]); + assert_ne!( + AccountId::for_private_pda(&program_id, &seed, &npk, 0), + AccountId::for_private_pda(&program_id, &seed, &npk, 1), + ); + assert_ne!( + AccountId::for_private_pda(&program_id, &seed, &npk, 0), + AccountId::for_private_pda(&program_id, &seed, &npk, u128::MAX), ); } @@ -908,14 +1027,62 @@ mod tests { let program_id: ProgramId = [1; 8]; let seed = PdaSeed::new([2; 32]); let npk = NullifierPublicKey([3; 32]); - let private_id = AccountId::for_private_pda(&program_id, &seed, &npk); + let private_id = AccountId::for_private_pda(&program_id, &seed, &npk, u128::MAX); let public_id = AccountId::for_public_pda(&program_id, &seed); assert_ne!(private_id, public_id); } - // ---- compute_public_authorized_pdas tests ---- + #[cfg(feature = "host")] + #[test] + fn private_account_kind_header_round_trips() { + let regular = PrivateAccountKind::Regular(42); + let pda = PrivateAccountKind::Pda { + program_id: [1_u32; 8], + seed: PdaSeed::new([2_u8; 32]), + identifier: u128::MAX, + }; + assert_eq!( + PrivateAccountKind::from_header_bytes(®ular.to_header_bytes()), + Some(regular) + ); + assert_eq!( + PrivateAccountKind::from_header_bytes(&pda.to_header_bytes()), + Some(pda) + ); + } + + #[cfg(feature = "host")] + #[test] + fn private_account_kind_unknown_discriminant_returns_none() { + let mut bytes = [0_u8; PrivateAccountKind::HEADER_LEN]; + bytes[0] = 0xFF; + assert_eq!(PrivateAccountKind::from_header_bytes(&bytes), None); + } + + #[test] + fn for_private_account_dispatches_correctly() { + let program_id: ProgramId = [1; 8]; + let seed = PdaSeed::new([2; 32]); + let npk = NullifierPublicKey([3; 32]); + let identifier: Identifier = 77; + + assert_eq!( + AccountId::for_private_account(&npk, &PrivateAccountKind::Regular(identifier)), + AccountId::for_regular_private_account(&npk, identifier), + ); + assert_eq!( + AccountId::for_private_account( + &npk, + &PrivateAccountKind::Pda { + program_id, + seed, + identifier + } + ), + AccountId::for_private_pda(&program_id, &seed, &npk, identifier), + ); + } - /// `compute_public_authorized_pdas` returns the public PDA addresses for the caller's seeds. #[test] fn compute_public_authorized_pdas_with_seeds() { let caller: ProgramId = [1; 8]; diff --git a/nssa/src/encoding/public_transaction.rs b/nssa/src/encoding/public_transaction.rs index 043bd4f7..2549cf27 100644 --- a/nssa/src/encoding/public_transaction.rs +++ b/nssa/src/encoding/public_transaction.rs @@ -1,8 +1,7 @@ use crate::{PublicTransaction, error::NssaError, public_transaction::Message}; impl Message { - #[must_use] - pub fn to_bytes(&self) -> Vec { + pub(crate) fn to_bytes(&self) -> Vec { borsh::to_vec(&self).expect("Autoderived borsh serialization failure") } } diff --git a/nssa/src/error.rs b/nssa/src/error.rs index 565e02ba..65079d25 100644 --- a/nssa/src/error.rs +++ b/nssa/src/error.rs @@ -93,14 +93,8 @@ pub enum InvalidProgramBehaviorError { actual: Box, }, - #[error( - "Inconsistent authorization for account {account_id} : expected {expected_authorization}, actual {actual_authorization}" - )] - InconsistentAccountAuthorization { - account_id: AccountId, - expected_authorization: bool, - actual_authorization: bool, - }, + #[error("Unauthorized account marked as authorized")] + InvalidAccountAuthorization { account_id: AccountId }, #[error("Program ID mismatch: expected {expected:?}, actual {actual:?}")] MismatchedProgramId { diff --git a/nssa/src/lib.rs b/nssa/src/lib.rs index f4c3be9d..5998e803 100644 --- a/nssa/src/lib.rs +++ b/nssa/src/lib.rs @@ -4,7 +4,7 @@ )] pub use nssa_core::{ - SharedSecretKey, + GENESIS_BLOCK_ID, SharedSecretKey, account::{Account, AccountId, Data}, encryption::EphemeralPublicKey, program::ProgramId, @@ -18,7 +18,7 @@ pub use public_transaction::PublicTransaction; pub use signature::{PrivateKey, PublicKey, Signature}; pub use state::{ CLOCK_01_PROGRAM_ACCOUNT_ID, CLOCK_10_PROGRAM_ACCOUNT_ID, CLOCK_50_PROGRAM_ACCOUNT_ID, - CLOCK_PROGRAM_ACCOUNT_IDS, V03State, + CLOCK_PROGRAM_ACCOUNT_IDS, V03State, system_faucet_account_id, }; pub use validated_state_diff::ValidatedStateDiff; diff --git a/nssa/src/merkle_tree/mod.rs b/nssa/src/merkle_tree/mod.rs index 588f0f60..e439d092 100644 --- a/nssa/src/merkle_tree/mod.rs +++ b/nssa/src/merkle_tree/mod.rs @@ -17,6 +17,26 @@ pub struct MerkleTree { } impl MerkleTree { + pub fn with_capacity(capacity: usize) -> Self { + // Adjust capacity to ensure power of two + let capacity = capacity.next_power_of_two(); + let total_depth = usize::try_from(capacity.trailing_zeros()).expect("u32 fits in usize"); + + let nodes = default_values::DEFAULT_VALUES[..=total_depth] + .iter() + .rev() + .enumerate() + .flat_map(|(level, default_value)| std::iter::repeat_n(default_value, 1 << level)) + .copied() + .collect(); + + Self { + nodes, + capacity, + length: 0, + } + } + pub fn root(&self) -> Node { let root_index = self.root_index(); *self.get_node(root_index) @@ -49,26 +69,6 @@ impl MerkleTree { self.nodes[index] = node; } - pub fn with_capacity(capacity: usize) -> Self { - // Adjust capacity to ensure power of two - let capacity = capacity.next_power_of_two(); - let total_depth = usize::try_from(capacity.trailing_zeros()).expect("u32 fits in usize"); - - let nodes = default_values::DEFAULT_VALUES[..=total_depth] - .iter() - .rev() - .enumerate() - .flat_map(|(level, default_value)| std::iter::repeat_n(default_value, 1 << level)) - .copied() - .collect(); - - Self { - nodes, - capacity, - length: 0, - } - } - /// Reallocates storage of Merkle tree for double capacity. /// The current tree is embedded into the new tree as a subtree. fn reallocate_to_double_capacity(&mut self) { diff --git a/nssa/src/privacy_preserving_transaction/circuit.rs b/nssa/src/privacy_preserving_transaction/circuit.rs index f5bd8cea..915c8d3e 100644 --- a/nssa/src/privacy_preserving_transaction/circuit.rs +++ b/nssa/src/privacy_preserving_transaction/circuit.rs @@ -2,8 +2,7 @@ use std::collections::{HashMap, VecDeque}; use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ - Identifier, MembershipProof, NullifierPublicKey, NullifierSecretKey, - PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput, SharedSecretKey, + InputAccountIdentity, PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput, account::AccountWithMetadata, program::{ChainedCall, InstructionData, ProgramId, ProgramOutput}, }; @@ -63,14 +62,10 @@ impl From for ProgramWithDependencies { /// Generates a proof of the execution of a NSSA program inside the privacy preserving execution /// circuit. -/// TODO: too many parameters. pub fn execute_and_prove( pre_states: Vec, instruction_data: InstructionData, - visibility_mask: Vec, - private_account_keys: Vec<(NullifierPublicKey, Identifier, SharedSecretKey)>, - private_account_nsks: Vec, - private_account_membership_proofs: Vec>, + account_identities: Vec, program_with_dependencies: &ProgramWithDependencies, ) -> Result<(PrivacyPreservingCircuitOutput, Proof), NssaError> { let ProgramWithDependencies { @@ -128,10 +123,7 @@ pub fn execute_and_prove( let circuit_input = PrivacyPreservingCircuitInput { program_outputs, - visibility_mask, - private_account_keys, - private_account_nsks, - private_account_membership_proofs, + account_identities, program_id: program_with_dependencies.program.id(), }; @@ -184,8 +176,10 @@ mod tests { #![expect(clippy::shadow_unrelated, reason = "We don't care about it in tests")] use nssa_core::{ - Commitment, DUMMY_COMMITMENT_HASH, EncryptionScheme, Nullifier, SharedSecretKey, + Commitment, DUMMY_COMMITMENT_HASH, EncryptionScheme, Nullifier, + PrivacyPreservingCircuitOutput, SharedSecretKey, account::{Account, AccountId, AccountWithMetadata, Nonce, data::Data}, + program::{PdaSeed, PrivateAccountKind}, }; use super::*; @@ -199,6 +193,21 @@ mod tests { }, }; + fn decrypt_kind( + output: &PrivacyPreservingCircuitOutput, + ssk: &SharedSecretKey, + idx: usize, + ) -> PrivateAccountKind { + let (kind, _) = EncryptionScheme::decrypt( + &output.ciphertexts[idx], + ssk, + &output.new_commitments[idx], + u32::try_from(idx).expect("idx fits in u32"), + ) + .unwrap(); + kind + } + #[test] fn prove_privacy_preserving_execution_circuit_public_and_private_pre_accounts() { let recipient_keys = test_private_account_keys_1(); @@ -213,7 +222,7 @@ mod tests { AccountId::new([0; 32]), ); - let recipient_account_id = AccountId::from((&recipient_keys.npk(), 0)); + let recipient_account_id = AccountId::for_regular_private_account(&recipient_keys.npk(), 0); let recipient = AccountWithMetadata::new(Account::default(), false, recipient_account_id); let balance_to_move: u128 = 37; @@ -235,15 +244,22 @@ mod tests { let expected_sender_pre = sender.clone(); let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &recipient_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &recipient_keys.vpk()); let (output, proof) = execute_and_prove( vec![sender, recipient], - Program::serialize_instruction(balance_to_move).unwrap(), - vec![0, 2], - vec![(recipient_keys.npk(), 0, shared_secret)], - vec![], - vec![None], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: balance_to_move, + }) + .unwrap(), + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: shared_secret, + identifier: 0, + }, + ], &Program::authenticated_transfer_program().into(), ) .unwrap(); @@ -283,12 +299,12 @@ mod tests { data: Data::default(), }, true, - AccountId::from((&sender_keys.npk(), 0)), + AccountId::for_regular_private_account(&sender_keys.npk(), 0), ); - let sender_account_id = AccountId::from((&sender_keys.npk(), 0)); + let sender_account_id = AccountId::for_regular_private_account(&sender_keys.npk(), 0); let commitment_sender = Commitment::new(&sender_account_id, &sender_pre.account); - let recipient_account_id = AccountId::from((&recipient_keys.npk(), 0)); + let recipient_account_id = AccountId::for_regular_private_account(&recipient_keys.npk(), 0); let recipient = AccountWithMetadata::new(Account::default(), false, recipient_account_id); let balance_to_move: u128 = 37; @@ -325,21 +341,32 @@ mod tests { ]; let esk_1 = [3; 32]; - let shared_secret_1 = SharedSecretKey::new(&esk_1, &sender_keys.vpk()); + let shared_secret_1 = SharedSecretKey::new(esk_1, &sender_keys.vpk()); let esk_2 = [5; 32]; - let shared_secret_2 = SharedSecretKey::new(&esk_2, &recipient_keys.vpk()); + let shared_secret_2 = SharedSecretKey::new(esk_2, &recipient_keys.vpk()); let (output, proof) = execute_and_prove( vec![sender_pre, recipient], - Program::serialize_instruction(balance_to_move).unwrap(), - vec![1, 2], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: balance_to_move, + }) + .unwrap(), vec![ - (sender_keys.npk(), 0, shared_secret_1), - (recipient_keys.npk(), 0, shared_secret_2), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: shared_secret_1, + nsk: sender_keys.nsk, + membership_proof: commitment_set + .get_proof_for(&commitment_sender) + .expect("sender's commitment must be in the set"), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: shared_secret_2, + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![commitment_set.get_proof_for(&commitment_sender), None], &program.into(), ) .unwrap(); @@ -376,7 +403,7 @@ mod tests { let pre = AccountWithMetadata::new( Account::default(), false, - AccountId::from((&account_keys.npk(), 0)), + AccountId::for_regular_private_account(&account_keys.npk(), 0), ); let validity_window_chain_caller = Program::validity_window_chain_caller(); @@ -392,7 +419,7 @@ mod tests { .unwrap(); let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &account_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &account_keys.vpk()); let program_with_deps = ProgramWithDependencies::new( validity_window_chain_caller, @@ -402,10 +429,442 @@ mod tests { let result = execute_and_prove( vec![pre], instruction, - vec![2], - vec![(account_keys.npk(), 0, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivateUnauthorized { + npk: account_keys.npk(), + ssk: shared_secret, + identifier: 0, + }], + &program_with_deps, + ); + + assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); + } + + /// A private PDA claimed with a non-default identifier produces a ciphertext that decrypts + /// to `PrivateAccountKind::Pda` carrying the correct `(program_id, seed, identifier)`. + #[test] + fn private_pda_claim_with_custom_identifier_encrypts_correct_kind() { + let program = Program::pda_claimer(); + let keys = test_private_account_keys_1(); + let npk = keys.npk(); + let seed = PdaSeed::new([42; 32]); + let identifier: u128 = 99; + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); + + let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk, identifier); + let pre_state = AccountWithMetadata::new(Account::default(), false, account_id); + + let (output, _proof) = execute_and_prove( + vec![pre_state], + Program::serialize_instruction(seed).unwrap(), + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier, + }], + &program.clone().into(), + ) + .unwrap(); + + assert_eq!( + decrypt_kind(&output, &shared_secret, 0), + PrivateAccountKind::Pda { + program_id: program.id(), + seed, + identifier + }, + ); + } + + /// PDA init: initializes a new PDA under `authenticated_transfer`'s ownership. + /// The `auth_transfer_proxy` program chains to `authenticated_transfer` with `pda_seeds` + /// to establish authorization and the private PDA binding. + #[test] + fn private_pda_init() { + let program = Program::auth_transfer_proxy(); + let auth_transfer = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let npk = keys.npk(); + let seed = PdaSeed::new([42; 32]); + let shared_secret_pda = SharedSecretKey::new([55; 32], &keys.vpk()); + + // PDA (new, mask 3) + let pda_id = AccountId::for_private_pda(&program.id(), &seed, &npk, 0); + let pda_pre = AccountWithMetadata::new(Account::default(), false, pda_id); + + let auth_id = auth_transfer.id(); + let program_with_deps = + ProgramWithDependencies::new(program, [(auth_id, auth_transfer)].into()); + + // is_withdraw=false triggers init path (1 pre-state) + let instruction = Program::serialize_instruction((seed, auth_id, 0_u128, false)).unwrap(); + + let result = execute_and_prove( + vec![pda_pre], + instruction, + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret_pda, + identifier: 0, + }], + &program_with_deps, + ); + + let (output, _proof) = result.expect("PDA init should succeed"); + assert_eq!(output.new_commitments.len(), 1); + } + + /// PDA withdraw: chains to `authenticated_transfer` to move balance from PDA to recipient. + /// Uses a default PDA (amount=0) because testing with a pre-funded PDA requires a + /// two-tx sequence with membership proofs. + #[test] + fn private_pda_withdraw() { + let program = Program::auth_transfer_proxy(); + let auth_transfer = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let npk = keys.npk(); + let seed = PdaSeed::new([42; 32]); + let shared_secret_pda = SharedSecretKey::new([55; 32], &keys.vpk()); + + // PDA (new, private PDA) + let pda_id = AccountId::for_private_pda(&program.id(), &seed, &npk, 0); + let pda_pre = AccountWithMetadata::new(Account::default(), false, pda_id); + + // Recipient (public) + let recipient_id = AccountId::new([88; 32]); + let recipient_pre = AccountWithMetadata::new( + Account { + program_owner: auth_transfer.id(), + balance: 10000, + ..Account::default() + }, + true, + recipient_id, + ); + + let auth_id = auth_transfer.id(); + let program_with_deps = + ProgramWithDependencies::new(program, [(auth_id, auth_transfer)].into()); + + // is_withdraw=true, amount=0 (PDA has no balance yet) + let instruction = Program::serialize_instruction((seed, auth_id, 0_u128, true)).unwrap(); + + let result = execute_and_prove( + vec![pda_pre, recipient_pre], + instruction, + vec![ + InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret_pda, + identifier: 0, + }, + InputAccountIdentity::Public, + ], + &program_with_deps, + ); + + let (output, _proof) = result.expect("PDA withdraw should succeed"); + assert_eq!(output.new_commitments.len(), 1); + } + + /// Shared regular private account: receives funds via `authenticated_transfer` directly, + /// no custom program needed. This demonstrates the non-PDA shared account flow where + /// keys are derived from GMS via `derive_keys_for_shared_account`. The shared account + /// uses the standard unauthorized private account path and works with auth-transfer's + /// transfer path like any other private account. + #[test] + fn shared_account_receives_via_auth_transfer() { + let program = Program::authenticated_transfer_program(); + let shared_keys = test_private_account_keys_1(); + let shared_npk = shared_keys.npk(); + let shared_identifier: u128 = 42; + let shared_secret = SharedSecretKey::new([55; 32], &shared_keys.vpk()); + + // Sender: public account with balance, owned by auth-transfer + let sender_id = AccountId::new([99; 32]); + let sender = AccountWithMetadata::new( + Account { + program_owner: program.id(), + balance: 1000, + ..Account::default() + }, + true, + sender_id, + ); + + // Recipient: shared private account (new, unauthorized) + let shared_account_id = AccountId::from((&shared_npk, shared_identifier)); + let recipient = AccountWithMetadata::new(Account::default(), false, shared_account_id); + + let balance_to_move: u128 = 100; + let instruction = + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: balance_to_move, + }) + .unwrap(); + + let result = execute_and_prove( + vec![sender, recipient], + instruction, + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivateUnauthorized { + npk: shared_npk, + ssk: shared_secret, + identifier: shared_identifier, + }, + ], + &program.into(), + ); + + let (output, _proof) = result.expect("shared account receive should succeed"); + // Sender is public (no commitment), recipient is private (1 commitment) + assert_eq!(output.new_commitments.len(), 1); + } + + /// `PrivateAuthorizedInit` with a non-default identifier produces a ciphertext that decrypts + /// to `PrivateAccountKind::Regular` carrying the correct identifier. + #[test] + fn private_authorized_init_encrypts_regular_kind_with_identifier() { + let program = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let identifier: u128 = 99; + let ssk = SharedSecretKey::new([55; 32], &keys.vpk()); + let account_id = AccountId::for_regular_private_account(&keys.npk(), identifier); + let pre = AccountWithMetadata::new(Account::default(), true, account_id); + + let (output, _) = execute_and_prove( + vec![pre], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Initialize) + .unwrap(), + vec![InputAccountIdentity::PrivateAuthorizedInit { + ssk, + nsk: keys.nsk, + identifier, + }], + &program.into(), + ) + .unwrap(); + + assert_eq!( + decrypt_kind(&output, &ssk, 0), + PrivateAccountKind::Regular(identifier) + ); + } + + /// `PrivateUnauthorized` with a non-default identifier produces a ciphertext that decrypts + /// to `PrivateAccountKind::Regular` carrying the correct identifier. + #[test] + fn private_unauthorized_init_encrypts_regular_kind_with_identifier() { + let program = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let identifier: u128 = 99; + let ssk = SharedSecretKey::new([55; 32], &keys.vpk()); + + let sender = AccountWithMetadata::new( + Account { + program_owner: program.id(), + balance: 1, + ..Account::default() + }, + true, + AccountId::new([0; 32]), + ); + let recipient_id = AccountId::for_regular_private_account(&keys.npk(), identifier); + let recipient = AccountWithMetadata::new(Account::default(), false, recipient_id); + + let (output, _) = execute_and_prove( + vec![sender, recipient], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: 1, + }) + .unwrap(), + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivateUnauthorized { + npk: keys.npk(), + ssk, + identifier, + }, + ], + &program.into(), + ) + .unwrap(); + + assert_eq!( + decrypt_kind(&output, &ssk, 0), + PrivateAccountKind::Regular(identifier) + ); + } + + /// `PrivateAuthorizedUpdate` with a non-default identifier produces a ciphertext that decrypts + /// to `PrivateAccountKind::Regular` carrying the correct identifier. + #[test] + fn private_authorized_update_encrypts_regular_kind_with_identifier() { + let program = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let identifier: u128 = 99; + let ssk = SharedSecretKey::new([55; 32], &keys.vpk()); + let account_id = AccountId::for_regular_private_account(&keys.npk(), identifier); + let account = Account { + program_owner: program.id(), + balance: 1, + ..Account::default() + }; + let commitment = Commitment::new(&account_id, &account); + let mut commitment_set = CommitmentSet::with_capacity(1); + commitment_set.extend(std::slice::from_ref(&commitment)); + + let sender = AccountWithMetadata::new(account, true, account_id); + let recipient = AccountWithMetadata::new(Account::default(), true, AccountId::new([0; 32])); + + let (output, _) = execute_and_prove( + vec![sender, recipient], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: 1, + }) + .unwrap(), + vec![ + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk, + nsk: keys.nsk, + membership_proof: commitment_set.get_proof_for(&commitment).unwrap(), + identifier, + }, + InputAccountIdentity::Public, + ], + &program.into(), + ) + .unwrap(); + + assert_eq!( + decrypt_kind(&output, &ssk, 0), + PrivateAccountKind::Regular(identifier) + ); + } + + /// `PrivatePdaUpdate` with a non-default identifier produces a ciphertext that decrypts + /// to `PrivateAccountKind::Pda` carrying the correct `(program_id, seed, identifier)`. + #[test] + fn private_pda_update_encrypts_pda_kind_with_identifier() { + let program = Program::pda_fund_spend_proxy(); + let auth_transfer = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let npk = keys.npk(); + let seed = PdaSeed::new([42; 32]); + let identifier: u128 = 99; + let ssk = SharedSecretKey::new([55; 32], &keys.vpk()); + + let auth_transfer_id = auth_transfer.id(); + let pda_id = AccountId::for_private_pda(&program.id(), &seed, &npk, identifier); + let pda_account = Account { + program_owner: auth_transfer_id, + balance: 1, + ..Account::default() + }; + let pda_commitment = Commitment::new(&pda_id, &pda_account); + let mut commitment_set = CommitmentSet::with_capacity(1); + commitment_set.extend(std::slice::from_ref(&pda_commitment)); + + let pda_pre = AccountWithMetadata::new(pda_account, true, pda_id); + let recipient_pre = + AccountWithMetadata::new(Account::default(), true, AccountId::new([0; 32])); + + let program_with_deps = ProgramWithDependencies::new( + program.clone(), + [(auth_transfer_id, auth_transfer)].into(), + ); + + let (output, _) = execute_and_prove( + vec![pda_pre, recipient_pre], + Program::serialize_instruction((seed, 1_u128, auth_transfer_id, false)).unwrap(), + vec![ + InputAccountIdentity::PrivatePdaUpdate { + ssk, + nsk: keys.nsk, + membership_proof: commitment_set.get_proof_for(&pda_commitment).unwrap(), + identifier, + }, + InputAccountIdentity::Public, + ], + &program_with_deps, + ) + .unwrap(); + + assert_eq!( + decrypt_kind(&output, &ssk, 0), + PrivateAccountKind::Pda { + program_id: program.id(), + seed, + identifier + }, + ); + } + + #[test] + fn private_pda_init_identifier_mismatch_fails() { + let program = Program::pda_claimer(); + let keys = test_private_account_keys_1(); + let npk = keys.npk(); + let seed = PdaSeed::new([42; 32]); + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); + + let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk, 5); + let pre_state = AccountWithMetadata::new(Account::default(), false, account_id); + + let result = execute_and_prove( + vec![pre_state], + Program::serialize_instruction(seed).unwrap(), + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier: 99, + }], + &program.into(), + ); + + assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); + } + + #[test] + fn private_pda_update_identifier_mismatch_fails() { + let program = Program::pda_fund_spend_proxy(); + let auth_transfer = Program::authenticated_transfer_program(); + let keys = test_private_account_keys_1(); + let npk = keys.npk(); + let seed = PdaSeed::new([42; 32]); + let ssk = SharedSecretKey::new([55; 32], &keys.vpk()); + + let auth_transfer_id = auth_transfer.id(); + let pda_id = AccountId::for_private_pda(&program.id(), &seed, &npk, 5); + let pda_account = Account { + program_owner: auth_transfer_id, + balance: 1, + ..Account::default() + }; + let pda_commitment = Commitment::new(&pda_id, &pda_account); + let mut commitment_set = CommitmentSet::with_capacity(1); + commitment_set.extend(std::slice::from_ref(&pda_commitment)); + + let pda_pre = AccountWithMetadata::new(pda_account, true, pda_id); + let recipient_pre = + AccountWithMetadata::new(Account::default(), true, AccountId::new([0; 32])); + + let program_with_deps = + ProgramWithDependencies::new(program, [(auth_transfer_id, auth_transfer)].into()); + + let result = execute_and_prove( + vec![pda_pre, recipient_pre], + Program::serialize_instruction((seed, 1_u128, auth_transfer_id, false)).unwrap(), + vec![ + InputAccountIdentity::PrivatePdaUpdate { + ssk, + nsk: keys.nsk, + membership_proof: commitment_set.get_proof_for(&pda_commitment).unwrap(), + identifier: 99, + }, + InputAccountIdentity::Public, + ], &program_with_deps, ); diff --git a/nssa/src/privacy_preserving_transaction/message.rs b/nssa/src/privacy_preserving_transaction/message.rs index 01e6e04f..3a968bfb 100644 --- a/nssa/src/privacy_preserving_transaction/message.rs +++ b/nssa/src/privacy_preserving_transaction/message.rs @@ -122,15 +122,16 @@ impl Message { } #[must_use] - pub fn hash_message(&self) -> [u8; 32] { + pub fn hash(&self) -> [u8; 32] { + let msg = self.to_bytes(); let mut bytes = Vec::with_capacity( PREFIX .len() - .checked_add(self.to_bytes().len()) + .checked_add(msg.len()) .expect("length overflow"), ); bytes.extend_from_slice(PREFIX); - bytes.extend_from_slice(&self.to_bytes()); + bytes.extend_from_slice(&msg); Sha256::digest(bytes).into() } @@ -139,7 +140,8 @@ impl Message { #[cfg(test)] pub mod tests { use nssa_core::{ - Commitment, EncryptionScheme, Nullifier, NullifierPublicKey, SharedSecretKey, + Commitment, EncryptionScheme, Nullifier, NullifierPublicKey, PrivateAccountKind, + SharedSecretKey, account::{Account, AccountId, Nonce}, encryption::{EphemeralPublicKey, ViewingPublicKey}, program::{BlockValidityWindow, TimestampValidityWindow}, @@ -167,10 +169,10 @@ pub mod tests { let encrypted_private_post_states = Vec::new(); - let account_id2 = nssa_core::account::AccountId::from((&npk2, 0)); + let account_id2 = nssa_core::account::AccountId::for_regular_private_account(&npk2, 0); let new_commitments = vec![Commitment::new(&account_id2, &account2)]; - let account_id1 = nssa_core::account::AccountId::from((&npk1, 0)); + let account_id1 = nssa_core::account::AccountId::for_regular_private_account(&npk1, 0); let old_commitment = Commitment::new(&account_id1, &account1); let new_nullifiers = vec![( Nullifier::for_account_update(&old_commitment, &nsk1), @@ -190,7 +192,7 @@ pub mod tests { } #[test] - fn hash_message_privacy_pinned() { + fn hash_privacy_pinned() { let msg = Message { public_account_ids: vec![AccountId::new([42_u8; 32])], nonces: vec![Nonce(5)], @@ -206,7 +208,7 @@ pub mod tests { let nonces_bytes: &[u8] = &[1, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]; // all remaining vec fields are empty: u32 len=0 let empty_vec_bytes: &[u8] = &[0_u8; 4]; - // validity windows: unbounded = {from: None (0_u8), to: None (0_u8)} + // validity windows: unbounded = {from: None (0u8), to: None (0u8)} let unbounded_window_bytes: &[u8] = &[0_u8; 2]; let expected_borsh_vec: Vec = [ @@ -226,7 +228,7 @@ pub mod tests { assert_eq!( borsh::to_vec(&msg).unwrap(), expected_borsh, - "`privacy_preserving_transaction::hash_message()`: expected borsh order has changed" + "`privacy_preserving_transaction::hash()`: expected borsh order has changed" ); let mut preimage = Vec::with_capacity(PREFIX.len() + expected_borsh.len()); @@ -235,9 +237,9 @@ pub mod tests { let expected_hash: [u8; 32] = Sha256::digest(&preimage).into(); assert_eq!( - msg.hash_message(), + msg.hash(), expected_hash, - "`privacy_preserving_transaction::hash_message()`: serialization has changed" + "`privacy_preserving_transaction::hash()`: serialization has changed" ); } @@ -246,12 +248,18 @@ pub mod tests { let npk = NullifierPublicKey::from(&[1; 32]); let vpk = ViewingPublicKey::from_scalar([2; 32]); let account = Account::default(); - let account_id = nssa_core::account::AccountId::from((&npk, 0)); + let account_id = nssa_core::account::AccountId::for_regular_private_account(&npk, 0); let commitment = Commitment::new(&account_id, &account); let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &vpk); + let shared_secret = SharedSecretKey::new(esk, &vpk); let epk = EphemeralPublicKey::from_scalar(esk); - let ciphertext = EncryptionScheme::encrypt(&account, 0, &shared_secret, &commitment, 2); + let ciphertext = EncryptionScheme::encrypt( + &account, + &PrivateAccountKind::Regular(0), + &shared_secret, + &commitment, + 2, + ); let encrypted_account_data = EncryptedAccountData::new(ciphertext.clone(), &npk, &vpk, epk.clone()); diff --git a/nssa/src/privacy_preserving_transaction/witness_set.rs b/nssa/src/privacy_preserving_transaction/witness_set.rs index 43a36671..e17df90c 100644 --- a/nssa/src/privacy_preserving_transaction/witness_set.rs +++ b/nssa/src/privacy_preserving_transaction/witness_set.rs @@ -13,9 +13,8 @@ pub struct WitnessSet { impl WitnessSet { #[must_use] - // TODO: swap for Keycard signing path. pub fn for_message(message: &Message, proof: Proof, private_keys: &[&PrivateKey]) -> Self { - let message_hash = message.hash_message(); + let message_hash = message.hash(); let signatures_and_public_keys = private_keys .iter() .map(|&key| { @@ -31,25 +30,9 @@ impl WitnessSet { } } - #[must_use] - pub fn from_list(proof: Proof, signatures: &[Signature], public_keys: &[PublicKey]) -> Self { - assert_eq!(signatures.len(), public_keys.len()); - - let signatures_and_public_keys = signatures - .iter() - .zip(public_keys.iter()) - .map(|(sig, key)| (sig.clone(), key.clone())) - .collect(); - - Self { - signatures_and_public_keys, - proof, - } - } - #[must_use] pub fn signatures_are_valid_for(&self, message: &Message) -> bool { - let message_hash = message.hash_message(); + let message_hash = message.hash(); for (signature, public_key) in self.signatures_and_public_keys() { if !signature.is_valid_for(&message_hash, public_key) { return false; diff --git a/nssa/src/program.rs b/nssa/src/program.rs index b8c3fe77..1aff3bc9 100644 --- a/nssa/src/program.rs +++ b/nssa/src/program.rs @@ -10,8 +10,8 @@ use crate::{ error::NssaError, program_methods::{ AMM_ELF, AMM_ID, ASSOCIATED_TOKEN_ACCOUNT_ELF, ASSOCIATED_TOKEN_ACCOUNT_ID, - AUTHENTICATED_TRANSFER_ELF, AUTHENTICATED_TRANSFER_ID, CLOCK_ELF, CLOCK_ID, PINATA_ELF, - PINATA_ID, TOKEN_ELF, TOKEN_ID, + AUTHENTICATED_TRANSFER_ELF, AUTHENTICATED_TRANSFER_ID, CLOCK_ELF, CLOCK_ID, FAUCET_ELF, + FAUCET_ID, PINATA_ELF, PINATA_ID, TOKEN_ELF, TOKEN_ID, VAULT_ELF, VAULT_ID, }, }; @@ -148,6 +148,22 @@ impl Program { elf: ASSOCIATED_TOKEN_ACCOUNT_ELF.to_vec(), } } + + #[must_use] + pub fn vault() -> Self { + Self { + id: VAULT_ID, + elf: VAULT_ELF.to_vec(), + } + } + + #[must_use] + pub fn faucet() -> Self { + Self { + id: FAUCET_ID, + elf: FAUCET_ELF.to_vec(), + } + } } // TODO: Testnet only. Refactor to prevent compilation on mainnet. @@ -178,8 +194,9 @@ mod tests { program::Program, program_methods::{ AMM_ELF, AMM_ID, ASSOCIATED_TOKEN_ACCOUNT_ELF, ASSOCIATED_TOKEN_ACCOUNT_ID, - AUTHENTICATED_TRANSFER_ELF, AUTHENTICATED_TRANSFER_ID, CLOCK_ELF, CLOCK_ID, PINATA_ELF, - PINATA_ID, PINATA_TOKEN_ELF, PINATA_TOKEN_ID, TOKEN_ELF, TOKEN_ID, + AUTHENTICATED_TRANSFER_ELF, AUTHENTICATED_TRANSFER_ID, CLOCK_ELF, CLOCK_ID, FAUCET_ELF, + FAUCET_ID, PINATA_ELF, PINATA_ID, PINATA_TOKEN_ELF, PINATA_TOKEN_ID, TOKEN_ELF, + TOKEN_ID, VAULT_ELF, VAULT_ID, }, }; @@ -312,6 +329,16 @@ mod tests { } } + #[must_use] + pub fn auth_transfer_proxy() -> Self { + use test_program_methods::{AUTH_TRANSFER_PROXY_ELF, AUTH_TRANSFER_PROXY_ID}; + + Self { + id: AUTH_TRANSFER_PROXY_ID, + elf: AUTH_TRANSFER_PROXY_ELF.to_vec(), + } + } + #[must_use] pub fn two_pda_claimer() -> Self { use test_program_methods::{TWO_PDA_CLAIMER_ELF, TWO_PDA_CLAIMER_ID}; @@ -322,6 +349,16 @@ mod tests { } } + #[must_use] + pub fn pda_fund_spend_proxy() -> Self { + use test_program_methods::{PDA_FUND_SPEND_PROXY_ELF, PDA_FUND_SPEND_PROXY_ID}; + + Self { + id: PDA_FUND_SPEND_PROXY_ID, + elf: PDA_FUND_SPEND_PROXY_ELF.to_vec(), + } + } + #[must_use] pub fn changer_claimer() -> Self { use test_program_methods::{CHANGER_CLAIMER_ELF, CHANGER_CLAIMER_ID}; @@ -472,12 +509,18 @@ mod tests { fn builtin_programs() { let auth_transfer_program = Program::authenticated_transfer_program(); let token_program = Program::token(); + let vault_program = Program::vault(); + let faucet_program = Program::faucet(); let pinata_program = Program::pinata(); assert_eq!(auth_transfer_program.id, AUTHENTICATED_TRANSFER_ID); assert_eq!(auth_transfer_program.elf, AUTHENTICATED_TRANSFER_ELF); assert_eq!(token_program.id, TOKEN_ID); assert_eq!(token_program.elf, TOKEN_ELF); + assert_eq!(vault_program.id, VAULT_ID); + assert_eq!(vault_program.elf, VAULT_ELF); + assert_eq!(faucet_program.id, FAUCET_ID); + assert_eq!(faucet_program.elf, FAUCET_ELF); assert_eq!(pinata_program.id, PINATA_ID); assert_eq!(pinata_program.elf, PINATA_ELF); } @@ -489,9 +532,11 @@ mod tests { (AUTHENTICATED_TRANSFER_ELF, AUTHENTICATED_TRANSFER_ID), (ASSOCIATED_TOKEN_ACCOUNT_ELF, ASSOCIATED_TOKEN_ACCOUNT_ID), (CLOCK_ELF, CLOCK_ID), + (FAUCET_ELF, FAUCET_ID), (PINATA_ELF, PINATA_ID), (PINATA_TOKEN_ELF, PINATA_TOKEN_ID), (TOKEN_ELF, TOKEN_ID), + (VAULT_ELF, VAULT_ID), ]; for (elf, expected_id) in cases { let program = Program::new(elf.to_vec()).unwrap(); diff --git a/nssa/src/public_transaction/message.rs b/nssa/src/public_transaction/message.rs index f71fb372..3ab7d74c 100644 --- a/nssa/src/public_transaction/message.rs +++ b/nssa/src/public_transaction/message.rs @@ -68,7 +68,7 @@ impl Message { } #[must_use] - pub fn hash_message(&self) -> [u8; 32] { + pub fn hash(&self) -> [u8; 32] { let mut bytes = Vec::with_capacity( PREFIX .len() @@ -90,7 +90,7 @@ mod tests { use super::{Message, PREFIX}; #[test] - fn hash_message_public_pinned() { + fn hash_public_pinned() { let msg = Message::new_preserialized( [1_u32; 8], vec![AccountId::new([42_u8; 32])], @@ -122,7 +122,7 @@ mod tests { assert_eq!( borsh::to_vec(&msg).unwrap(), expected_borsh, - "`public_transaction::hash_message()`: expected borsh order has changed" + "`public_transaction::hash()`: expected borsh order has changed" ); let mut preimage = Vec::with_capacity(PREFIX.len() + expected_borsh.len()); @@ -131,9 +131,9 @@ mod tests { let expected_hash: [u8; 32] = Sha256::digest(&preimage).into(); assert_eq!( - msg.hash_message(), + msg.hash(), expected_hash, - "`public_transaction::hash_message()`: serialization has changed" + "`public_transaction::hash()`: serialization has changed" ); } } diff --git a/nssa/src/public_transaction/witness_set.rs b/nssa/src/public_transaction/witness_set.rs index 7a32c0ea..1605f488 100644 --- a/nssa/src/public_transaction/witness_set.rs +++ b/nssa/src/public_transaction/witness_set.rs @@ -1,6 +1,6 @@ use borsh::{BorshDeserialize, BorshSerialize}; -use crate::{PrivateKey, PublicKey, Signature, error::NssaError, public_transaction::Message}; +use crate::{PrivateKey, PublicKey, Signature, public_transaction::Message}; #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct WitnessSet { @@ -8,40 +8,9 @@ pub struct WitnessSet { } impl WitnessSet { - pub fn from_list( - message: &Message, - signatures: &[Signature], - pub_keys: &[PublicKey], - ) -> Result { - if signatures.len() != pub_keys.len() { - return Err(NssaError::InvalidInput( - "`nssa::public_transaction::witness_set::from_list()`: mismatch in signature and public key counts".to_owned(), - )); - } - - let message_hash = message.hash_message(); - let signatures_and_public_keys = signatures - .iter() - .zip(pub_keys.iter()) - .map(|(sig, key)| { - if sig.is_valid_for(&message_hash, key) { - Ok((sig.clone(), key.clone())) - } else { - Err(NssaError::InvalidInput( - "`nssa::public_transaction::witness_set::from_list()`: signature does not correspond to public key".to_owned(), - )) - } - }) - .collect::>()?; - - Ok(Self { - signatures_and_public_keys, - }) - } - #[must_use] pub fn for_message(message: &Message, private_keys: &[&PrivateKey]) -> Self { - let message_hash = message.hash_message(); + let message_hash = message.hash(); let signatures_and_public_keys = private_keys .iter() .map(|&key| { @@ -58,7 +27,7 @@ impl WitnessSet { #[must_use] pub fn is_valid_for(&self, message: &Message) -> bool { - let message_hash = message.hash_message(); + let message_hash = message.hash(); for (signature, public_key) in self.signatures_and_public_keys() { if !signature.is_valid_for(&message_hash, public_key) { return false; @@ -90,73 +59,6 @@ mod tests { use super::*; use crate::AccountId; - #[test] - fn from_list_accepts_valid_pairs() { - let key1 = PrivateKey::try_new([42; 32]).unwrap(); - let key2 = PrivateKey::try_new([13; 32]).unwrap(); - let pubkey1 = PublicKey::new_from_private_key(&key1); - let pubkey2 = PublicKey::new_from_private_key(&key2); - let addr1 = AccountId::from(&pubkey1); - let addr2 = AccountId::from(&pubkey2); - let message = Message::try_new::>( - [1_u32; 8], - vec![addr1, addr2], - vec![1_u128.into(), 2_u128.into()], - vec![], - ) - .unwrap(); - - let WitnessSet { - signatures_and_public_keys, - } = WitnessSet::for_message(&message, &[&key1, &key2]); - let (sigs, keys): (Vec<_>, Vec<_>) = signatures_and_public_keys.into_iter().unzip(); - - assert!(WitnessSet::from_list(&message, &sigs, &keys).is_ok()); - } - - #[test] - fn from_list_rejects_mismatched_pairs() { - let key1 = PrivateKey::try_new([42; 32]).unwrap(); - let key2 = PrivateKey::try_new([13; 32]).unwrap(); - let pubkey1 = PublicKey::new_from_private_key(&key1); - let pubkey2 = PublicKey::new_from_private_key(&key2); - let addr1 = AccountId::from(&pubkey1); - let addr2 = AccountId::from(&pubkey2); - let message = Message::try_new::>( - [1_u32; 8], - vec![addr1, addr2], - vec![1_u128.into(), 2_u128.into()], - vec![], - ) - .unwrap(); - - let WitnessSet { - signatures_and_public_keys, - } = WitnessSet::for_message(&message, &[&key1, &key2]); - let (sigs, keys): (Vec<_>, Vec<_>) = signatures_and_public_keys.into_iter().unzip(); - - // Swapped keys should be rejected. - assert!( - WitnessSet::from_list(&message, &sigs, &[keys[1].clone(), keys[0].clone()]).is_err() - ); - } - - #[test] - fn from_list_rejects_length_mismatch() { - let key1 = PrivateKey::try_new([1_u8; 32]).unwrap(); - let pubkey1 = PublicKey::new_from_private_key(&key1); - let addr1 = AccountId::from(&pubkey1); - let message = - Message::try_new::>([0; 8], vec![addr1], vec![1_u128.into()], vec![]).unwrap(); - - let WitnessSet { - signatures_and_public_keys, - } = WitnessSet::for_message(&message, &[&key1]); - let (sigs, _keys): (Vec<_>, Vec<_>) = signatures_and_public_keys.into_iter().unzip(); - - assert!(WitnessSet::from_list(&message, &sigs, &[]).is_err()); - } - #[test] fn for_message_constructor() { let key1 = PrivateKey::try_new([1; 32]).unwrap(); @@ -173,7 +75,7 @@ mod tests { assert_eq!(witness_set.signatures_and_public_keys.len(), 2); - let message_bytes = message.hash_message(); + let message_bytes = message.hash(); for ((signature, public_key), expected_public_key) in witness_set .signatures_and_public_keys .into_iter() diff --git a/nssa/src/signature/mod.rs b/nssa/src/signature/mod.rs index 19daca2e..a46b1ff5 100644 --- a/nssa/src/signature/mod.rs +++ b/nssa/src/signature/mod.rs @@ -36,9 +36,9 @@ impl FromStr for Signature { } impl Signature { - #[must_use] /// This function expects the incoming message to be prehashed to be pre-2022 BIP-340/Keycard /// compatible. + #[must_use] pub fn new(key: &PrivateKey, message: &[u8; 32]) -> Self { let mut aux_random = [0_u8; 32]; OsRng.fill_bytes(&mut aux_random); diff --git a/nssa/src/state.rs b/nssa/src/state.rs index ff16175c..e9f2058f 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -122,15 +122,36 @@ pub struct V03State { programs: HashMap, } +impl Default for V03State { + fn default() -> Self { + let faucet_account_id = system_faucet_account_id(); + let faucet_account = system_faucet_account(); + let mut public_state = HashMap::new(); + public_state.insert(faucet_account_id, faucet_account); + + Self { + public_state, + private_state: (CommitmentSet::with_capacity(32), NullifierSet::new()), + programs: HashMap::new(), + } + } +} + impl V03State { + #[must_use] + pub fn new() -> Self { + Self::default() + } + #[must_use] pub fn new_with_genesis_accounts( initial_data: &[(AccountId, u128)], initial_private_accounts: Vec<(Commitment, Nullifier)>, genesis_timestamp: nssa_core::Timestamp, ) -> Self { + let faucet_account_id = system_faucet_account_id(); let authenticated_transfer_program = Program::authenticated_transfer_program(); - let public_state = initial_data + let mut public_state: HashMap<_, _> = initial_data .iter() .copied() .map(|(account_id, balance)| { @@ -142,6 +163,8 @@ impl V03State { (account_id, account) }) .collect(); + let faucet_account = system_faucet_account(); + public_state.insert(faucet_account_id, faucet_account); let mut commitment_set = CommitmentSet::with_capacity(32); commitment_set.extend(&[DUMMY_COMMITMENT]); @@ -165,6 +188,8 @@ impl V03State { this.insert_program(Program::token()); this.insert_program(Program::amm()); this.insert_program(Program::ata()); + this.insert_program(Program::vault()); + this.insert_program(Program::faucet()); this } @@ -351,6 +376,19 @@ impl V03State { } } +fn system_faucet_account() -> Account { + Account { + program_owner: Program::authenticated_transfer_program().id(), + balance: u128::MAX, + ..Account::default() + } +} + +#[must_use] +pub fn system_faucet_account_id() -> AccountId { + faucet_core::compute_faucet_account_id(Program::faucet().id()) +} + #[cfg(test)] pub mod tests { #![expect( @@ -361,9 +399,10 @@ pub mod tests { use std::collections::HashMap; + use authenticated_transfer_core::Instruction as AuthTransferInstruction; use nssa_core::{ - BlockId, Commitment, Nullifier, NullifierPublicKey, NullifierSecretKey, SharedSecretKey, - Timestamp, + BlockId, Commitment, InputAccountIdentity, Nullifier, NullifierPublicKey, + NullifierSecretKey, SharedSecretKey, Timestamp, account::{Account, AccountId, AccountWithMetadata, Nonce, data::Data}, encryption::{EphemeralPublicKey, Scalar, ViewingPublicKey}, program::{ @@ -387,8 +426,9 @@ pub mod tests { signature::PrivateKey, state::{ CLOCK_01_PROGRAM_ACCOUNT_ID, CLOCK_10_PROGRAM_ACCOUNT_ID, CLOCK_50_PROGRAM_ACCOUNT_ID, - CLOCK_PROGRAM_ACCOUNT_IDS, MAX_NUMBER_CHAINED_CALLS, + CLOCK_PROGRAM_ACCOUNT_IDS, MAX_NUMBER_CHAINED_CALLS, system_faucet_account, }, + system_faucet_account_id, }; impl V03State { @@ -459,7 +499,7 @@ pub mod tests { #[must_use] pub fn with_private_account(mut self, keys: &TestPrivateKeys, account: &Account) -> Self { - let account_id = AccountId::from((&keys.npk(), 0)); + let account_id = AccountId::for_regular_private_account(&keys.npk(), 0); let commitment = Commitment::new(&account_id, account); self.private_state.0.extend(&[commitment]); self @@ -525,8 +565,13 @@ pub mod tests { let account_ids = vec![from, to]; let nonces = vec![Nonce(from_nonce), Nonce(to_nonce)]; let program_id = Program::authenticated_transfer_program().id(); - let message = - public_transaction::Message::try_new(program_id, account_ids, nonces, balance).unwrap(); + let message = public_transaction::Message::try_new( + program_id, + account_ids, + nonces, + AuthTransferInstruction::Transfer { amount: balance }, + ) + .unwrap(); let witness_set = public_transaction::WitnessSet::for_message(&message, &[from_key, to_key]); PublicTransaction::new(message, witness_set) @@ -576,6 +621,7 @@ pub mod tests { ..Account::default() }, ); + this.insert(system_faucet_account_id(), system_faucet_account()); for account_id in CLOCK_PROGRAM_ACCOUNT_IDS { this.insert( account_id, @@ -598,6 +644,8 @@ pub mod tests { this.insert(Program::token().id(), Program::token()); this.insert(Program::amm().id(), Program::amm()); this.insert(Program::ata().id(), Program::ata()); + this.insert(Program::vault().id(), Program::vault()); + this.insert(Program::faucet().id(), Program::faucet()); this }; @@ -618,8 +666,8 @@ pub mod tests { ..Account::default() }; - let account_id1 = AccountId::from((&keys1.npk(), 0)); - let account_id2 = AccountId::from((&keys2.npk(), 0)); + let account_id1 = AccountId::for_regular_private_account(&keys1.npk(), 0); + let account_id2 = AccountId::for_regular_private_account(&keys2.npk(), 0); let init_commitment1 = Commitment::new(&account_id1, &account); let init_commitment2 = Commitment::new(&account_id2, &account); @@ -1206,7 +1254,7 @@ pub mod tests { let witness_set = public_transaction::WitnessSet::for_message(&message, &[]); let tx = PublicTransaction::new(message, witness_set); - let result = state.transition_from_public_transaction(&tx, 1, 0); + let result = state.transition_from_public_transaction(&tx, 2, 0); assert!(matches!( result, @@ -1240,7 +1288,7 @@ pub mod tests { .unwrap(); let witness_set = public_transaction::WitnessSet::for_message(&message, &[]); let tx = PublicTransaction::new(message, witness_set); - let result = state.transition_from_public_transaction(&tx, 1, 0); + let result = state.transition_from_public_transaction(&tx, 2, 0); assert!(matches!( result, @@ -1256,6 +1304,12 @@ pub mod tests { } } + fn test_public_account_keys_2() -> TestPublicKeys { + TestPublicKeys { + signing_key: PrivateKey::try_new([38; 32]).unwrap(), + } + } + pub fn test_private_account_keys_1() -> TestPrivateKeys { TestPrivateKeys { nsk: [13; 32], @@ -1288,16 +1342,23 @@ pub mod tests { AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &recipient_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &recipient_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); let (output, proof) = circuit::execute_and_prove( vec![sender, recipient], - Program::serialize_instruction(balance_to_move).unwrap(), - vec![0, 2], - vec![(recipient_keys.npk(), 0, shared_secret)], - vec![], - vec![None], + Program::serialize_instruction(AuthTransferInstruction::Transfer { + amount: balance_to_move, + }) + .unwrap(), + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: shared_secret, + identifier: 0, + }, + ], &Program::authenticated_transfer_program().into(), ) .unwrap(); @@ -1322,7 +1383,7 @@ pub mod tests { state: &V03State, ) -> PrivacyPreservingTransaction { let program = Program::authenticated_transfer_program(); - let sender_account_id = AccountId::from((&sender_keys.npk(), 0)); + let sender_account_id = AccountId::for_regular_private_account(&sender_keys.npk(), 0); let sender_commitment = Commitment::new(&sender_account_id, sender_private_account); let sender_pre = AccountWithMetadata::new( sender_private_account.clone(), @@ -1333,23 +1394,34 @@ pub mod tests { AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); let esk_1 = [3; 32]; - let shared_secret_1 = SharedSecretKey::new(&esk_1, &sender_keys.vpk()); + let shared_secret_1 = SharedSecretKey::new(esk_1, &sender_keys.vpk()); let epk_1 = EphemeralPublicKey::from_scalar(esk_1); let esk_2 = [3; 32]; - let shared_secret_2 = SharedSecretKey::new(&esk_2, &recipient_keys.vpk()); + let shared_secret_2 = SharedSecretKey::new(esk_2, &recipient_keys.vpk()); let epk_2 = EphemeralPublicKey::from_scalar(esk_2); let (output, proof) = circuit::execute_and_prove( vec![sender_pre, recipient_pre], - Program::serialize_instruction(balance_to_move).unwrap(), - vec![1, 2], + Program::serialize_instruction(AuthTransferInstruction::Transfer { + amount: balance_to_move, + }) + .unwrap(), vec![ - (sender_keys.npk(), 0, shared_secret_1), - (recipient_keys.npk(), 0, shared_secret_2), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: shared_secret_1, + nsk: sender_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&sender_commitment) + .expect("sender's commitment must be in state"), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: shared_secret_2, + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![state.get_proof_for_commitment(&sender_commitment), None], &program.into(), ) .unwrap(); @@ -1378,7 +1450,7 @@ pub mod tests { state: &V03State, ) -> PrivacyPreservingTransaction { let program = Program::authenticated_transfer_program(); - let sender_account_id = AccountId::from((&sender_keys.npk(), 0)); + let sender_account_id = AccountId::for_regular_private_account(&sender_keys.npk(), 0); let sender_commitment = Commitment::new(&sender_account_id, sender_private_account); let sender_pre = AccountWithMetadata::new( sender_private_account.clone(), @@ -1392,16 +1464,26 @@ pub mod tests { ); let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &sender_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &sender_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); let (output, proof) = circuit::execute_and_prove( vec![sender_pre, recipient_pre], - Program::serialize_instruction(balance_to_move).unwrap(), - vec![1, 0], - vec![(sender_keys.npk(), 0, shared_secret)], - vec![sender_keys.nsk], - vec![state.get_proof_for_commitment(&sender_commitment)], + Program::serialize_instruction(AuthTransferInstruction::Transfer { + amount: balance_to_move, + }) + .unwrap(), + vec![ + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: shared_secret, + nsk: sender_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&sender_commitment) + .expect("sender's commitment must be in state"), + identifier: 0, + }, + InputAccountIdentity::Public, + ], &program.into(), ) .unwrap(); @@ -1486,8 +1568,8 @@ pub mod tests { &state, ); - let sender_account_id = AccountId::from((&sender_keys.npk(), 0)); - let recipient_account_id = AccountId::from((&recipient_keys.npk(), 0)); + let sender_account_id = AccountId::for_regular_private_account(&sender_keys.npk(), 0); + let recipient_account_id = AccountId::for_regular_private_account(&recipient_keys.npk(), 0); let expected_new_commitment_1 = Commitment::new( &sender_account_id, &Account { @@ -1565,7 +1647,7 @@ pub mod tests { &state, ); - let sender_account_id = AccountId::from((&sender_keys.npk(), 0)); + let sender_account_id = AccountId::for_regular_private_account(&sender_keys.npk(), 0); let expected_new_commitment = Commitment::new( &sender_account_id, &Account { @@ -1615,10 +1697,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(10_u128).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1641,10 +1720,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(10_u128).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1667,10 +1743,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(()).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1693,10 +1766,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(vec![0]).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1727,10 +1797,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(large_data).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1753,10 +1820,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(()).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1788,10 +1852,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account_1, public_account_2], Program::serialize_instruction(()).unwrap(), - vec![0, 0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public, InputAccountIdentity::Public], &program.into(), ); @@ -1814,10 +1875,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account], Program::serialize_instruction(()).unwrap(), - vec![0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -1849,10 +1907,7 @@ pub mod tests { let result = execute_and_prove( vec![public_account_1, public_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![0, 0], - vec![], - vec![], - vec![], + vec![InputAccountIdentity::Public, InputAccountIdentity::Public], &program.into(), ); @@ -1881,177 +1936,11 @@ pub mod tests { AccountId::new([1; 32]), ); - // Setting only one visibility mask for a circuit execution with two pre_state accounts. - let visibility_mask = [0]; + // Single account_identity entry for a circuit execution with two pre_state accounts. let result = execute_and_prove( vec![public_account_1, public_account_2], Program::serialize_instruction(10_u128).unwrap(), - visibility_mask.to_vec(), - vec![], - vec![], - vec![], - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_fails_if_insufficient_nonces_are_provided() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let recipient_keys = test_private_account_keys_2(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_fails_if_insufficient_keys_are_provided() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, AccountId::new([1; 32])); - - // Setting only one key for an execution with two private accounts. - let private_account_keys = [( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - )]; - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - private_account_keys.to_vec(), - vec![sender_keys.nsk], - vec![Some((0, vec![]))], - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_fails_if_insufficient_commitment_proofs_are_provided() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let recipient_keys = test_private_account_keys_2(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - - // Setting no second commitment proof. - let private_account_membership_proofs = [Some((0, vec![]))]; - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ], - vec![sender_keys.nsk], - private_account_membership_proofs.to_vec(), - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_fails_if_insufficient_auth_keys_are_provided() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let recipient_keys = test_private_account_keys_2(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - - // Setting no auth key for an execution with one non default private accounts. - let private_account_nsks = []; - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ], - private_account_nsks.to_vec(), - vec![], + vec![InputAccountIdentity::Public], &program.into(), ); @@ -2075,33 +1964,26 @@ pub mod tests { let private_account_2 = AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - let private_account_keys = [ - // First private account is the sender - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - // Second private account is the recipient - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ]; - - // Setting the recipient key to authorize the sender. - // This should be set to the sender private account in - // a normal circumstance. The recipient can't authorize this. - let private_account_nsks = [recipient_keys.nsk]; - let private_account_membership_proofs = [Some((0, vec![]))]; + // Setting the recipient nsk to authorize the sender. + // This should be set to the sender private account in a normal circumstance. + // `PrivateAuthorizedUpdate` derives npk from nsk and asserts equality with + // `pre_state.account_id`, so a mismatched nsk fails that check. let result = execute_and_prove( vec![private_account_1, private_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - private_account_keys.to_vec(), - private_account_nsks.to_vec(), - private_account_membership_proofs.to_vec(), + vec![ + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([55; 32], &sender_keys.vpk()), + nsk: recipient_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: SharedSecretKey::new([56; 32], &recipient_keys.vpk()), + identifier: 0, + }, + ], &program.into(), ); @@ -2135,21 +2017,19 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([55; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: SharedSecretKey::new([56; 32], &recipient_keys.vpk()), + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], &program.into(), ); @@ -2183,21 +2063,19 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([55; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: SharedSecretKey::new([56; 32], &recipient_keys.vpk()), + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], &program.into(), ); @@ -2231,21 +2109,19 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([55; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: SharedSecretKey::new([56; 32], &recipient_keys.vpk()), + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], &program.into(), ); @@ -2279,21 +2155,19 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([55; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: SharedSecretKey::new([56; 32], &recipient_keys.vpk()), + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], &program.into(), ); @@ -2325,21 +2199,19 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([55; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateUnauthorized { + npk: recipient_keys.npk(), + ssk: SharedSecretKey::new([56; 32], &recipient_keys.vpk()), + identifier: 0, + }, ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], &program.into(), ); @@ -2355,7 +2227,7 @@ pub mod tests { let program = Program::simple_balance_transfer(); let keys = test_private_account_keys_1(); let npk = keys.npk(); - let shared_secret = SharedSecretKey::new(&[55; 32], &keys.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); let public_account_1 = AccountWithMetadata::new( Account { program_owner: program.id(), @@ -2368,14 +2240,17 @@ pub mod tests { let private_pda_account = AccountWithMetadata::new(Account::default(), false, AccountId::new([1; 32])); - let visibility_mask = [0, 3]; let result = execute_and_prove( vec![public_account_1, private_pda_account], Program::serialize_instruction(10_u128).unwrap(), - visibility_mask.to_vec(), - vec![(npk, 0, shared_secret)], - vec![], - vec![None], + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier: u128::MAX, + }, + ], &program.into(), ); @@ -2393,18 +2268,19 @@ pub mod tests { let keys = test_private_account_keys_1(); let npk = keys.npk(); let seed = PdaSeed::new([42; 32]); - let shared_secret = SharedSecretKey::new(&[55; 32], &keys.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); - let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk); + let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk, u128::MAX); let pre_state = AccountWithMetadata::new(Account::default(), false, account_id); let result = execute_and_prove( vec![pre_state], Program::serialize_instruction(seed).unwrap(), - vec![3], - vec![(npk, u128::MAX, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier: u128::MAX, + }], &program.into(), ); @@ -2428,21 +2304,22 @@ pub mod tests { let npk_a = keys_a.npk(); let npk_b = keys_b.npk(); let seed = PdaSeed::new([42; 32]); - let shared_secret = SharedSecretKey::new(&[55; 32], &keys_b.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &keys_b.vpk()); // `account_id` is derived from `npk_a`, but `npk_b` is supplied for this pre_state. // `AccountId::for_private_pda(program, seed, npk_b) != account_id`, so the claim check in // the circuit must reject. - let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk_a); + let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk_a, u128::MAX); let pre_state = AccountWithMetadata::new(Account::default(), false, account_id); let result = execute_and_prove( vec![pre_state], Program::serialize_instruction(seed).unwrap(), - vec![3], - vec![(npk_b, 0, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivatePdaInit { + npk: npk_b, + ssk: shared_secret, + identifier: u128::MAX, + }], &program.into(), ); @@ -2461,9 +2338,9 @@ pub mod tests { let keys = test_private_account_keys_1(); let npk = keys.npk(); let seed = PdaSeed::new([77; 32]); - let shared_secret = SharedSecretKey::new(&[55; 32], &keys.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); - let account_id = AccountId::for_private_pda(&delegator.id(), &seed, &npk); + let account_id = AccountId::for_private_pda(&delegator.id(), &seed, &npk, u128::MAX); let pre_state = AccountWithMetadata::new(Account::default(), false, account_id); let callee_id = callee.id(); @@ -2473,10 +2350,11 @@ pub mod tests { let result = execute_and_prove( vec![pre_state], Program::serialize_instruction((seed, seed, callee_id)).unwrap(), - vec![3], - vec![(npk, u128::MAX, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier: u128::MAX, + }], &program_with_deps, ); @@ -2498,9 +2376,9 @@ pub mod tests { let npk = keys.npk(); let claim_seed = PdaSeed::new([77; 32]); let wrong_delegated_seed = PdaSeed::new([88; 32]); - let shared_secret = SharedSecretKey::new(&[55; 32], &keys.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); - let account_id = AccountId::for_private_pda(&delegator.id(), &claim_seed, &npk); + let account_id = AccountId::for_private_pda(&delegator.id(), &claim_seed, &npk, u128::MAX); let pre_state = AccountWithMetadata::new(Account::default(), false, account_id); let callee_id = callee.id(); @@ -2510,10 +2388,11 @@ pub mod tests { let result = execute_and_prove( vec![pre_state], Program::serialize_instruction((claim_seed, wrong_delegated_seed, callee_id)).unwrap(), - vec![3], - vec![(npk, 0, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier: u128::MAX, + }], &program_with_deps, ); @@ -2534,11 +2413,11 @@ pub mod tests { let keys_a = test_private_account_keys_1(); let keys_b = test_private_account_keys_2(); let seed = PdaSeed::new([55; 32]); - let shared_a = SharedSecretKey::new(&[66; 32], &keys_a.vpk()); - let shared_b = SharedSecretKey::new(&[77; 32], &keys_b.vpk()); + let shared_a = SharedSecretKey::new([66; 32], &keys_a.vpk()); + let shared_b = SharedSecretKey::new([77; 32], &keys_b.vpk()); - let account_a = AccountId::for_private_pda(&program.id(), &seed, &keys_a.npk()); - let account_b = AccountId::for_private_pda(&program.id(), &seed, &keys_b.npk()); + let account_a = AccountId::for_private_pda(&program.id(), &seed, &keys_a.npk(), u128::MAX); + let account_b = AccountId::for_private_pda(&program.id(), &seed, &keys_b.npk(), u128::MAX); let pre_a = AccountWithMetadata::new(Account::default(), false, account_a); let pre_b = AccountWithMetadata::new(Account::default(), false, account_b); @@ -2546,10 +2425,18 @@ pub mod tests { let result = execute_and_prove( vec![pre_a, pre_b], Program::serialize_instruction(seed).unwrap(), - vec![3, 3], - vec![(keys_a.npk(), 0, shared_a), (keys_b.npk(), 0, shared_b)], - vec![], - vec![None, None], + vec![ + InputAccountIdentity::PrivatePdaInit { + npk: keys_a.npk(), + ssk: shared_a, + identifier: u128::MAX, + }, + InputAccountIdentity::PrivatePdaInit { + npk: keys_b.npk(), + ssk: shared_b, + identifier: u128::MAX, + }, + ], &program.into(), ); @@ -2572,12 +2459,12 @@ pub mod tests { let program = Program::noop(); let keys = test_private_account_keys_1(); let npk = keys.npk(); - let shared_secret = SharedSecretKey::new(&[55; 32], &keys.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &keys.vpk()); let seed = PdaSeed::new([99; 32]); // Simulate a previously-claimed private PDA: program_owner != DEFAULT, is_authorized = // true, account_id derived via the private formula. - let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk); + let account_id = AccountId::for_private_pda(&program.id(), &seed, &npk, u128::MAX); let owned_pre_state = AccountWithMetadata::new( Account { program_owner: program.id(), @@ -2590,146 +2477,11 @@ pub mod tests { let result = execute_and_prove( vec![owned_pre_state], Program::serialize_instruction(()).unwrap(), - vec![3], - vec![(npk, 0, shared_secret)], - vec![], - vec![None], - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_should_fail_with_too_many_nonces() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let recipient_keys = test_private_account_keys_2(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_should_fail_with_too_many_private_account_keys() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let recipient_keys = test_private_account_keys_2(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - - // Setting three private account keys for a circuit execution with only two private - // accounts. - let private_account_keys = [ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[57; 32], &sender_keys.vpk()), - ), - ]; - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - vec![1, 2], - private_account_keys.to_vec(), - vec![sender_keys.nsk], - vec![Some((0, vec![]))], - &program.into(), - ); - - assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); - } - - #[test] - fn circuit_should_fail_with_too_many_private_account_auth_keys() { - let program = Program::simple_balance_transfer(); - let sender_keys = test_private_account_keys_1(); - let recipient_keys = test_private_account_keys_2(); - let private_account_1 = AccountWithMetadata::new( - Account { - program_owner: program.id(), - balance: 100, - ..Account::default() - }, - true, - (&sender_keys.npk(), 0), - ); - let private_account_2 = - AccountWithMetadata::new(Account::default(), false, (&recipient_keys.npk(), 0)); - - // Setting two private account keys for a circuit execution with only one non default - // private account (visibility mask equal to 1 means that auth keys are expected). - let visibility_mask = [1, 2]; - let private_account_nsks = [sender_keys.nsk, recipient_keys.nsk]; - let private_account_membership_proofs = [Some((0, vec![])), Some((1, vec![]))]; - let result = execute_and_prove( - vec![private_account_1, private_account_2], - Program::serialize_instruction(10_u128).unwrap(), - visibility_mask.to_vec(), - vec![ - ( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[55; 32], &sender_keys.vpk()), - ), - ( - recipient_keys.npk(), - 0, - SharedSecretKey::new(&[56; 32], &recipient_keys.vpk()), - ), - ], - private_account_nsks.to_vec(), - private_account_membership_proofs.to_vec(), + vec![InputAccountIdentity::PrivatePdaInit { + npk, + ssk: shared_secret, + identifier: u128::MAX, + }], &program.into(), ); @@ -2806,20 +2558,24 @@ pub mod tests { (&sender_keys.npk(), 0), ); - let visibility_mask = [1, 1]; - let private_account_nsks = [sender_keys.nsk, sender_keys.nsk]; - let private_account_membership_proofs = [Some((1, vec![])), Some((1, vec![]))]; - let shared_secret = SharedSecretKey::new(&[55; 32], &sender_keys.vpk()); + let shared_secret = SharedSecretKey::new([55; 32], &sender_keys.vpk()); let result = execute_and_prove( vec![private_account_1.clone(), private_account_1], Program::serialize_instruction(100_u128).unwrap(), - visibility_mask.to_vec(), vec![ - (sender_keys.npk(), 0, shared_secret), - (sender_keys.npk(), 0, shared_secret), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: shared_secret, + nsk: sender_keys.nsk, + membership_proof: (1, vec![]), + identifier: 0, + }, + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: shared_secret, + nsk: sender_keys.nsk, + membership_proof: (1, vec![]), + identifier: 0, + }, ], - private_account_nsks.to_vec(), - private_account_membership_proofs.to_vec(), &program.into(), ); @@ -2853,7 +2609,7 @@ pub mod tests { program.id(), vec![from, to], vec![Nonce(0), Nonce(0)], - amount, + AuthTransferInstruction::Transfer { amount }, ) .unwrap(); let witness_set = @@ -2876,15 +2632,19 @@ pub mod tests { assert_eq!(state.get_account_by_id(account_id), Account::default()); - let message = - public_transaction::Message::try_new(program.id(), vec![account_id], vec![], 0_u128) - .unwrap(); + let message = public_transaction::Message::try_new( + program.id(), + vec![account_id], + vec![], + AuthTransferInstruction::Initialize, + ) + .unwrap(); let witness_set = public_transaction::WitnessSet::for_message(&message, &[]); let tx = PublicTransaction::new(message, witness_set); - let result = state.transition_from_public_transaction(&tx, 1, 0); + let result = state.transition_from_public_transaction(&tx, 2, 0); - assert!(matches!(result, Err(NssaError::ProgramExecutionFailed(_)))); + assert!(matches!(result, Err(NssaError::InvalidProgramBehavior(_)))); assert_eq!(state.get_account_by_id(account_id), Account::default()); } @@ -2901,7 +2661,7 @@ pub mod tests { program.id(), vec![account_id], vec![Nonce(0)], - 0_u128, + AuthTransferInstruction::Initialize, ) .unwrap(); let witness_set = public_transaction::WitnessSet::for_message(&message, &[&account_key]); @@ -3109,15 +2869,12 @@ pub mod tests { let result = execute_and_prove( vec![public_account], - Program::serialize_instruction(0_u128).unwrap(), - vec![0], - vec![], - vec![], - vec![], + Program::serialize_instruction(AuthTransferInstruction::Initialize).unwrap(), + vec![InputAccountIdentity::Public], &program.into(), ); - assert!(matches!(result, Err(NssaError::ProgramProveFailed(_)))); + assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); } #[test] @@ -3130,7 +2887,7 @@ pub mod tests { balance: 100, ..Account::default() }; - let sender_account_id = AccountId::from((&sender_keys.npk(), 0)); + let sender_account_id = AccountId::for_regular_private_account(&sender_keys.npk(), 0); let sender_commitment = Commitment::new(&sender_account_id, &sender_private_account); let sender_init_nullifier = Nullifier::for_account_initialization(&sender_account_id); let mut state = V03State::new_with_genesis_accounts( @@ -3146,16 +2903,28 @@ pub mod tests { let recipient_pre = AccountWithMetadata::new(Account::default(), true, recipient_account_id); let esk = [5; 32]; - let shared_secret = SharedSecretKey::new(&esk, &sender_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &sender_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); + let balance = 37; + let (output, proof) = execute_and_prove( vec![sender_pre, recipient_pre], - Program::serialize_instruction(37_u128).unwrap(), - vec![1, 0], - vec![(sender_keys.npk(), 0, shared_secret)], - vec![sender_keys.nsk], - vec![state.get_proof_for_commitment(&sender_commitment)], + Program::serialize_instruction(authenticated_transfer_core::Instruction::Transfer { + amount: balance, + }) + .unwrap(), + vec![ + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: shared_secret, + nsk: sender_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&sender_commitment) + .expect("sender's commitment must be in state"), + identifier: 0, + }, + InputAccountIdentity::Public, + ], &program.into(), ) .unwrap(); @@ -3182,7 +2951,7 @@ pub mod tests { state.get_account_by_id(recipient_account_id), Account { program_owner: program_id, - balance: 37, + balance, nonce: Nonce(1), ..Account::default() } @@ -3216,8 +2985,8 @@ pub mod tests { (&to_keys.npk(), 0), ); - let from_account_id = AccountId::from((&from_keys.npk(), 0)); - let to_account_id = AccountId::from((&to_keys.npk(), 0)); + let from_account_id = AccountId::for_regular_private_account(&from_keys.npk(), 0); + let to_account_id = AccountId::for_regular_private_account(&to_keys.npk(), 0); let from_commitment = Commitment::new(&from_account_id, &from_account.account); let to_commitment = Commitment::new(&to_account_id, &to_account.account); let from_init_nullifier = Nullifier::for_account_initialization(&from_account_id); @@ -3240,11 +3009,11 @@ pub mod tests { ); let from_esk = [3; 32]; - let from_ss = SharedSecretKey::new(&from_esk, &from_keys.vpk()); + let from_ss = SharedSecretKey::new(from_esk, &from_keys.vpk()); let from_epk = EphemeralPublicKey::from_scalar(from_esk); let to_esk = [3; 32]; - let to_ss = SharedSecretKey::new(&to_esk, &to_keys.vpk()); + let to_ss = SharedSecretKey::new(to_esk, &to_keys.vpk()); let to_epk = EphemeralPublicKey::from_scalar(to_esk); let mut dependencies = HashMap::new(); @@ -3273,12 +3042,23 @@ pub mod tests { let (output, proof) = execute_and_prove( vec![to_account, from_account], Program::serialize_instruction(instruction).unwrap(), - vec![1, 1], - vec![(from_keys.npk(), 0, to_ss), (to_keys.npk(), 0, from_ss)], - vec![from_keys.nsk, to_keys.nsk], vec![ - state.get_proof_for_commitment(&from_commitment), - state.get_proof_for_commitment(&to_commitment), + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: to_ss, + nsk: from_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&from_commitment) + .expect("from's commitment must be in state"), + identifier: 0, + }, + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: from_ss, + nsk: to_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&to_commitment) + .expect("to's commitment must be in state"), + identifier: 0, + }, ], &program_with_deps, ) @@ -3439,7 +3219,7 @@ pub mod tests { /// This test ensures that even if a malicious program tries to perform overflow of balances /// it will not be able to break the balance validation. #[test] - fn malicious_program_cannot_break_balance_validation() { + fn malicious_program_cannot_break_balance_validation_if_not_in_genesis() { let sender_key = PrivateKey::try_new([37; 32]).unwrap(); let sender_id = AccountId::from(&PublicKey::new_from_private_key(&sender_key)); let sender_init_balance: u128 = 10; @@ -3478,7 +3258,7 @@ pub mod tests { let witness_set = public_transaction::WitnessSet::for_message(&message, &[&sender_key]); let tx = PublicTransaction::new(message, witness_set); - let res = state.transition_from_public_transaction(&tx, 1, 0); + let res = state.transition_from_public_transaction(&tx, 2, 0); let expected_total_balance_pre_states = WrappedBalanceSum::from_balances( [sender_init_balance, recipient_init_balance].into_iter(), ) @@ -3532,20 +3312,20 @@ pub mod tests { // Set up parameters for the new account let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &private_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &private_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); - // Balance to initialize the account with (0 for a new account) - let balance: u128 = 0; + let instruction = authenticated_transfer_core::Instruction::Initialize; // Execute and prove the circuit with the authorized account but no commitment proof let (output, proof) = execute_and_prove( vec![authorized_account], - Program::serialize_instruction(balance).unwrap(), - vec![1], - vec![(private_keys.npk(), 0, shared_secret)], - vec![private_keys.nsk], - vec![None], + Program::serialize_instruction(instruction).unwrap(), + vec![InputAccountIdentity::PrivateAuthorizedInit { + ssk: shared_secret, + nsk: private_keys.nsk, + identifier: 0, + }], &program.into(), ) .unwrap(); @@ -3565,7 +3345,7 @@ pub mod tests { let result = state.transition_from_privacy_preserving_transaction(&tx, 1, 0); assert!(result.is_ok()); - let account_id = AccountId::from((&private_keys.npk(), 0)); + let account_id = AccountId::for_regular_private_account(&private_keys.npk(), 0); let nullifier = Nullifier::for_account_initialization(&account_id); assert!(state.private_state.1.contains(&nullifier)); } @@ -3584,16 +3364,17 @@ pub mod tests { let program = Program::claimer(); let esk = [5; 32]; - let shared_secret = SharedSecretKey::new(&esk, &private_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &private_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); let (output, proof) = execute_and_prove( vec![unauthorized_account], Program::serialize_instruction(0_u128).unwrap(), - vec![2], - vec![(private_keys.npk(), 0, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivateUnauthorized { + npk: private_keys.npk(), + ssk: shared_secret, + identifier: 0, + }], &program.into(), ) .unwrap(); @@ -3613,7 +3394,7 @@ pub mod tests { .transition_from_privacy_preserving_transaction(&tx, 1, 0) .unwrap(); - let account_id = AccountId::from((&private_keys.npk(), 0)); + let account_id = AccountId::for_regular_private_account(&private_keys.npk(), 0); let nullifier = Nullifier::for_account_initialization(&account_id); assert!(state.private_state.1.contains(&nullifier)); } @@ -3633,19 +3414,20 @@ pub mod tests { // Set up parameters for claiming the new account let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &private_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &private_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); - let balance: u128 = 0; + let instruction = authenticated_transfer_core::Instruction::Initialize; // Step 2: Execute claimer program to claim the account with authentication let (output, proof) = execute_and_prove( vec![authorized_account.clone()], - Program::serialize_instruction(balance).unwrap(), - vec![1], - vec![(private_keys.npk(), 0, shared_secret)], - vec![private_keys.nsk], - vec![None], + Program::serialize_instruction(instruction).unwrap(), + vec![InputAccountIdentity::PrivateAuthorizedInit { + ssk: shared_secret, + nsk: private_keys.nsk, + identifier: 0, + }], &claimer_program.into(), ) .unwrap(); @@ -3669,7 +3451,7 @@ pub mod tests { ); // Verify the account is now initialized (nullifier exists) - let account_id = AccountId::from((&private_keys.npk(), 0)); + let account_id = AccountId::for_regular_private_account(&private_keys.npk(), 0); let nullifier = Nullifier::for_account_initialization(&account_id); assert!(state.private_state.1.contains(&nullifier)); @@ -3682,16 +3464,17 @@ pub mod tests { let noop_program = Program::noop(); let esk2 = [4; 32]; - let shared_secret2 = SharedSecretKey::new(&esk2, &private_keys.vpk()); + let shared_secret2 = SharedSecretKey::new(esk2, &private_keys.vpk()); // Step 3: Try to execute noop program with authentication but without initialization let res = execute_and_prove( vec![account_metadata], Program::serialize_instruction(()).unwrap(), - vec![1], - vec![(private_keys.npk(), 0, shared_secret2)], - vec![private_keys.nsk], - vec![None], + vec![InputAccountIdentity::PrivateAuthorizedInit { + ssk: shared_secret2, + nsk: private_keys.nsk, + identifier: 0, + }], &noop_program.into(), ); @@ -3764,14 +3547,12 @@ pub mod tests { let result = execute_and_prove( vec![private_account], Program::serialize_instruction(instruction).unwrap(), - vec![1], - vec![( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[3; 32], &sender_keys.vpk()), - )], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], + vec![InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([3; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }], &program.into(), ); @@ -3792,14 +3573,12 @@ pub mod tests { let result = execute_and_prove( vec![private_account], Program::serialize_instruction(instruction).unwrap(), - vec![1], - vec![( - sender_keys.npk(), - 0, - SharedSecretKey::new(&[3; 32], &sender_keys.vpk()), - )], - vec![sender_keys.nsk], - vec![Some((0, vec![]))], + vec![InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: SharedSecretKey::new([3; 32], &sender_keys.vpk()), + nsk: sender_keys.nsk, + membership_proof: (0, vec![]), + identifier: 0, + }], &program.into(), ); @@ -3827,7 +3606,7 @@ pub mod tests { let recipient_account = AccountWithMetadata::new(Account::default(), true, (&recipient_keys.npk(), 0)); - let recipient_account_id = AccountId::from((&recipient_keys.npk(), 0)); + let recipient_account_id = AccountId::for_regular_private_account(&recipient_keys.npk(), 0); let recipient_commitment = Commitment::new(&recipient_account_id, &recipient_account.account); let recipient_init_nullifier = Nullifier::for_account_initialization(&recipient_account_id); @@ -3842,7 +3621,7 @@ pub mod tests { let instruction = (balance_to_transfer, auth_transfers.id()); let recipient_esk = [3; 32]; - let recipient = SharedSecretKey::new(&recipient_esk, &recipient_keys.vpk()); + let recipient = SharedSecretKey::new(recipient_esk, &recipient_keys.vpk()); let mut dependencies = HashMap::new(); dependencies.insert(auth_transfers.id(), auth_transfers); @@ -3852,10 +3631,17 @@ pub mod tests { let result = execute_and_prove( vec![sender_account, recipient_account], Program::serialize_instruction(instruction).unwrap(), - vec![0, 1], - vec![(recipient_keys.npk(), 0, recipient)], - vec![recipient_keys.nsk], - vec![state.get_proof_for_commitment(&recipient_commitment)], + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk: recipient, + nsk: recipient_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&recipient_commitment) + .expect("recipient's commitment must be in state"), + identifier: 0, + }, + ], &program_with_deps, ); @@ -3992,7 +3778,7 @@ pub mod tests { let mut state = V03State::new_with_genesis_accounts(&[], vec![], 0).with_test_programs(); let tx = { let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &account_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &account_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); let instruction = ( @@ -4002,10 +3788,11 @@ pub mod tests { let (output, proof) = circuit::execute_and_prove( vec![pre], Program::serialize_instruction(instruction).unwrap(), - vec![2], - vec![(account_keys.npk(), 0, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivateUnauthorized { + npk: account_keys.npk(), + ssk: shared_secret, + identifier: 0, + }], &validity_window_program.into(), ) .unwrap(); @@ -4061,7 +3848,7 @@ pub mod tests { let mut state = V03State::new_with_genesis_accounts(&[], vec![], 0).with_test_programs(); let tx = { let esk = [3; 32]; - let shared_secret = SharedSecretKey::new(&esk, &account_keys.vpk()); + let shared_secret = SharedSecretKey::new(esk, &account_keys.vpk()); let epk = EphemeralPublicKey::from_scalar(esk); let instruction = ( @@ -4071,10 +3858,11 @@ pub mod tests { let (output, proof) = circuit::execute_and_prove( vec![pre], Program::serialize_instruction(instruction).unwrap(), - vec![2], - vec![(account_keys.npk(), 0, shared_secret)], - vec![], - vec![None], + vec![InputAccountIdentity::PrivateUnauthorized { + npk: account_keys.npk(), + ssk: shared_secret, + identifier: 0, + }], &validity_window_program.into(), ) .unwrap(); @@ -4577,4 +4365,225 @@ pub mod tests { "program with spoofed caller_program_id in output should be rejected" ); } + + #[test] + fn two_private_pda_family_members_receive_and_spend() { + let funder_keys = test_public_account_keys_1(); + let alice_keys = test_private_account_keys_1(); + let alice_npk = alice_keys.npk(); + + let proxy = Program::pda_fund_spend_proxy(); + let auth_transfer = Program::authenticated_transfer_program(); + let proxy_id = proxy.id(); + let auth_transfer_id = auth_transfer.id(); + let seed = PdaSeed::new([42; 32]); + let amount: u128 = 100; + + let program_with_deps = + ProgramWithDependencies::new(proxy, [(auth_transfer_id, auth_transfer)].into()); + + let funder_id = funder_keys.account_id(); + let alice_pda_0_id = AccountId::for_private_pda(&proxy_id, &seed, &alice_npk, 0); + let alice_pda_1_id = AccountId::for_private_pda(&proxy_id, &seed, &alice_npk, 1); + let recipient_id = test_public_account_keys_2().account_id(); + let recipient_signing_key = test_public_account_keys_2().signing_key; + + let mut state = V03State::new_with_genesis_accounts(&[(funder_id, 500)], vec![], 0); + + let alice_pda_0_account = Account { + program_owner: auth_transfer_id, + balance: amount, + nonce: Nonce::private_account_nonce_init(&alice_pda_0_id), + ..Account::default() + }; + let alice_pda_1_account = Account { + program_owner: auth_transfer_id, + balance: amount, + nonce: Nonce::private_account_nonce_init(&alice_pda_1_id), + ..Account::default() + }; + + let alice_shared_0 = SharedSecretKey::new([10; 32], &alice_keys.vpk()); + let alice_shared_1 = SharedSecretKey::new([11; 32], &alice_keys.vpk()); + + // Fund alice_pda_0 + { + let funder_account = state.get_account_by_id(funder_id); + let funder_nonce = funder_account.nonce; + let (output, proof) = execute_and_prove( + vec![ + AccountWithMetadata::new(funder_account, true, funder_id), + AccountWithMetadata::new(Account::default(), false, alice_pda_0_id), + ], + Program::serialize_instruction((seed, amount, auth_transfer_id, true)).unwrap(), + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivatePdaInit { + npk: alice_npk, + ssk: alice_shared_0, + identifier: 0, + }, + ], + &program_with_deps, + ) + .unwrap(); + let message = Message::try_from_circuit_output( + vec![funder_id], + vec![funder_nonce], + vec![( + alice_npk, + alice_keys.vpk(), + EphemeralPublicKey::from_scalar([10; 32]), + )], + output, + ) + .unwrap(); + let witness_set = WitnessSet::for_message(&message, proof, &[&funder_keys.signing_key]); + state + .transition_from_privacy_preserving_transaction( + &PrivacyPreservingTransaction::new(message, witness_set), + 1, + 0, + ) + .unwrap(); + } + + // Fund alice_pda_1 + { + let funder_account = state.get_account_by_id(funder_id); + let funder_nonce = funder_account.nonce; + let (output, proof) = execute_and_prove( + vec![ + AccountWithMetadata::new(funder_account, true, funder_id), + AccountWithMetadata::new(Account::default(), false, alice_pda_1_id), + ], + Program::serialize_instruction((seed, amount, auth_transfer_id, true)).unwrap(), + vec![ + InputAccountIdentity::Public, + InputAccountIdentity::PrivatePdaInit { + npk: alice_npk, + ssk: alice_shared_1, + identifier: 1, + }, + ], + &program_with_deps, + ) + .unwrap(); + let message = Message::try_from_circuit_output( + vec![funder_id], + vec![funder_nonce], + vec![( + alice_npk, + alice_keys.vpk(), + EphemeralPublicKey::from_scalar([11; 32]), + )], + output, + ) + .unwrap(); + let witness_set = WitnessSet::for_message(&message, proof, &[&funder_keys.signing_key]); + state + .transition_from_privacy_preserving_transaction( + &PrivacyPreservingTransaction::new(message, witness_set), + 2, + 0, + ) + .unwrap(); + } + + let commitment_pda_0 = Commitment::new(&alice_pda_0_id, &alice_pda_0_account); + let commitment_pda_1 = Commitment::new(&alice_pda_1_id, &alice_pda_1_account); + + assert!(state.get_proof_for_commitment(&commitment_pda_0).is_some()); + assert!(state.get_proof_for_commitment(&commitment_pda_1).is_some()); + + // Alice spends alice_pda_0 into the public recipient. + { + let recipient_account = state.get_account_by_id(recipient_id); + let (output, proof) = execute_and_prove( + vec![ + AccountWithMetadata::new(alice_pda_0_account, true, alice_pda_0_id), + AccountWithMetadata::new(recipient_account, true, recipient_id), + ], + Program::serialize_instruction((seed, amount, auth_transfer_id, false)).unwrap(), + vec![ + InputAccountIdentity::PrivatePdaUpdate { + ssk: alice_shared_0, + nsk: alice_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&commitment_pda_0) + .expect("pda_0 must be in state"), + identifier: 0, + }, + InputAccountIdentity::Public, + ], + &program_with_deps, + ) + .unwrap(); + let message = Message::try_from_circuit_output( + vec![recipient_id], + vec![Nonce(0)], + vec![( + alice_npk, + alice_keys.vpk(), + EphemeralPublicKey::from_scalar([10; 32]), + )], + output, + ) + .unwrap(); + let witness_set = WitnessSet::for_message(&message, proof, &[&recipient_signing_key]); + state + .transition_from_privacy_preserving_transaction( + &PrivacyPreservingTransaction::new(message, witness_set), + 3, + 0, + ) + .unwrap(); + } + + // Alice spends alice_pda_1 into the same public recipient. + { + let recipient_account = state.get_account_by_id(recipient_id); + let (output, proof) = execute_and_prove( + vec![ + AccountWithMetadata::new(alice_pda_1_account, true, alice_pda_1_id), + AccountWithMetadata::new(recipient_account, false, recipient_id), + ], + Program::serialize_instruction((seed, amount, auth_transfer_id, false)).unwrap(), + vec![ + InputAccountIdentity::PrivatePdaUpdate { + ssk: alice_shared_1, + nsk: alice_keys.nsk, + membership_proof: state + .get_proof_for_commitment(&commitment_pda_1) + .expect("pda_1 must be in state"), + identifier: 1, + }, + InputAccountIdentity::Public, + ], + &program_with_deps, + ) + .unwrap(); + let message = Message::try_from_circuit_output( + vec![recipient_id], + vec![], + vec![( + alice_npk, + alice_keys.vpk(), + EphemeralPublicKey::from_scalar([11; 32]), + )], + output, + ) + .unwrap(); + let witness_set = WitnessSet::for_message(&message, proof, &[]); + state + .transition_from_privacy_preserving_transaction( + &PrivacyPreservingTransaction::new(message, witness_set), + 4, + 0, + ) + .unwrap(); + } + + assert_eq!(state.get_account_by_id(recipient_id).balance, 2 * amount); + } } diff --git a/nssa/src/validated_state_diff.rs b/nssa/src/validated_state_diff.rs index 455a13a6..4bd5fb05 100644 --- a/nssa/src/validated_state_diff.rs +++ b/nssa/src/validated_state_diff.rs @@ -8,7 +8,8 @@ use nssa_core::{ BlockId, Commitment, Nullifier, PrivacyPreservingCircuitOutput, Timestamp, account::{Account, AccountId, AccountWithMetadata}, program::{ - ChainedCall, Claim, DEFAULT_PROGRAM_ID, compute_public_authorized_pdas, validate_execution, + ChainedCall, Claim, DEFAULT_PROGRAM_ID, ProgramId, compute_public_authorized_pdas, + validate_execution, }, }; @@ -100,10 +101,26 @@ impl ValidatedStateDiff { pda_seeds: vec![], }; - let mut chained_calls = VecDeque::from_iter([(initial_call, None)]); + #[expect( + clippy::items_after_statements, + reason = "More readable to keep it behind the place where it's used" + )] + #[derive(Debug)] + struct CallerData { + program_id: Option, + authorized_accounts: HashSet, + } + + let initial_caller_data = CallerData { + program_id: None, + authorized_accounts: signer_account_ids.iter().copied().collect(), + }; + + let mut chained_calls = + VecDeque::<(ChainedCall, CallerData)>::from_iter([(initial_call, initial_caller_data)]); let mut chain_calls_counter = 0; - while let Some((chained_call, caller_program_id)) = chained_calls.pop_front() { + while let Some((chained_call, caller_data)) = chained_calls.pop_front() { ensure!( chain_calls_counter <= MAX_NUMBER_CHAINED_CALLS, NssaError::MaxChainedCallsDepthExceeded @@ -119,7 +136,7 @@ impl ValidatedStateDiff { chained_call.program_id, chained_call.pre_states, chained_call.instruction_data ); let mut program_output = program.execute( - caller_program_id, + caller_data.program_id, &chained_call.pre_states, &chained_call.instruction_data, )?; @@ -129,10 +146,13 @@ impl ValidatedStateDiff { ); let authorized_pdas = - compute_public_authorized_pdas(caller_program_id, &chained_call.pda_seeds); + compute_public_authorized_pdas(caller_data.program_id, &chained_call.pda_seeds); + // Account is authorized if it is either in the caller's authorized accounts or in the + // list of PDAs the caller has authorized. let is_authorized = |account_id: &AccountId| { - signer_account_ids.contains(account_id) || authorized_pdas.contains(account_id) + authorized_pdas.contains(account_id) + || caller_data.authorized_accounts.contains(account_id) }; for pre in &program_output.pre_states { @@ -152,16 +172,12 @@ impl ValidatedStateDiff { } ); - // Check that authorization flags are consistent with the provided ones or - // authorized by program through the PDA mechanism - let expected_is_authorized = is_authorized(&account_id); + // Check that the program output pre_states marked as authorized are indeed + // authorized. + let is_indeed_authorized = is_authorized(&account_id); ensure!( - pre.is_authorized == expected_is_authorized, - InvalidProgramBehaviorError::InconsistentAccountAuthorization { - account_id, - expected_authorization: expected_is_authorized, - actual_authorization: pre.is_authorized - } + !pre.is_authorized || is_indeed_authorized, + InvalidProgramBehaviorError::InvalidAccountAuthorization { account_id } ); } @@ -176,9 +192,9 @@ impl ValidatedStateDiff { // Verify that the program output's caller_program_id matches the actual caller. ensure!( - program_output.caller_program_id == caller_program_id, + program_output.caller_program_id == caller_data.program_id, InvalidProgramBehaviorError::MismatchedCallerProgramId { - expected: caller_program_id, + expected: caller_data.program_id, actual: program_output.caller_program_id, } ); @@ -205,7 +221,8 @@ impl ValidatedStateDiff { let Some(claim) = post.required_claim() else { continue; }; - let account_id = program_output.pre_states[i].account_id; + let pre = &program_output.pre_states[i]; + let account_id = pre.account_id; // The invoked program can only claim accounts with default program id. ensure!( @@ -217,7 +234,7 @@ impl ValidatedStateDiff { Claim::Authorized => { // The program can only claim accounts that were authorized by the signer. ensure!( - is_authorized(&account_id), + pre.is_authorized, InvalidProgramBehaviorError::ClaimedUnauthorizedAccount { account_id } ); } @@ -248,8 +265,20 @@ impl ValidatedStateDiff { state_diff.insert(pre.account_id, post.account().clone()); } + let authorized_accounts: HashSet<_> = chained_call + .pre_states + .iter() + .filter(|pre| pre.is_authorized) + .map(|pre| pre.account_id) + .collect(); for new_call in program_output.chained_calls.into_iter().rev() { - chained_calls.push_front((new_call, Some(chained_call.program_id))); + chained_calls.push_front(( + new_call, + CallerData { + program_id: Some(chained_call.program_id), + authorized_accounts: authorized_accounts.clone(), + }, + )); } chain_calls_counter = chain_calls_counter @@ -293,63 +322,62 @@ impl ValidatedStateDiff { let witness_set = &tx.witness_set; // 1. Commitments or nullifiers are non empty - if message.new_commitments.is_empty() && message.new_nullifiers.is_empty() { - return Err(NssaError::InvalidInput( + ensure!( + !message.new_commitments.is_empty() || !message.new_nullifiers.is_empty(), + NssaError::InvalidInput( "Empty commitments and empty nullifiers found in message".into(), - )); - } + ) + ); // 2. Check there are no duplicate account_ids in the public_account_ids list. - if n_unique(&message.public_account_ids) != message.public_account_ids.len() { - return Err(NssaError::InvalidInput( - "Duplicate account_ids found in message".into(), - )); - } + ensure!( + n_unique(&message.public_account_ids) == message.public_account_ids.len(), + NssaError::InvalidInput("Duplicate account_ids found in message".into()) + ); // Check there are no duplicate nullifiers in the new_nullifiers list - if n_unique(&message.new_nullifiers) != message.new_nullifiers.len() { - return Err(NssaError::InvalidInput( - "Duplicate nullifiers found in message".into(), - )); - } + ensure!( + n_unique(&message.new_nullifiers) == message.new_nullifiers.len(), + NssaError::InvalidInput("Duplicate nullifiers found in message".into()) + ); // Check there are no duplicate commitments in the new_commitments list - if n_unique(&message.new_commitments) != message.new_commitments.len() { - return Err(NssaError::InvalidInput( - "Duplicate commitments found in message".into(), - )); - } + ensure!( + n_unique(&message.new_commitments) == message.new_commitments.len(), + NssaError::InvalidInput("Duplicate commitments found in message".into()) + ); // 3. Nonce checks and Valid signatures // Check exactly one nonce is provided for each signature - if message.nonces.len() != witness_set.signatures_and_public_keys.len() { - return Err(NssaError::InvalidInput( + ensure!( + message.nonces.len() == witness_set.signatures_and_public_keys.len(), + NssaError::InvalidInput( "Mismatch between number of nonces and signatures/public keys".into(), - )); - } + ) + ); // Check the signatures are valid - if !witness_set.signatures_are_valid_for(message) { - return Err(NssaError::InvalidInput( - "Invalid signature for given message and public key".into(), - )); - } + ensure!( + witness_set.signatures_are_valid_for(message), + NssaError::InvalidInput("Invalid signature for given message and public key".into()) + ); let signer_account_ids = tx.signer_account_ids(); // Check nonces corresponds to the current nonces on the public state. for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) { let current_nonce = state.get_account_by_id(*account_id).nonce; - if current_nonce != *nonce { - return Err(NssaError::InvalidInput("Nonce mismatch".into())); - } + ensure!( + current_nonce == *nonce, + NssaError::InvalidInput("Nonce mismatch".into()) + ); } // Verify validity window - if !message.block_validity_window.is_valid_for(block_id) - || !message.timestamp_validity_window.is_valid_for(timestamp) - { - return Err(NssaError::OutOfValidityWindow); - } + ensure!( + message.block_validity_window.is_valid_for(block_id) + && message.timestamp_validity_window.is_valid_for(timestamp), + NssaError::OutOfValidityWindow + ); // Build pre_states for proof verification let public_pre_states: Vec<_> = message diff --git a/program_methods/guest/Cargo.toml b/program_methods/guest/Cargo.toml index dc2077b7..136fb0b8 100644 --- a/program_methods/guest/Cargo.toml +++ b/program_methods/guest/Cargo.toml @@ -9,6 +9,7 @@ workspace = true [dependencies] nssa_core.workspace = true +authenticated_transfer_core.workspace = true clock_core.workspace = true token_core.workspace = true token_program.workspace = true @@ -16,5 +17,7 @@ amm_core.workspace = true amm_program.workspace = true ata_core.workspace = true ata_program.workspace = true +faucet_core.workspace = true +vault_core.workspace = true risc0-zkvm.workspace = true serde = { workspace = true, default-features = false } diff --git a/program_methods/guest/src/bin/authenticated_transfer.rs b/program_methods/guest/src/bin/authenticated_transfer.rs index 32b69c3a..0c8040d9 100644 --- a/program_methods/guest/src/bin/authenticated_transfer.rs +++ b/program_methods/guest/src/bin/authenticated_transfer.rs @@ -1,3 +1,4 @@ +use authenticated_transfer_core::Instruction; use nssa_core::{ account::{Account, AccountWithMetadata}, program::{ @@ -8,7 +9,6 @@ use nssa_core::{ /// Initializes a default account under the ownership of this program. fn initialize_account(pre_state: AccountWithMetadata) -> AccountPostState { let account_to_claim = AccountPostState::new_claimed(pre_state.account, Claim::Authorized); - let is_authorized = pre_state.is_authorized; // Continue only if the account to claim has default values assert!( @@ -16,9 +16,6 @@ fn initialize_account(pre_state: AccountWithMetadata) -> AccountPostState { "Account must be uninitialized" ); - // Continue only if the owner authorized this operation - assert!(is_authorized, "Account must be authorized"); - account_to_claim } @@ -28,7 +25,7 @@ fn transfer( recipient: AccountWithMetadata, balance_to_move: u128, ) -> Vec { - // Continue only if the sender has authorized this operation + // Continue only if the sender has authorized this operation. assert!(sender.is_authorized, "Sender must be authorized"); // Create accounts post states, with updated balances @@ -70,20 +67,24 @@ fn main() { self_program_id, caller_program_id, pre_states, - instruction: balance_to_move, + instruction, }, instruction_words, - ) = read_nssa_inputs(); + ) = read_nssa_inputs::(); - let post_states = match (pre_states.as_slice(), balance_to_move) { - ([account_to_claim], 0) => { - let post = initialize_account(account_to_claim.clone()); - vec![post] + let post_states = match instruction { + Instruction::Initialize => { + let [account_to_claim] = <[_; 1]>::try_from(pre_states.clone()) + .expect("Initialize requires exactly 1 account"); + vec![initialize_account(account_to_claim)] } - ([sender, recipient], balance_to_move) => { - transfer(sender.clone(), recipient.clone(), balance_to_move) + Instruction::Transfer { + amount: balance_to_move, + } => { + let [sender, recipient] = <[_; 2]>::try_from(pre_states.clone()) + .expect("Transfer requires exactly 2 accounts"); + transfer(sender, recipient, balance_to_move) } - _ => panic!("invalid params"), }; ProgramOutput::new( diff --git a/program_methods/guest/src/bin/faucet.rs b/program_methods/guest/src/bin/faucet.rs new file mode 100644 index 00000000..e56330cd --- /dev/null +++ b/program_methods/guest/src/bin/faucet.rs @@ -0,0 +1,71 @@ +use faucet_core::Instruction; +use nssa_core::program::{ + AccountPostState, ChainedCall, ProgramInput, ProgramOutput, read_nssa_inputs, +}; + +fn unchanged_post_states( + pre_states: &[nssa_core::account::AccountWithMetadata], +) -> Vec { + pre_states + .iter() + .map(|pre_state| AccountPostState::new(pre_state.account.clone())) + .collect() +} + +fn main() { + let ( + ProgramInput { + self_program_id, + caller_program_id, + pre_states, + instruction, + }, + instruction_words, + ) = read_nssa_inputs::(); + + let pre_states_clone = pre_states.clone(); + let post_states = unchanged_post_states(&pre_states_clone); + + let chained_calls = match instruction { + Instruction::Transfer { + vault_program_id, + recipient_id, + amount, + } => { + let [faucet, recipient_vault] = pre_states + .try_into() + .expect("Transfer requires exactly 2 accounts"); + + assert_eq!( + faucet.account_id, + faucet_core::compute_faucet_account_id(self_program_id), + "First account must be faucet PDA" + ); + + let mut faucet_for_vault = faucet; + faucet_for_vault.is_authorized = true; + + vec![ + ChainedCall::new( + vault_program_id, + vec![faucet_for_vault, recipient_vault], + &vault_core::Instruction::Transfer { + recipient_id, + amount, + }, + ) + .with_pda_seeds(vec![faucet_core::compute_faucet_seed()]), + ] + } + }; + + ProgramOutput::new( + self_program_id, + caller_program_id, + instruction_words, + pre_states_clone, + post_states, + ) + .with_chained_calls(chained_calls) + .write(); +} diff --git a/program_methods/guest/src/bin/privacy_preserving_circuit.rs b/program_methods/guest/src/bin/privacy_preserving_circuit.rs deleted file mode 100644 index 70979b7e..00000000 --- a/program_methods/guest/src/bin/privacy_preserving_circuit.rs +++ /dev/null @@ -1,794 +0,0 @@ -use std::{ - collections::{HashMap, HashSet, VecDeque, hash_map::Entry}, - convert::Infallible, -}; - -use nssa_core::{ - Commitment, CommitmentSetDigest, DUMMY_COMMITMENT_HASH, EncryptionScheme, Identifier, - MembershipProof, Nullifier, NullifierPublicKey, NullifierSecretKey, - PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput, SharedSecretKey, - account::{Account, AccountId, AccountWithMetadata, Nonce}, - compute_digest_for_path, - program::{ - AccountPostState, BlockValidityWindow, ChainedCall, Claim, DEFAULT_PROGRAM_ID, - MAX_NUMBER_CHAINED_CALLS, PdaSeed, ProgramId, ProgramOutput, TimestampValidityWindow, - validate_execution, - }, -}; -use risc0_zkvm::{guest::env, serde::to_vec}; - -const PRIVATE_PDA_FIXED_IDENTIFIER: u128 = u128::MAX; - -/// State of the involved accounts before and after program execution. -struct ExecutionState { - pre_states: Vec, - post_states: HashMap, - block_validity_window: BlockValidityWindow, - timestamp_validity_window: TimestampValidityWindow, - /// Positions (in `pre_states`) of mask-3 accounts whose supplied npk has been bound to - /// their `AccountId` via a proven `AccountId::for_private_pda(program_id, seed, npk)` - /// check. - /// Two proof paths populate this set: a `Claim::Pda(seed)` in a program's `post_state` on - /// that `pre_state`, or a caller's `ChainedCall.pda_seeds` entry matching that `pre_state` - /// under the private derivation. Binding is an idempotent property, not an event: the same - /// position can legitimately be bound through both paths in the same tx (e.g. a program - /// claims a private PDA and then delegates it to a callee), and the set uses `contains`, - /// not `assert!(insert)`. After the main loop, every mask-3 position must appear in this - /// set; otherwise the npk is unbound and the circuit rejects. - private_pda_bound_positions: HashSet, - /// Across the whole transaction, each `(program_id, seed)` pair may resolve to at most one - /// `AccountId`. A seed under a program can derive a family of accounts, one public PDA and - /// one private PDA per distinct npk. Without this check, a single `pda_seeds: [S]` entry in - /// a chained call could authorize multiple family members at once (different npks under the - /// same seed) and let a callee mix balances across them. Every claim and every - /// caller-authorization resolution is recorded here, either as a new `(program, seed)` → - /// `AccountId` entry or as an equality check against the existing one, making the rule: one - /// `(program, seed)` → one account per tx. - pda_family_binding: HashMap<(ProgramId, PdaSeed), AccountId>, - /// Map from a mask-3 `pre_state`'s position in `visibility_mask` to the npk supplied for - /// that position in `private_account_keys`. Built once in `derive_from_outputs` by walking - /// `visibility_mask` in lock-step with `private_account_keys`, used later by the claim and - /// caller-seeds authorization paths. - private_pda_npk_by_position: HashMap, -} - -impl ExecutionState { - /// Validate program outputs and derive the overall execution state. - pub fn derive_from_outputs( - visibility_mask: &[u8], - private_account_keys: &[(NullifierPublicKey, Identifier, SharedSecretKey)], - program_id: ProgramId, - program_outputs: Vec, - ) -> Self { - // Build position → npk map for mask-3 pre_states. `private_account_keys` is consumed in - // pre_state order across all masks 1/2/3, so walk `visibility_mask` in lock-step. The - // downstream `compute_circuit_output` also consumes the same iterator and its trailing - // assertions catch an over-supply of keys; under-supply surfaces here. - let mut private_pda_npk_by_position: HashMap = HashMap::new(); - { - let mut keys_iter = private_account_keys.iter(); - for (pos, &mask) in visibility_mask.iter().enumerate() { - if matches!(mask, 1..=3) { - let (npk, _, _) = keys_iter.next().unwrap_or_else(|| { - panic!( - "private_account_keys shorter than visibility_mask demands: no key for masked position {pos} (mask {mask})" - ) - }); - if mask == 3 { - private_pda_npk_by_position.insert(pos, *npk); - } - } - } - } - - let block_valid_from = program_outputs - .iter() - .filter_map(|output| output.block_validity_window.start()) - .max(); - let block_valid_until = program_outputs - .iter() - .filter_map(|output| output.block_validity_window.end()) - .min(); - let ts_valid_from = program_outputs - .iter() - .filter_map(|output| output.timestamp_validity_window.start()) - .max(); - let ts_valid_until = program_outputs - .iter() - .filter_map(|output| output.timestamp_validity_window.end()) - .min(); - - let block_validity_window: BlockValidityWindow = (block_valid_from, block_valid_until) - .try_into() - .expect( - "There should be non empty intersection in the program output block validity windows", - ); - let timestamp_validity_window: TimestampValidityWindow = - (ts_valid_from, ts_valid_until) - .try_into() - .expect( - "There should be non empty intersection in the program output timestamp validity windows", - ); - - let mut execution_state = Self { - pre_states: Vec::new(), - post_states: HashMap::new(), - block_validity_window, - timestamp_validity_window, - private_pda_bound_positions: HashSet::new(), - pda_family_binding: HashMap::new(), - private_pda_npk_by_position, - }; - - let Some(first_output) = program_outputs.first() else { - panic!("No program outputs provided"); - }; - - let initial_call = ChainedCall { - program_id, - instruction_data: first_output.instruction_data.clone(), - pre_states: first_output.pre_states.clone(), - pda_seeds: Vec::new(), - }; - let mut chained_calls = VecDeque::from_iter([(initial_call, None)]); - - let mut program_outputs_iter = program_outputs.into_iter(); - let mut chain_calls_counter = 0; - - while let Some((chained_call, caller_program_id)) = chained_calls.pop_front() { - assert!( - chain_calls_counter <= MAX_NUMBER_CHAINED_CALLS, - "Max chained calls depth is exceeded" - ); - - let Some(program_output) = program_outputs_iter.next() else { - panic!("Insufficient program outputs for chained calls"); - }; - - // Check that instruction data in chained call is the instruction data in program output - assert_eq!( - chained_call.instruction_data, program_output.instruction_data, - "Mismatched instruction data between chained call and program output" - ); - - // Check that `program_output` is consistent with the execution of the corresponding - // program. - let program_output_words = - &to_vec(&program_output).expect("program_output must be serializable"); - env::verify(chained_call.program_id, program_output_words).unwrap_or_else( - |_: Infallible| unreachable!("Infallible error is never constructed"), - ); - - // Verify that the program output's self_program_id matches the expected program ID. - // This ensures the proof commits to which program produced the output. - assert_eq!( - program_output.self_program_id, chained_call.program_id, - "Program output self_program_id does not match chained call program_id" - ); - - // Verify that the program output's caller_program_id matches the actual caller. - // This prevents a malicious user from privately executing an internal function - // by spoofing caller_program_id (e.g. passing caller_program_id = self_program_id - // to bypass access control checks). - assert_eq!( - program_output.caller_program_id, caller_program_id, - "Program output caller_program_id does not match actual caller" - ); - - // Check that the program is well behaved. - // See the # Programs section for the definition of the `validate_execution` method. - let validated_execution = validate_execution( - &program_output.pre_states, - &program_output.post_states, - chained_call.program_id, - ); - if let Err(err) = validated_execution { - panic!( - "Invalid program behavior in program {:?}: {err}", - chained_call.program_id - ); - } - - for next_call in program_output.chained_calls.iter().rev() { - chained_calls.push_front((next_call.clone(), Some(chained_call.program_id))); - } - - execution_state.validate_and_sync_states( - visibility_mask, - chained_call.program_id, - caller_program_id, - &chained_call.pda_seeds, - program_output.pre_states, - program_output.post_states, - ); - chain_calls_counter = chain_calls_counter.checked_add(1).expect( - "Chain calls counter should not overflow as it checked before incrementing", - ); - } - - assert!( - program_outputs_iter.next().is_none(), - "Inner call without a chained call found", - ); - - // Every mask-3 pre_state must have had its npk bound to its account_id, either via a - // `Claim::Pda(seed)` in some program's post_state or via a caller's `pda_seeds` matching - // the private derivation. An unbound mask-3 pre_state has no cryptographic link between - // the supplied npk and the account_id, and must be rejected. - for (pos, &mask) in visibility_mask.iter().enumerate() { - if mask == 3 { - assert!( - execution_state.private_pda_bound_positions.contains(&pos), - "private PDA pre_state at position {pos} has no proven (seed, npk) binding via Claim::Pda or caller pda_seeds" - ); - } - } - - // Check that all modified uninitialized accounts were claimed - for (account_id, post) in execution_state - .pre_states - .iter() - .filter(|a| a.account.program_owner == DEFAULT_PROGRAM_ID) - .map(|a| { - let post = execution_state - .post_states - .get(&a.account_id) - .expect("Post state must exist for pre state"); - (a, post) - }) - .filter(|(pre_default, post)| pre_default.account != **post) - .map(|(pre, post)| (pre.account_id, post)) - { - assert_ne!( - post.program_owner, DEFAULT_PROGRAM_ID, - "Account {account_id} was modified but not claimed" - ); - } - - execution_state - } - - /// Validate program pre and post states and populate the execution state. - fn validate_and_sync_states( - &mut self, - visibility_mask: &[u8], - program_id: ProgramId, - caller_program_id: Option, - caller_pda_seeds: &[PdaSeed], - pre_states: Vec, - post_states: Vec, - ) { - for (pre, mut post) in pre_states.into_iter().zip(post_states) { - let pre_account_id = pre.account_id; - let pre_is_authorized = pre.is_authorized; - let post_states_entry = self.post_states.entry(pre.account_id); - match &post_states_entry { - Entry::Occupied(occupied) => { - #[expect( - clippy::shadow_unrelated, - reason = "Shadowing is intentional to use all fields" - )] - let AccountWithMetadata { - account: pre_account, - account_id: pre_account_id, - is_authorized: pre_is_authorized, - } = pre; - - // Ensure that new pre state is the same as known post state - assert_eq!( - occupied.get(), - &pre_account, - "Inconsistent pre state for account {pre_account_id}", - ); - - let (previous_is_authorized, pre_state_position) = self - .pre_states - .iter() - .enumerate() - .find(|(_, acc)| acc.account_id == pre_account_id) - .map_or_else( - || panic!( - "Pre state must exist in execution state for account {pre_account_id}", - ), - |(pos, acc)| (acc.is_authorized, pos) - ); - - let is_authorized = resolve_authorization_and_record_bindings( - &mut self.pda_family_binding, - &mut self.private_pda_bound_positions, - &self.private_pda_npk_by_position, - pre_account_id, - pre_state_position, - caller_program_id, - caller_pda_seeds, - previous_is_authorized, - ); - - assert_eq!( - pre_is_authorized, is_authorized, - "Inconsistent authorization for account {pre_account_id}", - ); - } - Entry::Vacant(_) => { - // Pre state for the initial call - self.pre_states.push(pre); - } - } - - if let Some(claim) = post.required_claim() { - // The invoked program can only claim accounts with default program id. - assert_eq!( - post.account().program_owner, - DEFAULT_PROGRAM_ID, - "Cannot claim an initialized account {pre_account_id}" - ); - - let pre_state_position = self - .pre_states - .iter() - .position(|acc| acc.account_id == pre_account_id) - .expect("Pre state must exist at this point"); - - let mask = visibility_mask[pre_state_position]; - match mask { - 0 => match claim { - Claim::Authorized => { - // Note: no need to check authorized pdas because we have already - // checked consistency of authorization above. - assert!( - pre_is_authorized, - "Cannot claim unauthorized account {pre_account_id}" - ); - } - Claim::Pda(seed) => { - let pda = AccountId::for_public_pda(&program_id, &seed); - assert_eq!( - pre_account_id, pda, - "Invalid PDA claim for account {pre_account_id} which does not match derived PDA {pda}" - ); - assert_family_binding( - &mut self.pda_family_binding, - program_id, - seed, - pre_account_id, - ); - } - }, - 3 => { - match claim { - Claim::Authorized => { - assert!( - pre_is_authorized, - "Cannot claim unauthorized private PDA {pre_account_id}" - ); - } - Claim::Pda(seed) => { - let npk = self - .private_pda_npk_by_position - .get(&pre_state_position) - .expect("private PDA pre_state must have an npk in the position map"); - let pda = AccountId::for_private_pda(&program_id, &seed, npk); - assert_eq!( - pre_account_id, pda, - "Invalid private PDA claim for account {pre_account_id}" - ); - self.private_pda_bound_positions.insert(pre_state_position); - assert_family_binding( - &mut self.pda_family_binding, - program_id, - seed, - pre_account_id, - ); - } - } - } - _ => { - // Mask 1/2: standard private accounts don't enforce the claim semantics. - // Unauthorized private claiming is intentionally allowed since operating - // these accounts requires the npk/nsk keypair anyway. - } - } - - post.account_mut().program_owner = program_id; - } - - post_states_entry.insert_entry(post.into_account()); - } - } - - /// Get an iterator over pre and post states of each account involved in the execution. - pub fn into_states_iter( - mut self, - ) -> impl ExactSizeIterator { - self.pre_states.into_iter().map(move |pre| { - let post = self - .post_states - .remove(&pre.account_id) - .expect("Account from pre states should exist in state diff"); - (pre, post) - }) - } -} - -/// Record or re-verify the `(program_id, seed) → account_id` family binding for the -/// transaction. Any claim or caller-seed authorization that resolves a `pre_state` under -/// `(program_id, seed)` must agree with every prior resolution of the same pair; otherwise a -/// single `pda_seeds: [seed]` entry could authorize multiple private-PDA family members at -/// once (different npks under the same seed) and let a callee mix balances across them. Free -/// function so callers can pass `&mut self.pda_family_binding` without holding a borrow on -/// the surrounding struct's other fields. -fn assert_family_binding( - bindings: &mut HashMap<(ProgramId, PdaSeed), AccountId>, - program_id: ProgramId, - seed: PdaSeed, - account_id: AccountId, -) { - match bindings.entry((program_id, seed)) { - Entry::Vacant(e) => { - e.insert(account_id); - } - Entry::Occupied(e) => { - assert_eq!( - *e.get(), - account_id, - "Two different accounts resolved under the same (program, seed) in one transaction: existing {}, new {account_id}", - e.get() - ); - } - } -} - -/// Resolve the authorization state of a `pre_state` seen again in a chained call and record -/// any resulting bindings. Returns `true` if the `pre_state` is authorized through either a -/// previously-seen authorization or a matching caller seed (under the public or private -/// derivation). When a caller seed matches, also records the `(caller, seed) → account_id` -/// family binding and, for the private form, marks the position in -/// `private_pda_bound_positions`. Only reachable when `caller_program_id.is_some()`, -/// top-level flows have no caller-emitted seeds, so binding at top level must come from the -/// claim path. Free function so callers can pass individual `&mut self.*` field borrows -/// without holding a borrow on the surrounding struct's other fields. -#[expect( - clippy::too_many_arguments, - reason = "breaking out a context struct does not buy us anything here" -)] -fn resolve_authorization_and_record_bindings( - pda_family_binding: &mut HashMap<(ProgramId, PdaSeed), AccountId>, - private_pda_bound_positions: &mut HashSet, - private_pda_npk_by_position: &HashMap, - pre_account_id: AccountId, - pre_state_position: usize, - caller_program_id: Option, - caller_pda_seeds: &[PdaSeed], - previous_is_authorized: bool, -) -> bool { - let matched_caller_seed: Option<(PdaSeed, bool, ProgramId)> = - caller_program_id.and_then(|caller| { - caller_pda_seeds.iter().find_map(|seed| { - if AccountId::for_public_pda(&caller, seed) == pre_account_id { - return Some((*seed, false, caller)); - } - if let Some(npk) = private_pda_npk_by_position.get(&pre_state_position) - && AccountId::for_private_pda(&caller, seed, npk) == pre_account_id - { - return Some((*seed, true, caller)); - } - None - }) - }); - - if let Some((seed, is_private_form, caller)) = matched_caller_seed { - assert_family_binding(pda_family_binding, caller, seed, pre_account_id); - if is_private_form { - private_pda_bound_positions.insert(pre_state_position); - } - } - - previous_is_authorized || matched_caller_seed.is_some() -} - -fn compute_circuit_output( - execution_state: ExecutionState, - visibility_mask: &[u8], - private_account_keys: &[(NullifierPublicKey, Identifier, SharedSecretKey)], - private_account_nsks: &[NullifierSecretKey], - private_account_membership_proofs: &[Option], -) -> PrivacyPreservingCircuitOutput { - let mut output = PrivacyPreservingCircuitOutput { - public_pre_states: Vec::new(), - public_post_states: Vec::new(), - ciphertexts: Vec::new(), - new_commitments: Vec::new(), - new_nullifiers: Vec::new(), - block_validity_window: execution_state.block_validity_window, - timestamp_validity_window: execution_state.timestamp_validity_window, - }; - - let states_iter = execution_state.into_states_iter(); - assert_eq!( - visibility_mask.len(), - states_iter.len(), - "Invalid visibility mask length" - ); - - let mut private_keys_iter = private_account_keys.iter(); - let mut private_nsks_iter = private_account_nsks.iter(); - let mut private_membership_proofs_iter = private_account_membership_proofs.iter(); - - let mut output_index = 0; - for (account_visibility_mask, (pre_state, post_state)) in - visibility_mask.iter().copied().zip(states_iter) - { - match account_visibility_mask { - 0 => { - // Public account - output.public_pre_states.push(pre_state); - output.public_post_states.push(post_state); - } - 1 | 2 => { - let Some((npk, identifier, shared_secret)) = private_keys_iter.next() else { - panic!("Missing private account key"); - }; - assert_ne!( - *identifier, PRIVATE_PDA_FIXED_IDENTIFIER, - "Identifier must be different from {PRIVATE_PDA_FIXED_IDENTIFIER}. This is reserved for private PDA." - ); - - let account_id = AccountId::from((npk, *identifier)); - - assert_eq!(account_id, pre_state.account_id, "AccountId mismatch"); - - let (new_nullifier, new_nonce) = if account_visibility_mask == 1 { - // Private account with authentication - - let Some(nsk) = private_nsks_iter.next() else { - panic!("Missing private account nullifier secret key"); - }; - - // Verify the nullifier public key - assert_eq!( - npk, - &NullifierPublicKey::from(nsk), - "Nullifier public key mismatch" - ); - - // Check pre_state authorization - assert!( - pre_state.is_authorized, - "Pre-state not authorized for authenticated private account" - ); - - let Some(membership_proof_opt) = private_membership_proofs_iter.next() else { - panic!("Missing membership proof"); - }; - - let new_nullifier = compute_nullifier_and_set_digest( - membership_proof_opt.as_ref(), - &pre_state.account, - &account_id, - nsk, - ); - - let new_nonce = pre_state.account.nonce.private_account_nonce_increment(nsk); - - (new_nullifier, new_nonce) - } else { - // Private account without authentication - - assert_eq!( - pre_state.account, - Account::default(), - "Found new private account with non default values", - ); - - assert!( - !pre_state.is_authorized, - "Found new private account marked as authorized." - ); - - let Some(membership_proof_opt) = private_membership_proofs_iter.next() else { - panic!("Missing membership proof"); - }; - - assert!( - membership_proof_opt.is_none(), - "Membership proof must be None for unauthorized accounts" - ); - - let nullifier = Nullifier::for_account_initialization(&account_id); - - let new_nonce = Nonce::private_account_nonce_init(&account_id); - - ((nullifier, DUMMY_COMMITMENT_HASH), new_nonce) - }; - output.new_nullifiers.push(new_nullifier); - - // Update post-state with new nonce - let mut post_with_updated_nonce = post_state; - post_with_updated_nonce.nonce = new_nonce; - - // Compute commitment - let commitment_post = Commitment::new(&account_id, &post_with_updated_nonce); - - // Encrypt and push post state - let encrypted_account = EncryptionScheme::encrypt( - &post_with_updated_nonce, - *identifier, - shared_secret, - &commitment_post, - output_index, - ); - - output.new_commitments.push(commitment_post); - output.ciphertexts.push(encrypted_account); - output_index = output_index - .checked_add(1) - .unwrap_or_else(|| panic!("Too many private accounts, output index overflow")); - } - 3 => { - // Private PDA account. The supplied npk has already been bound to - // `pre_state.account_id` upstream in `validate_and_sync_states`, either via a - // `Claim::Pda(seed)` match or via a caller `pda_seeds` match, both of which - // assert `AccountId::for_private_pda(owner, seed, npk) == account_id`. The - // post-loop assertion in `derive_from_outputs` (see the - // `private_pda_bound_positions` check) guarantees that every mask-3 - // position has been through at least one such binding, so this - // branch can safely use the wallet npk without re-verifying. - let Some((npk, identifier, shared_secret)) = private_keys_iter.next() else { - panic!("Missing private account key"); - }; - - assert_eq!( - *identifier, PRIVATE_PDA_FIXED_IDENTIFIER, - "Identifier for private PDAs must be {PRIVATE_PDA_FIXED_IDENTIFIER}." - ); - - let (new_nullifier, new_nonce) = if pre_state.is_authorized { - // Existing private PDA with authentication (like mask 1) - let Some(nsk) = private_nsks_iter.next() else { - panic!("Missing private account nullifier secret key"); - }; - assert_eq!( - npk, - &NullifierPublicKey::from(nsk), - "Nullifier public key mismatch" - ); - - let Some(membership_proof_opt) = private_membership_proofs_iter.next() else { - panic!("Missing membership proof"); - }; - - let new_nullifier = compute_nullifier_and_set_digest( - membership_proof_opt.as_ref(), - &pre_state.account, - &pre_state.account_id, - nsk, - ); - let new_nonce = pre_state.account.nonce.private_account_nonce_increment(nsk); - (new_nullifier, new_nonce) - } else { - // New private PDA (like mask 2). The default + unauthorized requirement - // here rules out use cases like a fully-private multisig, which would need - // a non-default, non-authorized private PDA input account. - // TODO(private-pdas-pr-2/3): relax this once the wallet can supply a - // `(seed, owner)` side input so the npk-to-account_id binding can be - // re-verified for an existing private PDA without a `Claim::Pda` or caller - // `pda_seeds` match. - assert_eq!( - pre_state.account, - Account::default(), - "New private PDA must be default" - ); - - let Some(membership_proof_opt) = private_membership_proofs_iter.next() else { - panic!("Missing membership proof"); - }; - assert!( - membership_proof_opt.is_none(), - "Membership proof must be None for new accounts" - ); - - let nullifier = Nullifier::for_account_initialization(&pre_state.account_id); - let new_nonce = Nonce::private_account_nonce_init(&pre_state.account_id); - ((nullifier, DUMMY_COMMITMENT_HASH), new_nonce) - }; - output.new_nullifiers.push(new_nullifier); - - let mut post_with_updated_nonce = post_state; - post_with_updated_nonce.nonce = new_nonce; - - let commitment_post = - Commitment::new(&pre_state.account_id, &post_with_updated_nonce); - - let encrypted_account = EncryptionScheme::encrypt( - &post_with_updated_nonce, - PRIVATE_PDA_FIXED_IDENTIFIER, - shared_secret, - &commitment_post, - output_index, - ); - - output.new_commitments.push(commitment_post); - output.ciphertexts.push(encrypted_account); - output_index = output_index - .checked_add(1) - .unwrap_or_else(|| panic!("Too many private accounts, output index overflow")); - } - _ => panic!("Invalid visibility mask value"), - } - } - - assert!( - private_keys_iter.next().is_none(), - "Too many private account keys" - ); - - assert!( - private_nsks_iter.next().is_none(), - "Too many private account nullifier secret keys" - ); - - assert!( - private_membership_proofs_iter.next().is_none(), - "Too many private account membership proofs" - ); - - output -} - -fn compute_nullifier_and_set_digest( - membership_proof_opt: Option<&MembershipProof>, - pre_account: &Account, - account_id: &AccountId, - nsk: &NullifierSecretKey, -) -> (Nullifier, CommitmentSetDigest) { - membership_proof_opt.as_ref().map_or_else( - || { - assert_eq!( - *pre_account, - Account::default(), - "Found new private account with non default values" - ); - - // Compute initialization nullifier - let nullifier = Nullifier::for_account_initialization(account_id); - (nullifier, DUMMY_COMMITMENT_HASH) - }, - |membership_proof| { - // Compute commitment set digest associated with provided auth path - let commitment_pre = Commitment::new(account_id, pre_account); - let set_digest = compute_digest_for_path(&commitment_pre, membership_proof); - - // Compute update nullifier - let nullifier = Nullifier::for_account_update(&commitment_pre, nsk); - (nullifier, set_digest) - }, - ) -} - -fn main() { - let PrivacyPreservingCircuitInput { - program_outputs, - visibility_mask, - private_account_keys, - private_account_nsks, - private_account_membership_proofs, - program_id, - } = env::read(); - - let execution_state = ExecutionState::derive_from_outputs( - &visibility_mask, - &private_account_keys, - program_id, - program_outputs, - ); - - let output = compute_circuit_output( - execution_state, - &visibility_mask, - &private_account_keys, - &private_account_nsks, - &private_account_membership_proofs, - ); - - env::commit(&output); -} diff --git a/program_methods/guest/src/bin/privacy_preserving_circuit/execution_state.rs b/program_methods/guest/src/bin/privacy_preserving_circuit/execution_state.rs new file mode 100644 index 00000000..aad1bc1c --- /dev/null +++ b/program_methods/guest/src/bin/privacy_preserving_circuit/execution_state.rs @@ -0,0 +1,529 @@ +use std::{ + collections::{HashMap, VecDeque, hash_map::Entry}, + convert::Infallible, +}; + +use nssa_core::{ + Identifier, InputAccountIdentity, NullifierPublicKey, + account::{Account, AccountId, AccountWithMetadata}, + program::{ + AccountPostState, BlockValidityWindow, ChainedCall, Claim, DEFAULT_PROGRAM_ID, + MAX_NUMBER_CHAINED_CALLS, PdaSeed, ProgramId, ProgramOutput, TimestampValidityWindow, + validate_execution, + }, +}; +use risc0_zkvm::{guest::env, serde::to_vec}; + +/// State of the involved accounts before and after program execution. +pub struct ExecutionState { + pre_states: Vec, + post_states: HashMap, + block_validity_window: BlockValidityWindow, + timestamp_validity_window: TimestampValidityWindow, + /// Positions (in `pre_states`) of private-PDA accounts whose supplied npk has been bound to + /// their `AccountId` via a proven `AccountId::for_private_pda(program_id, seed, npk, + /// identifier)` check. + /// Two proof paths populate this set: a `Claim::Pda(seed)` in a program's `post_state` on + /// that `pre_state`, or a caller's `ChainedCall.pda_seeds` entry matching that `pre_state` + /// under the private derivation. Binding is an idempotent property, not an event: the same + /// position can legitimately be bound through both paths in the same tx (e.g. a program + /// claims a private PDA and then delegates it to a callee), and the map uses `contains_key`, + /// not `assert!(insert)`. After the main loop, every private-PDA position must appear in this + /// map; otherwise the npk is unbound and the circuit rejects. + /// The stored `(ProgramId, PdaSeed)` is the owner program and seed, used in + /// `compute_circuit_output` to construct `PrivateAccountKind::Pda { program_id, seed, + /// identifier }`. + private_pda_bound_positions: HashMap, + /// Across the whole transaction, each `(program_id, seed)` pair may resolve to at most one + /// `AccountId`. A seed under a program can derive a family of accounts, one public PDA and + /// one private PDA per distinct npk. Without this check, a single `pda_seeds: [S]` entry in + /// a chained call could authorize multiple family members at once (different npks under the + /// same seed) and let a callee mix balances across them. Every claim and every + /// caller-authorization resolution is recorded here, either as a new `(program, seed)` → + /// `AccountId` entry or as an equality check against the existing one, making the rule: one + /// `(program, seed)` → one account per tx. + pda_family_binding: HashMap<(ProgramId, PdaSeed), AccountId>, + /// Map from a private-PDA `pre_state`'s position in `account_identities` to the (npk, + /// identifier) supplied for that position. Built once in `derive_from_outputs` by walking + /// `account_identities` and consulting `npk_if_private_pda`. Used later by the claim and + /// caller-seeds authorization paths to verify + /// `AccountId::for_private_pda(program_id, seed, npk, identifier) == pre_state.account_id`. + private_pda_npk_by_position: HashMap, +} + +impl ExecutionState { + /// Validate program outputs and derive the overall execution state. + pub fn derive_from_outputs( + account_identities: &[InputAccountIdentity], + program_id: ProgramId, + program_outputs: Vec, + ) -> Self { + // Build position → (npk, identifier) map for private-PDA pre_states, indexed by position + // in `account_identities`. The vec is documented as 1:1 with the program's pre_state + // order, so position here matches `pre_state_position` used downstream in + // `validate_and_sync_states`. + let mut private_pda_npk_by_position: HashMap = + HashMap::new(); + for (pos, account_identity) in account_identities.iter().enumerate() { + if let Some((npk, identifier)) = account_identity.npk_if_private_pda() { + private_pda_npk_by_position.insert(pos, (npk, identifier)); + } + } + + let block_valid_from = program_outputs + .iter() + .filter_map(|output| output.block_validity_window.start()) + .max(); + let block_valid_until = program_outputs + .iter() + .filter_map(|output| output.block_validity_window.end()) + .min(); + let ts_valid_from = program_outputs + .iter() + .filter_map(|output| output.timestamp_validity_window.start()) + .max(); + let ts_valid_until = program_outputs + .iter() + .filter_map(|output| output.timestamp_validity_window.end()) + .min(); + + let block_validity_window: BlockValidityWindow = (block_valid_from, block_valid_until) + .try_into() + .expect( + "There should be non empty intersection in the program output block validity windows", + ); + let timestamp_validity_window: TimestampValidityWindow = + (ts_valid_from, ts_valid_until) + .try_into() + .expect( + "There should be non empty intersection in the program output timestamp validity windows", + ); + + let mut execution_state = Self { + pre_states: Vec::new(), + post_states: HashMap::new(), + block_validity_window, + timestamp_validity_window, + private_pda_bound_positions: HashMap::new(), + pda_family_binding: HashMap::new(), + private_pda_npk_by_position, + }; + + let Some(first_output) = program_outputs.first() else { + panic!("No program outputs provided"); + }; + + let initial_call = ChainedCall { + program_id, + instruction_data: first_output.instruction_data.clone(), + pre_states: first_output.pre_states.clone(), + pda_seeds: Vec::new(), + }; + let mut chained_calls = VecDeque::from_iter([(initial_call, None)]); + + let mut program_outputs_iter = program_outputs.into_iter(); + let mut chain_calls_counter = 0; + + while let Some((chained_call, caller_program_id)) = chained_calls.pop_front() { + assert!( + chain_calls_counter <= MAX_NUMBER_CHAINED_CALLS, + "Max chained calls depth is exceeded" + ); + + let Some(program_output) = program_outputs_iter.next() else { + panic!("Insufficient program outputs for chained calls"); + }; + + // Check that instruction data in chained call is the instruction data in program output + assert_eq!( + chained_call.instruction_data, program_output.instruction_data, + "Mismatched instruction data between chained call and program output" + ); + + // Check that `program_output` is consistent with the execution of the corresponding + // program. + let program_output_words = + &to_vec(&program_output).expect("program_output must be serializable"); + env::verify(chained_call.program_id, program_output_words).unwrap_or_else( + |_: Infallible| unreachable!("Infallible error is never constructed"), + ); + + // Verify that the program output's self_program_id matches the expected program ID. + // This ensures the proof commits to which program produced the output. + assert_eq!( + program_output.self_program_id, chained_call.program_id, + "Program output self_program_id does not match chained call program_id" + ); + + // Verify that the program output's caller_program_id matches the actual caller. + // This prevents a malicious user from privately executing an internal function + // by spoofing caller_program_id (e.g. passing caller_program_id = self_program_id + // to bypass access control checks). + assert_eq!( + program_output.caller_program_id, caller_program_id, + "Program output caller_program_id does not match actual caller" + ); + + // Check that the program is well behaved. + // See the # Programs section for the definition of the `validate_execution` method. + let validated_execution = validate_execution( + &program_output.pre_states, + &program_output.post_states, + chained_call.program_id, + ); + if let Err(err) = validated_execution { + panic!( + "Invalid program behavior in program {:?}: {err}", + chained_call.program_id + ); + } + + for next_call in program_output.chained_calls.iter().rev() { + chained_calls.push_front((next_call.clone(), Some(chained_call.program_id))); + } + + execution_state.validate_and_sync_states( + account_identities, + chained_call.program_id, + caller_program_id, + &chained_call.pda_seeds, + program_output.pre_states, + program_output.post_states, + ); + chain_calls_counter = chain_calls_counter.checked_add(1).expect( + "Chain calls counter should not overflow as it checked before incrementing", + ); + } + + assert!( + program_outputs_iter.next().is_none(), + "Inner call without a chained call found", + ); + + // Every private-PDA pre_state must have had its npk bound to its account_id, either via + // a `Claim::Pda(seed)` in some program's post_state or via a caller's `pda_seeds` + // matching the private derivation. An unbound private-PDA pre_state has no + // cryptographic link between the supplied npk and the account_id, and must be rejected. + for (pos, account_identity) in account_identities.iter().enumerate() { + if account_identity.is_private_pda() { + assert!( + execution_state + .private_pda_bound_positions + .contains_key(&pos), + "private PDA pre_state at position {pos} has no proven (seed, npk) binding via Claim::Pda or caller pda_seeds" + ); + } + } + + // Check that all modified uninitialized accounts were claimed + for (account_id, post) in execution_state + .pre_states + .iter() + .filter(|a| a.account.program_owner == DEFAULT_PROGRAM_ID) + .map(|a| { + let post = execution_state + .post_states + .get(&a.account_id) + .expect("Post state must exist for pre state"); + (a, post) + }) + .filter(|(pre_default, post)| pre_default.account != **post) + .map(|(pre, post)| (pre.account_id, post)) + { + assert_ne!( + post.program_owner, DEFAULT_PROGRAM_ID, + "Account {account_id} was modified but not claimed" + ); + } + + execution_state + } + + /// Validate program pre and post states and populate the execution state. + fn validate_and_sync_states( + &mut self, + account_identities: &[InputAccountIdentity], + program_id: ProgramId, + caller_program_id: Option, + caller_pda_seeds: &[PdaSeed], + pre_states: Vec, + post_states: Vec, + ) { + for (pre, mut post) in pre_states.into_iter().zip(post_states) { + let pre_account_id = pre.account_id; + let pre_is_authorized = pre.is_authorized; + let post_states_entry = self.post_states.entry(pre.account_id); + match &post_states_entry { + Entry::Occupied(occupied) => { + #[expect( + clippy::shadow_unrelated, + reason = "Shadowing is intentional to use all fields" + )] + let AccountWithMetadata { + account: pre_account, + account_id: pre_account_id, + is_authorized: pre_is_authorized, + } = pre; + + // Ensure that new pre state is the same as known post state + assert_eq!( + occupied.get(), + &pre_account, + "Inconsistent pre state for account {pre_account_id}", + ); + + let (previous_is_authorized, pre_state_position) = self + .pre_states + .iter() + .enumerate() + .find(|(_, acc)| acc.account_id == pre_account_id) + .map_or_else( + || panic!( + "Pre state must exist in execution state for account {pre_account_id}", + ), + |(pos, acc)| (acc.is_authorized, pos) + ); + + let is_authorized = resolve_authorization_and_record_bindings( + &mut self.pda_family_binding, + &mut self.private_pda_bound_positions, + &self.private_pda_npk_by_position, + pre_account_id, + pre_state_position, + caller_program_id, + caller_pda_seeds, + previous_is_authorized, + ); + + assert_eq!( + pre_is_authorized, is_authorized, + "Inconsistent authorization for account {pre_account_id}", + ); + } + Entry::Vacant(_) => { + // Pre state for the initial call + self.pre_states.push(pre); + } + } + + if let Some(claim) = post.required_claim() { + // The invoked program can only claim accounts with default program id. + assert_eq!( + post.account().program_owner, + DEFAULT_PROGRAM_ID, + "Cannot claim an initialized account {pre_account_id}" + ); + + let pre_state_position = self + .pre_states + .iter() + .position(|acc| acc.account_id == pre_account_id) + .expect("Pre state must exist at this point"); + + let account_identity = &account_identities[pre_state_position]; + if account_identity.is_public() { + match claim { + Claim::Authorized => { + // Note: no need to check authorized pdas because we have already + // checked consistency of authorization above. + assert!( + pre_is_authorized, + "Cannot claim unauthorized account {pre_account_id}" + ); + } + Claim::Pda(seed) => { + let pda = AccountId::for_public_pda(&program_id, &seed); + assert_eq!( + pre_account_id, pda, + "Invalid PDA claim for account {pre_account_id} which does not match derived PDA {pda}" + ); + assert_family_binding( + &mut self.pda_family_binding, + program_id, + seed, + pre_account_id, + ); + } + } + } else if account_identity.is_private_pda() { + match claim { + Claim::Authorized => { + assert!( + pre_is_authorized, + "Cannot claim unauthorized private PDA {pre_account_id}" + ); + } + Claim::Pda(seed) => { + let (npk, identifier) = self + .private_pda_npk_by_position + .get(&pre_state_position) + .expect( + "private PDA pre_state must have an npk in the position map", + ); + let pda = + AccountId::for_private_pda(&program_id, &seed, npk, *identifier); + assert_eq!( + pre_account_id, pda, + "Invalid private PDA claim for account {pre_account_id}" + ); + bind_private_pda_position( + &mut self.private_pda_bound_positions, + pre_state_position, + program_id, + seed, + ); + assert_family_binding( + &mut self.pda_family_binding, + program_id, + seed, + pre_account_id, + ); + } + } + } else { + // Standalone private accounts: don't enforce the claim semantics. + // Unauthorized private claiming is intentionally allowed since operating + // these accounts requires the npk/nsk keypair anyway. + } + + post.account_mut().program_owner = program_id; + } + + post_states_entry.insert_entry(post.into_account()); + } + } + + /// Consume self and yield the validity windows, the per-position PDA seed/program map + /// (recorded during `derive_from_outputs`), and an iterator over pre and post states of each + /// account involved in the execution. Returning everything together keeps the + /// fields module-private rather than forcing them visible to downstream consumers. + #[expect( + clippy::type_complexity, + reason = "tuple bundles four exit values from one consuming call so all fields stay private; a struct would only rename it" + )] + pub fn into_parts( + mut self, + ) -> ( + BlockValidityWindow, + TimestampValidityWindow, + HashMap, + impl ExactSizeIterator, + ) { + let block_validity_window = self.block_validity_window; + let timestamp_validity_window = self.timestamp_validity_window; + let pda_seed_by_position = std::mem::take(&mut self.private_pda_bound_positions); + let states_iter = self.pre_states.into_iter().map(move |pre| { + let post = self + .post_states + .remove(&pre.account_id) + .expect("Account from pre states should exist in state diff"); + (pre, post) + }); + ( + block_validity_window, + timestamp_validity_window, + pda_seed_by_position, + states_iter, + ) + } +} + +/// Record or re-verify the `(program_id, seed) → account_id` family binding for the +/// transaction. Any claim or caller-seed authorization that resolves a `pre_state` under +/// `(program_id, seed)` must agree with every prior resolution of the same pair; otherwise a +/// single `pda_seeds: [seed]` entry could authorize multiple private-PDA family members at +/// once (different npks under the same seed) and let a callee mix balances across them. Free +/// function so callers can pass `&mut self.pda_family_binding` without holding a borrow on +/// the surrounding struct's other fields. +fn assert_family_binding( + bindings: &mut HashMap<(ProgramId, PdaSeed), AccountId>, + program_id: ProgramId, + seed: PdaSeed, + account_id: AccountId, +) { + match bindings.entry((program_id, seed)) { + Entry::Vacant(e) => { + e.insert(account_id); + } + Entry::Occupied(e) => { + assert_eq!( + *e.get(), + account_id, + "Two different accounts resolved under the same (program, seed) in one transaction: existing {}, new {account_id}", + e.get() + ); + } + } +} + +fn bind_private_pda_position( + map: &mut HashMap, + position: usize, + program_id: ProgramId, + seed: PdaSeed, +) { + match map.entry(position) { + Entry::Occupied(e) => assert_eq!( + *e.get(), + (program_id, seed), + "Duplicate binding at position {position}: conflicting (program_id, seed)" + ), + Entry::Vacant(e) => { + e.insert((program_id, seed)); + } + } +} + +/// Resolve the authorization state of a `pre_state` seen again in a chained call and record +/// any resulting bindings. Returns `true` if the `pre_state` is authorized through either a +/// previously-seen authorization or a matching caller seed (under the public or private +/// derivation). When a caller seed matches, also records the `(caller, seed) → account_id` +/// family binding and, for the private form, marks the position in +/// `private_pda_bound_positions`. Only reachable when `caller_program_id.is_some()`, +/// top-level flows have no caller-emitted seeds, so binding at top level must come from the +/// claim path. Free function so callers can pass individual `&mut self.*` field borrows +/// without holding a borrow on the surrounding struct's other fields. +#[expect( + clippy::too_many_arguments, + reason = "breaking out a context struct does not buy us anything here" +)] +fn resolve_authorization_and_record_bindings( + pda_family_binding: &mut HashMap<(ProgramId, PdaSeed), AccountId>, + private_pda_bound_positions: &mut HashMap, + private_pda_npk_by_position: &HashMap, + pre_account_id: AccountId, + pre_state_position: usize, + caller_program_id: Option, + caller_pda_seeds: &[PdaSeed], + previous_is_authorized: bool, +) -> bool { + let matched_caller_seed: Option<(PdaSeed, bool, ProgramId)> = + caller_program_id.and_then(|caller| { + caller_pda_seeds.iter().find_map(|seed| { + if AccountId::for_public_pda(&caller, seed) == pre_account_id { + return Some((*seed, false, caller)); + } + if let Some((npk, identifier)) = + private_pda_npk_by_position.get(&pre_state_position) + && AccountId::for_private_pda(&caller, seed, npk, *identifier) == pre_account_id + { + return Some((*seed, true, caller)); + } + None + }) + }); + + if let Some((seed, is_private_form, caller)) = matched_caller_seed { + assert_family_binding(pda_family_binding, caller, seed, pre_account_id); + if is_private_form { + bind_private_pda_position( + private_pda_bound_positions, + pre_state_position, + caller, + seed, + ); + } + } + + previous_is_authorized || matched_caller_seed.is_some() +} diff --git a/program_methods/guest/src/bin/privacy_preserving_circuit/main.rs b/program_methods/guest/src/bin/privacy_preserving_circuit/main.rs new file mode 100644 index 00000000..9441c27e --- /dev/null +++ b/program_methods/guest/src/bin/privacy_preserving_circuit/main.rs @@ -0,0 +1,23 @@ +use nssa_core::PrivacyPreservingCircuitInput; +use risc0_zkvm::guest::env; + +mod execution_state; +mod output; + +fn main() { + let PrivacyPreservingCircuitInput { + program_outputs, + account_identities, + program_id, + } = env::read(); + + let execution_state = execution_state::ExecutionState::derive_from_outputs( + &account_identities, + program_id, + program_outputs, + ); + + let output = output::compute_circuit_output(execution_state, &account_identities); + + env::commit(&output); +} diff --git a/program_methods/guest/src/bin/privacy_preserving_circuit/output.rs b/program_methods/guest/src/bin/privacy_preserving_circuit/output.rs new file mode 100644 index 00000000..f5a6d1f9 --- /dev/null +++ b/program_methods/guest/src/bin/privacy_preserving_circuit/output.rs @@ -0,0 +1,286 @@ +use nssa_core::{ + Commitment, CommitmentSetDigest, DUMMY_COMMITMENT_HASH, EncryptionScheme, InputAccountIdentity, + MembershipProof, Nullifier, NullifierPublicKey, NullifierSecretKey, + PrivacyPreservingCircuitOutput, PrivateAccountKind, SharedSecretKey, + account::{Account, AccountId, Nonce}, + compute_digest_for_path, +}; + +use crate::execution_state::ExecutionState; + +pub fn compute_circuit_output( + execution_state: ExecutionState, + account_identities: &[InputAccountIdentity], +) -> PrivacyPreservingCircuitOutput { + let (block_validity_window, timestamp_validity_window, pda_seed_by_position, states_iter) = + execution_state.into_parts(); + let mut output = PrivacyPreservingCircuitOutput { + public_pre_states: Vec::new(), + public_post_states: Vec::new(), + ciphertexts: Vec::new(), + new_commitments: Vec::new(), + new_nullifiers: Vec::new(), + block_validity_window, + timestamp_validity_window, + }; + + assert_eq!( + account_identities.len(), + states_iter.len(), + "Invalid account_identities length" + ); + + let mut output_index = 0; + for (pos, (account_identity, (pre_state, post_state))) in + account_identities.iter().zip(states_iter).enumerate() + { + match account_identity { + InputAccountIdentity::Public => { + output.public_pre_states.push(pre_state); + output.public_post_states.push(post_state); + } + InputAccountIdentity::PrivateAuthorizedInit { + ssk, + nsk, + identifier, + } => { + let npk = NullifierPublicKey::from(nsk); + let account_id = AccountId::for_regular_private_account(&npk, *identifier); + + assert_eq!(account_id, pre_state.account_id, "AccountId mismatch"); + assert!( + pre_state.is_authorized, + "Pre-state not authorized for authenticated private account" + ); + assert_eq!( + pre_state.account, + Account::default(), + "Found new private account with non default values" + ); + + let new_nullifier = ( + Nullifier::for_account_initialization(&account_id), + DUMMY_COMMITMENT_HASH, + ); + let new_nonce = pre_state.account.nonce.private_account_nonce_increment(nsk); + + emit_private_output( + &mut output, + &mut output_index, + post_state, + &account_id, + &PrivateAccountKind::Regular(*identifier), + ssk, + new_nullifier, + new_nonce, + ); + } + InputAccountIdentity::PrivateAuthorizedUpdate { + ssk, + nsk, + membership_proof, + identifier, + } => { + let npk = NullifierPublicKey::from(nsk); + let account_id = AccountId::for_regular_private_account(&npk, *identifier); + + assert_eq!(account_id, pre_state.account_id, "AccountId mismatch"); + assert!( + pre_state.is_authorized, + "Pre-state not authorized for authenticated private account" + ); + + let new_nullifier = compute_update_nullifier_and_set_digest( + membership_proof, + &pre_state.account, + &account_id, + nsk, + ); + let new_nonce = pre_state.account.nonce.private_account_nonce_increment(nsk); + + emit_private_output( + &mut output, + &mut output_index, + post_state, + &account_id, + &PrivateAccountKind::Regular(*identifier), + ssk, + new_nullifier, + new_nonce, + ); + } + InputAccountIdentity::PrivateUnauthorized { + npk, + ssk, + identifier, + } => { + let account_id = AccountId::for_regular_private_account(npk, *identifier); + + assert_eq!(account_id, pre_state.account_id, "AccountId mismatch"); + assert_eq!( + pre_state.account, + Account::default(), + "Found new private account with non default values", + ); + assert!( + !pre_state.is_authorized, + "Found new private account marked as authorized." + ); + + let new_nullifier = ( + Nullifier::for_account_initialization(&account_id), + DUMMY_COMMITMENT_HASH, + ); + let new_nonce = Nonce::private_account_nonce_init(&account_id); + + emit_private_output( + &mut output, + &mut output_index, + post_state, + &account_id, + &PrivateAccountKind::Regular(*identifier), + ssk, + new_nullifier, + new_nonce, + ); + } + InputAccountIdentity::PrivatePdaInit { + npk: _, + ssk, + identifier, + } => { + // The npk-to-account_id binding is established upstream in + // `validate_and_sync_states` via `Claim::Pda(seed)` or a caller `pda_seeds` + // match. Here we only enforce the init pre-conditions. The supplied npk on + // the variant has been recorded into `private_pda_npk_by_position` and used + // for the binding check; we use `pre_state.account_id` directly for nullifier + // and commitment derivation. + assert!( + !pre_state.is_authorized, + "PrivatePdaInit requires unauthorized pre_state" + ); + assert_eq!( + pre_state.account, + Account::default(), + "New private PDA must be default" + ); + + let new_nullifier = ( + Nullifier::for_account_initialization(&pre_state.account_id), + DUMMY_COMMITMENT_HASH, + ); + let new_nonce = Nonce::private_account_nonce_init(&pre_state.account_id); + + let account_id = pre_state.account_id; + let (pda_program_id, seed) = pda_seed_by_position + .get(&pos) + .expect("PrivatePdaInit position must be in pda_seed_by_position"); + emit_private_output( + &mut output, + &mut output_index, + post_state, + &account_id, + &PrivateAccountKind::Pda { + program_id: *pda_program_id, + seed: *seed, + identifier: *identifier, + }, + ssk, + new_nullifier, + new_nonce, + ); + } + InputAccountIdentity::PrivatePdaUpdate { + ssk, + nsk, + membership_proof, + identifier, + } => { + // The npk binding is established upstream. Authorization must already be set; + // an unauthorized PrivatePdaUpdate would mean the prover supplied an nsk for an + // unbound PDA, which the upstream binding check would have rejected anyway, + // but we assert here to fail fast and document the precondition. + assert!( + pre_state.is_authorized, + "PrivatePdaUpdate requires authorized pre_state" + ); + + let new_nullifier = compute_update_nullifier_and_set_digest( + membership_proof, + &pre_state.account, + &pre_state.account_id, + nsk, + ); + let new_nonce = pre_state.account.nonce.private_account_nonce_increment(nsk); + + let account_id = pre_state.account_id; + let (pda_program_id, seed) = pda_seed_by_position + .get(&pos) + .expect("PrivatePdaUpdate position must be in pda_seed_by_position"); + emit_private_output( + &mut output, + &mut output_index, + post_state, + &account_id, + &PrivateAccountKind::Pda { + program_id: *pda_program_id, + seed: *seed, + identifier: *identifier, + }, + ssk, + new_nullifier, + new_nonce, + ); + } + } + } + + output +} + +#[expect( + clippy::too_many_arguments, + reason = "All seven inputs are distinct concerns from the variant arms; bundling would be artificial" +)] +fn emit_private_output( + output: &mut PrivacyPreservingCircuitOutput, + output_index: &mut u32, + post_state: Account, + account_id: &AccountId, + kind: &PrivateAccountKind, + shared_secret: &SharedSecretKey, + new_nullifier: (Nullifier, CommitmentSetDigest), + new_nonce: Nonce, +) { + output.new_nullifiers.push(new_nullifier); + + let mut post_with_updated_nonce = post_state; + post_with_updated_nonce.nonce = new_nonce; + + let commitment_post = Commitment::new(account_id, &post_with_updated_nonce); + let encrypted_account = EncryptionScheme::encrypt( + &post_with_updated_nonce, + kind, + shared_secret, + &commitment_post, + *output_index, + ); + + output.new_commitments.push(commitment_post); + output.ciphertexts.push(encrypted_account); + *output_index = output_index + .checked_add(1) + .unwrap_or_else(|| panic!("Too many private accounts, output index overflow")); +} + +fn compute_update_nullifier_and_set_digest( + membership_proof: &MembershipProof, + pre_account: &Account, + account_id: &AccountId, + nsk: &NullifierSecretKey, +) -> (Nullifier, CommitmentSetDigest) { + let commitment_pre = Commitment::new(account_id, pre_account); + let set_digest = compute_digest_for_path(&commitment_pre, membership_proof); + let nullifier = Nullifier::for_account_update(&commitment_pre, nsk); + (nullifier, set_digest) +} diff --git a/program_methods/guest/src/bin/vault.rs b/program_methods/guest/src/bin/vault.rs new file mode 100644 index 00000000..c691e8f6 --- /dev/null +++ b/program_methods/guest/src/bin/vault.rs @@ -0,0 +1,94 @@ +//! Vault program which allows users to create vault accounts and transfer funds to them. +//! Funds can later be claimed from the vault accounts by their owners. +//! +//! The program is designed to be used in conjunction with the authenticated transfer program, which +//! performs the actual transfer of funds from the vault accounts. + +use authenticated_transfer_core::Instruction as AuthTransferInstruction; +use nssa_core::program::{ + AccountPostState, ChainedCall, ProgramInput, ProgramOutput, read_nssa_inputs, +}; +use vault_core::Instruction; + +fn unchanged_post_states( + pre_states: &[nssa_core::account::AccountWithMetadata], +) -> Vec { + pre_states + .iter() + .map(|pre_state| AccountPostState::new(pre_state.account.clone())) + .collect() +} + +fn main() { + let ( + ProgramInput { + self_program_id, + caller_program_id, + pre_states, + instruction, + }, + instruction_words, + ) = read_nssa_inputs::(); + + let pre_states_clone = pre_states.clone(); + let post_states = unchanged_post_states(&pre_states_clone); + + let chained_calls = match instruction { + Instruction::Transfer { + recipient_id, + amount, + } => { + let [sender, recipient_vault] = pre_states + .try_into() + .expect("Transfer requires exactly 3 accounts"); + + let seed = vault_core::compute_vault_seed(recipient_id); + + let mut recipient_vault_for_callee = recipient_vault; + recipient_vault_for_callee.is_authorized = true; + + vec![ + ChainedCall::new( + sender.account.program_owner, + vec![sender, recipient_vault_for_callee], + &AuthTransferInstruction::Transfer { amount }, + ) + .with_pda_seeds(vec![seed]), + ] + } + Instruction::Claim { amount } => { + let [owner, owner_vault] = pre_states + .try_into() + .expect("Claim requires exactly 2 accounts"); + + assert!( + owner.is_authorized, + "Owner must be authorized to claim from the vault" + ); + + let seed = vault_core::compute_vault_seed(owner.account_id); + + let mut owner_vault_for_callee = owner_vault; + owner_vault_for_callee.is_authorized = true; + + vec![ + ChainedCall::new( + owner_vault_for_callee.account.program_owner, + vec![owner_vault_for_callee, owner], + &AuthTransferInstruction::Transfer { amount }, + ) + .with_pda_seeds(vec![seed]), + ] + } + }; + + ProgramOutput::new( + self_program_id, + caller_program_id, + instruction_words, + pre_states_clone, + post_states, + ) + .with_chained_calls(chained_calls) + .write(); +} diff --git a/programs/authenticated_transfer/core/Cargo.toml b/programs/authenticated_transfer/core/Cargo.toml new file mode 100644 index 00000000..0331bd64 --- /dev/null +++ b/programs/authenticated_transfer/core/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "authenticated_transfer_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[lints] +workspace = true + +[dependencies] +serde.workspace = true diff --git a/programs/authenticated_transfer/core/src/lib.rs b/programs/authenticated_transfer/core/src/lib.rs new file mode 100644 index 00000000..14edac5e --- /dev/null +++ b/programs/authenticated_transfer/core/src/lib.rs @@ -0,0 +1,17 @@ +//! Core data structures for the Authenticated Transfer Program. + +use serde::{Deserialize, Serialize}; + +/// Instruction type for the Authenticated Transfer program. +#[derive(Serialize, Deserialize)] +pub enum Instruction { + /// Transfer `amount` of native balance from sender to recipient. + /// + /// Required accounts: `[sender, recipient]`. + Transfer { amount: u128 }, + + /// Initialize a new account under the ownership of this program. + /// + /// Required accounts: `[account_to_initialize]`. + Initialize, +} diff --git a/programs/faucet/core/Cargo.toml b/programs/faucet/core/Cargo.toml new file mode 100644 index 00000000..aa8826ea --- /dev/null +++ b/programs/faucet/core/Cargo.toml @@ -0,0 +1,12 @@ +[package] +name = "faucet_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[lints] +workspace = true + +[dependencies] +nssa_core.workspace = true +serde = { workspace = true, default-features = false } diff --git a/programs/faucet/core/src/lib.rs b/programs/faucet/core/src/lib.rs new file mode 100644 index 00000000..da9861e6 --- /dev/null +++ b/programs/faucet/core/src/lib.rs @@ -0,0 +1,29 @@ +pub use nssa_core::program::PdaSeed; +use nssa_core::{account::AccountId, program::ProgramId}; +use serde::{Deserialize, Serialize}; + +const FAUCET_SEED_DOMAIN_SEPARATOR: [u8; 32] = *b"/LEZ/v0.3/FaucetSeed/0000000000/"; + +#[derive(Serialize, Deserialize)] +pub enum Instruction { + /// Transfers native tokens from system faucet to recipient's vault. + /// + /// Required accounts (2): + /// - Faucet PDA account + /// - Recipient vault PDA account + Transfer { + vault_program_id: ProgramId, + recipient_id: AccountId, + amount: u128, + }, +} + +#[must_use] +pub const fn compute_faucet_seed() -> PdaSeed { + PdaSeed::new(FAUCET_SEED_DOMAIN_SEPARATOR) +} + +#[must_use] +pub fn compute_faucet_account_id(faucet_program_id: ProgramId) -> AccountId { + AccountId::for_public_pda(&faucet_program_id, &compute_faucet_seed()) +} diff --git a/programs/vault/core/Cargo.toml b/programs/vault/core/Cargo.toml new file mode 100644 index 00000000..fd3cdf96 --- /dev/null +++ b/programs/vault/core/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "vault_core" +version = "0.1.0" +edition = "2024" +license = { workspace = true } + +[lints] +workspace = true + +[dependencies] +nssa_core.workspace = true +serde = { workspace = true, default-features = false } +risc0-zkvm.workspace = true diff --git a/programs/vault/core/src/lib.rs b/programs/vault/core/src/lib.rs new file mode 100644 index 00000000..8937e087 --- /dev/null +++ b/programs/vault/core/src/lib.rs @@ -0,0 +1,53 @@ +pub use nssa_core::program::PdaSeed; +use nssa_core::{account::AccountId, program::ProgramId}; +use serde::{Deserialize, Serialize}; + +const VAULT_SEED_DOMAIN_SEPARATOR: &[u8] = b"/LEZ/v0.3/VaultSeed/00000000000/"; + +const _: () = assert!( + VAULT_SEED_DOMAIN_SEPARATOR.len() == 32, + "Domain separator must be exactly 32 bytes long" +); + +#[derive(Serialize, Deserialize)] +pub enum Instruction { + /// Transfers native tokens from sender to recipient's vault. + /// + /// Required accounts (3): + /// - Sender account + /// - Recipient account + /// - Recipient vault PDA account + Transfer { + recipient_id: AccountId, + amount: u128, + }, + + /// Claims native tokens from owner's vault into owner's account. + /// + /// Required accounts (2): + /// - Owner account + /// - Owner vault PDA account + Claim { amount: u128 }, +} + +#[must_use] +pub fn compute_vault_seed(owner_id: AccountId) -> PdaSeed { + use risc0_zkvm::sha::{Impl, Sha256 as _}; + + let mut bytes = [0_u8; 64]; + bytes[..32].copy_from_slice(VAULT_SEED_DOMAIN_SEPARATOR); + bytes[32..64].copy_from_slice(&owner_id.to_bytes()); + + PdaSeed::new( + Impl::hash_bytes(&bytes) + .as_bytes() + .try_into() + .expect("Hash output must be exactly 32 bytes long"), + ) +} + +#[must_use] +pub fn compute_vault_account_id(vault_program_id: ProgramId, owner_id: AccountId) -> AccountId { + let seed = compute_vault_seed(owner_id); + AccountId::for_public_pda(&vault_program_id, &seed) +} diff --git a/python/keycard_applets/LEE_keycard.cap b/python/keycard_applets/LEE_keycard.cap deleted file mode 100644 index b44835c4..00000000 Binary files a/python/keycard_applets/LEE_keycard.cap and /dev/null differ diff --git a/python/keycard_applets/math.cap b/python/keycard_applets/math.cap deleted file mode 100644 index b9c0e99f..00000000 Binary files a/python/keycard_applets/math.cap and /dev/null differ diff --git a/python/keycard_wallet.py b/python/keycard_wallet.py deleted file mode 100644 index 7d7e5bd3..00000000 --- a/python/keycard_wallet.py +++ /dev/null @@ -1,125 +0,0 @@ -from smartcard.System import readers -from keycard.exceptions import APDUError, TransportError -from ecdsa import VerifyingKey, SECP256k1 - -from keycard.keycard import KeyCard - -from mnemonic import Mnemonic -from keycard import constants - -import keycard - -DEFAULT_PAIRING_PASSWORD = "KeycardDefaultPairing" - -class KeycardWallet: - def __init__(self): - self.card = KeyCard() - - def _is_smart_card_reader_detected(self) -> bool: - try: - return len(readers()) > 0 - except Exception: - return False - - def _is_keycard_detected(self) -> bool: - try: - KeyCard().select() - return True - except (TransportError, APDUError, Exception): - # No readers, no card, or card doesn't respond. - return False - - def is_unpaired_keycard_available(self) -> bool: - if not self._is_smart_card_reader_detected(): - return False - elif not self._is_keycard_detected(): - return False - return True - - def setup_communication(self, pin: str, password = DEFAULT_PAIRING_PASSWORD) -> bool: - try: - self.card.select() - - if not self.card.is_initialized: - return False - - pairing_index, pairing_key = self.card.pair(password) - self.pairing_index = pairing_index - - self.card.open_secure_channel(pairing_index, pairing_key) - self.card.verify_pin(pin) - - return True - except Exception as e: - print(f"Error: {e}") - return False - - def load_mnemonic(self, mnemonic: str) -> bool: - try: - # Convert mnemonic to seed - mnemo = Mnemonic("english") - seed = mnemo.to_seed(mnemonic) - - # Load the LEE seed onto the card - result = self.card.load_key( - key_type = constants.LoadKeyType.LEE_SEED, - lee_seed = seed - ) - - #TODO: this appears to be the issue. - return True - except Exception as e: - print(f"Error during disconnect: {e}") - return False - - def disconnect(self) -> bool: - try: - if not self.card.is_secure_channel_open: - return None - - self.card.unpair(self.pairing_index) - - return True - except Exception as e: - print(f"Error during unpair: {e}") - return False - - def get_public_key_for_path(self, path: str = "m/44'/60'/0'/0/0") -> bytes | None: - try: - if not self.card.is_secure_channel_open or not self.card.is_pin_verified: - return None - - public_key = self.card.export_key( - derivation_option = constants.DerivationOption.DERIVE, - public_only = True, - keypath = path - ) - - public_key = public_key.public_key - public_key = VerifyingKey.from_string(public_key[1:], curve=SECP256k1) - public_key = public_key.to_string("compressed")[1:] - - return public_key - - except Exception as e: - print(f"Error getting public key: {e}") - return None - - - def sign_message_for_path(self, message: bytes, path: str = "m/44'/60'/0'/0/0") -> bytes | None: - try: - if not self.card.is_secure_channel_open or not self.card.is_pin_verified: - return None - - signature = self.card.sign_with_path( - digest = message, - path = path, - algorithm = constants.SigningAlgorithm.SCHNORR_BIP340, - make_current = False - ) - - return signature.signature - - except Exception as e: - print(f"Error signing message: {e}") - return None \ No newline at end of file diff --git a/sequencer/core/Cargo.toml b/sequencer/core/Cargo.toml index efd0e359..5f74fbde 100644 --- a/sequencer/core/Cargo.toml +++ b/sequencer/core/Cargo.toml @@ -13,8 +13,10 @@ nssa_core.workspace = true common.workspace = true storage.workspace = true mempool.workspace = true -bedrock_client.workspace = true +logos-blockchain-zone-sdk.workspace = true testnet_initial_state.workspace = true +faucet_core.workspace = true +vault_core.workspace = true anyhow.workspace = true serde.workspace = true @@ -30,7 +32,6 @@ rand.workspace = true borsh.workspace = true bytesize.workspace = true url.workspace = true -jsonrpsee = { workspace = true, features = ["ws-client"] } [features] default = [] diff --git a/sequencer/core/src/block_publisher.rs b/sequencer/core/src/block_publisher.rs new file mode 100644 index 00000000..9f4c8235 --- /dev/null +++ b/sequencer/core/src/block_publisher.rs @@ -0,0 +1,136 @@ +use std::{sync::Arc, time::Duration}; + +use anyhow::{Context as _, Result, anyhow}; +use common::block::Block; +use log::warn; +pub use logos_blockchain_core::mantle::ops::channel::MsgId; +pub use logos_blockchain_key_management_system_service::keys::Ed25519Key; +pub use logos_blockchain_zone_sdk::sequencer::SequencerCheckpoint; +use logos_blockchain_zone_sdk::{ + CommonHttpClient, + adapter::NodeHttpClient, + sequencer::{Event, SequencerConfig as ZoneSdkSequencerConfig, SequencerHandle, ZoneSequencer}, + state::InscriptionInfo, +}; +use tokio::task::JoinHandle; + +use crate::config::BedrockConfig; + +/// Sink for `Event::Published` checkpoints emitted by the drive task. +/// Caller is responsible for persistence (e.g. writing to rocksdb). +pub type CheckpointSink = Box; + +/// Sink for finalized L2 block ids derived from `Event::TxsFinalized` and +/// `Event::FinalizedInscriptions`. Caller is responsible for cleanup +/// (e.g. marking pending blocks as finalized in storage). +pub type FinalizedBlockSink = Box; + +#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")] +pub trait BlockPublisherTrait: Clone { + async fn new( + config: &BedrockConfig, + bedrock_signing_key: Ed25519Key, + resubmit_interval: Duration, + initial_checkpoint: Option, + on_checkpoint: CheckpointSink, + on_finalized_block: FinalizedBlockSink, + ) -> Result; + + /// Fire-and-forget publish. Zone-sdk drives the actual submission and + /// retries internally; this just hands the payload off. + async fn publish_block(&self, block: &Block) -> Result<()>; +} + +/// Real block publisher backed by zone-sdk's `ZoneSequencer`. +#[derive(Clone)] +pub struct ZoneSdkPublisher { + handle: SequencerHandle, + // Aborts the drive task when the last clone is dropped. + _drive_task: Arc, +} + +struct DriveTaskGuard(JoinHandle<()>); + +impl Drop for DriveTaskGuard { + fn drop(&mut self) { + self.0.abort(); + } +} + +impl BlockPublisherTrait for ZoneSdkPublisher { + async fn new( + config: &BedrockConfig, + bedrock_signing_key: Ed25519Key, + resubmit_interval: Duration, + initial_checkpoint: Option, + on_checkpoint: CheckpointSink, + on_finalized_block: FinalizedBlockSink, + ) -> Result { + let basic_auth = config.auth.clone().map(Into::into); + let node = NodeHttpClient::new(CommonHttpClient::new(basic_auth), config.node_url.clone()); + + let zone_sdk_config = ZoneSdkSequencerConfig { + resubmit_interval, + ..ZoneSdkSequencerConfig::default() + }; + + let (mut sequencer, mut handle) = ZoneSequencer::init_with_config( + config.channel_id, + bedrock_signing_key, + node, + zone_sdk_config, + initial_checkpoint, + ); + + let drive_task = tokio::spawn(async move { + loop { + let Some(event) = sequencer.next_event().await else { + continue; + }; + match event { + Event::Published { checkpoint, .. } => on_checkpoint(checkpoint), + Event::TxsFinalized { inscriptions, .. } + | Event::FinalizedInscriptions { inscriptions } => { + if let Some(max_block_id) = max_block_id_from_inscriptions(&inscriptions) { + on_finalized_block(max_block_id); + } + } + Event::ChannelUpdate { .. } | Event::Ready => {} + } + } + }); + + handle.wait_ready().await; + + Ok(Self { + handle, + _drive_task: Arc::new(DriveTaskGuard(drive_task)), + }) + } + + async fn publish_block(&self, block: &Block) -> Result<()> { + let data = borsh::to_vec(block).context("Failed to serialize block")?; + self.handle + .publish_message(data) + .await + .map_err(|e| anyhow!("zone-sdk publish failed: {e}"))?; + Ok(()) + } +} + +/// Deserialize each inscription payload as a `Block` and return the highest +/// `block_id`. Bad payloads are logged and skipped. +fn max_block_id_from_inscriptions(inscriptions: &[InscriptionInfo]) -> Option { + inscriptions + .iter() + .filter_map( + |inscription| match borsh::from_slice::(&inscription.payload) { + Ok(block) => Some(block.header.block_id), + Err(err) => { + warn!("Failed to deserialize finalized inscription as Block: {err:#}"); + None + } + }, + ) + .max() +} diff --git a/sequencer/core/src/block_settlement_client.rs b/sequencer/core/src/block_settlement_client.rs deleted file mode 100644 index 6b32f8de..00000000 --- a/sequencer/core/src/block_settlement_client.rs +++ /dev/null @@ -1,116 +0,0 @@ -use anyhow::{Context as _, Result}; -use bedrock_client::BedrockClient; -pub use common::block::Block; -pub use logos_blockchain_core::mantle::{MantleTx, SignedMantleTx, ops::channel::MsgId}; -use logos_blockchain_core::mantle::{ - Op, OpProof, Transaction as _, - ops::channel::{ChannelId, inscribe::InscriptionOp}, -}; -pub use logos_blockchain_key_management_system_service::keys::Ed25519Key; -use logos_blockchain_key_management_system_service::keys::Ed25519PublicKey; - -use crate::config::BedrockConfig; - -#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")] -pub trait BlockSettlementClientTrait: Clone { - //// Create a new client. - fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result; - - /// Get the bedrock channel ID used by this client. - fn bedrock_channel_id(&self) -> ChannelId; - - /// Get the bedrock signing key used by this client. - fn bedrock_signing_key(&self) -> &Ed25519Key; - - /// Post a transaction to the node. - async fn submit_inscribe_tx_to_bedrock(&self, tx: SignedMantleTx) -> Result<()>; - - /// Create and sign a transaction for inscribing data. - fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> { - let inscription_data = borsh::to_vec(block)?; - log::debug!( - "The size of the block {} is {} bytes", - block.header.block_id, - inscription_data.len() - ); - let verifying_key_bytes = self.bedrock_signing_key().public_key().to_bytes(); - let verifying_key = - Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key"); - - let inscribe_op = InscriptionOp { - channel_id: self.bedrock_channel_id(), - inscription: inscription_data, - parent: block.bedrock_parent_id.into(), - signer: verifying_key, - }; - let inscribe_op_id = inscribe_op.id(); - - let inscribe_tx = MantleTx { - ops: vec![Op::ChannelInscribe(inscribe_op)], - // Altruistic test config - storage_gas_price: 0.into(), - execution_gas_price: 0.into(), - }; - - let tx_hash = inscribe_tx.hash(); - let signature_bytes = self - .bedrock_signing_key() - .sign_payload(tx_hash.as_signing_bytes().as_ref()) - .to_bytes(); - let signature = - logos_blockchain_key_management_system_service::keys::Ed25519Signature::from_bytes( - &signature_bytes, - ); - - let signed_mantle_tx = SignedMantleTx { - ops_proofs: vec![OpProof::Ed25519Sig(signature)], - mantle_tx: inscribe_tx, - }; - Ok((signed_mantle_tx, inscribe_op_id)) - } -} - -/// A component that posts block data to logos blockchain. -#[derive(Clone)] -pub struct BlockSettlementClient { - client: BedrockClient, - signing_key: Ed25519Key, - channel_id: ChannelId, -} - -impl BlockSettlementClientTrait for BlockSettlementClient { - fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result { - let client = - BedrockClient::new(config.backoff, config.node_url.clone(), config.auth.clone()) - .context("Failed to initialize bedrock client")?; - Ok(Self { - client, - signing_key, - channel_id: config.channel_id, - }) - } - - async fn submit_inscribe_tx_to_bedrock(&self, tx: SignedMantleTx) -> Result<()> { - let (parent_id, msg_id) = match tx.mantle_tx.ops.first() { - Some(Op::ChannelInscribe(inscribe)) => (inscribe.parent, inscribe.id()), - _ => panic!("Expected ChannelInscribe op"), - }; - self.client - .post_transaction(tx) - .await - .context("Failed to post transaction to Bedrock after retries")? - .context("Failed to post transaction to Bedrock with non-retryable error")?; - - log::debug!("Posted block to Bedrock with parent id {parent_id:?} and msg id: {msg_id:?}"); - - Ok(()) - } - - fn bedrock_channel_id(&self) -> ChannelId { - self.channel_id - } - - fn bedrock_signing_key(&self) -> &Ed25519Key { - &self.signing_key - } -} diff --git a/sequencer/core/src/block_store.rs b/sequencer/core/src/block_store.rs index 7e47005d..ada6d306 100644 --- a/sequencer/core/src/block_store.rs +++ b/sequencer/core/src/block_store.rs @@ -1,16 +1,19 @@ -use std::{collections::HashMap, path::Path}; +use std::{collections::HashMap, path::Path, sync::Arc}; -use anyhow::Result; +use anyhow::{Context as _, Result}; use common::{ HashType, block::{Block, BlockMeta, MantleMsgId}, transaction::NSSATransaction, }; +use log::info; +use logos_blockchain_zone_sdk::sequencer::SequencerCheckpoint; use nssa::V03State; -use storage::{error::DbError, sequencer::RocksDBIO}; +pub use storage::DbResult; +use storage::sequencer::RocksDBIO; pub struct SequencerStore { - dbio: RocksDBIO, + dbio: Arc, // TODO: Consider adding the hashmap to the database for faster recovery. tx_hash_to_block_map: HashMap, genesis_id: u64, @@ -18,21 +21,25 @@ pub struct SequencerStore { } impl SequencerStore { - /// Starting database at the start of new chain. - /// Creates files if necessary. - /// - /// ATTENTION: Will overwrite genesis block. - pub fn open_db_with_genesis( - location: &Path, - genesis_block: &Block, - genesis_msg_id: MantleMsgId, - signing_key: nssa::PrivateKey, - ) -> Result { - let tx_hash_to_block_map = block_to_transactions_map(genesis_block); - - let dbio = RocksDBIO::open_or_create(location, genesis_block, genesis_msg_id)?; - + /// Open existing database at the given location. Fails if no database is found. + pub fn open_db(location: &Path, signing_key: nssa::PrivateKey) -> DbResult { + let dbio = Arc::new(RocksDBIO::open(location)?); let genesis_id = dbio.get_meta_first_block_in_db()?; + let last_id = dbio.latest_block_meta()?.id; + + info!("Preparing block cache"); + let mut tx_hash_to_block_map = HashMap::new(); + for i in genesis_id..=last_id { + let block = dbio + .get_block(i)? + .expect("Block should be present in the database"); + + tx_hash_to_block_map.extend(block_to_transactions_map(&block)); + } + info!( + "Block cache prepared. Total blocks in cache: {}", + tx_hash_to_block_map.len() + ); Ok(Self { dbio, @@ -42,19 +49,56 @@ impl SequencerStore { }) } - pub fn get_block_at_id(&self, id: u64) -> Result, DbError> { + /// Starting database at the start of new chain. + /// Creates files if necessary. + /// + /// ATTENTION: Will overwrite genesis block. + pub fn create_db_with_genesis( + location: &Path, + genesis_block: &Block, + genesis_msg_id: MantleMsgId, + genesis_state: &V03State, + signing_key: nssa::PrivateKey, + ) -> DbResult { + let dbio = Arc::new(RocksDBIO::create( + location, + genesis_block, + genesis_msg_id, + genesis_state, + )?); + let genesis_id = dbio.get_meta_first_block_in_db()?; + let tx_hash_to_block_map = block_to_transactions_map(genesis_block); + + Ok(Self { + dbio, + tx_hash_to_block_map, + genesis_id, + signing_key, + }) + } + + /// Shared handle to the underlying rocksdb. Used to persist the zone-sdk + /// checkpoint from the sequencer's drive task without needing &mut to the + /// store. + #[must_use] + pub fn dbio(&self) -> Arc { + Arc::clone(&self.dbio) + } + + pub fn get_block_at_id(&self, id: u64) -> DbResult> { self.dbio.get_block(id) } - pub fn delete_block_at_id(&mut self, block_id: u64) -> Result<()> { - Ok(self.dbio.delete_block(block_id)?) + pub fn delete_block_at_id(&mut self, block_id: u64) -> DbResult<()> { + self.dbio.delete_block(block_id) } - pub fn mark_block_as_finalized(&mut self, block_id: u64) -> Result<()> { - Ok(self.dbio.mark_block_as_finalized(block_id)?) + pub fn mark_block_as_finalized(&mut self, block_id: u64) -> DbResult<()> { + self.dbio.mark_block_as_finalized(block_id) } /// Returns the transaction corresponding to the given hash, if it exists in the blockchain. + #[must_use] pub fn get_transaction_by_hash(&self, hash: HashType) -> Option { let block_id = *self.tx_hash_to_block_map.get(&hash)?; let block = self @@ -72,20 +116,22 @@ impl SequencerStore { ); } - pub fn latest_block_meta(&self) -> Result { - Ok(self.dbio.latest_block_meta()?) + pub fn latest_block_meta(&self) -> DbResult { + self.dbio.latest_block_meta() } + #[must_use] pub const fn genesis_id(&self) -> u64 { self.genesis_id } + #[must_use] pub const fn signing_key(&self) -> &nssa::PrivateKey { &self.signing_key } - pub fn get_all_blocks(&self) -> impl Iterator> { - self.dbio.get_all_blocks().map(|res| Ok(res?)) + pub fn get_all_blocks(&self) -> impl Iterator> { + self.dbio.get_all_blocks() } pub(crate) fn update( @@ -93,15 +139,31 @@ impl SequencerStore { block: &Block, msg_id: MantleMsgId, state: &V03State, - ) -> Result<()> { + ) -> DbResult<()> { let new_transactions_map = block_to_transactions_map(block); self.dbio.atomic_update(block, msg_id, state)?; self.tx_hash_to_block_map.extend(new_transactions_map); Ok(()) } - pub fn get_nssa_state(&self) -> Option { - self.dbio.get_nssa_state().ok() + pub fn get_nssa_state(&self) -> DbResult { + self.dbio.get_nssa_state() + } + + pub fn get_zone_checkpoint(&self) -> Result> { + let Some(bytes) = self.dbio.get_zone_sdk_checkpoint_bytes()? else { + return Ok(None); + }; + let checkpoint: SequencerCheckpoint = serde_json::from_slice(&bytes) + .context("Failed to deserialize stored zone-sdk checkpoint")?; + Ok(Some(checkpoint)) + } + + pub fn set_zone_checkpoint(&self, checkpoint: &SequencerCheckpoint) -> Result<()> { + let bytes = + serde_json::to_vec(checkpoint).context("Failed to serialize zone-sdk checkpoint")?; + self.dbio.put_zone_sdk_checkpoint_bytes(&bytes)?; + Ok(()) } } @@ -139,9 +201,14 @@ mod tests { let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); // Start an empty node store - let mut node_store = - SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) - .unwrap(); + let mut node_store = SequencerStore::create_db_with_genesis( + path, + &genesis_block, + [0; 32], + &testnet_initial_state::initial_state(), + signing_key, + ) + .unwrap(); let tx = common::test_utils::produce_dummy_empty_transaction(); let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); @@ -174,9 +241,14 @@ mod tests { let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); let genesis_hash = genesis_block.header.hash; - let node_store = - SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) - .unwrap(); + let node_store = SequencerStore::create_db_with_genesis( + path, + &genesis_block, + [0; 32], + &testnet_initial_state::initial_state(), + signing_key, + ) + .unwrap(); // Verify that initially the latest block hash equals genesis hash let latest_meta = node_store.latest_block_meta().unwrap(); @@ -199,9 +271,14 @@ mod tests { }; let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); - let mut node_store = - SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) - .unwrap(); + let mut node_store = SequencerStore::create_db_with_genesis( + path, + &genesis_block, + [0; 32], + &testnet_initial_state::initial_state(), + signing_key, + ) + .unwrap(); // Add a new block let tx = common::test_utils::produce_dummy_empty_transaction(); @@ -235,9 +312,14 @@ mod tests { }; let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); - let mut node_store = - SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) - .unwrap(); + let mut node_store = SequencerStore::create_db_with_genesis( + path, + &genesis_block, + [0; 32], + &testnet_initial_state::initial_state(), + signing_key, + ) + .unwrap(); // Add a new block with Pending status let tx = common::test_utils::produce_dummy_empty_transaction(); @@ -264,4 +346,49 @@ mod tests { common::block::BedrockStatus::Finalized )); } + + #[test] + fn open_existing_db_caches_transactions() { + let temp_dir = tempdir().unwrap(); + let path = temp_dir.path(); + + let signing_key = sequencer_sign_key_for_testing(); + + let genesis_block_hashable_data = HashableBlockData { + block_id: 0, + prev_block_hash: HashType([0; 32]), + timestamp: 0, + transactions: vec![], + }; + + let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); + let tx = common::test_utils::produce_dummy_empty_transaction(); + { + // Create a scope to drop the first store after creating the db + let mut node_store = SequencerStore::create_db_with_genesis( + path, + &genesis_block, + [0; 32], + &testnet_initial_state::initial_state(), + signing_key.clone(), + ) + .unwrap(); + + // Add a new block + let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); + node_store + .update( + &block, + [1; 32], + &V03State::new_with_genesis_accounts(&[], vec![], 0), + ) + .unwrap(); + } + + // Re-open the store and verify that the transaction is still retrievable (which means it + // was cached correctly) + let node_store = SequencerStore::open_db(path, signing_key).unwrap(); + let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); + assert_eq!(Some(tx), retrieved_tx); + } } diff --git a/sequencer/core/src/config.rs b/sequencer/core/src/config.rs index fa4a2fa7..371ebc89 100644 --- a/sequencer/core/src/config.rs +++ b/sequencer/core/src/config.rs @@ -6,24 +6,29 @@ use std::{ }; use anyhow::Result; -use bedrock_client::BackoffConfig; use bytesize::ByteSize; use common::config::BasicAuth; use humantime_serde; use logos_blockchain_core::mantle::ops::channel::ChannelId; +use nssa::AccountId; use serde::{Deserialize, Serialize}; -use testnet_initial_state::{PrivateAccountPublicInitialData, PublicAccountPublicInitialData}; use url::Url; +/// A transaction to be applied at genesis to supply initial balances. +#[derive(Clone, Debug, Serialize, Deserialize)] +#[serde(rename_all = "snake_case")] +pub enum GenesisAction { + SupplyAccount { + account_id: AccountId, + balance: u128, + }, +} + // TODO: Provide default values #[derive(Clone, Serialize, Deserialize)] pub struct SequencerConfig { /// Home dir of sequencer storage. pub home: PathBuf, - /// Genesis id. - pub genesis_id: u64, - /// If `True`, then adds random sequence of bytes to genesis block. - pub is_genesis_random: bool, /// Maximum number of user transactions in a block (excludes the mandatory clock transaction). pub max_num_tx_in_block: usize, /// Maximum block size (includes header, user transactions, and the mandatory clock @@ -42,19 +47,13 @@ pub struct SequencerConfig { pub signing_key: [u8; 32], /// Bedrock configuration options. pub bedrock_config: BedrockConfig, - /// Indexer RPC URL. - pub indexer_rpc_url: Url, - #[serde(skip_serializing_if = "Option::is_none")] - pub initial_public_accounts: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub initial_private_accounts: Option>, + /// Genesis configuration. + #[serde(default)] + pub genesis: Vec, } #[derive(Clone, Serialize, Deserialize)] pub struct BedrockConfig { - /// Fibonacci backoff retry strategy configuration. - #[serde(default)] - pub backoff: BackoffConfig, /// Bedrock channel ID. pub channel_id: ChannelId, /// Bedrock Url. diff --git a/sequencer/core/src/lib.rs b/sequencer/core/src/lib.rs index 22c09d85..c6606145 100644 --- a/sequencer/core/src/lib.rs +++ b/sequencer/core/src/lib.rs @@ -1,52 +1,43 @@ use std::{path::Path, time::Instant}; use anyhow::{Context as _, Result, anyhow}; -use bedrock_client::SignedMantleTx; -#[cfg(feature = "testnet")] -use common::PINATA_BASE58; use common::{ HashType, block::{BedrockStatus, Block, HashableBlockData}, transaction::{NSSATransaction, clock_invocation}, }; -use config::SequencerConfig; +use config::{GenesisAction, SequencerConfig}; use log::{error, info, warn}; use logos_blockchain_key_management_system_service::keys::{ED25519_SECRET_KEY_SIZE, Ed25519Key}; use mempool::{MemPool, MemPoolHandle}; #[cfg(feature = "mock")] pub use mock::SequencerCoreWithMockClients; -use nssa::V03State; +use nssa::{AccountId, PublicTransaction, program::Program, public_transaction::Message}; +use nssa_core::GENESIS_BLOCK_ID; pub use storage::error::DbError; -use testnet_initial_state::initial_state; use crate::{ - block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait, MsgId}, + block_publisher::{BlockPublisherTrait, ZoneSdkPublisher}, block_store::SequencerStore, - indexer_client::{IndexerClient, IndexerClientTrait}, }; -pub mod block_settlement_client; +pub mod block_publisher; pub mod block_store; pub mod config; -pub mod indexer_client; #[cfg(feature = "mock")] pub mod mock; -pub struct SequencerCore< - BC: BlockSettlementClientTrait = BlockSettlementClient, - IC: IndexerClientTrait = IndexerClient, -> { +pub struct SequencerCore { state: nssa::V03State, store: SequencerStore, mempool: MemPool, sequencer_config: SequencerConfig, chain_height: u64, - block_settlement_client: BC, - indexer_client: IC, + block_publisher: BP, } -impl SequencerCore { +impl SequencerCore { /// Starts the sequencer using the provided configuration. /// If an existing database is found, the sequencer state is loaded from it and /// assumed to represent the correct latest state consistent with Bedrock-finalized data. @@ -55,101 +46,110 @@ impl SequencerCore (Self, MemPoolHandle) { - let hashable_data = HashableBlockData { - block_id: config.genesis_id, - transactions: vec![], - prev_block_hash: HashType([0; 32]), - timestamp: 0, - }; - let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); - let genesis_parent_msg_id = [0; 32]; - let genesis_block = hashable_data.into_pending_block(&signing_key, genesis_parent_msg_id); let bedrock_signing_key = load_or_create_signing_key(&config.home.join("bedrock_signing_key")) .expect("Failed to load or create bedrock signing key"); - let block_settlement_client = BC::new(&config.bedrock_config, bedrock_signing_key) - .expect("Failed to initialize Block Settlement Client"); + let db_path = config.home.join("rocksdb"); + let (store, state, genesis_block) = if db_path.exists() { + let store = + SequencerStore::open_db(&db_path, signing_key.clone()).unwrap_or_else(|err| { + panic!( + "Failed to open database at {} with error: {err}", + db_path.display() + ) + }); + let state = store + .get_nssa_state() + .expect("Failed to read state from store"); + let genesis_block = store + .get_block_at_id(store.genesis_id()) + .expect("Failed to read genesis block from store") + .expect("Genesis block not found in store"); + (store, state, genesis_block) + } else { + warn!( + "Database not found at {}, starting from genesis", + db_path.display() + ); - let indexer_client = IC::new(&config.indexer_rpc_url) - .await - .expect("Failed to create Indexer Client"); + // TODO: Remove msg_id from BlockMeta — it is no longer needed now that + // zone-sdk manages L1 settlement state via its own checkpoint. + let genesis_msg_id = [0; 32]; + let genesis_parent_msg_id = [0; 32]; + let (genesis_state, genesis_txs) = build_genesis_state(&config); - let (_tx, genesis_msg_id) = block_settlement_client - .create_inscribe_tx(&genesis_block) - .expect("Failed to create inscribe tx for genesis block"); + let hashable_data = HashableBlockData { + block_id: GENESIS_BLOCK_ID, + transactions: genesis_txs, + prev_block_hash: HashType([0; 32]), + timestamp: 0, + }; + let genesis_block = + hashable_data.into_pending_block(&signing_key, genesis_parent_msg_id); + + let store = SequencerStore::create_db_with_genesis( + &db_path, + &genesis_block, + genesis_msg_id, + &genesis_state, + signing_key, + ) + .expect("Failed to create database with genesis block"); + + (store, genesis_state, genesis_block) + }; - // Sequencer should panic if unable to open db, - // as fixing this issue may require actions non-native to program scope - let store = SequencerStore::open_db_with_genesis( - &config.home.join("rocksdb"), - &genesis_block, - genesis_msg_id.into(), - signing_key, - ) - .unwrap(); let latest_block_meta = store .latest_block_meta() .expect("Failed to read latest block meta from store"); - #[cfg_attr(not(feature = "testnet"), allow(unused_mut))] - let mut state = if let Some(state) = store.get_nssa_state() { - info!("Found local database. Loading state and pending blocks from it."); - state - } else { - info!( - "No database found when starting the sequencer. Creating a fresh new with the initial data" - ); + let initial_checkpoint = store + .get_zone_checkpoint() + .expect("Failed to load zone-sdk checkpoint"); + let is_fresh_start = initial_checkpoint.is_none(); - let initial_private_accounts: Option< - Vec<(nssa_core::Commitment, nssa_core::Nullifier)>, - > = config.initial_private_accounts.clone().map(|accounts| { - accounts - .iter() - .map(|init_comm_data| { - let npk = &init_comm_data.npk; - let account_id = nssa::AccountId::from((npk, 0)); - - let mut acc = init_comm_data.account.clone(); - - acc.program_owner = - nssa::program::Program::authenticated_transfer_program().id(); - - ( - nssa_core::Commitment::new(&account_id, &acc), - nssa_core::Nullifier::for_account_initialization(&account_id), - ) - }) - .collect() - }); - - let init_accs: Option> = config - .initial_public_accounts - .clone() - .map(|initial_accounts| { - initial_accounts - .iter() - .map(|acc_data| (acc_data.account_id, acc_data.balance)) - .collect() - }); - - // If initial commitments or accounts are present in config, need to construct state - // from them - if initial_private_accounts.is_some() || init_accs.is_some() { - V03State::new_with_genesis_accounts( - &init_accs.unwrap_or_default(), - initial_private_accounts.unwrap_or_default(), - genesis_block.header.timestamp, - ) - } else { - initial_state() + let dbio_for_checkpoint = store.dbio(); + let on_checkpoint: block_publisher::CheckpointSink = Box::new(move |cp| { + let bytes = match serde_json::to_vec(&cp) { + Ok(b) => b, + Err(err) => { + error!("Failed to serialize zone-sdk checkpoint: {err:#}"); + return; + } + }; + if let Err(err) = dbio_for_checkpoint.put_zone_sdk_checkpoint_bytes(&bytes) { + error!("Failed to persist zone-sdk checkpoint: {err:#}"); } - }; + }); - #[cfg(feature = "testnet")] - state.add_pinata_program(PINATA_BASE58.parse().unwrap()); + let dbio_for_finalized = store.dbio(); + let on_finalized_block: block_publisher::FinalizedBlockSink = Box::new(move |block_id| { + if let Err(err) = dbio_for_finalized.clean_pending_blocks_up_to(block_id) { + error!("Failed to mark pending blocks finalized up to {block_id}: {err:#}"); + } + }); + + let block_publisher = BP::new( + &config.bedrock_config, + bedrock_signing_key, + config.retry_pending_blocks_timeout, + initial_checkpoint, + on_checkpoint, + on_finalized_block, + ) + .await + .expect("Failed to initialize Block Publisher"); + + // On a truly fresh start (no checkpoint persisted yet), publish the + // genesis block so the indexer can find the channel start. After the + // first publish, zone-sdk's checkpoint persistence covers further + // restarts. + if is_fresh_start && let Err(err) = block_publisher.publish_block(&genesis_block).await { + error!("Failed to publish genesis block: {err:#}"); + } let (mempool, mempool_handle) = MemPool::new(config.mempool_max_size); @@ -159,35 +159,33 @@ impl SequencerCore Result { - let (tx, _msg_id) = self - .produce_new_block_with_mempool_transactions() - .context("Failed to produce new block with mempool transactions")?; - match self - .block_settlement_client - .submit_inscribe_tx_to_bedrock(tx) - .await - { - Ok(()) => {} - Err(err) => { - error!("Failed to post block data to Bedrock with error: {err:#}"); - } + let block = self + .build_block_from_mempool() + .context("Failed to build block from mempool transactions")?; + + // TODO: Remove msg_id from store.update — it is no longer needed now that + // zone-sdk manages L1 settlement state via its own checkpoint. + let placeholder_msg_id = [0_u8; 32]; + + if let Err(err) = self.block_publisher.publish_block(&block).await { + error!("Failed to publish block to Bedrock with error: {err:#}"); } + self.store.update(&block, placeholder_msg_id, &self.state)?; Ok(self.chain_height) } - /// Produces new block from transactions in mempool and packs it into a `SignedMantleTx`. - pub fn produce_new_block_with_mempool_transactions( - &mut self, - ) -> Result<(SignedMantleTx, MsgId)> { + /// Builds a new block from transactions in the mempool. + /// Does NOT publish or store the block — the caller is responsible for that. + pub fn build_block_from_mempool(&mut self) -> Result { let now = Instant::now(); let new_block_height = self.next_block_id(); @@ -277,21 +275,12 @@ impl SequencerCore SequencerCore &nssa::V03State { @@ -319,22 +308,19 @@ impl SequencerCore Result<()> { - self.get_pending_blocks()? - .iter() - .map(|block| block.header.block_id) - .min() - .map_or(Ok(()), |first_pending_block_id| { - info!("Clearing pending blocks up to id: {last_finalized_block_id}"); - // TODO: Delete blocks instead of marking them as finalized. - // Current approach is used because we still have `GetBlockDataRequest`. - (first_pending_block_id..=last_finalized_block_id) - .try_for_each(|id| self.store.mark_block_as_finalized(id)) - }) + /// Marks all pending blocks with `block_id <= last_finalized_block_id` as + /// finalized. Idempotent. Production callers don't invoke this directly — + /// it's wired up in `start_from_config` to the publisher's + /// `on_finalized_block` sink, which fires on `Event::TxsFinalized` / + /// `Event::FinalizedInscriptions`. Kept on the type for tests. + // TODO: Delete blocks instead of marking them as finalized. Current + // approach is used because we still have `GetBlockDataRequest`. + pub fn clean_finalized_blocks_from_db(&self, last_finalized_block_id: u64) -> Result<()> { + info!("Clearing pending blocks up to id: {last_finalized_block_id}"); + self.store + .dbio() + .clean_pending_blocks_up_to(last_finalized_block_id)?; + Ok(()) } /// Returns the list of stored pending blocks. @@ -342,18 +328,14 @@ impl SequencerCore>>()? + .collect::>>()? .into_iter() .filter(|block| matches!(block.bedrock_status, BedrockStatus::Pending)) .collect()) } - pub fn block_settlement_client(&self) -> BC { - self.block_settlement_client.clone() - } - - pub fn indexer_client(&self) -> IC { - self.indexer_client.clone() + pub fn block_publisher(&self) -> BP { + self.block_publisher.clone() } fn next_block_id(&self) -> u64 { @@ -363,6 +345,61 @@ impl SequencerCore (nssa::V03State, Vec) { + #[cfg(not(feature = "testnet"))] + let mut state = testnet_initial_state::initial_state(); + + #[cfg(feature = "testnet")] + let mut state = testnet_initial_state::initial_state_testnet(); + + let genesis_txs = config + .genesis + .iter() + .map(|genesis_tx| match genesis_tx { + GenesisAction::SupplyAccount { + account_id, + balance, + } => build_supply_account_genesis_transaction(account_id, *balance), + }) + .chain(std::iter::once(clock_invocation(0))) + .inspect(|tx| { + state + .transition_from_public_transaction(tx, GENESIS_BLOCK_ID, 0) + .expect("Failed to execute genesis transaction"); + }) + .map(NSSATransaction::Public) + .collect(); + + (state, genesis_txs) +} + +fn build_supply_account_genesis_transaction( + account_id: &AccountId, + balance: u128, +) -> PublicTransaction { + let faucet_program_id = Program::faucet().id(); + let vault_program_id = Program::vault().id(); + let recipient_vault_id = vault_core::compute_vault_account_id(vault_program_id, *account_id); + + let message = Message::try_new( + faucet_program_id, + vec![nssa::system_faucet_account_id(), recipient_vault_id], + vec![], + faucet_core::Instruction::Transfer { + vault_program_id, + recipient_id: *account_id, + amount: balance, + }, + ) + .expect("Failed to serialize genesis transfer instruction"); + let witness_set = nssa::public_transaction::WitnessSet::from_raw_parts(vec![]); + + PublicTransaction::new(message, witness_set) +} + /// Load signing key from file or generate a new one if it doesn't exist. fn load_or_create_signing_key(path: &Path) -> Result { if path.exists() { @@ -392,16 +429,20 @@ mod tests { use std::{pin::pin, time::Duration}; - use bedrock_client::BackoffConfig; use common::{ + HashType, + block::HashableBlockData, test_utils::sequencer_sign_key_for_testing, transaction::{NSSATransaction, clock_invocation}, }; use logos_blockchain_core::mantle::ops::channel::ChannelId; use mempool::MemPoolHandle; + use tempfile::tempdir; use testnet_initial_state::{initial_accounts, initial_pub_accounts_private_keys}; use crate::{ + block_store::SequencerStore, + build_genesis_state, config::{BedrockConfig, SequencerConfig}, mock::SequencerCoreWithMockClients, }; @@ -412,26 +453,18 @@ mod tests { SequencerConfig { home, - genesis_id: 1, - is_genesis_random: false, max_num_tx_in_block: 10, max_block_size: bytesize::ByteSize::mib(1), mempool_max_size: 10000, block_create_timeout: Duration::from_secs(1), signing_key: *sequencer_sign_key_for_testing().value(), bedrock_config: BedrockConfig { - backoff: BackoffConfig { - start_delay: Duration::from_millis(100), - max_retries: 5, - }, channel_id: ChannelId::from([0; 32]), node_url: "http://not-used-in-unit-tests".parse().unwrap(), auth: None, }, retry_pending_blocks_timeout: Duration::from_mins(4), - indexer_rpc_url: "ws://localhost:8779".parse().unwrap(), - initial_public_accounts: None, - initial_private_accounts: None, + genesis: vec![], } } @@ -457,9 +490,7 @@ mod tests { let tx = common::test_utils::produce_dummy_empty_transaction(); mempool_handle.push(tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); (sequencer, mempool_handle) } @@ -470,7 +501,7 @@ mod tests { let (sequencer, _mempool_handle) = SequencerCoreWithMockClients::start_from_config(config.clone()).await; - assert_eq!(sequencer.chain_height, config.genesis_id); + assert_eq!(sequencer.chain_height, 1); assert_eq!(sequencer.sequencer_config.max_num_tx_in_block, 10); let acc1_account_id = initial_accounts()[0].account_id; @@ -483,6 +514,57 @@ mod tests { assert_eq!(20000, balance_acc_2); } + #[tokio::test] + async fn start_from_config_opens_existing_db_if_it_exists() { + let config = setup_sequencer_config(); + let temp_dir = tempdir().unwrap(); + let mut config = config; + config.home = temp_dir.path().to_path_buf(); + + let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); + let (genesis_state, genesis_txs) = build_genesis_state(&config); + let genesis_hashable_data = HashableBlockData { + block_id: 1, + transactions: genesis_txs, + prev_block_hash: HashType([0; 32]), + timestamp: 0, + }; + let genesis_block = genesis_hashable_data.into_pending_block(&signing_key, [0; 32]); + + let expected_msg_id = [7; 32]; + SequencerStore::create_db_with_genesis( + &config.home.join("rocksdb"), + &genesis_block, + expected_msg_id, + &genesis_state, + signing_key, + ) + .unwrap(); + + let (sequencer, _mempool_handle) = + SequencerCoreWithMockClients::start_from_config(config).await; + let latest_meta = sequencer.store.latest_block_meta().unwrap(); + + assert_eq!(latest_meta.msg_id, expected_msg_id); + assert_eq!(sequencer.chain_height, 1); + } + + #[should_panic(expected = "Failed to open database")] + #[tokio::test] + async fn start_from_config_panics_when_db_open_returns_non_not_found_error() { + let mut config = setup_sequencer_config(); + let temp_dir = tempdir().unwrap(); + config.home = temp_dir.path().to_path_buf(); + + let db_path = config.home.join("rocksdb"); + + std::fs::create_dir_all(&config.home).unwrap(); + // Force RocksDB open to fail with an IO error by placing a file at DB path. + std::fs::write(&db_path, b"not-a-directory").unwrap(); + + let _ = SequencerCoreWithMockClients::start_from_config(config).await; + } + #[test] fn transaction_pre_check_pass() { let tx = common::test_utils::produce_dummy_empty_transaction(); @@ -604,23 +686,21 @@ mod tests { assert!(poll.is_pending()); // Empty the mempool by producing a block - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); // Resolve the pending push assert!(push_fut.await.is_ok()); } #[tokio::test] - async fn produce_new_block_with_mempool_transactions() { + async fn build_block_from_mempool() { let (mut sequencer, mempool_handle) = common_setup().await; let genesis_height = sequencer.chain_height; let tx = common::test_utils::produce_dummy_empty_transaction(); mempool_handle.push(tx).await.unwrap(); - let result = sequencer.produce_new_block_with_mempool_transactions(); + let result = sequencer.build_block_from_mempool(); assert!(result.is_ok()); assert_eq!(sequencer.chain_height, genesis_height + 1); } @@ -645,9 +725,7 @@ mod tests { mempool_handle.push(tx_replay).await.unwrap(); // Create block - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); let block = sequencer .store .get_block_at_id(sequencer.chain_height) @@ -679,9 +757,7 @@ mod tests { // The transaction should be included the first time mempool_handle.push(tx.clone()).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); let block = sequencer .store .get_block_at_id(sequencer.chain_height) @@ -697,9 +773,7 @@ mod tests { // Add same transaction should fail mempool_handle.push(tx.clone()).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); let block = sequencer .store .get_block_at_id(sequencer.chain_height) @@ -738,9 +812,7 @@ mod tests { ); mempool_handle.push(tx.clone()).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); let block = sequencer .store .get_block_at_id(sequencer.chain_height) @@ -778,15 +850,9 @@ mod tests { let config = setup_sequencer_config(); let (mut sequencer, _mempool_handle) = SequencerCoreWithMockClients::start_from_config(config).await; - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); + sequencer.produce_new_block().await.unwrap(); + sequencer.produce_new_block().await.unwrap(); assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4); } @@ -795,15 +861,9 @@ mod tests { let config = setup_sequencer_config(); let (mut sequencer, _mempool_handle) = SequencerCoreWithMockClients::start_from_config(config).await; - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); + sequencer.produce_new_block().await.unwrap(); + sequencer.produce_new_block().await.unwrap(); let last_finalized_block = 3; sequencer @@ -836,9 +896,7 @@ mod tests { ); mempool_handle.push(tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); // Get the metadata of the last block produced sequencer.store.latest_block_meta().unwrap() @@ -861,9 +919,7 @@ mod tests { mempool_handle.push(tx.clone()).await.unwrap(); // Step 4: Produce new block - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); // Step 5: Verify the new block has correct previous block metadata let new_block = sequencer @@ -876,10 +932,6 @@ mod tests { new_block.header.prev_block_hash, expected_prev_meta.hash, "New block's prev_block_hash should match the stored metadata hash" ); - assert_eq!( - new_block.bedrock_parent_id, expected_prev_meta.msg_id, - "New block's bedrock_parent_id should match the stored metadata msg_id" - ); assert_eq!( new_block.body.transactions, vec![ @@ -914,9 +966,7 @@ mod tests { .await .unwrap(); mempool_handle.push(crafted_clock_tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); let block = sequencer .store @@ -933,55 +983,6 @@ mod tests { ); } - #[tokio::test] - async fn start_from_config_uses_db_height_not_config_genesis() { - let mut config = setup_sequencer_config(); - let original_genesis_id = config.genesis_id; - - // Step 1: Create initial database and produce some blocks - let expected_chain_height = { - let (mut sequencer, mempool_handle) = - SequencerCoreWithMockClients::start_from_config(config.clone()).await; - - // Verify we start with the genesis_id from config - assert_eq!(sequencer.chain_height, original_genesis_id); - - // Produce multiple blocks to advance chain height - let tx = common::test_utils::produce_dummy_empty_transaction(); - mempool_handle.push(tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); - - let tx = common::test_utils::produce_dummy_empty_transaction(); - mempool_handle.push(tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); - - // Return the current chain height (should be genesis_id + 2) - sequencer.chain_height - }; - - // Step 2: Modify the config to have a DIFFERENT genesis_id - let different_genesis_id = original_genesis_id + 100; - config.genesis_id = different_genesis_id; - - // Step 3: Restart sequencer with the modified config (different genesis_id) - let (sequencer, _mempool_handle) = - SequencerCoreWithMockClients::start_from_config(config.clone()).await; - - // Step 4: Verify chain_height comes from database, NOT from the new config.genesis_id - assert_eq!( - sequencer.chain_height, expected_chain_height, - "Chain height should be loaded from database metadata, not config.genesis_id" - ); - assert_ne!( - sequencer.chain_height, different_genesis_id, - "Chain height should NOT match the modified config.genesis_id" - ); - } - #[tokio::test] async fn user_tx_that_chain_calls_clock_is_dropped() { let (mut sequencer, mempool_handle) = common_setup().await; @@ -994,9 +995,7 @@ mod tests { ), )); mempool_handle.push(deploy_tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); // Build a user transaction that invokes clock_chain_caller, which in turn chain-calls the // clock program with the clock accounts. The sequencer should detect that the resulting @@ -1021,9 +1020,7 @@ mod tests { )); mempool_handle.push(user_tx).await.unwrap(); - sequencer - .produce_new_block_with_mempool_transactions() - .unwrap(); + sequencer.produce_new_block().await.unwrap(); let block = sequencer .store @@ -1057,86 +1054,10 @@ mod tests { mempool_handle.push(tx).await.unwrap(); // Block production must fail because the appended clock tx cannot execute. - let result = sequencer.produce_new_block_with_mempool_transactions(); + let result = sequencer.produce_new_block().await; assert!( result.is_err(), "Block production should abort when clock account data is corrupted" ); } - - #[tokio::test] - async fn genesis_private_account_cannot_be_re_initialized() { - use common::transaction::NSSATransaction; - use nssa::{ - Account, - privacy_preserving_transaction::{ - PrivacyPreservingTransaction, circuit::execute_and_prove, message::Message, - witness_set::WitnessSet, - }, - program::Program, - }; - use nssa_core::{ - SharedSecretKey, - account::AccountWithMetadata, - encryption::{EphemeralPublicKey, EphemeralSecretKey, ViewingPublicKey}, - }; - use testnet_initial_state::PrivateAccountPublicInitialData; - - let nsk: nssa_core::NullifierSecretKey = [7; 32]; - let npk = nssa_core::NullifierPublicKey::from(&nsk); - let vsk: EphemeralSecretKey = [8; 32]; - let vpk = ViewingPublicKey::from_scalar(vsk); - - let genesis_account = Account { - program_owner: Program::authenticated_transfer_program().id(), - ..Account::default() - }; - - // Start a sequencer from config with a preconfigured private genesis account - let mut config = setup_sequencer_config(); - config.initial_private_accounts = Some(vec![PrivateAccountPublicInitialData { - npk, - account: genesis_account, - }]); - - let (mut sequencer, _mempool_handle) = - SequencerCoreWithMockClients::start_from_config(config).await; - - // Attempt to re-initialize the same genesis account via a privacy-preserving transaction - let esk = [9; 32]; - let shared_secret = SharedSecretKey::new(&esk, &vpk); - let epk = EphemeralPublicKey::from_scalar(esk); - - let (output, proof) = execute_and_prove( - vec![AccountWithMetadata::new( - Account::default(), - true, - (&npk, 0), - )], - Program::serialize_instruction(0_u128).unwrap(), - vec![1], - vec![(npk, 0, shared_secret)], - vec![nsk], - vec![None], - &Program::authenticated_transfer_program().into(), - ) - .unwrap(); - - let message = - Message::try_from_circuit_output(vec![], vec![], vec![(npk, vpk, epk)], output) - .unwrap(); - - let witness_set = WitnessSet::for_message(&message, proof, &[]); - let tx = NSSATransaction::PrivacyPreserving(PrivacyPreservingTransaction::new( - message, - witness_set, - )); - - let result = tx.execute_check_on_state(&mut sequencer.state, 2, 0); - - assert!( - result.is_err_and(|e| e.to_string().contains("Nullifier already seen")), - "re-initializing a genesis private account must be rejected by the sequencer" - ); - } } diff --git a/sequencer/core/src/mock.rs b/sequencer/core/src/mock.rs index 45a682e2..ebe6ea5d 100644 --- a/sequencer/core/src/mock.rs +++ b/sequencer/core/src/mock.rs @@ -1,76 +1,34 @@ -use anyhow::{Result, anyhow}; -use bedrock_client::SignedMantleTx; -use logos_blockchain_core::mantle::ops::channel::ChannelId; +use std::time::Duration; + +use anyhow::Result; +use common::block::Block; use logos_blockchain_key_management_system_service::keys::Ed25519Key; -use url::Url; use crate::{ - block_settlement_client::BlockSettlementClientTrait, config::BedrockConfig, - indexer_client::IndexerClientTrait, + block_publisher::{ + BlockPublisherTrait, CheckpointSink, FinalizedBlockSink, SequencerCheckpoint, + }, + config::BedrockConfig, }; -pub type SequencerCoreWithMockClients = - crate::SequencerCore; +pub type SequencerCoreWithMockClients = crate::SequencerCore; #[derive(Clone)] -pub struct MockBlockSettlementClient { - bedrock_channel_id: ChannelId, - bedrock_signing_key: Ed25519Key, -} +pub struct MockBlockPublisher; -impl BlockSettlementClientTrait for MockBlockSettlementClient { - fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result { - Ok(Self { - bedrock_channel_id: config.channel_id, - bedrock_signing_key: signing_key, - }) +impl BlockPublisherTrait for MockBlockPublisher { + async fn new( + _config: &BedrockConfig, + _bedrock_signing_key: Ed25519Key, + _resubmit_interval: Duration, + _initial_checkpoint: Option, + _on_checkpoint: CheckpointSink, + _on_finalized_block: FinalizedBlockSink, + ) -> Result { + Ok(Self) } - fn bedrock_channel_id(&self) -> ChannelId { - self.bedrock_channel_id - } - - fn bedrock_signing_key(&self) -> &Ed25519Key { - &self.bedrock_signing_key - } - - async fn submit_inscribe_tx_to_bedrock(&self, _tx: SignedMantleTx) -> Result<()> { + async fn publish_block(&self, _block: &Block) -> Result<()> { Ok(()) } } - -#[derive(Clone)] -pub struct MockBlockSettlementClientWithError { - bedrock_channel_id: ChannelId, - bedrock_signing_key: Ed25519Key, -} - -impl BlockSettlementClientTrait for MockBlockSettlementClientWithError { - fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result { - Ok(Self { - bedrock_channel_id: config.channel_id, - bedrock_signing_key: signing_key, - }) - } - - fn bedrock_channel_id(&self) -> ChannelId { - self.bedrock_channel_id - } - - fn bedrock_signing_key(&self) -> &Ed25519Key { - &self.bedrock_signing_key - } - - async fn submit_inscribe_tx_to_bedrock(&self, _tx: SignedMantleTx) -> Result<()> { - Err(anyhow!("Mock error")) - } -} - -#[derive(Copy, Clone)] -pub struct MockIndexerClient; - -impl IndexerClientTrait for MockIndexerClient { - async fn new(_indexer_url: &Url) -> Result { - Ok(Self) - } -} diff --git a/sequencer/service/Cargo.toml b/sequencer/service/Cargo.toml index 6fee808c..beed6be2 100644 --- a/sequencer/service/Cargo.toml +++ b/sequencer/service/Cargo.toml @@ -14,7 +14,6 @@ mempool.workspace = true sequencer_core = { workspace = true, features = ["testnet"] } sequencer_service_protocol.workspace = true sequencer_service_rpc = { workspace = true, features = ["server"] } -indexer_service_rpc = { workspace = true, features = ["client"] } clap = { workspace = true, features = ["derive", "env"] } anyhow.workspace = true diff --git a/sequencer/service/configs/debug/sequencer_config.json b/sequencer/service/configs/debug/sequencer_config.json index 4088fc4a..bfe963ae 100644 --- a/sequencer/service/configs/debug/sequencer_config.json +++ b/sequencer/service/configs/debug/sequencer_config.json @@ -1,7 +1,5 @@ { "home": ".", - "genesis_id": 1, - "is_genesis_random": true, "max_num_tx_in_block": 20, "max_block_size": "1 MiB", "mempool_max_size": 1000, @@ -16,117 +14,29 @@ "node_url": "http://localhost:8080" }, "indexer_rpc_url": "ws://localhost:8779", - "initial_accounts": [ + "genesis": [ { - "account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r", - "balance": 10000 - }, - { - "account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2", - "balance": 20000 - } - ], - "initial_commitments": [ - { - "npk": [ - 139, - 19, - 158, - 11, - 155, - 231, - 85, - 206, - 132, - 228, - 220, - 114, - 145, - 89, - 113, - 156, - 238, - 142, - 242, - 74, - 182, - 91, - 43, - 100, - 6, - 190, - 31, - 15, - 31, - 88, - 96, - 204 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 10000, - "data": [], - "nonce": 0 + "supply_account": { + "account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r", + "balance": 10000 } }, { - "npk": [ - 173, - 134, - 33, - 223, - 54, - 226, - 10, - 71, - 215, - 254, - 143, - 172, - 24, - 244, - 243, - 208, - 65, - 112, - 118, - 70, - 217, - 240, - 69, - 100, - 129, - 3, - 121, - 25, - 213, - 132, - 42, - 45 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 20000, - "data": [], - "nonce": 0 + "supply_account": { + "account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2", + "balance": 20000 + } + }, + { + "supply_account": { + "account_id": "5T7EJX45Vi8rYgVjeiiQbdgM2Eun4Hq5S8uMiupiwWY1", + "balance": 10000 + } + }, + { + "supply_account": { + "account_id": "2ByFEVzfMKC13jvk1nZuFVGYoDVW1iAxvvFnqboV1aAg", + "balance": 20000 } } ], diff --git a/sequencer/service/configs/docker/sequencer_config.json b/sequencer/service/configs/docker/sequencer_config.json index f5a243d5..c9d0e6a6 100644 --- a/sequencer/service/configs/docker/sequencer_config.json +++ b/sequencer/service/configs/docker/sequencer_config.json @@ -1,7 +1,5 @@ { "home": "/var/lib/sequencer_service", - "genesis_id": 1, - "is_genesis_random": true, "max_num_tx_in_block": 20, "max_block_size": "1 MiB", "mempool_max_size": 10000, @@ -16,117 +14,29 @@ "node_url": "http://localhost:18080" }, "indexer_rpc_url": "ws://localhost:8779", - "initial_accounts": [ + "genesis": [ { - "account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r", - "balance": 10000 - }, - { - "account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2", - "balance": 20000 - } - ], - "initial_commitments": [ - { - "npk": [ - 139, - 19, - 158, - 11, - 155, - 231, - 85, - 206, - 132, - 228, - 220, - 114, - 145, - 89, - 113, - 156, - 238, - 142, - 242, - 74, - 182, - 91, - 43, - 100, - 6, - 190, - 31, - 15, - 31, - 88, - 96, - 204 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 10000, - "data": [], - "nonce": 0 + "supply_account": { + "account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r", + "balance": 10000 } }, { - "npk": [ - 173, - 134, - 33, - 223, - 54, - 226, - 10, - 71, - 215, - 254, - 143, - 172, - 24, - 244, - 243, - 208, - 65, - 112, - 118, - 70, - 217, - 240, - 69, - 100, - 129, - 3, - 121, - 25, - 213, - 132, - 42, - 45 - ], - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 20000, - "data": [], - "nonce": 0 + "supply_account": { + "account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2", + "balance": 20000 + } + }, + { + "supply_account": { + "account_id": "5T7EJX45Vi8rYgVjeiiQbdgM2Eun4Hq5S8uMiupiwWY1", + "balance": 10000 + } + }, + { + "supply_account": { + "account_id": "2ByFEVzfMKC13jvk1nZuFVGYoDVW1iAxvvFnqboV1aAg", + "balance": 20000 } } ], diff --git a/sequencer/service/src/lib.rs b/sequencer/service/src/lib.rs index 5373b31f..319b75ad 100644 --- a/sequencer/service/src/lib.rs +++ b/sequencer/service/src/lib.rs @@ -5,15 +5,13 @@ use bytesize::ByteSize; use common::transaction::NSSATransaction; use futures::never::Never; use jsonrpsee::server::ServerHandle; -#[cfg(not(feature = "standalone"))] -use log::warn; use log::{error, info}; use mempool::MemPoolHandle; +#[cfg(not(feature = "standalone"))] +use sequencer_core::SequencerCore; #[cfg(feature = "standalone")] use sequencer_core::SequencerCoreWithMockClients as SequencerCore; pub use sequencer_core::config::*; -#[cfg(not(feature = "standalone"))] -use sequencer_core::{SequencerCore, block_settlement_client::BlockSettlementClientTrait as _}; use sequencer_service_rpc::RpcServer as _; use tokio::{sync::Mutex, task::JoinHandle}; @@ -29,8 +27,6 @@ pub struct SequencerHandle { /// Option because of `Drop` which forbids to simply move out of `self` in `stopped()`. server_handle: Option, main_loop_handle: JoinHandle>, - retry_pending_blocks_loop_handle: JoinHandle>, - listen_for_bedrock_blocks_loop_handle: JoinHandle>, } impl SequencerHandle { @@ -38,15 +34,11 @@ impl SequencerHandle { addr: SocketAddr, server_handle: ServerHandle, main_loop_handle: JoinHandle>, - retry_pending_blocks_loop_handle: JoinHandle>, - listen_for_bedrock_blocks_loop_handle: JoinHandle>, ) -> Self { Self { addr, server_handle: Some(server_handle), main_loop_handle, - retry_pending_blocks_loop_handle, - listen_for_bedrock_blocks_loop_handle, } } @@ -60,8 +52,6 @@ impl SequencerHandle { addr: _, server_handle, main_loop_handle, - retry_pending_blocks_loop_handle, - listen_for_bedrock_blocks_loop_handle, } = &mut self; let server_handle = server_handle.take().expect("Server handle is set"); @@ -75,16 +65,6 @@ impl SequencerHandle { .context("Main loop task panicked")? .context("Main loop exited unexpectedly") } - res = retry_pending_blocks_loop_handle => { - res - .context("Retry pending blocks loop task panicked")? - .context("Retry pending blocks loop exited unexpectedly") - } - res = listen_for_bedrock_blocks_loop_handle => { - res - .context("Listen for bedrock blocks loop task panicked")? - .context("Listen for bedrock blocks loop exited unexpectedly") - } } } @@ -98,14 +78,10 @@ impl SequencerHandle { addr: _, server_handle, main_loop_handle, - retry_pending_blocks_loop_handle, - listen_for_bedrock_blocks_loop_handle, } = self; let stopped = server_handle.as_ref().is_none_or(ServerHandle::is_stopped) - || main_loop_handle.is_finished() - || retry_pending_blocks_loop_handle.is_finished() - || listen_for_bedrock_blocks_loop_handle.is_finished(); + || main_loop_handle.is_finished(); !stopped } @@ -121,13 +97,9 @@ impl Drop for SequencerHandle { addr: _, server_handle, main_loop_handle, - retry_pending_blocks_loop_handle, - listen_for_bedrock_blocks_loop_handle, } = self; main_loop_handle.abort(); - retry_pending_blocks_loop_handle.abort(); - listen_for_bedrock_blocks_loop_handle.abort(); let Some(handle) = server_handle else { return; @@ -141,7 +113,6 @@ impl Drop for SequencerHandle { pub async fn run(config: SequencerConfig, port: u16) -> Result { let block_timeout = config.block_create_timeout; - let retry_pending_blocks_timeout = config.retry_pending_blocks_timeout; let max_block_size = config.max_block_size; let (sequencer_core, mempool_handle) = SequencerCore::start_from_config(config).await; @@ -159,34 +130,10 @@ pub async fn run(config: SequencerConfig, port: u16) -> Result .await?; info!("RPC server started"); - #[cfg(not(feature = "standalone"))] - { - info!("Submitting stored pending blocks"); - retry_pending_blocks(&seq_core_wrapped) - .await - .expect("Failed to submit pending blocks on startup"); - } - info!("Starting main sequencer loop"); - let main_loop_handle = tokio::spawn(main_loop(Arc::clone(&seq_core_wrapped), block_timeout)); + let main_loop_handle = tokio::spawn(main_loop(seq_core_wrapped, block_timeout)); - info!("Starting pending block retry loop"); - let retry_pending_blocks_loop_handle = tokio::spawn(retry_pending_blocks_loop( - Arc::clone(&seq_core_wrapped), - retry_pending_blocks_timeout, - )); - - info!("Starting bedrock block listening loop"); - let listen_for_bedrock_blocks_loop_handle = - tokio::spawn(listen_for_bedrock_blocks_loop(seq_core_wrapped)); - - Ok(SequencerHandle::new( - addr, - server_handle, - main_loop_handle, - retry_pending_blocks_loop_handle, - listen_for_bedrock_blocks_loop_handle, - )) + Ok(SequencerHandle::new(addr, server_handle, main_loop_handle)) } async fn run_server( @@ -235,118 +182,3 @@ async fn main_loop(seq_core: Arc>, block_timeout: Duration) info!("Waiting for new transactions"); } } - -#[cfg(not(feature = "standalone"))] -async fn retry_pending_blocks(seq_core: &Arc>) -> Result<()> { - use std::time::Instant; - - use log::debug; - - let (mut pending_blocks, block_settlement_client) = { - let sequencer_core = seq_core.lock().await; - let client = sequencer_core.block_settlement_client(); - let pending_blocks = sequencer_core - .get_pending_blocks() - .expect("Sequencer should be able to retrieve pending blocks"); - (pending_blocks, client) - }; - - pending_blocks.sort_by(|block1, block2| block1.header.block_id.cmp(&block2.header.block_id)); - - if !pending_blocks.is_empty() { - info!( - "Resubmitting blocks from {} to {}", - pending_blocks.first().unwrap().header.block_id, - pending_blocks.last().unwrap().header.block_id - ); - } - - for block in &pending_blocks { - debug!( - "Resubmitting pending block with id {}", - block.header.block_id - ); - // TODO: We could cache the inscribe tx for each pending block to avoid re-creating it - // on every retry. - let now = Instant::now(); - let (tx, _msg_id) = block_settlement_client - .create_inscribe_tx(block) - .context("Failed to create inscribe tx for pending block")?; - - debug!("Create inscribe: {:?}", now.elapsed()); - - let now = Instant::now(); - if let Err(e) = block_settlement_client - .submit_inscribe_tx_to_bedrock(tx) - .await - { - warn!( - "Failed to resubmit block with id {} with error {e:#}", - block.header.block_id - ); - } - debug!("Post: {:?}", now.elapsed()); - } - Ok(()) -} - -#[cfg(not(feature = "standalone"))] -async fn retry_pending_blocks_loop( - seq_core: Arc>, - retry_pending_blocks_timeout: Duration, -) -> Result { - loop { - tokio::time::sleep(retry_pending_blocks_timeout).await; - retry_pending_blocks(&seq_core).await?; - } -} - -#[cfg(not(feature = "standalone"))] -async fn listen_for_bedrock_blocks_loop(seq_core: Arc>) -> Result { - use indexer_service_rpc::RpcClient as _; - - let indexer_client = seq_core.lock().await.indexer_client(); - - let retry_delay = Duration::from_secs(5); - - loop { - // TODO: Subscribe from the first pending block ID? - let mut subscription = indexer_client - .subscribe_to_finalized_blocks() - .await - .context("Failed to subscribe to finalized blocks")?; - - while let Some(block_id) = subscription.next().await { - let block_id = block_id.context("Failed to get next block from subscription")?; - - info!("Received new L2 block with ID {block_id}"); - - seq_core - .lock() - .await - .clean_finalized_blocks_from_db(block_id) - .with_context(|| { - format!("Failed to clean finalized blocks from DB for block ID {block_id}") - })?; - } - - warn!( - "Block subscription closed unexpectedly, reason: {:?}, retrying after {retry_delay:?}", - subscription.close_reason() - ); - tokio::time::sleep(retry_delay).await; - } -} - -#[cfg(feature = "standalone")] -async fn listen_for_bedrock_blocks_loop(_seq_core: Arc>) -> Result { - std::future::pending::>().await -} - -#[cfg(feature = "standalone")] -async fn retry_pending_blocks_loop( - _seq_core: Arc>, - _retry_pending_blocks_timeout: Duration, -) -> Result { - std::future::pending::>().await -} diff --git a/sequencer/service/src/service.rs b/sequencer/service/src/service.rs index 71645363..0bb8e1dd 100644 --- a/sequencer/service/src/service.rs +++ b/sequencer/service/src/service.rs @@ -8,10 +8,7 @@ use jsonrpsee::{ use log::warn; use mempool::MemPoolHandle; use nssa::{self, program::Program}; -use sequencer_core::{ - DbError, SequencerCore, block_settlement_client::BlockSettlementClientTrait, - indexer_client::IndexerClientTrait, -}; +use sequencer_core::{DbError, SequencerCore, block_publisher::BlockPublisherTrait}; use sequencer_service_protocol::{ Account, AccountId, Block, BlockId, Commitment, HashType, MembershipProof, Nonce, ProgramId, }; @@ -19,15 +16,15 @@ use tokio::sync::Mutex; const NOT_FOUND_ERROR_CODE: i32 = -31999; -pub struct SequencerService { - sequencer: Arc>>, +pub struct SequencerService { + sequencer: Arc>>, mempool_handle: MemPoolHandle, max_block_size: u64, } -impl SequencerService { +impl SequencerService { pub const fn new( - sequencer: Arc>>, + sequencer: Arc>>, mempool_handle: MemPoolHandle, max_block_size: u64, ) -> Self { @@ -40,8 +37,8 @@ impl SequencerService - sequencer_service_rpc::RpcServer for SequencerService +impl sequencer_service_rpc::RpcServer + for SequencerService { async fn send_transaction(&self, tx: NSSATransaction) -> Result { // Reserve ~200 bytes for block header overhead diff --git a/storage/src/cells/shared_cells.rs b/storage/src/cells/shared_cells.rs index 2a76edf3..1efd0e35 100644 --- a/storage/src/cells/shared_cells.rs +++ b/storage/src/cells/shared_cells.rs @@ -63,6 +63,14 @@ impl SimpleStorableCell for FirstBlockCell { impl SimpleReadableCell for FirstBlockCell {} +impl SimpleWritableCell for FirstBlockCell { + fn value_constructor(&self) -> DbResult> { + borsh::to_vec(&self).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize first block id".to_owned())) + }) + } +} + #[derive(Debug, BorshSerialize, BorshDeserialize)] pub struct BlockCell(pub Block); diff --git a/storage/src/indexer/indexer_cells.rs b/storage/src/indexer/indexer_cells.rs index 76a2c035..615902bd 100644 --- a/storage/src/indexer/indexer_cells.rs +++ b/storage/src/indexer/indexer_cells.rs @@ -8,7 +8,8 @@ use crate::{ indexer::{ ACC_NUM_CELL_NAME, BLOCK_HASH_CELL_NAME, BREAKPOINT_CELL_NAME, CF_ACC_META, CF_BREAKPOINT_NAME, CF_HASH_TO_ID, CF_TX_TO_ID, DB_META_LAST_BREAKPOINT_ID, - DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY, TX_HASH_CELL_NAME, + DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY, DB_META_ZONE_SDK_INDEXER_CURSOR_KEY, + TX_HASH_CELL_NAME, }, }; @@ -211,6 +212,41 @@ impl SimpleWritableCell for AccNumTxCell { } } +/// Opaque bytes for the zone-sdk indexer cursor `Option<(MsgId, Slot)>`. +/// The caller serializes via `serde_json` (neither type derives borsh). +#[derive(BorshDeserialize)] +pub struct ZoneSdkIndexerCursorCellOwned(pub Vec); + +impl SimpleStorableCell for ZoneSdkIndexerCursorCellOwned { + type KeyParams = (); + + const CELL_NAME: &'static str = DB_META_ZONE_SDK_INDEXER_CURSOR_KEY; + const CF_NAME: &'static str = CF_META_NAME; +} + +impl SimpleReadableCell for ZoneSdkIndexerCursorCellOwned {} + +#[derive(BorshSerialize)] +pub struct ZoneSdkIndexerCursorCellRef<'bytes>(pub &'bytes [u8]); + +impl SimpleStorableCell for ZoneSdkIndexerCursorCellRef<'_> { + type KeyParams = (); + + const CELL_NAME: &'static str = DB_META_ZONE_SDK_INDEXER_CURSOR_KEY; + const CF_NAME: &'static str = CF_META_NAME; +} + +impl SimpleWritableCell for ZoneSdkIndexerCursorCellRef<'_> { + fn value_constructor(&self) -> DbResult> { + borsh::to_vec(&self).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize zone-sdk indexer cursor cell".to_owned()), + ) + }) + } +} + #[cfg(test)] mod uniform_tests { use crate::{ diff --git a/storage/src/indexer/mod.rs b/storage/src/indexer/mod.rs index 7ef21258..97be70e5 100644 --- a/storage/src/indexer/mod.rs +++ b/storage/src/indexer/mod.rs @@ -4,7 +4,7 @@ use common::{ block::Block, transaction::{NSSATransaction, clock_invocation}, }; -use nssa::V03State; +use nssa::{GENESIS_BLOCK_ID, V03State}; use rocksdb::{ BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, }; @@ -22,6 +22,8 @@ pub const DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY: &str = "last_observed_l1_lib_header_in_db"; /// Key base for storing metainformation about the last breakpoint. pub const DB_META_LAST_BREAKPOINT_ID: &str = "last_breakpoint_id"; +/// Key base for storing the zone-sdk indexer cursor (opaque bytes). +pub const DB_META_ZONE_SDK_INDEXER_CURSOR_KEY: &str = "zone_sdk_indexer_cursor"; /// Cell name for a breakpoint. pub const BREAKPOINT_CELL_NAME: &str = "breakpoint"; @@ -54,11 +56,8 @@ impl DBIO for RocksDBIO { } impl RocksDBIO { - pub fn open_or_create( - path: &Path, - genesis_block: &Block, - initial_state: &V03State, - ) -> DbResult { + // TODO: Remove initial state when it will be included in genesis block + pub fn open_or_create(path: &Path, initial_state: &V03State) -> DbResult { let mut cf_opts = Options::default(); cf_opts.set_max_write_buffer_number(16); // ToDo: Add more column families for different data @@ -85,17 +84,9 @@ impl RocksDBIO { let dbio = Self { db }; - let is_start_set = dbio.get_meta_is_first_block_set()?; - if !is_start_set { - let block_id = genesis_block.header.block_id; - dbio.put_meta_last_block_in_db(block_id)?; - dbio.put_meta_first_block_in_db_batch(genesis_block)?; - dbio.put_meta_is_first_block_set()?; - - // First breakpoint setup - dbio.put_breakpoint(0, initial_state)?; - dbio.put_meta_last_breakpoint_id(0)?; - } + // First breakpoint setup + dbio.put_breakpoint(0, initial_state)?; + dbio.put_meta_last_breakpoint_id(0)?; Ok(dbio) } @@ -153,86 +144,108 @@ impl RocksDBIO { // State pub fn calculate_state_for_id(&self, block_id: u64) -> DbResult { - let last_block = self.get_meta_last_block_in_db()?; + let last_block_id = self.get_meta_last_block_id_in_db()?.unwrap_or(0); - if block_id <= last_block { - let br_id = closest_breakpoint_id(block_id); - let mut breakpoint = self.get_breakpoint(br_id)?; + if block_id > last_block_id { + return Err(DbError::db_interaction_error( + "Block on this id not found".to_owned(), + )); + } - // ToDo: update it to handle any genesis id - // right now works correctly only if genesis_id < BREAKPOINT_INTERVAL - let start = if br_id != 0 { - u64::from(BREAKPOINT_INTERVAL) - .checked_mul(br_id) - .expect("Reached maximum breakpoint id") - } else { - self.get_meta_first_block_in_db()? - }; + let br_id = closest_breakpoint_id(block_id); + let mut breakpoint = self.get_breakpoint(br_id)?; - for block in self.get_block_batch_seq( - start.checked_add(1).expect("Will be lesser that u64::MAX")..=block_id, - )? { - let expected_clock = - NSSATransaction::Public(clock_invocation(block.header.timestamp)); + let start = u64::from(BREAKPOINT_INTERVAL) + .checked_mul(br_id) + .expect("Reached maximum breakpoint id"); - if let Some((clock_tx, user_txs)) = block.body.transactions.split_last() { - if *clock_tx != expected_clock { - return Err(DbError::db_interaction_error( - "Last transaction in block must be the clock invocation for the block timestamp" - .to_owned(), - )); - } - for transaction in user_txs { - transaction - .clone() - .transaction_stateless_check() - .map_err(|err| { - DbError::db_interaction_error(format!( - "transaction pre check failed with err {err:?}" - )) - })? - .execute_check_on_state( - &mut breakpoint, - block.header.block_id, - block.header.timestamp, - ) - .map_err(|err| { - DbError::db_interaction_error(format!( - "transaction execution failed with err {err:?}" - )) - })?; - } + for mut block in self.get_block_batch_seq( + start.checked_add(1).expect("Will be lesser that u64::MAX")..=block_id, + )? { + let expected_clock = NSSATransaction::Public(clock_invocation(block.header.timestamp)); - let NSSATransaction::Public(clock_public_tx) = clock_tx else { - return Err(DbError::db_interaction_error( - "Clock invocation must be a public transaction".to_owned(), - )); + let clock_tx = block.body.transactions.pop().ok_or_else(|| { + DbError::db_interaction_error( + "Block must contain clock transaction at the end".to_owned(), + ) + })?; + let user_txs = block.body.transactions; + + if clock_tx != expected_clock { + return Err(DbError::db_interaction_error( + "Last transaction in block must be the clock invocation for the block timestamp" + .to_owned(), + )); + } + for transaction in user_txs { + let is_genesis = block.header.block_id == GENESIS_BLOCK_ID; + if is_genesis { + let genesis_tx = match transaction { + NSSATransaction::Public(public_tx) => public_tx, + NSSATransaction::PrivacyPreserving(_) + | NSSATransaction::ProgramDeployment(_) => { + return Err(DbError::db_interaction_error( + "Genesis block should contain only public transactions".to_owned(), + )); + } }; - breakpoint .transition_from_public_transaction( - clock_public_tx, + &genesis_tx, block.header.block_id, block.header.timestamp, ) .map_err(|err| { DbError::db_interaction_error(format!( - "clock transaction execution failed with err {err:?}" + "genesis transaction execution failed with err {err:?}" + )) + })?; + } else { + transaction + .transaction_stateless_check() + .map_err(|err| { + DbError::db_interaction_error(format!( + "transaction pre check failed with err {err:?}" + )) + })? + .execute_check_on_state( + &mut breakpoint, + block.header.block_id, + block.header.timestamp, + ) + .map_err(|err| { + DbError::db_interaction_error(format!( + "transaction execution failed with err {err:?}" )) })?; } } - Ok(breakpoint) - } else { - Err(DbError::db_interaction_error( - "Block on this id not found".to_owned(), - )) + let NSSATransaction::Public(clock_public_tx) = clock_tx else { + return Err(DbError::db_interaction_error( + "Clock invocation must be a public transaction".to_owned(), + )); + }; + + breakpoint + .transition_from_public_transaction( + &clock_public_tx, + block.header.block_id, + block.header.timestamp, + ) + .map_err(|err| { + DbError::db_interaction_error(format!( + "clock transaction execution failed with err {err:?}" + )) + })?; } + + Ok(breakpoint) } pub fn final_state(&self) -> DbResult { - self.calculate_state_for_id(self.get_meta_last_block_in_db()?) + let last_block_id = self.get_meta_last_block_id_in_db()?.unwrap_or(0); + self.calculate_state_for_id(last_block_id) } } @@ -253,7 +266,7 @@ mod tests { use super::*; fn genesis_block() -> Block { - common::test_utils::produce_dummy_block(1, None, vec![]) + produce_dummy_block(1, None, vec![]) } fn acc1_sign_key() -> nssa::PrivateKey { @@ -279,7 +292,6 @@ mod tests { let dbio = RocksDBIO::open_or_create( temdir_path, - &genesis_block(), &nssa::V03State::new_with_genesis_accounts( &[(acc1(), 10000), (acc2(), 20000)], vec![], @@ -288,21 +300,21 @@ mod tests { ) .unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let first_id = dbio.get_meta_first_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap(); + let first_id = dbio.get_meta_first_block_id_in_db().unwrap(); let is_first_set = dbio.get_meta_is_first_block_set().unwrap(); let last_observed_l1_header = dbio.get_meta_last_observed_l1_lib_header_in_db().unwrap(); let last_br_id = dbio.get_meta_last_breakpoint_id().unwrap(); - let last_block = dbio.get_block(1).unwrap().unwrap(); + let last_block = dbio.get_block(1).unwrap(); let breakpoint = dbio.get_breakpoint(0).unwrap(); let final_state = dbio.final_state().unwrap(); - assert_eq!(last_id, 1); - assert_eq!(first_id, 1); + assert_eq!(last_id, None); + assert_eq!(first_id, None); assert_eq!(last_observed_l1_header, None); - assert!(is_first_set); - assert_eq!(last_br_id, 0); - assert_eq!(last_block.header.hash, genesis_block().header.hash); + assert!(!is_first_set); + assert_eq!(last_br_id, Some(0)); // TODO: Will be None after we remove hardcoded testnet state + assert!(last_block.is_none()); assert_eq!( breakpoint.get_account_by_id(acc1()), final_state.get_account_by_id(acc1()) @@ -320,7 +332,6 @@ mod tests { let dbio = RocksDBIO::open_or_create( temdir_path, - &genesis_block(), &nssa::V03State::new_with_genesis_accounts( &[(acc1(), 10000), (acc2(), 20000)], vec![], @@ -329,7 +340,10 @@ mod tests { ) .unwrap(); - let prev_hash = genesis_block().header.hash; + let genesis_block = genesis_block(); + dbio.put_block(&genesis_block, [0; 32]).unwrap(); + + let prev_hash = genesis_block.header.hash; let from = acc1(); let to = acc2(); let sign_key = acc1_sign_key(); @@ -340,8 +354,8 @@ mod tests { dbio.put_block(&block, [1; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let first_id = dbio.get_meta_first_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); + let first_id = dbio.get_meta_first_block_id_in_db().unwrap(); let last_observed_l1_header = dbio .get_meta_last_observed_l1_lib_header_in_db() .unwrap() @@ -353,11 +367,11 @@ mod tests { let final_state = dbio.final_state().unwrap(); assert_eq!(last_id, 2); - assert_eq!(first_id, 1); + assert_eq!(first_id, Some(1)); assert_eq!(last_observed_l1_header, [1; 32]); assert!(is_first_set); - assert_eq!(last_br_id, 0); - assert_ne!(last_block.header.hash, genesis_block().header.hash); + assert_eq!(last_br_id, Some(0)); + assert_eq!(last_block.header.hash, block.header.hash); assert_eq!( breakpoint.get_account_by_id(acc1()).balance - final_state.get_account_by_id(acc1()).balance, @@ -377,7 +391,6 @@ mod tests { let dbio = RocksDBIO::open_or_create( temdir_path, - &genesis_block(), &nssa::V03State::new_with_genesis_accounts( &[(acc1(), 10000), (acc2(), 20000)], vec![], @@ -390,11 +403,11 @@ mod tests { let to = acc2(); let sign_key = acc1_sign_key(); - for i in 1..=BREAKPOINT_INTERVAL { - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let last_block = dbio.get_block(last_id).unwrap().unwrap(); - - let prev_hash = last_block.header.hash; + for i in 1..=BREAKPOINT_INTERVAL + 1 { + let prev_hash = dbio.get_meta_last_block_id_in_db().unwrap().map(|last_id| { + let last_block = dbio.get_block(last_id).unwrap().unwrap(); + last_block.header.hash + }); let transfer_tx = common::test_utils::create_transaction_native_token_transfer( from, @@ -403,12 +416,12 @@ mod tests { 1, &sign_key, ); - let block = produce_dummy_block((i + 1).into(), Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(i.into(), prev_hash, vec![transfer_tx]); dbio.put_block(&block, [i; 32]).unwrap(); } - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let first_id = dbio.get_meta_first_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); + let first_id = dbio.get_meta_first_block_id_in_db().unwrap(); let is_first_set = dbio.get_meta_is_first_block_set().unwrap(); let last_br_id = dbio.get_meta_last_breakpoint_id().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); @@ -417,19 +430,19 @@ mod tests { let final_state = dbio.final_state().unwrap(); assert_eq!(last_id, 101); - assert_eq!(first_id, 1); + assert_eq!(first_id, Some(1)); assert!(is_first_set); - assert_eq!(last_br_id, 1); + assert_eq!(last_br_id, Some(1)); assert_ne!(last_block.header.hash, genesis_block().header.hash); assert_eq!( prev_breakpoint.get_account_by_id(acc1()).balance - final_state.get_account_by_id(acc1()).balance, - 100 + 101 ); assert_eq!( final_state.get_account_by_id(acc2()).balance - prev_breakpoint.get_account_by_id(acc2()).balance, - 100 + 101 ); assert_eq!( breakpoint.get_account_by_id(acc1()).balance @@ -450,7 +463,6 @@ mod tests { let dbio = RocksDBIO::open_or_create( temdir_path, - &genesis_block(), &nssa::V03State::new_with_genesis_accounts( &[(acc1(), 10000), (acc2(), 20000)], vec![], @@ -463,31 +475,27 @@ mod tests { let to = acc2(); let sign_key = acc1_sign_key(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let last_block = dbio.get_block(last_id).unwrap().unwrap(); - - let prev_hash = last_block.header.hash; let transfer_tx = common::test_utils::create_transaction_native_token_transfer(from, 0, to, 1, &sign_key); - let block = produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(1, None, vec![transfer_tx]); let control_hash1 = block.header.hash; dbio.put_block(&block, [1; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; let transfer_tx = common::test_utils::create_transaction_native_token_transfer(from, 1, to, 1, &sign_key); - let block = produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); let control_hash2 = block.header.hash; dbio.put_block(&block, [2; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; @@ -496,10 +504,10 @@ mod tests { let control_tx_hash1 = transfer_tx.hash(); - let block = produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]); dbio.put_block(&block, [3; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; @@ -508,7 +516,7 @@ mod tests { let control_tx_hash2 = transfer_tx.hash(); - let block = produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); dbio.put_block(&block, [4; 32]).unwrap(); let control_block_id1 = dbio.get_block_id_by_hash(control_hash1.0).unwrap().unwrap(); @@ -522,10 +530,10 @@ mod tests { .unwrap() .unwrap(); - assert_eq!(control_block_id1, 2); - assert_eq!(control_block_id2, 3); - assert_eq!(control_block_id3, 4); - assert_eq!(control_block_id4, 5); + assert_eq!(control_block_id1, 1); + assert_eq!(control_block_id2, 2); + assert_eq!(control_block_id3, 3); + assert_eq!(control_block_id4, 4); } #[test] @@ -537,7 +545,6 @@ mod tests { let dbio = RocksDBIO::open_or_create( temdir_path, - &genesis_block(), &nssa::V03State::new_with_genesis_accounts( &[(acc1(), 10000), (acc2(), 20000)], vec![], @@ -550,56 +557,52 @@ mod tests { let to = acc2(); let sign_key = acc1_sign_key(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let last_block = dbio.get_block(last_id).unwrap().unwrap(); - - let prev_hash = last_block.header.hash; let transfer_tx = common::test_utils::create_transaction_native_token_transfer(from, 0, to, 1, &sign_key); - let block = produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(1, None, vec![transfer_tx]); block_res.push(block.clone()); dbio.put_block(&block, [1; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; let transfer_tx = common::test_utils::create_transaction_native_token_transfer(from, 1, to, 1, &sign_key); - let block = produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); dbio.put_block(&block, [2; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; let transfer_tx = common::test_utils::create_transaction_native_token_transfer(from, 2, to, 1, &sign_key); - let block = produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); dbio.put_block(&block, [3; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; let transfer_tx = common::test_utils::create_transaction_native_token_transfer(from, 3, to, 1, &sign_key); - let block = produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); dbio.put_block(&block, [4; 32]).unwrap(); let block_hashes_mem: Vec<[u8; 32]> = block_res.into_iter().map(|bl| bl.header.hash.0).collect(); - // Get blocks before ID 6 (i.e., starting from 5 going backwards), limit 4 - // This should return blocks 5, 4, 3, 2 in descending order - let mut batch_res = dbio.get_block_batch(Some(6), 4).unwrap(); + // Get blocks before ID 5 (i.e., starting from 4 going backwards), limit 4 + // This should return blocks 4, 3, 2, 1 in descending order + let mut batch_res = dbio.get_block_batch(Some(5), 4).unwrap(); batch_res.reverse(); // Reverse to match ascending order for comparison let block_hashes_db: Vec<[u8; 32]> = @@ -609,9 +612,9 @@ mod tests { let block_hashes_mem_limited = &block_hashes_mem[1..]; - // Get blocks before ID 6, limit 3 - // This should return blocks 5, 4, 3 in descending order - let mut batch_res_limited = dbio.get_block_batch(Some(6), 3).unwrap(); + // Get blocks before ID 5, limit 3 + // This should return blocks 4, 3, 2 in descending order + let mut batch_res_limited = dbio.get_block_batch(Some(5), 3).unwrap(); batch_res_limited.reverse(); // Reverse to match ascending order for comparison let block_hashes_db_limited: Vec<[u8; 32]> = batch_res_limited @@ -627,7 +630,7 @@ mod tests { .map(|block| block.header.block_id) .collect::>(); - assert_eq!(block_batch_ids, vec![1, 2, 3, 4, 5]); + assert_eq!(block_batch_ids, vec![1, 2, 3, 4]); } #[test] @@ -637,7 +640,6 @@ mod tests { let dbio = RocksDBIO::open_or_create( temdir_path, - &genesis_block(), &nssa::V03State::new_with_genesis_accounts( &[(acc1(), 10000), (acc2(), 20000)], vec![], @@ -652,10 +654,6 @@ mod tests { let mut tx_hash_res = vec![]; - let last_id = dbio.get_meta_last_block_in_db().unwrap(); - let last_block = dbio.get_block(last_id).unwrap().unwrap(); - - let prev_hash = last_block.header.hash; let transfer_tx1 = common::test_utils::create_transaction_native_token_transfer(from, 0, to, 1, &sign_key); let transfer_tx2 = @@ -663,11 +661,11 @@ mod tests { tx_hash_res.push(transfer_tx1.hash().0); tx_hash_res.push(transfer_tx2.hash().0); - let block = produce_dummy_block(2, Some(prev_hash), vec![transfer_tx1, transfer_tx2]); + let block = produce_dummy_block(1, None, vec![transfer_tx1, transfer_tx2]); dbio.put_block(&block, [1; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; @@ -678,11 +676,11 @@ mod tests { tx_hash_res.push(transfer_tx1.hash().0); tx_hash_res.push(transfer_tx2.hash().0); - let block = produce_dummy_block(3, Some(prev_hash), vec![transfer_tx1, transfer_tx2]); + let block = produce_dummy_block(2, Some(prev_hash), vec![transfer_tx1, transfer_tx2]); dbio.put_block(&block, [2; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; @@ -693,11 +691,11 @@ mod tests { tx_hash_res.push(transfer_tx1.hash().0); tx_hash_res.push(transfer_tx2.hash().0); - let block = produce_dummy_block(4, Some(prev_hash), vec![transfer_tx1, transfer_tx2]); + let block = produce_dummy_block(3, Some(prev_hash), vec![transfer_tx1, transfer_tx2]); dbio.put_block(&block, [3; 32]).unwrap(); - let last_id = dbio.get_meta_last_block_in_db().unwrap(); + let last_id = dbio.get_meta_last_block_id_in_db().unwrap().unwrap(); let last_block = dbio.get_block(last_id).unwrap().unwrap(); let prev_hash = last_block.header.hash; @@ -705,7 +703,7 @@ mod tests { common::test_utils::create_transaction_native_token_transfer(from, 6, to, 1, &sign_key); tx_hash_res.push(transfer_tx.hash().0); - let block = produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]); + let block = produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); dbio.put_block(&block, [4; 32]).unwrap(); diff --git a/storage/src/indexer/read_multiple.rs b/storage/src/indexer/read_multiple.rs index 866fc7b0..d91e9627 100644 --- a/storage/src/indexer/read_multiple.rs +++ b/storage/src/indexer/read_multiple.rs @@ -12,7 +12,10 @@ impl RocksDBIO { before_id.saturating_sub(1) } else { // Get the latest block ID - self.get_meta_last_block_in_db()? + let Some(last) = self.get_meta_last_block_id_in_db()? else { + return Ok(vec![]); // No blocks in the database + }; + last }; for i in 0..limit { diff --git a/storage/src/indexer/read_once.rs b/storage/src/indexer/read_once.rs index b1ae0ada..6e79adc4 100644 --- a/storage/src/indexer/read_once.rs +++ b/storage/src/indexer/read_once.rs @@ -4,7 +4,7 @@ use crate::{ cells::shared_cells::{BlockCell, FirstBlockCell, FirstBlockSetCell, LastBlockCell}, indexer::indexer_cells::{ AccNumTxCell, BlockHashToBlockIdMapCell, BreakpointCellOwned, LastBreakpointIdCell, - LastObservedL1LibHeaderCell, TxHashToBlockIdMapCell, + LastObservedL1LibHeaderCell, TxHashToBlockIdMapCell, ZoneSdkIndexerCursorCellOwned, }, }; @@ -12,12 +12,14 @@ use crate::{ impl RocksDBIO { // Meta - pub fn get_meta_first_block_in_db(&self) -> DbResult { - self.get::(()).map(|cell| cell.0) + pub fn get_meta_first_block_id_in_db(&self) -> DbResult> { + self.get_opt::(()) + .map(|opt| opt.map(|cell| cell.0)) } - pub fn get_meta_last_block_in_db(&self) -> DbResult { - self.get::(()).map(|cell| cell.0) + pub fn get_meta_last_block_id_in_db(&self) -> DbResult> { + self.get_opt::(()) + .map(|opt| opt.map(|cell| cell.0)) } pub fn get_meta_last_observed_l1_lib_header_in_db(&self) -> DbResult> { @@ -29,8 +31,9 @@ impl RocksDBIO { Ok(self.get_opt::(())?.is_some()) } - pub fn get_meta_last_breakpoint_id(&self) -> DbResult { - self.get::(()).map(|cell| cell.0) + pub fn get_meta_last_breakpoint_id(&self) -> DbResult> { + self.get_opt::(()) + .map(|opt| opt.map(|cell| cell.0)) } // Block @@ -64,4 +67,10 @@ impl RocksDBIO { self.get_opt::(acc_id) .map(|opt| opt.map(|cell| cell.0)) } + + pub fn get_zone_sdk_indexer_cursor_bytes(&self) -> DbResult>> { + Ok(self + .get_opt::(())? + .map(|cell| cell.0)) + } } diff --git a/storage/src/indexer/write_atomic.rs b/storage/src/indexer/write_atomic.rs index 9b661f3b..7e05791f 100644 --- a/storage/src/indexer/write_atomic.rs +++ b/storage/src/indexer/write_atomic.rs @@ -4,8 +4,8 @@ use rocksdb::WriteBatch; use super::{BREAKPOINT_INTERVAL, Block, DbError, DbResult, RocksDBIO}; use crate::{ - DB_META_FIRST_BLOCK_IN_DB_KEY, DBIO as _, - cells::shared_cells::{FirstBlockSetCell, LastBlockCell}, + DBIO as _, + cells::shared_cells::{FirstBlockCell, FirstBlockSetCell, LastBlockCell}, indexer::indexer_cells::{ AccNumTxCell, BlockHashToBlockIdMapCell, LastBreakpointIdCell, LastObservedL1LibHeaderCell, TxHashToBlockIdMapCell, @@ -143,28 +143,12 @@ impl RocksDBIO { // Meta - pub fn put_meta_first_block_in_db_batch(&self, block: &Block) -> DbResult<()> { - let cf_meta = self.meta_column(); - self.db - .put_cf( - &cf_meta, - borsh::to_vec(&DB_META_FIRST_BLOCK_IN_DB_KEY).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_owned()), - ) - })?, - borsh::to_vec(&block.header.block_id).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize first block id".to_owned()), - ) - })?, - ) - .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; - - self.put_block(block, [0; 32])?; - Ok(()) + pub fn put_meta_first_block_in_db_batch( + &self, + block: &Block, + write_batch: &mut WriteBatch, + ) -> DbResult<()> { + self.put_batch(&FirstBlockCell(block.header.block_id), (), write_batch) } pub fn put_meta_last_block_in_db_batch( @@ -199,7 +183,7 @@ impl RocksDBIO { pub fn put_block(&self, block: &Block, l1_lib_header: [u8; 32]) -> DbResult<()> { let cf_block = self.block_column(); - let last_curr_block = self.get_meta_last_block_in_db()?; + let last_curr_block = self.get_meta_last_block_id_in_db()?.unwrap_or(0); let mut write_batch = WriteBatch::default(); write_batch.put_cf( @@ -216,6 +200,10 @@ impl RocksDBIO { self.put_meta_last_block_in_db_batch(block.header.block_id, &mut write_batch)?; self.put_meta_last_observed_l1_lib_header_in_db_batch(l1_lib_header, &mut write_batch)?; } + if last_curr_block == 0 { + self.put_meta_first_block_in_db_batch(block, &mut write_batch)?; + self.put_meta_is_first_block_set_batch(&mut write_batch)?; + } self.put_block_id_by_hash_batch( block.header.hash.into(), diff --git a/storage/src/indexer/write_non_atomic.rs b/storage/src/indexer/write_non_atomic.rs index 62b466a2..7ddab1dd 100644 --- a/storage/src/indexer/write_non_atomic.rs +++ b/storage/src/indexer/write_non_atomic.rs @@ -4,6 +4,7 @@ use crate::{ cells::shared_cells::{FirstBlockSetCell, LastBlockCell}, indexer::indexer_cells::{ BreakpointCellRef, LastBreakpointIdCell, LastObservedL1LibHeaderCell, + ZoneSdkIndexerCursorCellRef, }, }; @@ -30,6 +31,10 @@ impl RocksDBIO { self.put(&FirstBlockSetCell(true), ()) } + pub fn put_zone_sdk_indexer_cursor_bytes(&self, bytes: &[u8]) -> DbResult<()> { + self.put(&ZoneSdkIndexerCursorCellRef(bytes), ()) + } + // State pub fn put_breakpoint(&self, br_id: u64, breakpoint: &V03State) -> DbResult<()> { @@ -37,9 +42,10 @@ impl RocksDBIO { } pub fn put_next_breakpoint(&self) -> DbResult<()> { - let last_block = self.get_meta_last_block_in_db()?; + let last_block = self.get_meta_last_block_id_in_db()?.unwrap_or(0); let next_breakpoint_id = self .get_meta_last_breakpoint_id()? + .unwrap_or(0) .checked_add(1) .expect("Breakpoint Id will be lesser than u64::MAX"); let block_to_break_id = next_breakpoint_id diff --git a/storage/src/sequencer/mod.rs b/storage/src/sequencer/mod.rs index 508f6c29..be5e5cfe 100644 --- a/storage/src/sequencer/mod.rs +++ b/storage/src/sequencer/mod.rs @@ -12,7 +12,7 @@ use crate::{ error::DbError, sequencer::sequencer_cells::{ LastFinalizedBlockIdCell, LatestBlockMetaCellOwned, LatestBlockMetaCellRef, - NSSAStateCellOwned, NSSAStateCellRef, + NSSAStateCellOwned, NSSAStateCellRef, ZoneSdkCheckpointCellOwned, ZoneSdkCheckpointCellRef, }, }; @@ -22,6 +22,8 @@ pub mod sequencer_cells; pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id"; /// Key base for storing metainformation about the latest block meta. pub const DB_META_LATEST_BLOCK_META_KEY: &str = "latest_block_meta"; +/// Key base for storing the zone-sdk sequencer checkpoint (opaque bytes). +pub const DB_META_ZONE_SDK_CHECKPOINT_KEY: &str = "zone_sdk_checkpoint"; /// Key base for storing the NSSA state. pub const DB_NSSA_STATE_KEY: &str = "nssa_state"; @@ -40,36 +42,26 @@ impl DBIO for RocksDBIO { } impl RocksDBIO { - pub fn open_or_create( + pub fn open(path: &Path) -> DbResult { + let db_opts = Options::default(); + Self::open_inner(path, &db_opts) + } + + pub fn create( path: &Path, genesis_block: &Block, genesis_msg_id: MantleMsgId, + genesis_state: &V03State, ) -> DbResult { - let mut cf_opts = Options::default(); - cf_opts.set_max_write_buffer_number(16); - // ToDo: Add more column families for different data - let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); - let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); - let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); db_opts.create_if_missing(true); - let db = DBWithThreadMode::::open_cf_descriptors( - &db_opts, - path, - vec![cfb, cfmeta, cfstate], - ) - .map_err(|err| DbError::RocksDbError { - error: err, - additional_info: Some("Failed to open or create DB".to_owned()), - })?; - - let dbio = Self { db }; + let dbio = Self::open_inner(path, &db_opts)?; let is_start_set = dbio.get_meta_is_first_block_set()?; if !is_start_set { let block_id = genesis_block.header.block_id; + // TODO: Shouldn't this be atomic (batched)? dbio.put_meta_first_block_in_db(genesis_block, genesis_msg_id)?; dbio.put_meta_is_first_block_set()?; dbio.put_meta_last_block_in_db(block_id)?; @@ -79,11 +71,35 @@ impl RocksDBIO { hash: genesis_block.header.hash, msg_id: genesis_msg_id, })?; + dbio.put_nssa_state_in_db(genesis_state)?; } Ok(dbio) } + fn open_inner(path: &Path, db_opts: &Options) -> DbResult { + let mut cf_opts = Options::default(); + cf_opts.set_max_write_buffer_number(16); + + // ToDo: Add more column families for different data + let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); + let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); + let cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); + + let db = DBWithThreadMode::::open_cf_descriptors( + db_opts, + path, + vec![cfb, cfmeta, cfstate], + ) + .map_err(|err| DbError::RocksDbError { + error: err, + additional_info: Some("Failed to open or create DB".to_owned()), + })?; + + let dbio = Self { db }; + Ok(dbio) + } + pub fn destroy(path: &Path) -> DbResult<()> { let mut cf_opts = Options::default(); cf_opts.set_max_write_buffer_number(16); @@ -133,7 +149,15 @@ impl RocksDBIO { Ok(self.get_opt::(())?.is_some()) } - pub fn put_nssa_state_in_db(&self, state: &V03State, batch: &mut WriteBatch) -> DbResult<()> { + pub fn put_nssa_state_in_db(&self, state: &V03State) -> DbResult<()> { + self.put(&NSSAStateCellRef(state), ()) + } + + pub fn put_nssa_state_in_db_batch( + &self, + state: &V03State, + batch: &mut WriteBatch, + ) -> DbResult<()> { self.put_batch(&NSSAStateCellRef(state), (), batch) } @@ -205,6 +229,16 @@ impl RocksDBIO { self.get::(()).map(|val| val.0) } + pub fn get_zone_sdk_checkpoint_bytes(&self) -> DbResult>> { + Ok(self + .get_opt::(())? + .map(|cell| cell.0)) + } + + pub fn put_zone_sdk_checkpoint_bytes(&self, bytes: &[u8]) -> DbResult<()> { + self.put(&ZoneSdkCheckpointCellRef(bytes), ()) + } + pub fn put_block( &self, block: &Block, @@ -275,6 +309,22 @@ impl RocksDBIO { Ok(()) } + /// Mark every pending block with `block_id <= last_finalized` as finalized. + /// Idempotent — already-finalized blocks are skipped. + pub fn clean_pending_blocks_up_to(&self, last_finalized: u64) -> DbResult<()> { + let pending_ids: Vec = self + .get_all_blocks() + .filter_map(Result::ok) + .filter(|b| matches!(b.bedrock_status, BedrockStatus::Pending)) + .map(|b| b.header.block_id) + .filter(|id| *id <= last_finalized) + .collect(); + for id in pending_ids { + self.mark_block_as_finalized(id)?; + } + Ok(()) + } + pub fn mark_block_as_finalized(&self, block_id: u64) -> DbResult<()> { let mut block = self.get_block(block_id)?.ok_or_else(|| { DbError::db_interaction_error(format!("Block with id {block_id} not found")) @@ -338,7 +388,7 @@ impl RocksDBIO { let block_id = block.header.block_id; let mut batch = WriteBatch::default(); self.put_block(block, msg_id, false, &mut batch)?; - self.put_nssa_state_in_db(state, &mut batch)?; + self.put_nssa_state_in_db_batch(state, &mut batch)?; self.db.write(batch).map_err(|rerr| { DbError::rocksdb_cast_message( rerr, diff --git a/storage/src/sequencer/sequencer_cells.rs b/storage/src/sequencer/sequencer_cells.rs index 0ad092d7..2bf65367 100644 --- a/storage/src/sequencer/sequencer_cells.rs +++ b/storage/src/sequencer/sequencer_cells.rs @@ -8,7 +8,7 @@ use crate::{ error::DbError, sequencer::{ CF_NSSA_STATE_NAME, DB_META_LAST_FINALIZED_BLOCK_ID, DB_META_LATEST_BLOCK_META_KEY, - DB_NSSA_STATE_KEY, + DB_META_ZONE_SDK_CHECKPOINT_KEY, DB_NSSA_STATE_KEY, }, }; @@ -95,6 +95,42 @@ impl SimpleWritableCell for LatestBlockMetaCellRef<'_> { } } +/// Opaque bytes for the zone-sdk sequencer checkpoint. The caller is +/// responsible for the actual encoding (we use `serde_json` since +/// `SequencerCheckpoint` only derives serde, not borsh). +#[derive(BorshDeserialize)] +pub struct ZoneSdkCheckpointCellOwned(pub Vec); + +impl SimpleStorableCell for ZoneSdkCheckpointCellOwned { + type KeyParams = (); + + const CELL_NAME: &'static str = DB_META_ZONE_SDK_CHECKPOINT_KEY; + const CF_NAME: &'static str = CF_META_NAME; +} + +impl SimpleReadableCell for ZoneSdkCheckpointCellOwned {} + +#[derive(BorshSerialize)] +pub struct ZoneSdkCheckpointCellRef<'bytes>(pub &'bytes [u8]); + +impl SimpleStorableCell for ZoneSdkCheckpointCellRef<'_> { + type KeyParams = (); + + const CELL_NAME: &'static str = DB_META_ZONE_SDK_CHECKPOINT_KEY; + const CF_NAME: &'static str = CF_META_NAME; +} + +impl SimpleWritableCell for ZoneSdkCheckpointCellRef<'_> { + fn value_constructor(&self) -> DbResult> { + borsh::to_vec(&self).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize zone-sdk checkpoint cell".to_owned()), + ) + }) + } +} + #[cfg(test)] mod uniform_tests { use crate::{ diff --git a/test_program_methods/guest/Cargo.toml b/test_program_methods/guest/Cargo.toml index 46edeb61..ca8cdc1d 100644 --- a/test_program_methods/guest/Cargo.toml +++ b/test_program_methods/guest/Cargo.toml @@ -9,6 +9,7 @@ workspace = true [dependencies] nssa_core.workspace = true +authenticated_transfer_core.workspace = true clock_core.workspace = true risc0-zkvm.workspace = true diff --git a/test_program_methods/guest/src/bin/auth_transfer_proxy.rs b/test_program_methods/guest/src/bin/auth_transfer_proxy.rs new file mode 100644 index 00000000..b3590074 --- /dev/null +++ b/test_program_methods/guest/src/bin/auth_transfer_proxy.rs @@ -0,0 +1,104 @@ +use nssa_core::program::{ + AccountPostState, ChainedCall, PdaSeed, ProgramId, ProgramInput, ProgramOutput, + read_nssa_inputs, +}; + +/// PDA authorization program that delegates balance operations to `authenticated_transfer`. +/// +/// The PDA is owned by `authenticated_transfer`, not by this program. This program's role +/// is solely to provide PDA authorization via `pda_seeds` in chained calls. +/// +/// Instruction: `(pda_seed, auth_transfer_id, amount, is_withdraw)`. +/// +/// **Init** (`is_withdraw = false`, 1 pre-state `[pda]`): +/// Chains to `authenticated_transfer` with `instruction=0` (init path) and `pda_seeds=[seed]` +/// to initialize the PDA under `authenticated_transfer`'s ownership. +/// +/// **Withdraw** (`is_withdraw = true`, 2 pre-states `[pda, recipient]`): +/// Chains to `authenticated_transfer` with the amount and `pda_seeds=[seed]` to authorize +/// the PDA for a balance transfer. The actual balance modification happens in +/// `authenticated_transfer`, not here. +/// +/// **Deposit**: done directly via `authenticated_transfer` (no need for this program). +type Instruction = (PdaSeed, ProgramId, u128, bool); + +#[expect( + clippy::allow_attributes, + reason = "allow is needed because the clones are only redundant in test compilation" +)] +#[allow( + clippy::redundant_clone, + reason = "clones needed in non-test compilation" +)] +fn main() { + let ( + ProgramInput { + self_program_id, + caller_program_id, + pre_states, + instruction: (pda_seed, auth_transfer_id, amount, is_withdraw), + }, + instruction_words, + ) = read_nssa_inputs::(); + + if is_withdraw { + let Ok([pda_pre, recipient_pre]) = <[_; 2]>::try_from(pre_states.clone()) else { + panic!("expected exactly 2 pre_states for withdraw: [pda, recipient]"); + }; + + // Post-states stay unchanged in this program. The actual balance transfer + // happens in the chained call to authenticated_transfer. + let pda_post = AccountPostState::new(pda_pre.account.clone()); + let recipient_post = AccountPostState::new(recipient_pre.account.clone()); + + // Chain to authenticated_transfer with pda_seeds to authorize the PDA. + // The circuit's resolve_authorization_and_record_bindings establishes the + // private PDA (seed, npk) binding when pda_seeds match the private PDA derivation. + let mut auth_pda_pre = pda_pre; + auth_pda_pre.is_authorized = true; + let auth_call = ChainedCall::new( + auth_transfer_id, + vec![auth_pda_pre, recipient_pre], + &authenticated_transfer_core::Instruction::Transfer { amount }, + ) + .with_pda_seeds(vec![pda_seed]); + + ProgramOutput::new( + self_program_id, + caller_program_id, + instruction_words, + pre_states, + vec![pda_post, recipient_post], + ) + .with_chained_calls(vec![auth_call]) + .write(); + } else { + // Init: initialize the PDA under authenticated_transfer's ownership. + let Ok([pda_pre]) = <[_; 1]>::try_from(pre_states.clone()) else { + panic!("expected exactly 1 pre_state for init: [pda]"); + }; + + let pda_post = AccountPostState::new(pda_pre.account.clone()); + + // Chain to authenticated_transfer with instruction=0 (init path) and pda_seeds + // to authorize the PDA. authenticated_transfer will claim it with Claim::Authorized. + let mut auth_pda_pre = pda_pre; + auth_pda_pre.is_authorized = true; + let auth_call = ChainedCall::new( + auth_transfer_id, + vec![auth_pda_pre], + &authenticated_transfer_core::Instruction::Initialize, + ) + .with_pda_seeds(vec![pda_seed]); + + ProgramOutput::new( + self_program_id, + caller_program_id, + instruction_words, + pre_states, + vec![pda_post], + ) + .with_chained_calls(vec![auth_call]) + .write(); + } +} diff --git a/test_program_methods/guest/src/bin/chain_caller.rs b/test_program_methods/guest/src/bin/chain_caller.rs index 5c124bed..ac25301b 100644 --- a/test_program_methods/guest/src/bin/chain_caller.rs +++ b/test_program_methods/guest/src/bin/chain_caller.rs @@ -1,3 +1,4 @@ +use authenticated_transfer_core::Instruction as AuthTransferInstruction; use nssa_core::program::{ AccountPostState, ChainedCall, PdaSeed, ProgramId, ProgramInput, ProgramOutput, read_nssa_inputs, @@ -25,7 +26,7 @@ fn main() { return; }; - let instruction_data = to_vec(&balance).unwrap(); + let instruction_data = to_vec(&AuthTransferInstruction::Transfer { amount: balance }).unwrap(); let mut running_recipient_pre = recipient_pre.clone(); let mut running_sender_pre = sender_pre.clone(); diff --git a/test_program_methods/guest/src/bin/flash_swap_callback.rs b/test_program_methods/guest/src/bin/flash_swap_callback.rs index 251833bb..ca596163 100644 --- a/test_program_methods/guest/src/bin/flash_swap_callback.rs +++ b/test_program_methods/guest/src/bin/flash_swap_callback.rs @@ -63,7 +63,10 @@ fn main() { // Mark the receiver as authorized since it will be PDA-authorized in this chained call. let mut receiver_authorized = receiver_pre.clone(); receiver_authorized.is_authorized = true; - let transfer_instruction = risc0_zkvm::serde::to_vec(&instruction.amount) + let transfer_instruction = + risc0_zkvm::serde::to_vec(&authenticated_transfer_core::Instruction::Transfer { + amount: instruction.amount, + }) .expect("transfer instruction serialization"); chained_calls.push(ChainedCall { diff --git a/test_program_methods/guest/src/bin/flash_swap_initiator.rs b/test_program_methods/guest/src/bin/flash_swap_initiator.rs index 27d1f317..c6a76ebd 100644 --- a/test_program_methods/guest/src/bin/flash_swap_initiator.rs +++ b/test_program_methods/guest/src/bin/flash_swap_initiator.rs @@ -123,7 +123,10 @@ fn main() { let mut vault_authorized = vault_pre.clone(); vault_authorized.is_authorized = true; let transfer_instruction = - risc0_zkvm::serde::to_vec(&amount_out).expect("transfer instruction serialization"); + risc0_zkvm::serde::to_vec(&authenticated_transfer_core::Instruction::Transfer { + amount: amount_out, + }) + .expect("transfer instruction serialization"); let call_1 = ChainedCall { program_id: token_program_id, pre_states: vec![vault_authorized, receiver_pre.clone()], diff --git a/test_program_methods/guest/src/bin/malicious_authorization_changer.rs b/test_program_methods/guest/src/bin/malicious_authorization_changer.rs index f7aba4a0..894f22bf 100644 --- a/test_program_methods/guest/src/bin/malicious_authorization_changer.rs +++ b/test_program_methods/guest/src/bin/malicious_authorization_changer.rs @@ -32,7 +32,8 @@ fn main() { ..sender.clone() }; - let instruction_data = to_vec(&balance).unwrap(); + let instruction_data = + to_vec(&authenticated_transfer_core::Instruction::Transfer { amount: balance }).unwrap(); let chained_call = ChainedCall { program_id: transfer_program_id, diff --git a/test_program_methods/guest/src/bin/pda_fund_spend_proxy.rs b/test_program_methods/guest/src/bin/pda_fund_spend_proxy.rs new file mode 100644 index 00000000..567f9af1 --- /dev/null +++ b/test_program_methods/guest/src/bin/pda_fund_spend_proxy.rs @@ -0,0 +1,71 @@ +use nssa_core::{ + account::AccountWithMetadata, + program::{ + AccountPostState, ChainedCall, PdaSeed, ProgramId, ProgramInput, ProgramOutput, + read_nssa_inputs, + }, +}; +use risc0_zkvm::serde::to_vec; + +/// Proxy for interacting with private PDAs via `auth_transfer`. +/// +/// The `is_fund` flag selects the operating mode: +/// +/// - `false` (Spend): `pre_states = [pda (authorized), recipient]`. Debits the PDA. The PDA-to-npk +/// binding is established via `pda_seeds` in the chained call to `auth_transfer`. +/// +/// - `true` (Fund): `pre_states = [sender (authorized), pda (foreign/uninitialized)]`. Credits the +/// PDA. A direct call to `auth_transfer` cannot bind the PDA because `auth_transfer` uses +/// `Claim::Authorized`, not `Claim::Pda`. Routing through this proxy establishes the binding via +/// `pda_seeds` in the chained call. +type Instruction = (PdaSeed, u128, ProgramId, bool); + +fn main() { + let ( + ProgramInput { + self_program_id, + caller_program_id, + pre_states, + instruction: (seed, amount, auth_transfer_id, is_fund), + }, + instruction_words, + ) = read_nssa_inputs::(); + + let Ok([first, second]) = <[_; 2]>::try_from(pre_states) else { + return; + }; + + assert!(first.is_authorized, "first pre_state must be authorized"); + + let chained_pre_states = if is_fund { + let pda_authorized = AccountWithMetadata { + account: second.account.clone(), + account_id: second.account_id, + is_authorized: true, + }; + vec![first.clone(), pda_authorized] + } else { + vec![first.clone(), second.clone()] + }; + + let first_post = AccountPostState::new(first.account.clone()); + let second_post = AccountPostState::new(second.account.clone()); + + let chained_call = ChainedCall { + program_id: auth_transfer_id, + instruction_data: to_vec(&authenticated_transfer_core::Instruction::Transfer { amount }) + .unwrap(), + pre_states: chained_pre_states, + pda_seeds: vec![seed], + }; + + ProgramOutput::new( + self_program_id, + caller_program_id, + instruction_words, + vec![first, second], + vec![first_post, second_post], + ) + .with_chained_calls(vec![chained_call]) + .write(); +} diff --git a/testnet_initial_state/src/lib.rs b/testnet_initial_state/src/lib.rs index f6f1e288..668d5f24 100644 --- a/testnet_initial_state/src/lib.rs +++ b/testnet_initial_state/src/lib.rs @@ -1,6 +1,7 @@ use common::PINATA_BASE58; use key_protocol::key_management::{ KeyChain, + key_tree::chain_index::ChainIndex, secret_holders::{PrivateKeyHolder, SecretSpendingKey}, }; use nssa::{Account, AccountId, Data, PrivateKey, PublicKey, V03State}; @@ -97,13 +98,17 @@ pub struct PublicAccountPrivateInitialData { pub struct PrivateAccountPrivateInitialData { pub account: nssa_core::account::Account, pub key_chain: KeyChain, + pub chain_index: Option, pub identifier: nssa_core::Identifier, } impl PrivateAccountPrivateInitialData { #[must_use] pub fn account_id(&self) -> nssa::AccountId { - nssa::AccountId::from((&self.key_chain.nullifier_public_key, self.identifier)) + nssa::AccountId::for_regular_private_account( + &self.key_chain.nullifier_public_key, + self.identifier, + ) } } @@ -156,6 +161,7 @@ pub fn initial_priv_accounts_private_keys() -> Vec Vec V03State { .iter() .map(|init_comm_data| { let npk = &init_comm_data.npk; - let account_id = nssa::AccountId::from((npk, 0)); + let account_id = nssa::AccountId::for_regular_private_account(npk, 0); let mut acc = init_comm_data.account.clone(); diff --git a/wallet-ffi/Cargo.toml b/wallet-ffi/Cargo.toml index 0af20a54..869845c8 100644 --- a/wallet-ffi/Cargo.toml +++ b/wallet-ffi/Cargo.toml @@ -15,7 +15,10 @@ wallet.workspace = true nssa.workspace = true nssa_core.workspace = true sequencer_service_rpc = { workspace = true, features = ["client"] } + tokio.workspace = true +key_protocol.workspace = true +serde_json.workspace = true [build-dependencies] cbindgen = "0.29" diff --git a/wallet-ffi/src/account.rs b/wallet-ffi/src/account.rs index 6214ab01..ed27abe6 100644 --- a/wallet-ffi/src/account.rs +++ b/wallet-ffi/src/account.rs @@ -1,17 +1,20 @@ //! Account management functions. -use std::ptr; +use std::{ffi::c_char, ptr, str::FromStr as _}; +use key_protocol::key_management::{key_tree::chain_index::ChainIndex, KeyChain}; use nssa::AccountId; +use wallet::account::{AccountIdWithPrivacy, HumanReadableAccount}; use crate::{ - block_on, + block_on, c_str_to_string, error::{print_error, WalletFfiError}, types::{ FfiAccount, FfiAccountList, FfiAccountListEntry, FfiBytes32, FfiPrivateAccountKeys, WalletHandle, }, wallet::get_wallet, + FfiU128, }; /// Create a new public account. @@ -162,16 +165,12 @@ pub unsafe extern "C" fn wallet_ffi_create_private_accounts_key( }; let chain_index = wallet.create_private_accounts_key(None); - - let node = wallet + let key_chain = wallet .storage() - .user_data - .private_key_tree - .key_map - .get(&chain_index) + .key_chain() + .private_account_key_chain_by_index(&chain_index) .expect("Node was just inserted"); - let key_chain = &node.value.0; let npk_bytes = key_chain.nullifier_public_key.0; let vpk_bytes = key_chain.viewing_public_key.to_bytes(); let vpk_len = vpk_bytes.len(); @@ -231,40 +230,21 @@ pub unsafe extern "C" fn wallet_ffi_list_accounts( } }; - let user_data = &wallet.storage().user_data; - let mut entries = Vec::new(); - - // Public accounts from default signing keys (preconfigured) - for account_id in user_data.default_pub_account_signing_keys.keys() { - entries.push(FfiAccountListEntry { - account_id: FfiBytes32::from_account_id(account_id), - is_public: true, - }); - } - - // Public accounts from key tree (generated) - for account_id in user_data.public_key_tree.account_id_map.keys() { - entries.push(FfiAccountListEntry { - account_id: FfiBytes32::from_account_id(account_id), - is_public: true, - }); - } - - // Private accounts from default accounts (preconfigured) - for account_id in user_data.default_user_private_accounts.keys() { - entries.push(FfiAccountListEntry { - account_id: FfiBytes32::from_account_id(account_id), - is_public: false, - }); - } - - // Private accounts from key tree (generated) - for account_id in user_data.private_key_tree.account_id_map.keys() { - entries.push(FfiAccountListEntry { - account_id: FfiBytes32::from_account_id(account_id), - is_public: false, - }); - } + let entries = wallet + .storage() + .key_chain() + .account_ids() + .map(|(account_id, _idx)| match account_id { + AccountIdWithPrivacy::Public(account_id) => FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: true, + }, + AccountIdWithPrivacy::Private(account_id) => FfiAccountListEntry { + account_id: FfiBytes32::from_account_id(account_id), + is_public: false, + }, + }) + .collect::>(); let count = entries.len(); @@ -508,3 +488,168 @@ pub unsafe extern "C" fn wallet_ffi_free_account_data(account: *mut FfiAccount) } } } + +/// Import a public account private key into wallet storage. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `private_key_hex`: Hex-encoded private key string +/// +/// # Returns +/// - `Success` on successful import +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `private_key_hex` must be a valid pointer to a null-terminated C string +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_import_public_account( + handle: *mut WalletHandle, + private_key_hex: *const c_char, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + let private_key_hex = match c_str_to_string(private_key_hex, "private_key_hex") { + Ok(value) => value, + Err(e) => return e, + }; + + let private_key = match nssa::PrivateKey::from_str(&private_key_hex) { + Ok(value) => value, + Err(e) => { + print_error(format!("Invalid public account private key: {e}")); + return WalletFfiError::InvalidKeyValue; + } + }; + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {e}")); + return WalletFfiError::InternalError; + } + }; + + wallet + .storage_mut() + .key_chain_mut() + .add_imported_public_account(private_key); + + match wallet.store_persistent_data() { + Ok(()) => WalletFfiError::Success, + Err(e) => { + print_error(format!("Failed to save wallet after public import: {e}")); + WalletFfiError::StorageError + } + } +} + +/// Import a private account keychain and account state into wallet storage. +/// +/// # Parameters +/// - `handle`: Valid wallet handle +/// - `key_chain_json`: JSON-encoded `key_protocol::key_management::KeyChain` +/// - `chain_index`: Optional chain index string (for example `/0/1`, `NULL` if unknown) +/// - `identifier`: Identifier for this private account as little-endian u128 bytes +/// - `account_state_json`: JSON-encoded `wallet::account::HumanReadableAccount` +/// +/// # Returns +/// - `Success` on successful import +/// - Error code on failure +/// +/// # Safety +/// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` +/// - `key_chain_json` must be a valid pointer to a null-terminated C string +/// - `identifier` must be a valid pointer to a `FfiU128` struct +/// - `account_state_json` must be a valid pointer to a null-terminated C string +#[no_mangle] +pub unsafe extern "C" fn wallet_ffi_import_private_account( + handle: *mut WalletHandle, + key_chain_json: *const c_char, + chain_index: *const c_char, + identifier: *const FfiU128, + account_state_json: *const c_char, +) -> WalletFfiError { + let wrapper = match get_wallet(handle) { + Ok(w) => w, + Err(e) => return e, + }; + + if identifier.is_null() { + print_error("Null pointer for identifier"); + return WalletFfiError::NullPointer; + } + + let key_chain_json = match c_str_to_string(key_chain_json, "key_chain_json") { + Ok(value) => value, + Err(e) => return e, + }; + + let account_state_json = match c_str_to_string(account_state_json, "account_state_json") { + Ok(value) => value, + Err(e) => return e, + }; + + let key_chain: KeyChain = match serde_json::from_str(&key_chain_json) { + Ok(value) => value, + Err(e) => { + print_error(format!("Invalid key chain JSON: {e}")); + return WalletFfiError::SerializationError; + } + }; + + let account_state: HumanReadableAccount = match serde_json::from_str(&account_state_json) { + Ok(value) => value, + Err(e) => { + print_error(format!("Invalid account state JSON: {e}")); + return WalletFfiError::SerializationError; + } + }; + + let account = nssa::Account::from(account_state); + + let mut wallet = match wrapper.core.lock() { + Ok(w) => w, + Err(e) => { + print_error(format!("Failed to lock wallet: {e}")); + return WalletFfiError::InternalError; + } + }; + + let chain_index = if chain_index.is_null() { + None + } else { + let chain_index_path = match c_str_to_string(chain_index, "chain_index") { + Ok(value) => value, + Err(e) => return e, + }; + + let parsed_chain_index = match ChainIndex::from_str(&chain_index_path) { + Ok(value) => value, + Err(e) => { + print_error(format!("Invalid chain index string: {e}")); + return WalletFfiError::InvalidTypeConversion; + } + }; + + Some(parsed_chain_index) + }; + + let identifier = u128::from_le_bytes(unsafe { (*identifier).data }); + + wallet + .storage_mut() + .key_chain_mut() + .add_imported_private_account(key_chain, chain_index, identifier, account); + + match wallet.store_persistent_data() { + Ok(()) => WalletFfiError::Success, + Err(e) => { + print_error(format!("Failed to save wallet after private import: {e}")); + WalletFfiError::StorageError + } + } +} diff --git a/wallet-ffi/src/error.rs b/wallet-ffi/src/error.rs index a8c345b5..17b73075 100644 --- a/wallet-ffi/src/error.rs +++ b/wallet-ffi/src/error.rs @@ -4,6 +4,7 @@ /// Error codes returned by FFI functions. #[repr(C)] +#[must_use] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WalletFfiError { /// Operation completed successfully. diff --git a/wallet-ffi/src/keys.rs b/wallet-ffi/src/keys.rs index 0471f255..b676ffab 100644 --- a/wallet-ffi/src/keys.rs +++ b/wallet-ffi/src/keys.rs @@ -116,12 +116,11 @@ pub unsafe extern "C" fn wallet_ffi_get_private_account_keys( let account_id = AccountId::new(unsafe { (*account_id).data }); - let Some((key_chain, _account, _identifier)) = - wallet.storage().user_data.get_private_account(account_id) - else { + let Some(acc) = wallet.storage().key_chain().private_account(account_id) else { print_error("Private account not found in wallet"); return WalletFfiError::AccountNotFound; }; + let key_chain = acc.key_chain; // NPK is a 32-byte array let npk_bytes = key_chain.nullifier_public_key.0; diff --git a/wallet-ffi/src/lib.rs b/wallet-ffi/src/lib.rs index d84bf5a3..16943d3e 100644 --- a/wallet-ffi/src/lib.rs +++ b/wallet-ffi/src/lib.rs @@ -26,9 +26,13 @@ reason = "TODO: fix later" )] -use std::sync::OnceLock; +use std::{ + ffi::{c_char, CStr}, + sync::OnceLock, +}; use ::wallet::ExecutionFailureKind; +use error::WalletFfiError; // Re-export public types for cbindgen pub use error::WalletFfiError as FfiError; use tokio::runtime::Handle; @@ -88,3 +92,20 @@ pub(crate) fn map_execution_error(e: ExecutionFailureKind) -> FfiError { _ => FfiError::InternalError, } } + +/// Helper to convert a C string to a Rust String. +fn c_str_to_string(ptr: *const c_char, name: &str) -> Result { + if ptr.is_null() { + print_error(format!("Null pointer for {name}")); + return Err(WalletFfiError::NullPointer); + } + + let c_str = unsafe { CStr::from_ptr(ptr) }; + match c_str.to_str() { + Ok(s) => Ok(s.to_owned()), + Err(e) => { + print_error(format!("Invalid UTF-8 in {name}: {e}")); + Err(WalletFfiError::InvalidUtf8) + } + } +} diff --git a/wallet-ffi/src/sync.rs b/wallet-ffi/src/sync.rs index 41031d06..5f7a4413 100644 --- a/wallet-ffi/src/sync.rs +++ b/wallet-ffi/src/sync.rs @@ -93,7 +93,7 @@ pub unsafe extern "C" fn wallet_ffi_get_last_synced_block( }; unsafe { - *out_block_id = wallet.last_synced_block; + *out_block_id = wallet.storage().last_synced_block(); } WalletFfiError::Success diff --git a/wallet-ffi/src/transfer.rs b/wallet-ffi/src/transfer.rs index 982df0f3..39e9cf71 100644 --- a/wallet-ffi/src/transfer.rs +++ b/wallet-ffi/src/transfer.rs @@ -3,7 +3,11 @@ use std::{ffi::CString, ptr}; use nssa::AccountId; -use wallet::program_facades::native_token_transfer::NativeTokenTransfer; +use wallet::{ + account::AccountIdWithPrivacy, + cli::CliAccountMention, + program_facades::native_token_transfer::NativeTokenTransfer, +}; use crate::{ block_on, @@ -72,7 +76,10 @@ pub unsafe extern "C" fn wallet_ffi_transfer_public( let transfer = NativeTokenTransfer(&wallet); - match block_on(transfer.send_public_transfer(from_id, to_id, amount, None, None)) { + let from_mention = CliAccountMention::Id(AccountIdWithPrivacy::Public(from_id)); + let to_mention = CliAccountMention::Id(AccountIdWithPrivacy::Public(to_id)); + + match block_on(transfer.send_public_transfer(from_id, to_id, amount, &from_mention, &to_mention)) { Ok(tx_hash) => { let tx_hash = CString::new(tx_hash.to_string()) .map_or(ptr::null_mut(), std::ffi::CString::into_raw); @@ -591,7 +598,9 @@ pub unsafe extern "C" fn wallet_ffi_register_public_account( let transfer = NativeTokenTransfer(&wallet); - match block_on(transfer.register_account(account_id, None)) { + let mention = CliAccountMention::Id(AccountIdWithPrivacy::Public(account_id)); + + match block_on(transfer.register_account(account_id, &mention)) { Ok(tx_hash) => { let tx_hash = CString::new(tx_hash.to_string()) .map_or(ptr::null_mut(), std::ffi::CString::into_raw); diff --git a/wallet-ffi/src/types.rs b/wallet-ffi/src/types.rs index 87c30315..b970a8d3 100644 --- a/wallet-ffi/src/types.rs +++ b/wallet-ffi/src/types.rs @@ -149,7 +149,7 @@ impl FfiBytes32 { /// Create from an `AccountId`. #[must_use] - pub const fn from_account_id(id: &nssa::AccountId) -> Self { + pub const fn from_account_id(id: nssa::AccountId) -> Self { Self { data: *id.value() } } } @@ -186,8 +186,8 @@ impl From for u128 { } } -impl From<&nssa::AccountId> for FfiBytes32 { - fn from(id: &nssa::AccountId) -> Self { +impl From for FfiBytes32 { + fn from(id: nssa::AccountId) -> Self { Self::from_account_id(id) } } diff --git a/wallet-ffi/src/wallet.rs b/wallet-ffi/src/wallet.rs index 93fc20aa..7aabaa2d 100644 --- a/wallet-ffi/src/wallet.rs +++ b/wallet-ffi/src/wallet.rs @@ -10,7 +10,7 @@ use std::{ use wallet::WalletCore; use crate::{ - block_on, + c_str_to_string, error::{print_error, WalletFfiError}, types::WalletHandle, }; @@ -60,23 +60,6 @@ fn c_str_to_path(ptr: *const c_char, name: &str) -> Result Result { - if ptr.is_null() { - print_error(format!("Null pointer for {name}")); - return Err(WalletFfiError::NullPointer); - } - - let c_str = unsafe { CStr::from_ptr(ptr) }; - match c_str.to_str() { - Ok(s) => Ok(s.to_owned()), - Err(e) => { - print_error(format!("Invalid UTF-8 in {name}: {e}")); - Err(WalletFfiError::InvalidUtf8) - } - } -} - /// Create a new wallet with fresh storage. /// /// This initializes a new wallet with a new seed derived from the password. @@ -212,7 +195,7 @@ pub unsafe extern "C" fn wallet_ffi_save(handle: *mut WalletHandle) -> WalletFfi } }; - match block_on(wallet.store_persistent_data()) { + match wallet.store_persistent_data() { Ok(()) => WalletFfiError::Success, Err(e) => { print_error(format!("Failed to save wallet: {e}")); diff --git a/wallet-ffi/wallet_ffi.h b/wallet-ffi/wallet_ffi.h index 89026950..adbb7b50 100644 --- a/wallet-ffi/wallet_ffi.h +++ b/wallet-ffi/wallet_ffi.h @@ -410,6 +410,50 @@ enum WalletFfiError wallet_ffi_get_account_private(struct WalletHandle *handle, */ void wallet_ffi_free_account_data(struct FfiAccount *account); +/** + * Import a public account private key into wallet storage. + * + * # Parameters + * - `handle`: Valid wallet handle + * - `private_key_hex`: Hex-encoded private key string + * + * # Returns + * - `Success` on successful import + * - Error code on failure + * + * # Safety + * - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` + * - `private_key_hex` must be a valid pointer to a null-terminated C string + */ +enum WalletFfiError wallet_ffi_import_public_account(struct WalletHandle *handle, + const char *private_key_hex); + +/** + * Import a private account keychain and account state into wallet storage. + * + * # Parameters + * - `handle`: Valid wallet handle + * - `key_chain_json`: JSON-encoded `key_protocol::key_management::KeyChain` + * - `chain_index`: Optional chain index string (for example `/0/1`, `NULL` if unknown) + * - `identifier`: Identifier for this private account as little-endian u128 bytes + * - `account_state_json`: JSON-encoded `wallet::account::HumanReadableAccount` + * + * # Returns + * - `Success` on successful import + * - Error code on failure + * + * # Safety + * - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` + * - `key_chain_json` must be a valid pointer to a null-terminated C string + * - `identifier` must be a valid pointer to a `FfiU128` struct + * - `account_state_json` must be a valid pointer to a null-terminated C string + */ +enum WalletFfiError wallet_ffi_import_private_account(struct WalletHandle *handle, + const char *key_chain_json, + const char *chain_index, + const struct FfiU128 *identifier, + const char *account_state_json); + /** * Get the public key for a public account. * diff --git a/wallet/Cargo.toml b/wallet/Cargo.toml index a4d5cf0a..3aaa1753 100644 --- a/wallet/Cargo.toml +++ b/wallet/Cargo.toml @@ -11,6 +11,7 @@ workspace = true nssa_core.workspace = true nssa.workspace = true common.workspace = true +authenticated_transfer_core.workspace = true key_protocol.workspace = true sequencer_service_rpc = { workspace = true, features = ["client"] } token_core.workspace = true @@ -21,6 +22,7 @@ bip39.workspace = true pyo3.workspace = true rpassword = "7" zeroize = "1" +keycard_wallet.workspace = true anyhow.workspace = true thiserror.workspace = true @@ -42,4 +44,9 @@ async-stream.workspace = true indicatif = { version = "0.18.3", features = ["improved_unicode"] } optfield = "0.4.0" url.workspace = true -keycard_wallet.workspace = true \ No newline at end of file +derive_more = { workspace = true, features = ["display"] } + +[dev-dependencies] +tempfile.workspace = true +key_protocol = { workspace = true, features = ["test_utils"] } +bincode.workspace = true diff --git a/wallet/configs/debug/wallet_config.json b/wallet/configs/debug/wallet_config.json index 94e13ebd..926ee298 100644 --- a/wallet/configs/debug/wallet_config.json +++ b/wallet/configs/debug/wallet_config.json @@ -3,411 +3,5 @@ "seq_poll_timeout": "30s", "seq_tx_poll_max_blocks": 15, "seq_poll_max_retries": 10, - "seq_block_poll_max_amount": 100, - "initial_accounts": [ - { - "Public": { - "account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r", - "pub_sign_key": "7f273098f25b71e6c005a9519f2678da8d1c7f01f6a27778e2d9948abdf901fb" - } - }, - { - "Public": { - "account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2", - "pub_sign_key": "f434f8741720014586ae43356d2aec6257da086222f604ddb75d69733b86fc4c" - } - }, - { - "Private": { - "account_id": "GoKB6RuE6pT2KxCqDXQqiCuuuYZaGdJNfctzyqRdGBCy", - "identifier": 0, - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 10000, - "data": [], - "nonce": 0 - }, - "key_chain": { - "secret_spending_key": [ - 75, - 231, - 144, - 165, - 5, - 36, - 183, - 237, - 190, - 227, - 238, - 13, - 132, - 39, - 114, - 228, - 172, - 82, - 119, - 164, - 233, - 132, - 130, - 224, - 201, - 90, - 200, - 156, - 108, - 199, - 56, - 22 - ], - "private_key_holder": { - "nullifier_secret_key": [ - 212, - 34, - 166, - 184, - 182, - 77, - 127, - 176, - 147, - 68, - 148, - 190, - 41, - 244, - 8, - 202, - 51, - 10, - 44, - 43, - 93, - 41, - 229, - 130, - 54, - 96, - 198, - 242, - 10, - 227, - 119, - 1 - ], - "viewing_secret_key": [ - 205, - 10, - 5, - 19, - 148, - 98, - 49, - 19, - 251, - 186, - 247, - 216, - 75, - 53, - 184, - 36, - 84, - 87, - 236, - 205, - 105, - 217, - 213, - 21, - 61, - 183, - 133, - 174, - 121, - 115, - 51, - 203 - ] - }, - "nullifier_public_key": [ - 122, - 213, - 113, - 8, - 118, - 179, - 235, - 94, - 5, - 219, - 131, - 106, - 246, - 253, - 14, - 204, - 65, - 93, - 0, - 198, - 100, - 108, - 57, - 48, - 6, - 65, - 183, - 31, - 136, - 86, - 82, - 165 - ], - "viewing_public_key": [ - 3, - 165, - 235, - 215, - 77, - 4, - 19, - 45, - 0, - 27, - 18, - 26, - 11, - 226, - 126, - 174, - 144, - 167, - 160, - 199, - 14, - 23, - 49, - 163, - 49, - 138, - 129, - 229, - 79, - 9, - 15, - 234, - 30 - ] - } - } - }, - { - "Private": { - "account_id": "BCdMnPkdH2DrVhe7cGdawkPU9iapsSboRvJpWX8pWnLq", - "identifier": 0, - "account": { - "program_owner": [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0 - ], - "balance": 20000, - "data": [], - "nonce": 0 - }, - "key_chain": { - "secret_spending_key": [ - 107, - 49, - 136, - 174, - 162, - 107, - 250, - 105, - 252, - 146, - 166, - 197, - 163, - 132, - 153, - 222, - 68, - 17, - 87, - 101, - 22, - 113, - 88, - 97, - 180, - 203, - 139, - 18, - 28, - 62, - 51, - 149 - ], - "private_key_holder": { - "nullifier_secret_key": [ - 219, - 5, - 233, - 185, - 144, - 150, - 100, - 58, - 97, - 5, - 57, - 163, - 110, - 46, - 241, - 216, - 155, - 217, - 100, - 51, - 184, - 21, - 225, - 148, - 198, - 9, - 121, - 239, - 232, - 98, - 22, - 218 - ], - "viewing_secret_key": [ - 35, - 105, - 230, - 121, - 218, - 177, - 21, - 55, - 83, - 80, - 95, - 235, - 161, - 83, - 11, - 221, - 67, - 83, - 1, - 218, - 49, - 242, - 53, - 29, - 26, - 171, - 170, - 144, - 49, - 233, - 159, - 48 - ] - }, - "nullifier_public_key": [ - 33, - 68, - 229, - 154, - 12, - 235, - 210, - 229, - 236, - 144, - 126, - 122, - 58, - 107, - 36, - 58, - 243, - 128, - 174, - 197, - 141, - 137, - 162, - 190, - 155, - 234, - 94, - 156, - 218, - 34, - 13, - 221 - ], - "viewing_public_key": [ - 3, - 122, - 7, - 137, - 250, - 84, - 10, - 85, - 3, - 15, - 134, - 250, - 205, - 40, - 126, - 211, - 14, - 120, - 15, - 55, - 56, - 214, - 72, - 243, - 83, - 17, - 124, - 242, - 251, - 184, - 174, - 150, - 83 - ] - } - } - } - ] + "seq_block_poll_max_amount": 100 } \ No newline at end of file diff --git a/wallet/src/account.rs b/wallet/src/account.rs new file mode 100644 index 00000000..dca0a051 --- /dev/null +++ b/wallet/src/account.rs @@ -0,0 +1,149 @@ +use std::str::FromStr; + +use base58::{FromBase58 as _, ToBase58 as _}; +use derive_more::Display; +use nssa::AccountId; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +#[derive(Debug, Display, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)] +#[display("{_0}")] +pub struct Label(String); + +impl Label { + #[expect( + clippy::needless_pass_by_value, + reason = "Convenience for caller and negligible cost" + )] + #[must_use] + pub fn new(label: impl ToString) -> Self { + Self(label.to_string()) + } +} + +impl FromStr for Label { + type Err = std::convert::Infallible; + + fn from_str(s: &str) -> std::result::Result { + Ok(Self(s.to_owned())) + } +} + +impl From<&str> for Label { + fn from(value: &str) -> Self { + Self(value.to_owned()) + } +} + +impl From for Label { + fn from(value: String) -> Self { + Self(value) + } +} + +#[derive(Debug, Display, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] +pub enum AccountIdWithPrivacy { + #[display("Public/{_0}")] + Public(AccountId), + #[display("Private/{_0}")] + Private(AccountId), +} + +#[derive(Debug, Error)] +pub enum AccountIdWithPrivacyParseError { + #[error("Invalid format, expected 'Public/{{account_id}}' or 'Private/{{account_id}}'")] + InvalidFormat, + #[error("Invalid account id")] + InvalidAccountId(#[from] nssa_core::account::AccountIdError), +} + +impl FromStr for AccountIdWithPrivacy { + type Err = AccountIdWithPrivacyParseError; + + fn from_str(s: &str) -> Result { + if let Some(stripped) = s.strip_prefix("Public/") { + Ok(Self::Public(AccountId::from_str(stripped)?)) + } else if let Some(stripped) = s.strip_prefix("Private/") { + Ok(Self::Private(AccountId::from_str(stripped)?)) + } else { + Err(AccountIdWithPrivacyParseError::InvalidFormat) + } + } +} + +/// Human-readable representation of an account. +#[derive(Debug, Clone, Serialize, Deserialize)] +pub struct HumanReadableAccount { + balance: u128, + program_owner: String, + data: String, + nonce: u128, +} + +impl FromStr for HumanReadableAccount { + type Err = anyhow::Error; + + fn from_str(s: &str) -> Result { + serde_json::from_str(s).map_err(Into::into) + } +} + +impl std::fmt::Display for HumanReadableAccount { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let json = serde_json::to_string_pretty(self).map_err(|_err| std::fmt::Error)?; + write!(f, "{json}") + } +} + +impl From for HumanReadableAccount { + fn from(account: nssa::Account) -> Self { + let program_owner = account + .program_owner + .iter() + .flat_map(|n| n.to_le_bytes()) + .collect::>() + .to_base58(); + let data = hex::encode(account.data); + Self { + balance: account.balance, + program_owner, + data, + nonce: account.nonce.0, + } + } +} + +impl From for nssa::Account { + fn from(account: HumanReadableAccount) -> Self { + let mut program_owner_bytes = [0_u8; 32]; + let decoded_program_owner = account + .program_owner + .from_base58() + .expect("Invalid base58 in HumanReadableAccount.program_owner"); + assert!( + decoded_program_owner.len() == 32, + "HumanReadableAccount.program_owner must decode to exactly 32 bytes" + ); + program_owner_bytes.copy_from_slice(&decoded_program_owner); + + let mut program_owner = [0_u32; 8]; + for (index, chunk) in program_owner_bytes.chunks_exact(4).enumerate() { + let chunk: [u8; 4] = chunk + .try_into() + .expect("chunk length is guaranteed to be 4"); + program_owner[index] = u32::from_le_bytes(chunk); + } + + let data = hex::decode(&account.data).expect("Invalid hex in HumanReadableAccount.data"); + let data = data + .try_into() + .expect("Invalid account data: exceeds maximum allowed size"); + + Self { + balance: account.balance, + program_owner, + data, + nonce: nssa_core::account::Nonce(account.nonce), + } + } +} diff --git a/wallet/src/chain_storage.rs b/wallet/src/chain_storage.rs deleted file mode 100644 index 3bfdb383..00000000 --- a/wallet/src/chain_storage.rs +++ /dev/null @@ -1,314 +0,0 @@ -use std::collections::{BTreeMap, HashMap, btree_map::Entry}; - -use anyhow::Result; -use bip39::Mnemonic; -use key_protocol::{ - key_management::{ - key_tree::{KeyTreePrivate, KeyTreePublic, chain_index::ChainIndex}, - secret_holders::SeedHolder, - }, - key_protocol_core::{NSSAUserData, UserPrivateAccountData}, -}; -use log::debug; -use nssa::program::Program; - -use crate::config::{InitialAccountData, Label, PersistentAccountData, WalletConfig}; - -pub struct WalletChainStore { - pub user_data: NSSAUserData, - pub wallet_config: WalletConfig, - pub labels: HashMap, -} - -impl WalletChainStore { - #[expect( - clippy::wildcard_enum_match_arm, - reason = "We perform search for specific variants only" - )] - pub fn new( - config: WalletConfig, - persistent_accounts: Vec, - labels: HashMap, - ) -> Result { - if persistent_accounts.is_empty() { - anyhow::bail!("Roots not found; please run setup beforehand"); - } - - let mut public_init_acc_map = BTreeMap::new(); - let mut private_init_acc_map = BTreeMap::new(); - - let public_root = persistent_accounts - .iter() - .find(|data| match data { - &PersistentAccountData::Public(data) => data.chain_index == ChainIndex::root(), - _ => false, - }) - .cloned() - .expect("Malformed persistent account data, must have public root"); - - let private_root = persistent_accounts - .iter() - .find(|data| match data { - &PersistentAccountData::Private(data) => data.chain_index == ChainIndex::root(), - _ => false, - }) - .cloned() - .expect("Malformed persistent account data, must have private root"); - - let mut public_tree = KeyTreePublic::new_from_root(match public_root { - PersistentAccountData::Public(data) => data - .data - .expect("public tree in persistent_accounts failed to return a valid KeyTree."), - _ => unreachable!(), - }); - let mut private_tree = KeyTreePrivate::new_from_root(match private_root { - PersistentAccountData::Private(data) => data.data, - _ => unreachable!(), - }); - - for pers_acc_data in persistent_accounts { - match pers_acc_data { - PersistentAccountData::Public(data) => { - public_tree.insert( - data.account_id, - data.chain_index, - data.data.expect("`chain_storage::WalletChainStore::new()`: failed to produce a Key Tree for a PersistentAccountData."), - ); - } - PersistentAccountData::Private(data) => { - let npk = data.data.value.0.nullifier_public_key; - let chain_index = data.chain_index; - for identifier in &data.identifiers { - let account_id = nssa::AccountId::from((&npk, *identifier)); - private_tree - .account_id_map - .insert(account_id, chain_index.clone()); - } - private_tree.key_map.insert(chain_index, data.data); - } - PersistentAccountData::Preconfigured(acc_data) => match acc_data { - InitialAccountData::Public(data) => { - public_init_acc_map.insert(data.account_id, data.pub_sign_key); - } - InitialAccountData::Private(data) => { - private_init_acc_map.insert( - data.account_id(), - UserPrivateAccountData { - key_chain: data.key_chain, - accounts: vec![(data.identifier, data.account)], - }, - ); - } - }, - } - } - - Ok(Self { - user_data: NSSAUserData::new_with_accounts( - public_init_acc_map, - private_init_acc_map, - public_tree, - private_tree, - )?, - wallet_config: config, - labels, - }) - } - - pub fn new_storage(config: WalletConfig, password: &str) -> Result<(Self, Mnemonic)> { - let mut public_init_acc_map = BTreeMap::new(); - let mut private_init_acc_map = BTreeMap::new(); - - let initial_accounts = config - .initial_accounts - .clone() - .unwrap_or_else(InitialAccountData::create_initial_accounts_data); - - for init_acc_data in initial_accounts { - match init_acc_data { - InitialAccountData::Public(data) => { - public_init_acc_map.insert(data.account_id, data.pub_sign_key); - } - InitialAccountData::Private(data) => { - let account_id = data.account_id(); - let mut account = data.account; - // TODO: Program owner is only known after code is compiled and can't be set - // in the config. Therefore we overwrite it here on - // startup. Fix this when program id can be fetched - // from the node and queried from the wallet. - account.program_owner = Program::authenticated_transfer_program().id(); - private_init_acc_map.insert( - account_id, - UserPrivateAccountData { - key_chain: data.key_chain, - accounts: vec![(data.identifier, account)], - }, - ); - } - } - } - - // TODO: Use password for storage encryption - let _ = password; - let (seed_holder, mnemonic) = SeedHolder::new_mnemonic(""); - let public_tree = KeyTreePublic::new(&seed_holder); - let private_tree = KeyTreePrivate::new(&seed_holder); - - Ok(( - Self { - user_data: NSSAUserData::new_with_accounts( - public_init_acc_map, - private_init_acc_map, - public_tree, - private_tree, - )?, - wallet_config: config, - labels: HashMap::new(), - }, - mnemonic, - )) - } - - /// Restore storage from an existing mnemonic phrase. - pub fn restore_storage( - config: WalletConfig, - mnemonic: &Mnemonic, - password: &str, - ) -> Result { - // TODO: Use password for storage encryption - let _ = password; - let seed_holder = SeedHolder::from_mnemonic(mnemonic, ""); - let public_tree = KeyTreePublic::new(&seed_holder); - let private_tree = KeyTreePrivate::new(&seed_holder); - - Ok(Self { - user_data: NSSAUserData::new_with_accounts( - BTreeMap::new(), - BTreeMap::new(), - public_tree, - private_tree, - )?, - wallet_config: config, - labels: HashMap::new(), - }) - } - - pub fn insert_private_account_data( - &mut self, - account_id: nssa::AccountId, - identifier: nssa_core::Identifier, - account: nssa_core::account::Account, - ) { - debug!("inserting at address {account_id}, this account {account:?}"); - - // Update default accounts if present - if let Entry::Occupied(mut entry) = self - .user_data - .default_user_private_accounts - .entry(account_id) - { - let entry = entry.get_mut(); - if let Some((_, acc)) = entry.accounts.iter_mut().find(|(id, _)| *id == identifier) { - *acc = account; - } else { - entry.accounts.push((identifier, account)); - } - return; - } - - // Otherwise update the private key tree - - // Find the node by iterating all tree nodes for this account_id - let chain_index = self - .user_data - .private_key_tree - .account_id_map - .get(&account_id) - .cloned(); - - if let Some(chain_index) = chain_index { - // Node already in account_id_map — update its entry - if let Some(node) = self - .user_data - .private_key_tree - .key_map - .get_mut(&chain_index) - { - if let Some((_, acc)) = node.value.1.iter_mut().find(|(id, _)| *id == identifier) { - *acc = account; - } else { - node.value.1.push((identifier, account)); - } - } - } else { - // Node not yet in account_id_map — find it by checking all nodes - for (ci, node) in &mut self.user_data.private_key_tree.key_map { - let expected_id = - nssa::AccountId::from((&node.value.0.nullifier_public_key, identifier)); - if expected_id == account_id { - if let Some((_, acc)) = - node.value.1.iter_mut().find(|(id, _)| *id == identifier) - { - *acc = account; - } else { - node.value.1.push((identifier, account)); - } - // Register in account_id_map - self.user_data - .private_key_tree - .account_id_map - .insert(account_id, ci.clone()); - break; - } - } - } - } -} - -#[cfg(test)] -mod tests { - use key_protocol::key_management::key_tree::{ - keys_private::ChildKeysPrivate, keys_public::ChildKeysPublic, - }; - - use super::*; - use crate::config::{PersistentAccountDataPrivate, PersistentAccountDataPublic}; - - fn create_sample_wallet_config() -> WalletConfig { - WalletConfig { - sequencer_addr: "http://127.0.0.1".parse().unwrap(), - seq_poll_timeout: std::time::Duration::from_secs(12), - seq_tx_poll_max_blocks: 5, - seq_poll_max_retries: 10, - seq_block_poll_max_amount: 100, - basic_auth: None, - initial_accounts: None, - } - } - - fn create_sample_persistent_accounts() -> Vec { - let public_data = ChildKeysPublic::root([42; 64]); - let private_data = ChildKeysPrivate::root([47; 64]); - - vec![ - PersistentAccountData::Public(PersistentAccountDataPublic { - account_id: public_data.account_id(), - chain_index: ChainIndex::root(), - data: Some(public_data), - }), - PersistentAccountData::Private(Box::new(PersistentAccountDataPrivate { - identifiers: vec![], - chain_index: ChainIndex::root(), - data: private_data, - })), - ] - } - - #[test] - fn new_initializes_correctly() { - let config = create_sample_wallet_config(); - let accs = create_sample_persistent_accounts(); - - let _ = WalletChainStore::new(config, accs, HashMap::new()).unwrap(); - } -} diff --git a/wallet/src/cli/account.rs b/wallet/src/cli/account.rs index 4943aae1..1dcea1d5 100644 --- a/wallet/src/cli/account.rs +++ b/wallet/src/cli/account.rs @@ -1,19 +1,15 @@ use anyhow::{Context as _, Result}; use clap::Subcommand; use itertools::Itertools as _; -use key_protocol::key_management::key_tree::chain_index::ChainIndex; +use key_protocol::key_management::{KeyChain, key_tree::chain_index::ChainIndex}; use nssa::{Account, PublicKey, program::Program}; -use sequencer_service_rpc::RpcClient as _; +use nssa_core::Identifier; use token_core::{TokenDefinition, TokenHolding}; use crate::{ WalletCore, - cli::{SubcommandReturnValue, WalletSubcommand}, - config::Label, - helperfunctions::{ - AccountPrivacyKind, HumanReadableAccount, parse_addr_with_privacy_prefix, - resolve_id_or_label, - }, + account::{AccountIdWithPrivacy, HumanReadableAccount, Label}, + cli::{CliAccountMention, SubcommandReturnValue, WalletSubcommand}, }; /// Represents generic chain CLI subcommand. @@ -27,14 +23,9 @@ pub enum AccountSubcommand { /// Display keys (pk for public accounts, npk/vpk for private accounts). #[arg(short, long)] keys: bool, - /// Valid 32 byte base58 string with privacy prefix. - #[arg(short, long, conflicts_with = "account_label", required_unless_present_any = ["account_label", "key_path"])] - account_id: Option, - /// Account label (alternative to --account-id). - #[arg(long, conflicts_with = "account_id")] - account_label: Option, - #[arg(long, conflicts_with = "account_id", conflicts_with = "account_label")] - key_path: Option, + /// Either 32 byte base58 account id string with privacy prefix or a label. + #[arg(short, long)] + account_id: CliAccountMention, }, /// Produce new public or private account. #[command(subcommand)] @@ -50,21 +41,16 @@ pub enum AccountSubcommand { }, /// Set a label for an account. Label { - /// Valid 32 byte base58 string with privacy prefix. - #[arg( - short, - long, - conflicts_with = "account_label", - required_unless_present = "account_label" - )] - account_id: Option, - /// Account label (alternative to --account-id). - #[arg(long = "account-label", conflicts_with = "account_id")] - account_label: Option, + /// Either 32 byte base58 account id string with privacy prefix or a label. + #[arg(short, long)] + account_id: CliAccountMention, /// The label to assign to the account. #[arg(short, long)] - label: String, + label: Label, }, + /// Import external account. + #[command(subcommand)] + Import(ImportSubcommand), } /// Represents generic register CLI subcommand. @@ -77,7 +63,7 @@ pub enum NewSubcommand { cci: Option, #[arg(short, long)] /// Label to assign to the new account. - label: Option, + label: Option