diff --git a/.github/workflows/publish_image.yml b/.github/workflows/publish_images.yml similarity index 96% rename from .github/workflows/publish_image.yml rename to .github/workflows/publish_images.yml index 457a98d0..619a6209 100644 --- a/.github/workflows/publish_image.yml +++ b/.github/workflows/publish_images.yml @@ -2,6 +2,9 @@ name: Publish Docker Images on: workflow_dispatch: + push: + tags: + - "v*" jobs: publish: @@ -42,6 +45,7 @@ jobs: with: images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/${{ matrix.name }} tags: | + type=ref,event=tag type=ref,event=branch type=ref,event=pr type=semver,pattern={{version}} diff --git a/Cargo.lock b/Cargo.lock index 33f810e0..6faf3b1e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -46,13 +46,13 @@ dependencies = [ [[package]] name = "actix-cors" -version = "0.6.5" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0346d8c1f762b41b458ed3145eea914966bb9ad20b9be0d6d463b20d45586370" +checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d" dependencies = [ "actix-utils", "actix-web", - "derive_more 0.99.20", + "derive_more", "futures-util", "log", "once_cell", @@ -69,26 +69,21 @@ dependencies = [ "actix-rt", "actix-service", "actix-utils", - "base64 0.22.1", "bitflags 2.11.0", "bytes", "bytestring", - "derive_more 2.1.1", + "derive_more", "encoding_rs", "foldhash", "futures-core", - "h2 0.3.27", "http 0.2.12", "httparse", "httpdate", "itoa", "language-tags", - "local-channel", "mime", "percent-encoding", "pin-project-lite", - "rand 0.9.2", - "sha1", "smallvec", "tokio", "tokio-util", @@ -102,7 +97,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -114,7 +109,6 @@ dependencies = [ "bytestring", "cfg-if", "http 0.2.12", - "regex", "regex-lite", "serde", "tracing", @@ -169,9 +163,9 @@ dependencies = [ [[package]] name = "actix-web" -version = "4.1.0" +version = "4.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a27e8fe9ba4ae613c21f677c2cfaf0696c3744030c6f485b34634e502d6bb379" +checksum = "ff87453bc3b56e9b2b23c1cc0b1be8797184accf51d2abe0f8a33ec275d316bf" dependencies = [ "actix-codec", "actix-http", @@ -182,27 +176,29 @@ dependencies = [ "actix-service", "actix-utils", "actix-web-codegen", - "ahash 0.7.8", "bytes", "bytestring", "cfg-if", - "derive_more 0.99.20", + "derive_more", "encoding_rs", + "foldhash", "futures-core", "futures-util", + "impl-more", "itoa", "language-tags", "log", "mime", "once_cell", "pin-project-lite", - "regex", + "regex-lite", "serde", "serde_json", "serde_urlencoded", "smallvec", - "socket2 0.4.10", + "socket2 0.6.3", "time", + "tracing", "url", ] @@ -215,7 +211,7 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -226,14 +222,14 @@ checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] name = "addchain" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b2e69442aa5628ea6951fa33e24efe8313f4321a91bd729fc2f75bdfc858570" +checksum = "2e33f6a175ec6a9e0aca777567f9ff7c3deefc255660df887e7fa3585e9801d8" dependencies = [ "num-bigint 0.3.3", "num-integer", @@ -248,23 +244,14 @@ checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "cpp_demangle", "fallible-iterator", - "gimli 0.31.1", + "gimli", "memmap2", - "object 0.36.7", + "object", "rustc-demangle", "smallvec", "typed-arena", ] -[[package]] -name = "addr2line" -version = "0.25.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5d307320b3181d6d7954e663bd7c774a838b8220fe0593c86d9fb09f498b4b" -dependencies = [ - "gimli 0.32.3", -] - [[package]] name = "adler2" version = "2.0.1" @@ -277,7 +264,7 @@ version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0" dependencies = [ - "crypto-common", + "crypto-common 0.1.7", "generic-array 0.14.7", ] @@ -288,8 +275,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", - "cipher", - "cpufeatures", + "cipher 0.4.4", + "cpufeatures 0.2.17", ] [[package]] @@ -300,23 +287,12 @@ checksum = "831010a0f742e1209b3bcea8fab6a8e149051ba6099432c8cb2cc117dec3ead1" dependencies = [ "aead", "aes", - "cipher", + "cipher 0.4.4", "ctr", "ghash", "subtle", ] -[[package]] -name = "ahash" -version = "0.7.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" -dependencies = [ - "getrandom 0.2.17", - "once_cell", - "version_check", -] - [[package]] name = "ahash" version = "0.8.12" @@ -359,6 +335,7 @@ name = "amm_program" version = "0.1.0" dependencies = [ "amm_core", + "nssa", "nssa_core", "token_core", ] @@ -436,12 +413,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.101" +version = "1.0.102" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" -dependencies = [ - "backtrace", -] +checksum = "7f202df86484c868dbad7eaa557ef785d5c66295e41b460ef922eca0723b842c" [[package]] name = "arbitrary" @@ -517,7 +491,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e0c292754729c8a190e50414fd1a37093c786c709899f29c9f7daccecfa855e" dependencies = [ - "ahash 0.8.12", + "ahash", "ark-crypto-primitives-macros", "ark-ec 0.5.0", "ark-ff 0.5.0", @@ -541,7 +515,7 @@ checksum = "e7e89fe77d1f0f4fe5b96dfc940923d88d17b6a773808124f21e764dfb063c6a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -567,7 +541,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43d68f2d516162846c1238e755a7c4d131b892b70cc70c471a8e3ca3ed818fce" dependencies = [ - "ahash 0.8.12", + "ahash", "ark-ff 0.5.0", "ark-poly 0.5.0", "ark-serialize 0.5.0", @@ -639,7 +613,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62945a2f7e6de02a31fe400aa489f0e0f5b2502e69f95f853adb82a96c7a6b60" dependencies = [ "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -665,7 +639,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -717,7 +691,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "579305839da207f02b89cd1679e50e67b4331e2f9294a57693e5051b7703fe27" dependencies = [ - "ahash 0.8.12", + "ahash", "ark-ff 0.5.0", "ark-serialize 0.5.0", "ark-std 0.5.0", @@ -811,7 +785,7 @@ checksum = "213888f660fddcca0d257e88e54ac05bca01885f258ccdf695bafd77031bb69d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -962,7 +936,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -973,7 +947,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1002,7 +976,7 @@ dependencies = [ "manyhow", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1018,7 +992,7 @@ dependencies = [ "proc-macro2", "quote", "quote-use", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1136,21 +1110,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "backtrace" -version = "0.3.76" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb531853791a215d7c62a30daf0dde835f381ab5de4589cfe7c649d2cbe92bd6" -dependencies = [ - "addr2line 0.25.1", - "cfg-if", - "libc", - "miniz_oxide", - "object 0.37.3", - "rustc-demangle", - "windows-link", -] - [[package]] name = "base-x" version = "0.2.11" @@ -1245,7 +1204,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1329,6 +1288,15 @@ dependencies = [ "generic-array 0.14.7", ] +[[package]] +name = "block-buffer" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdd35008169921d80bc60d3d0ab416eecb028c4cd653352907921d95084790be" +dependencies = [ + "hybrid-array", +] + [[package]] name = "bollard" version = "0.20.1" @@ -1436,7 +1404,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1450,9 +1418,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.20.1" +version = "3.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6f81257d10a0f602a294ae4182251151ff97dbb504ef9afcdda4a64b24d9b4" +checksum = "5d20789868f4b01b2f2caec9f5c4e0213b41e3e5702a50157d699ae31ced2fcb" [[package]] name = "bytemuck" @@ -1471,7 +1439,7 @@ checksum = "f9abbd1bc6865053c427f7198e6af43bfdedc55ab791faed4fbd361d789575ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1563,7 +1531,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn 2.0.116", + "syn 2.0.117", "tempfile", "toml 0.9.12+spec-1.1.0", ] @@ -1615,25 +1583,25 @@ checksum = "45565fc9416b9896014f5732ac776f810ee53a66730c17e4020c3ec064a8f88f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] name = "chacha20" -version = "0.9.1" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" +checksum = "6f8d983286843e49675a4b7a2d174efe136dc93a18d69130dd18198a6c167601" dependencies = [ "cfg-if", - "cipher", - "cpufeatures", + "cipher 0.5.1", + "cpufeatures 0.3.0", ] [[package]] name = "chrono" -version = "0.4.43" +version = "0.4.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fac4744fb15ae8337dc853fee7fb3f4e48c0fbaa23d0afe49c447b4fab126118" +checksum = "c673075a2e0e5f4a1dde27ce9dee1ea4558c7ffe648f576438a20ca1d2acc4b0" dependencies = [ "iana-time-zone", "js-sys", @@ -1649,8 +1617,19 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ - "crypto-common", - "inout", + "crypto-common 0.1.7", + "inout 0.1.4", +] + +[[package]] +name = "cipher" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e34d8227fe1ba289043aeb13792056ff80fd6de1a9f49137a5f499de8e8c78ea" +dependencies = [ + "block-buffer 0.12.0", + "crypto-common 0.2.1", + "inout 0.2.2", ] [[package]] @@ -1666,9 +1645,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.59" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5caf74d17c3aec5495110c34cc3f78644bfa89af6c8993ed4de2790e49b6499" +checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a" dependencies = [ "clap_builder", "clap_derive", @@ -1676,9 +1655,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.59" +version = "4.5.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "370daa45065b80218950227371916a1633217ae42b2715b2287b606dcd618e24" +checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876" dependencies = [ "anstream", "anstyle", @@ -1695,7 +1674,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -1826,12 +1805,12 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.17.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb320cac8a0750d7f25280aa97b09c26edfe161164238ecbbb31092b079e735" +checksum = "531185e432bb31db1ecda541e9e7ab21468d4d844ad7505e0546a49b4945d49b" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "proptest", "serde_core", ] @@ -1880,12 +1859,6 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f67855af358fcb20fac58f9d714c94e2b228fe5694c1c9b4ead4a366343eda1b" -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" - [[package]] name = "convert_case" version = "0.6.0" @@ -1995,6 +1968,15 @@ dependencies = [ "libc", ] +[[package]] +name = "cpufeatures" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b2a41393f66f16b0823bb79094d54ac5fbd34ab292ddafb9a0456ac9f87d201" +dependencies = [ + "libc", +] + [[package]] name = "crc32fast" version = "1.5.0" @@ -2073,13 +2055,22 @@ dependencies = [ "typenum", ] +[[package]] +name = "crypto-common" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77727bb15fa921304124b128af125e7e3b968275d1b108b379190264f4423710" +dependencies = [ + "hybrid-array", +] + [[package]] name = "ctr" version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" dependencies = [ - "cipher", + "cipher 0.4.4", ] [[package]] @@ -2089,7 +2080,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "curve25519-dalek-derive", "digest", "fiat-crypto", @@ -2107,7 +2098,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2141,7 +2132,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2155,7 +2146,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2166,7 +2157,7 @@ checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" dependencies = [ "darling_core 0.20.11", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2177,21 +2168,7 @@ checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ "darling_core 0.21.3", "quote", - "syn 2.0.116", -] - -[[package]] -name = "dashmap" -version = "6.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" -dependencies = [ - "cfg-if", - "crossbeam-utils", - "hashbrown 0.14.5", - "lock_api", - "once_cell", - "parking_lot_core", + "syn 2.0.117", ] [[package]] @@ -2217,7 +2194,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ab67060fc6b8ef687992d439ca0fa36e7ed17e9a0b16b25b601e8757df720de" dependencies = [ "data-encoding", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2233,9 +2210,9 @@ dependencies = [ [[package]] name = "deranged" -version = "0.5.6" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc3dc5ad92c2e2d1c193bbbbdf2ea477cb81331de4f3103f267ca18368b988c4" +checksum = "7cd812cc2bc1d69d4764bd80df88b4317eaef9e773c75226407d9bc0876b211c" dependencies = [ "powerfmt", "serde_core", @@ -2260,7 +2237,7 @@ checksum = "ef941ded77d15ca19b40374869ac6000af1c9f2a4c0f3d4c70926287e6364a8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2271,7 +2248,7 @@ checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2292,7 +2269,7 @@ dependencies = [ "darling 0.20.11", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2302,20 +2279,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ab63b0e2bf4d5928aff72e83a7dace85d7bba5fe12dcc3c5a572d78caffd3f3c" dependencies = [ "derive_builder_core", - "syn 2.0.116", -] - -[[package]] -name = "derive_more" -version = "0.99.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" -dependencies = [ - "convert_case 0.4.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2337,7 +2301,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.116", + "syn 2.0.117", "unicode-xid", ] @@ -2347,9 +2311,9 @@ version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ed9a281f7bc9b7576e61468ba615a66a5c8cfdff42420a70aa82701a3b1e292" dependencies = [ - "block-buffer", + "block-buffer 0.10.4", "const-oid", - "crypto-common", + "crypto-common 0.1.7", "subtle", ] @@ -2391,7 +2355,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2515,7 +2479,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2526,9 +2490,9 @@ checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" [[package]] name = "either_of" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "216d23e0ec69759a17f05e1c553f3a6870e5ec73420fbb07807a6f34d5d1d5a4" +checksum = "14f7f86eef3a7e4b9c2107583dbbbe3d9535c4b800796faf1774b82ba22033da" dependencies = [ "paste", "pin-project-lite", @@ -2605,7 +2569,7 @@ checksum = "f282cfdfe92516eb26c2af8589c274c7c17681f5ecc03c18255fe741c6aa64eb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2625,20 +2589,30 @@ checksum = "8ca9601fb2d62598ee17836250842873a413586e5d7ed88b356e38ddbb0ec631" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", +] + +[[package]] +name = "env_filter" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a1c3cc8e57274ec99de65301228b537f1e4eedc1b8e0f9411c6caac8ae7308f" +dependencies = [ + "log", + "regex", ] [[package]] name = "env_logger" -version = "0.10.2" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd405aab171cb85d6735e5c8d9db038c17d3ca007a4d2c25f337935c3d90580" +checksum = "b2daee4ea451f429a58296525ddf28b45a3b64f1acf6587e2067437bb11e218d" dependencies = [ - "humantime", - "is-terminal", + "anstream", + "anstyle", + "env_filter", + "jiff", "log", - "regex", - "termcolor", ] [[package]] @@ -2655,9 +2629,9 @@ checksum = "a1731451909bde27714eacba19c2566362a7f35224f52b153d3f42cf60f72472" [[package]] name = "erased-serde" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89e8918065695684b2b0702da20382d5ae6065cf3327bc2d6436bd49a71ce9f3" +checksum = "d2add8a07dd6a8d93ff627029c51de145e12686fbc36ecb298ac22e74cf02dec" dependencies = [ "serde", "serde_core", @@ -2872,7 +2846,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -2958,7 +2932,7 @@ checksum = "e835b70203e41293343137df5c0664546da5745f82ec9b84d40be8336958447b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -3068,21 +3042,21 @@ dependencies = [ "cfg-if", "js-sys", "libc", - "r-efi", + "r-efi 5.3.0", "wasip2", "wasm-bindgen", ] [[package]] name = "getrandom" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +checksum = "0de51e6874e94e7bf76d726fc5d13ba782deca734ff60d5bb2fb2607c7406555" dependencies = [ "cfg-if", "js-sys", "libc", - "r-efi", + "r-efi 6.0.0", "wasip2", "wasip3", "wasm-bindgen", @@ -3109,12 +3083,6 @@ dependencies = [ "stable_deref_trait", ] -[[package]] -name = "gimli" -version = "0.32.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e629b9b98ef3dd8afe6ca2bd0f89306cec16d43d907889945bc5d6687f2f13c7" - [[package]] name = "glob" version = "0.3.3" @@ -3184,25 +3152,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "17e2ac29387b1aa07a1e448f7bb4f35b500787971e965b02842b900afa5c8f6f" -[[package]] -name = "h2" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0beca50380b1fc32983fc1cb4587bfa4bb9e78fc259aad4a0032d2080309222d" -dependencies = [ - "bytes", - "fnv", - "futures-core", - "futures-sink", - "futures-util", - "http 0.2.12", - "indexmap 2.13.0", - "slab", - "tokio", - "tokio-util", - "tracing", -] - [[package]] name = "h2" version = "0.4.13" @@ -3243,7 +3192,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.12", + "ahash", ] [[package]] @@ -3252,7 +3201,7 @@ version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.12", + "ahash", ] [[package]] @@ -3300,12 +3249,6 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" -[[package]] -name = "hermit-abi" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc0fef456e4baa96da950455cd02c081ca953b141298e41db3fc7e36b1da849c" - [[package]] name = "hex" version = "0.4.3" @@ -3453,6 +3396,15 @@ dependencies = [ "serde", ] +[[package]] +name = "hybrid-array" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8655f91cd07f2b9d0c24137bd650fe69617773435ee5ec83022377777ce65ef1" +dependencies = [ + "typenum", +] + [[package]] name = "hydration_context" version = "0.3.0" @@ -3479,7 +3431,7 @@ dependencies = [ "bytes", "futures-channel", "futures-core", - "h2 0.4.13", + "h2", "http 1.4.0", "http-body", "httparse", @@ -3571,7 +3523,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.6.2", + "socket2 0.6.3", "system-configuration", "tokio", "tower-service", @@ -3732,6 +3684,12 @@ dependencies = [ "icu_properties", ] +[[package]] +name = "impl-more" +version = "0.1.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2" + [[package]] name = "include_bytes_aligned" version = "0.1.4" @@ -3756,6 +3714,7 @@ dependencies = [ "serde", "serde_json", "storage", + "tempfile", "tokio", "url", ] @@ -3852,6 +3811,15 @@ dependencies = [ "generic-array 0.14.7", ] +[[package]] +name = "inout" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4250ce6452e92010fdf7268ccc5d14faa80bb12fc741938534c58f16804e03c7" +dependencies = [ + "hybrid-array", +] + [[package]] name = "integration_tests" version = "0.1.0" @@ -3890,18 +3858,18 @@ checksum = "71dd52191aae121e8611f1e8dc3e324dd0dd1dee1e6dd91d10ee07a3cfb4d9d8" [[package]] name = "inventory" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc61209c082fbeb19919bee74b176221b27223e27b65d781eb91af24eb1fb46e" +checksum = "009ae045c87e7082cb72dab0ccd01ae075dd00141ddc108f43a0ea150a9e7227" dependencies = [ "rustversion", ] [[package]] name = "ipnet" -version = "2.11.0" +version = "2.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +checksum = "d98f6fed1fde3f8c21bc40a1abb88dd75e67924f9cffc3ef95607bad8017f8e2" [[package]] name = "iri-string" @@ -3913,17 +3881,6 @@ dependencies = [ "serde", ] -[[package]] -name = "is-terminal" -version = "0.4.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3640c1c38b8e4e43584d8df18be5fc6b0aa314ce6ebf51b53313d4306cca8e46" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.61.2", -] - [[package]] name = "is_terminal_polyfill" version = "1.70.2" @@ -3997,6 +3954,30 @@ dependencies = [ "zeroize", ] +[[package]] +name = "jiff" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a3546dc96b6d42c5f24902af9e2538e82e39ad350b0c766eb3fbf2d8f3d8359" +dependencies = [ + "jiff-static", + "log", + "portable-atomic", + "portable-atomic-util", + "serde_core", +] + +[[package]] +name = "jiff-static" +version = "0.2.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a8c8b344124222efd714b73bb41f8b5120b27a7cc1c75593a6ff768d9d05aa4" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.117", +] + [[package]] name = "jni" version = "0.21.1" @@ -4031,9 +4012,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.85" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c942ebf8e95485ca0d52d97da7c5a2c387d0e7f0ba4c35e93bfcaee045955b3" +checksum = "b49715b7073f385ba4bc528e5747d02e66cb39c6146efb66b781f131f0fb399c" dependencies = [ "once_cell", "wasm-bindgen", @@ -4143,7 +4124,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -4232,7 +4213,7 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb26cec98cce3a3d96cbb7bced3c4b16e3d13f27ec56dbd62cbc8f39cfb9d653" dependencies = [ - "cpufeatures", + "cpufeatures 0.2.17", ] [[package]] @@ -4283,7 +4264,7 @@ dependencies = [ "proc-macro2", "quote", "regex", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -4303,16 +4284,16 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" [[package]] name = "leptos" -version = "0.8.16" +version = "0.8.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43ad8042559850ee5253293b8460a75513f7d542021b9303083d5c236bcdd16f" +checksum = "4b540ac2868724738f0f5d00f00ec4640e587223774219c1baddc46bad46fb8e" dependencies = [ "any_spawner", "base64 0.22.1", "cfg-if", "either_of", "futures", - "getrandom 0.4.1", + "getrandom 0.4.2", "hydration_context", "leptos_config", "leptos_dom", @@ -4408,7 +4389,7 @@ dependencies = [ "quote", "rstml", "serde", - "syn 2.0.116", + "syn 2.0.117", "walkdir", ] @@ -4447,7 +4428,7 @@ dependencies = [ "rstml", "rustc_version", "server_fn_macro", - "syn 2.0.116", + "syn 2.0.117", "uuid", ] @@ -4500,7 +4481,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -4525,9 +4506,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.182" +version = "0.2.183" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112" +checksum = "b5b646652bf6661599e1da8901b3b9522896f01e736bad5f723fe7a3a27f899d" [[package]] name = "libloading" @@ -4581,13 +4562,14 @@ dependencies = [ [[package]] name = "libredox" -version = "0.1.12" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" +checksum = "1744e39d1d6a9948f4f388969627434e31128196de472883b39f148769bfe30a" dependencies = [ "bitflags 2.11.0", "libc", - "redox_syscall 0.7.1", + "plain", + "redox_syscall 0.7.3", ] [[package]] @@ -4605,9 +4587,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.23" +version = "1.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15d118bbf3771060e7311cc7bb0545b01d08a8b4a7de949198dec1fa0ca1c0f7" +checksum = "4735e9cbde5aac84a5ce588f6b23a90b9b0b528f6c5a8db8a4aff300463a0839" dependencies = [ "cc", "pkg-config", @@ -4628,9 +4610,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.11.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" +checksum = "32a66949e030da00e8c7d4434b251670a91556f4144941d37452769c25d58a53" [[package]] name = "litemap" @@ -4638,17 +4620,6 @@ version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77" -[[package]] -name = "local-channel" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" -dependencies = [ - "futures-core", - "futures-sink", - "local-waker", -] - [[package]] name = "local-waker" version = "0.1.4" @@ -4672,8 +4643,8 @@ checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" [[package]] name = "logos-blockchain-blend-crypto" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "blake2", "logos-blockchain-groth16", @@ -4686,8 +4657,8 @@ dependencies = [ [[package]] name = "logos-blockchain-blend-message" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "blake2", "derivative", @@ -4708,11 +4679,12 @@ dependencies = [ [[package]] name = "logos-blockchain-blend-proofs" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "ed25519-dalek", "generic-array 1.3.5", + "hex", "logos-blockchain-blend-crypto", "logos-blockchain-groth16", "logos-blockchain-pol", @@ -4725,8 +4697,8 @@ dependencies = [ [[package]] name = "logos-blockchain-chain-broadcast-service" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "derivative", @@ -4741,8 +4713,8 @@ dependencies = [ [[package]] name = "logos-blockchain-chain-service" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "bytes", @@ -4771,8 +4743,8 @@ dependencies = [ [[package]] name = "logos-blockchain-circuits-prover" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "logos-blockchain-circuits-utils", "tempfile", @@ -4780,16 +4752,16 @@ dependencies = [ [[package]] name = "logos-blockchain-circuits-utils" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "dirs", ] [[package]] name = "logos-blockchain-common-http-client" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "futures", "hex", @@ -4808,8 +4780,8 @@ dependencies = [ [[package]] name = "logos-blockchain-core" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "ark-ff 0.4.2", "bincode", @@ -4838,8 +4810,8 @@ dependencies = [ [[package]] name = "logos-blockchain-cryptarchia-engine" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "cfg_eval", "logos-blockchain-pol", @@ -4854,8 +4826,8 @@ dependencies = [ [[package]] name = "logos-blockchain-cryptarchia-sync" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "bytes", "futures", @@ -4871,8 +4843,8 @@ dependencies = [ [[package]] name = "logos-blockchain-groth16" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "ark-bn254 0.4.0", "ark-ec 0.4.2", @@ -4889,8 +4861,8 @@ dependencies = [ [[package]] name = "logos-blockchain-http-api-common" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "axum 0.7.9", "logos-blockchain-core", @@ -4903,8 +4875,8 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-keys" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "bytes", @@ -4929,18 +4901,18 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-macros" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] name = "logos-blockchain-key-management-system-operators" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "logos-blockchain-blend-proofs", @@ -4955,8 +4927,8 @@ dependencies = [ [[package]] name = "logos-blockchain-key-management-system-service" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "log", @@ -4971,8 +4943,8 @@ dependencies = [ [[package]] name = "logos-blockchain-ledger" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "derivative", "logos-blockchain-blend-crypto", @@ -4995,8 +4967,8 @@ dependencies = [ [[package]] name = "logos-blockchain-network-service" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "futures", @@ -5011,8 +4983,8 @@ dependencies = [ [[package]] name = "logos-blockchain-poc" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "logos-blockchain-circuits-prover", "logos-blockchain-circuits-utils", @@ -5027,8 +4999,8 @@ dependencies = [ [[package]] name = "logos-blockchain-pol" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "astro-float", "logos-blockchain-circuits-prover", @@ -5046,8 +5018,8 @@ dependencies = [ [[package]] name = "logos-blockchain-poq" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "logos-blockchain-circuits-prover", "logos-blockchain-circuits-utils", @@ -5063,8 +5035,8 @@ dependencies = [ [[package]] name = "logos-blockchain-poseidon2" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "ark-bn254 0.4.0", "ark-ff 0.4.2", @@ -5074,8 +5046,8 @@ dependencies = [ [[package]] name = "logos-blockchain-services-utils" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "futures", @@ -5089,8 +5061,8 @@ dependencies = [ [[package]] name = "logos-blockchain-storage-service" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "bytes", @@ -5106,8 +5078,8 @@ dependencies = [ [[package]] name = "logos-blockchain-time-service" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "futures", @@ -5124,12 +5096,12 @@ dependencies = [ [[package]] name = "logos-blockchain-utils" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "async-trait", "blake2", - "cipher", + "cipher 0.4.4", "const-hex", "humantime", "overwatch", @@ -5141,8 +5113,8 @@ dependencies = [ [[package]] name = "logos-blockchain-utxotree" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "ark-ff 0.4.2", "logos-blockchain-groth16", @@ -5155,16 +5127,16 @@ dependencies = [ [[package]] name = "logos-blockchain-witness-generator" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "tempfile", ] [[package]] name = "logos-blockchain-zksign" -version = "0.1.0" -source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81192877116cbc3eedf6688b85fab6dd0e448290" +version = "0.2.1" +source = "git+https://github.com/logos-blockchain/logos-blockchain.git#81dbb4517aa466358ed425d92fad7d45a0c419fd" dependencies = [ "logos-blockchain-circuits-prover", "logos-blockchain-circuits-utils", @@ -5266,7 +5238,7 @@ dependencies = [ "manyhow-macros", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5288,7 +5260,7 @@ checksum = "757aee279b8bdbb9f9e676796fd459e4207a1f986e87886700abf589f5abf771" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5321,7 +5293,7 @@ checksum = "5cf92c10c7e361d6b99666ec1c6f9805b0bea2c3bd8c78dc6fe98ac5bd78db11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5560,7 +5532,7 @@ dependencies = [ name = "nssa" version = "0.1.0" dependencies = [ - "amm_core", + "anyhow", "borsh", "env_logger", "hex", @@ -5671,7 +5643,7 @@ checksum = "ed3955f1a9c7c0c15e092f9c887db08b1fc683305fdf6eb6684f22555355e202" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5733,7 +5705,7 @@ checksum = "ff32365de1b6743cb203b710788263c44a03de03802daf96092f2da4fe6ba4d7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5765,15 +5737,6 @@ dependencies = [ "ruzstd", ] -[[package]] -name = "object" -version = "0.37.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff76201f031d8863c38aa7f905eca4f53abbfa15f609db4277d44cd8938f33fe" -dependencies = [ - "memchr", -] - [[package]] name = "oco_ref" version = "0.2.1" @@ -5825,7 +5788,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5854,7 +5817,7 @@ checksum = "969ccca8ffc4fb105bd131a228107d5c9dd89d9d627edf3295cbe979156f9712" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5872,7 +5835,7 @@ checksum = "8c04f5d74368e4d0dfe06c45c8627c81bd7c317d52762d118fb9b3076f6420fd" [[package]] name = "overwatch" version = "0.1.0" -source = "git+https://github.com/logos-co/Overwatch?rev=f5a9902#f5a99022f389d65adbd55e51f1e3f9eead62432a" +source = "git+https://github.com/logos-co/Overwatch?rev=448c192#448c192895b8311c742b1726a1bb12ee314ad95c" dependencies = [ "async-trait", "futures", @@ -5887,13 +5850,13 @@ dependencies = [ [[package]] name = "overwatch-derive" version = "0.1.0" -source = "git+https://github.com/logos-co/Overwatch?rev=f5a9902#f5a99022f389d65adbd55e51f1e3f9eead62432a" +source = "git+https://github.com/logos-co/Overwatch?rev=448c192#448c192895b8311c742b1726a1bb12ee314ad95c" dependencies = [ "convert_case 0.8.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5947,7 +5910,7 @@ dependencies = [ "regex", "regex-syntax", "structmeta", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -5979,29 +5942,29 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pin-project" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "677f1add503faace112b9f1373e43e9e054bfdd22ff1a63c1bc485eaec6a6a8a" +checksum = "f1749c7ed4bcaf4c3d0a3efc28538844fb29bcdd7d2b67b2be7e20ba861ff517" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.10" +version = "1.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" +checksum = "d9b20ed30f105399776b9c883e68e536ef602a16ae6f596d2c473591d6ad64c6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] name = "pin-project-lite" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b" +checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd" [[package]] name = "pin-utils" @@ -6036,6 +5999,12 @@ version = "0.3.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" +[[package]] +name = "plain" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6" + [[package]] name = "polyval" version = "0.6.2" @@ -6043,7 +6012,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d1fe60d06143b2430aa532c94cfe9e29783047f06c0d7fd359a9a51b729fa25" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "opaque-debug", "universal-hash", ] @@ -6107,16 +6076,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] name = "proc-macro-crate" -version = "3.4.0" +version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" +checksum = "e67ba7e9b2b56446f1d419b1d807906278ffa1a658a8a5d8a39dcb1f5a78614f" dependencies = [ - "toml_edit 0.23.10+spec-1.0.0", + "toml_edit 0.25.4+spec-1.1.0", ] [[package]] @@ -6138,7 +6107,7 @@ dependencies = [ "proc-macro-error-attr2", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -6169,7 +6138,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", "version_check", "yansi", ] @@ -6249,7 +6218,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -6262,7 +6231,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -6301,7 +6270,7 @@ dependencies = [ "quinn-udp", "rustc-hash", "rustls", - "socket2 0.6.2", + "socket2 0.6.3", "thiserror 2.0.18", "tokio", "tracing", @@ -6310,9 +6279,9 @@ dependencies = [ [[package]] name = "quinn-proto" -version = "0.11.13" +version = "0.11.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" +checksum = "434b42fec591c96ef50e21e886936e66d3cc3f737104fdb9b737c40ffb94c098" dependencies = [ "bytes", "getrandom 0.3.4", @@ -6338,16 +6307,16 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2 0.6.2", + "socket2 0.6.3", "tracing", "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.44" +version = "1.0.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" +checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924" dependencies = [ "proc-macro2", ] @@ -6371,7 +6340,7 @@ dependencies = [ "proc-macro-utils", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -6380,6 +6349,12 @@ version = "5.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f" +[[package]] +name = "r-efi" +version = "6.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f8dcc9c7d52a811697d2151c701e0d08956f92b0e24136cf4cf27b57a6a0d9bf" + [[package]] name = "radium" version = "0.7.0" @@ -6506,12 +6481,12 @@ dependencies = [ [[package]] name = "reactive_stores" -version = "0.3.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35372f05664a62a3dd389503371a15b8feb3396f99f6ec000de651fddb030942" +checksum = "3e114642d342893571ff40b4e1da8ccdea907be44c649041eb7d8413b3fd95e8" dependencies = [ - "dashmap", "guardian", + "indexmap 2.13.0", "itertools 0.14.0", "or_poisoned", "paste", @@ -6523,15 +6498,15 @@ dependencies = [ [[package]] name = "reactive_stores_macro" -version = "0.2.6" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa40919eb2975100283b2a70e68eafce1e8bcf81f0622ff168e4c2b3f8d46bb" +checksum = "5b024812c47a6867b6cb32767a46182203f94e59eb88c69b032fd9caffa304ce" dependencies = [ - "convert_case 0.8.0", + "convert_case 0.11.0", "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -6545,9 +6520,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.7.1" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35985aa610addc02e24fc232012c86fd11f14111180f902b67e2d5331f8ebf2b" +checksum = "6ce70a74e890531977d37e532c34d45e9055d2409ed08ddba14529471ed0be16" dependencies = [ "bitflags 2.11.0", ] @@ -6580,7 +6555,7 @@ checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -6614,9 +6589,9 @@ checksum = "cab834c73d247e67f4fae452806d17d3c7501756d98c8808d7c9c7aa7d18f973" [[package]] name = "regex-syntax" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" +checksum = "dc897dd8d9e8bd1ed8cdad82b5966c3e0ecae09fb1907d58efaa013543185d0a" [[package]] name = "reqwest" @@ -6630,7 +6605,7 @@ dependencies = [ "futures-channel", "futures-core", "futures-util", - "h2 0.4.13", + "h2", "http 1.4.0", "http-body", "http-body-util", @@ -6705,7 +6680,7 @@ dependencies = [ "anyhow", "borsh", "bytemuck", - "derive_more 2.1.1", + "derive_more", "elf", "lazy_static", "postcard", @@ -6785,7 +6760,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8eae53a7bf1c09828dfd46ed5c942cefbf4bef3c4400f6758001569a834c462" dependencies = [ "cc", - "derive_more 2.1.1", + "derive_more", "glob", "risc0-build-kernel", "risc0-core", @@ -6840,7 +6815,7 @@ dependencies = [ "bytemuck", "byteorder", "cfg-if", - "derive_more 2.1.1", + "derive_more", "enum-map", "gdbstub", "gdbstub_arch", @@ -6870,7 +6845,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69d677ec41e475534e18e58889ef0626dcdabf5e918804ef847da0c0bbf300b3" dependencies = [ "cc", - "derive_more 2.1.1", + "derive_more", "glob", "risc0-build-kernel", "risc0-core", @@ -6974,25 +6949,25 @@ version = "3.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "22b7eafb5d85be59cbd9da83f662cf47d834f1b836e14f675d1530b12c666867" dependencies = [ - "addr2line 0.24.2", + "addr2line", "anyhow", "bincode", "bonsai-sdk", "borsh", "bytemuck", "bytes", - "derive_more 2.1.1", + "derive_more", "elf", "enum-map", "gdbstub", "gdbstub_arch", - "gimli 0.31.1", + "gimli", "hex", "keccak", "lazy-regex", "num-bigint 0.4.6", "num-traits", - "object 0.36.7", + "object", "prost 0.13.5", "rand 0.9.2", "rayon", @@ -7119,7 +7094,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.116", + "syn 2.0.117", "syn_derive", "thiserror 2.0.18", ] @@ -7169,9 +7144,9 @@ dependencies = [ [[package]] name = "rustix" -version = "1.1.3" +version = "1.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "146c9e247ccc180c1f61615433868c99f3de3ae256a30a43b49f67c2d9171f34" +checksum = "b6fe4565b9518b83ef4f91bb47ce29620ca828bd32cb7e408f0062e9930ba190" dependencies = [ "bitflags 2.11.0", "errno", @@ -7182,9 +7157,9 @@ dependencies = [ [[package]] name = "rustls" -version = "0.23.36" +version = "0.23.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c665f33d38cea657d9614f766881e4d510e0eda4239891eea56b4cadcf01801b" +checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4" dependencies = [ "log", "once_cell", @@ -7356,7 +7331,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7402,9 +7377,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "3.6.0" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d17b898a6d6948c3a8ee4372c17cb384f90d2e6e912ef00895b14fd7ab54ec38" +checksum = "b7f4bc775c73d9a02cde8bf7b2ec4c9d12743edf609006c7facc23998404cd1d" dependencies = [ "bitflags 2.11.0", "core-foundation 0.10.1", @@ -7415,9 +7390,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.16.0" +version = "2.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "321c8673b092a9a42605034a9879d73cb79101ed5fd117bc9a597b89b4e9e61a" +checksum = "6ce2691df843ecc5d231c0b14ece2acc3efb62c0a398c7e1d875f3983ce020e3" dependencies = [ "core-foundation-sys", "libc", @@ -7557,7 +7532,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7568,7 +7543,7 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7614,7 +7589,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7649,9 +7624,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.16.1" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" +checksum = "381b283ce7bc6b476d903296fb59d0d36633652b633b27f64db4fb46dcbfc3b9" dependencies = [ "base64 0.22.1", "chrono", @@ -7668,14 +7643,14 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.16.1" +version = "3.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" +checksum = "a6d4e30573c8cb306ed6ab1dca8423eec9a463ea0e155f45399455e0368b27e0" dependencies = [ "darling 0.21.3", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7703,9 +7678,9 @@ dependencies = [ [[package]] name = "server_fn" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fafe3a832a747ded8dc4827c538a2a064284f150c1c44c51ec56b58bd947dd7" +checksum = "7c799cec4e8e210dfb2f203aa97f0e82232c619e385ef4d011b17a58d6397c7b" dependencies = [ "axum 0.8.8", "base64 0.22.1", @@ -7743,16 +7718,16 @@ dependencies = [ [[package]] name = "server_fn_macro" -version = "0.8.9" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14faf423aab09f8c3eb2d9785bb37f11a255cdf01857d3c6083eacc82269c191" +checksum = "1295b54815397d30d986b63f93cfd515fa86d5e528e0bb589ce9d530502f9e0f" dependencies = [ "const_format", "convert_case 0.11.0", "proc-macro2", "quote", "rustc_version", - "syn 2.0.116", + "syn 2.0.117", "xxhash-rust", ] @@ -7763,7 +7738,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63eb08f80db903d3c42f64e60ebb3875e0305be502bdc064ec0a0eab42207f00" dependencies = [ "server_fn_macro", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7773,7 +7748,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest", ] @@ -7784,7 +7759,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283" dependencies = [ "cfg-if", - "cpufeatures", + "cpufeatures 0.2.17", "digest", ] @@ -7851,16 +7826,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "socket2" -version = "0.4.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "socket2" version = "0.5.10" @@ -7873,12 +7838,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" +checksum = "3a766e1110788c36f4fa1c2b71b387a7815aa65f88ce0229841826633d93723e" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.2", ] [[package]] @@ -7923,7 +7888,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d904e7009df136af5297832a3ace3370cd14ff1546a232f4f185036c2736fcac" dependencies = [ "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7965,7 +7930,7 @@ dependencies = [ "proc-macro2", "quote", "structmeta-derive", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7976,7 +7941,7 @@ checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -7997,7 +7962,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8019,9 +7984,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.116" +version = "2.0.117" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3df424c70518695237746f84cede799c9c58fcb37450d7b23716568cc8bc69cb" +checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99" dependencies = [ "proc-macro2", "quote", @@ -8037,7 +8002,7 @@ dependencies = [ "proc-macro-error2", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8057,7 +8022,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8083,9 +8048,9 @@ dependencies = [ [[package]] name = "tachys" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14691ce610071757bd17bd8d572065192c9c93f9f169125390aaea345a4c56b9" +checksum = "f768750b0d5514f487772187d4b20c66f56faff4541b1faa5aad4975f5aee085" dependencies = [ "any_spawner", "async-trait", @@ -8121,26 +8086,17 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.25.0" +version = "3.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1" +checksum = "82a72c767771b47409d2345987fda8628641887d5466101319899796367354a0" dependencies = [ "fastrand", - "getrandom 0.4.1", + "getrandom 0.4.2", "once_cell", "rustix", "windows-sys 0.61.2", ] -[[package]] -name = "termcolor" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" -dependencies = [ - "winapi-util", -] - [[package]] name = "test-case" version = "3.3.1" @@ -8159,7 +8115,7 @@ dependencies = [ "cfg-if", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8170,7 +8126,7 @@ checksum = "5c89e72a01ed4c579669add59014b9a524d609c0c88c6a585ce37485879f6ffb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", "test-case-core", ] @@ -8191,9 +8147,9 @@ dependencies = [ [[package]] name = "testcontainers" -version = "0.27.0" +version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fdcea723c64cc08dbc533b3761e345a15bf1222cbe6cb611de09b43f17a168" +checksum = "c1c0624faaa317c56d6d19136580be889677259caf5c897941c6f446b4655068" dependencies = [ "astral-tokio-tar", "async-trait", @@ -8248,7 +8204,7 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8259,7 +8215,7 @@ checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8355,9 +8311,9 @@ dependencies = [ [[package]] name = "tokio" -version = "1.49.0" +version = "1.50.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72a2903cd7736441aac9df9d7688bd0ce48edccaadf181c3b90be801e81d3d86" +checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d" dependencies = [ "bytes", "libc", @@ -8365,20 +8321,20 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.6.2", + "socket2 0.6.3", "tokio-macros", "windows-sys 0.61.2", ] [[package]] name = "tokio-macros" -version = "2.6.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" +checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8495,6 +8451,15 @@ dependencies = [ "serde_core", ] +[[package]] +name = "toml_datetime" +version = "1.0.0+spec-1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32c2555c699578a4f59f0cc68e5116c8d7cabbd45e1409b989d4be085b53f13e" +dependencies = [ + "serde_core", +] + [[package]] name = "toml_edit" version = "0.22.27" @@ -8511,12 +8476,12 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.23.10+spec-1.0.0" +version = "0.25.4+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84c8b9f757e028cee9fa244aea147aab2a9ec09d5325a9b01e0a49730c2b5269" +checksum = "7193cbd0ce53dc966037f54351dbbcf0d5a642c7f0038c382ef9e677ce8c13f2" dependencies = [ "indexmap 2.13.0", - "toml_datetime 0.7.5+spec-1.1.0", + "toml_datetime 1.0.0+spec-1.1.0", "toml_parser", "winnow", ] @@ -8544,15 +8509,15 @@ checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" [[package]] name = "tonic" -version = "0.14.4" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f32a6f80051a4111560201420c7885d0082ba9efe2ab61875c587bb6b18b9a0" +checksum = "fec7c61a0695dc1887c1b53952990f3ad2e3a31453e1f49f10e75424943a93ec" dependencies = [ "async-trait", "axum 0.8.8", "base64 0.22.1", "bytes", - "h2 0.4.13", + "h2", "http 1.4.0", "http-body", "http-body-util", @@ -8561,7 +8526,7 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project", - "socket2 0.6.2", + "socket2 0.6.3", "sync_wrapper", "tokio", "tokio-stream", @@ -8573,9 +8538,9 @@ dependencies = [ [[package]] name = "tonic-prost" -version = "0.14.4" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f86539c0089bfd09b1f8c0ab0239d80392af74c21bc9e0f15e1b4aca4c1647f" +checksum = "a55376a0bbaa4975a3f10d009ad763d8f4108f067c7c2e74f3001fb49778d309" dependencies = [ "bytes", "prost 0.14.3", @@ -8661,7 +8626,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8755,7 +8720,7 @@ checksum = "076a02dc54dd46795c2e9c8282ed40bcfb1e22747e955de9389a1de28190fb26" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8791,7 +8756,7 @@ checksum = "27a7a9b72ba121f6f1f6c3632b85604cac41aedb5ddc70accbebb6cac83de846" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -8851,7 +8816,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea" dependencies = [ - "crypto-common", + "crypto-common 0.1.7", "subtle", ] @@ -8945,11 +8910,11 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.21.0" +version = "1.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b672338555252d43fd2240c714dc444b8c6fb0a5c5335e65a07bba7742735ddb" +checksum = "a68d3c8f01c0cfa54a75291d83601161799e4a89a39e0929f4b0354d88757a37" dependencies = [ - "getrandom 0.4.1", + "getrandom 0.4.2", "js-sys", "wasm-bindgen", ] @@ -8989,9 +8954,9 @@ dependencies = [ "amm_core", "anyhow", "async-stream", + "base58", "base64 0.22.1", "borsh", - "bytemuck", "clap", "common", "env_logger", @@ -9006,7 +8971,6 @@ dependencies = [ "nssa", "nssa_core", "optfield", - "rand 0.8.5", "serde", "serde_json", "sha2", @@ -9063,9 +9027,9 @@ dependencies = [ [[package]] name = "wasm-bindgen" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64024a30ec1e37399cf85a7ffefebdb72205ca1c972291c51512360d90bd8566" +checksum = "6532f9a5c1ece3798cb1c2cfdba640b9b3ba884f5db45973a6f442510a87d38e" dependencies = [ "cfg-if", "once_cell", @@ -9076,9 +9040,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.58" +version = "0.4.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70a6e77fd0ae8029c9ea0063f87c46fde723e7d887703d74ad2616d792e51e6f" +checksum = "e9c5522b3a28661442748e09d40924dfb9ca614b21c00d3fd135720e48b67db8" dependencies = [ "cfg-if", "futures-util", @@ -9090,9 +9054,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "008b239d9c740232e71bd39e8ef6429d27097518b6b30bdf9086833bd5b6d608" +checksum = "18a2d50fcf105fb33bb15f00e7a77b772945a2ee45dcf454961fd843e74c18e6" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -9100,22 +9064,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5256bae2d58f54820e6490f9839c49780dff84c65aeab9e772f15d5f0e913a55" +checksum = "03ce4caeaac547cdf713d280eda22a730824dd11e6b8c3ca9e42247b25c631e3" dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.108" +version = "0.2.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f01b580c9ac74c8d8f0c0e4afb04eeef2acf145458e52c03845ee9cd23e3d12" +checksum = "75a326b8c223ee17883a4251907455a2431acc2791c98c26279376490c378c16" dependencies = [ "unicode-ident", ] @@ -9187,7 +9151,7 @@ dependencies = [ "base16", "quote", "sha2", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -9204,9 +9168,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.85" +version = "0.3.91" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312e32e551d92129218ea9a2452120f4aabc03529ef03e4d0d82fb2780608598" +checksum = "854ba17bb104abfb26ba36da9729addc7ce7f06f5c0f90f3c391f8461cca21f9" dependencies = [ "js-sys", "wasm-bindgen", @@ -9311,7 +9275,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -9322,7 +9286,7 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -9593,9 +9557,9 @@ checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650" [[package]] name = "winnow" -version = "0.7.14" +version = "0.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5364e9d77fcdeeaa6062ced926ee3381faa2ee02d3eb83a5c27a8825540829" +checksum = "df79d97927682d2fd8adb29682d1140b343be4ac0f08fd68b7765d9c059d3945" dependencies = [ "memchr", ] @@ -9630,7 +9594,7 @@ dependencies = [ "heck", "indexmap 2.13.0", "prettyplease", - "syn 2.0.116", + "syn 2.0.117", "wasm-metadata", "wit-bindgen-core", "wit-component", @@ -9646,7 +9610,7 @@ dependencies = [ "prettyplease", "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", "wit-bindgen-core", "wit-bindgen-rust", ] @@ -9767,28 +9731,28 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", "synstructure", ] [[package]] name = "zerocopy" -version = "0.8.39" +version = "0.8.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" +checksum = "f2578b716f8a7a858b7f02d5bd870c14bf4ddbbcf3a4c05414ba6503640505e3" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.39" +version = "0.8.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" +checksum = "7e6cc098ea4d3bd6246687de65af3f920c430e236bee1e3bf2e441463f08a02f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -9808,7 +9772,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", "synstructure", ] @@ -9829,7 +9793,7 @@ checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] @@ -9862,7 +9826,7 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.116", + "syn 2.0.117", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index c868e0a9..bcd11651 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -58,7 +58,7 @@ amm_program = { path = "programs/amm" } test_program_methods = { path = "test_program_methods" } bedrock_client = { path = "bedrock_client" } -tokio = { version = "1.28.2", features = [ +tokio = { version = "1.50", features = [ "net", "rt-multi-thread", "sync", @@ -75,15 +75,15 @@ serde = { version = "1.0.60", default-features = false, features = ["derive"] } serde_json = "1.0.81" serde_with = "3.16.1" actix = "0.13.0" -actix-cors = "0.6.1" +actix-cors = "0.7.1" jsonrpsee = "0.26.0" futures = "0.3" actix-rt = "*" lazy_static = "1.5.0" -env_logger = "0.10" +env_logger = "0.11" log = "0.4.28" -lru = "0.7.8" -thiserror = "2.0.12" +lru = "0.16.3" +thiserror = "2.0" sha2 = "0.10.8" hex = "0.4.3" bytemuck = "1.24.0" @@ -91,7 +91,7 @@ bytesize = { version = "2.3.1", features = ["serde"] } humantime-serde = "1.1" humantime = "2.1" aes-gcm = "0.10.3" -toml = "0.7.4" +toml = "0.9.8" bincode = "1.3.3" tempfile = "3.14.0" light-poseidon = "0.3.0" @@ -107,7 +107,7 @@ base58 = "0.2.0" itertools = "0.14.0" url = { version = "2.5.4", features = ["serde"] } tokio-retry = "0.3.0" -schemars = "1.2.0" +schemars = "1.2" async-stream = "0.3.6" logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" } @@ -129,7 +129,7 @@ k256 = { version = "0.13.3", features = [ "pem", ] } elliptic-curve = { version = "0.13.8", features = ["arithmetic"] } -actix-web = { version = "=4.1.0", default-features = false, features = [ +actix-web = { version = "4.13.0", default-features = false, features = [ "macros", ] } clap = { version = "4.5.42", features = ["derive", "env"] } @@ -141,3 +141,142 @@ inherits = "release" opt-level = 'z' lto = true codegen-units = 1 + +[workspace.lints.rust] +warnings = "deny" + +[workspace.lints] +clippy.all = { level = "deny", priority = -1 } + +# Pedantic +clippy.pedantic = { level = "deny", priority = -1 } + +# Reason: documenting every function returning Result is too verbose and doesn't add much value when you have good error types. +clippy.missing-errors-doc = "allow" +# Reason: most of the panics are internal and not part of the public API, so documenting them is not necessary. +clippy.missing-panics-doc = "allow" +# Reason: this isn't always bad and actually works well for our financial and cryptography code. +clippy.similar-names = "allow" +# Reason: this lint is too strict and hard to fix. +clippy.too-many-lines = "allow" +# Reason: std hasher is fine for us in public functions. +clippy.implicit-hasher = "allow" + +# Restriction +clippy.restriction = { level = "deny", priority = -1 } + +# Reason: we deny the whole `restriction` group but we allow things that don't make sense for us. +# That way we can still benefit from new lints added to the `restriction` group without having to +# explicitly allow them. +# As a downside our contributors don't know if some lint was enabled intentionally or just no one +# else faced it before to allow it but we can handle this during code reviews. +clippy.blanket-clippy-restriction-lints = "allow" +# Reason: we can't avoid using unwrap for now. +clippy.unwrap-used = "allow" +# Reason: we can't avoid using expect for now. +clippy.expect-used = "allow" +# Reason: unreachable is good in many cases. +clippy.unreachable = "allow" +# Reason: this is ridiculous strict in our codebase and doesn't add any value. +clippy.single-call-fn = "allow" +# Reason: we use panic in some places and it's okay. +clippy.panic = "allow" +# Reason: shadowing is good most of the times. +clippy.shadow-reuse = "allow" +# Reason: implicit return is good. +clippy.implicit-return = "allow" +# Reason: std is fine for us, we don't need to use core. +clippy.std-instead-of-core = "allow" +# Reason: std is fine for us, we don't need to use alloc. +clippy.std-instead-of-alloc = "allow" +# Reason: default methods are good most of the time. +clippy.missing-trait-methods = "allow" +# Reason: this is too verbose and doesn't help much if you have rust analyzer. +clippy.pattern-type-mismatch = "allow" +# Reason: decreases readability. +clippy.assertions-on-result-states = "allow" +# Reason: documenting every assert is too verbose. +clippy.missing-assert-message = "allow" +# Reason: documenting private items is too verbose and doesn't add much value. +clippy.missing-docs-in-private-items = "allow" +# Reason: we use separated suffix style. +clippy.separated_literal_suffix = "allow" +# Reason: sometimes absolute paths are more readable. +clippy.absolute-paths = "allow" +# Reason: sometimes it's as readable as full variable naming. +clippy.min-ident-chars = "allow" +# Reason: it's very common and handy. +clippy.indexing-slicing = "allow" +# Reason: we use little endian style. +clippy.little-endian-bytes = "allow" +# Reason: we use this style of pub visibility. +clippy.pub-with-shorthand = "allow" +# Reason: question mark operator is very cool. +clippy.question-mark-used = "allow" +# Reason: it's fine to panic in tests and some functions where it makes sense. +clippy.panic-in-result-fn = "allow" +# Reason: we don't care that much about inlining and LTO should take care of it. +clippy.missing_inline_in_public_items = "allow" +# Reason: it's okay for us. +clippy.default-numeric-fallback = "allow" +# Reason: this is fine for us. +clippy.exhaustive-enums = "allow" +# Reason: this is fine for us. +clippy.exhaustive-structs = "allow" +# Reason: this helps readability when item is imported in other modules. +clippy.module-name-repetitions = "allow" +# Reason: mostly historical reasons, maybe we'll address this in future. +clippy.mod-module-files = "allow" +# Reason: named module files is our preferred way. +clippy.self-named-module-files = "allow" +# Reason: this is actually quite handy. +clippy.impl-trait-in-params = "allow" +# Reason: this is often useful. +clippy.use-debug = "allow" +# Reason: this is sometimes useful. +clippy.field-scoped-visibility-modifiers = "allow" +# Reason: `pub use` is good for re-exports and hiding unnecessary details. +clippy.pub-use = "allow" +# Reason: we prefer semicolons inside blocks. +clippy.semicolon-outside-block = "allow" +# Reason: we don't do it blindly, this is mostly internal constraints checks. +clippy.unwrap-in-result = "allow" +# Reason: we don't see any problems with that. +clippy.shadow-same = "allow" +# Reason: this lint is too verbose. +clippy.let-underscore-untyped = "allow" +# Reason: this lint is actually bad as it forces to use wildcard `..` instead of +# field-by-field `_` which may lead to subtle bugs when new fields are added to the struct. +clippy.unneeded-field-pattern = "allow" + +# Nursery +clippy.nursery = { level = "deny", priority = -1 } + +# Reason: this is okay if it compiles. +clippy.future-not-send = "allow" +# Reason: this is actually a good lint, but currently it gives a lot of false-positives. +clippy.significant-drop-tightening = "allow" + +# Correctness +clippy.correctness = { level = "deny", priority = -1 } + +# Complexity +clippy.complexity = { level = "deny", priority = -1 } + +# Perf +clippy.perf = { level = "deny", priority = -1 } + +# Suspicious +clippy.suspicious = { level = "deny", priority = -1 } + +# Style +clippy.style = { level = "deny", priority = -1 } + +# Cargo +clippy.cargo = { level = "deny", priority = -1 } + +# Reason: we're not at this stage yet and it will be a pain to create a new crate. +clippy.cargo-common-metadata = "allow" +# Reason: hard to address right now and mostly comes from dependencies +# so the fix would be just a long list of exceptions. +clippy.multiple-crate-versions = "allow" diff --git a/Justfile b/Justfile index c8ffe09d..b4ec3a98 100644 --- a/Justfile +++ b/Justfile @@ -66,4 +66,5 @@ clean: rm -rf sequencer_runner/rocksdb rm -rf indexer/service/rocksdb rm -rf wallet/configs/debug/storage.json + rm -rf rocksdb cd bedrock && docker compose down -v diff --git a/README.md b/README.md index ee2d8097..1619747b 100644 --- a/README.md +++ b/README.md @@ -12,7 +12,7 @@ Public accounts are stored on-chain as a visible map from IDs to account states, ### Programmability and selective privacy -LEZ aims to deliver full programmability in a hybrid public/private model, with the same flexibility and composability as public blockchains. Developers write and deploy programs in LEZ just as they would elsewhere. The protocol automatically supports executions that involve any combination of public and private accounts. From the program’s perspective, all accounts look the same, and privacy is enforced transparently. This lets developers focus on business logic while the system guarantees privacy and correctness. +LEZ aims to deliver full programmability in a hybrid public/private model, with the same flexibility and composability as public blockchains. Developers write and deploy programs in LEZ without addressing privacy concerns. The protocol automatically supports executions that involve any combination of public and private accounts. From the program’s perspective, all accounts look the same, and privacy is enforced transparently. This lets developers focus on business logic while the system guarantees privacy and correctness. To our knowledge, this design is unique to LEZ. Other privacy-focused programmable blockchains often require developers to explicitly handle private inputs inside their app logic. In LEZ, privacy is protocol-level: programs do not change, accounts are treated uniformly, and private execution works out of the box. @@ -71,6 +71,17 @@ This design keeps public transactions as fast as any RISC-V–based VM and makes --- --- +--- + +# Versioning + +We release versions as git tags (e.g. `v0.1.0`). If no critical issues with version is found you can expect it to be immutable. All further features and fixes will be a part of the next tag. As the project is in active development we don't provide backward compatibility yet. +For each tag we publish docker images of our services. +If you depend on this project you can pin your rust dependency to a git tag like this: + +```toml +nssa_core = { git = "https://github.com/logos-blockchain/logos-execution-zone.git", tag = "v0.1.0" } +``` # Install dependencies ### Install build dependencies @@ -130,29 +141,31 @@ RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all ``` # Run the sequencer and node - - ## Running Manually ### Normal mode The sequencer and logos blockchain node can be run locally: 1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node: - - `git checkout master; git pull` - - `cargo clean` - - `rm -r ~/.logos-blockchain-circuits` - - `./scripts/setup-logos-blockchain-circuits.sh` - - `cargo build --all-features` - - `./target/debug/logos-blockchain-node --deployment nodes/node/standalone-deployment-config.yaml nodes/node/standalone-node-config.yaml` + - `git checkout master; git pull` + - `cargo clean` + - `rm -r ~/.logos-blockchain-circuits` + - `./scripts/setup-logos-blockchain-circuits.sh` + - `cargo build --all-features` + - `./target/debug/logos-blockchain-node --deployment nodes/node/standalone-deployment-config.yaml nodes/node/standalone-node-config.yaml` - 2. Alternatively (WARNING: This node is outdated) go to ``logos-blockchain/lssa/` repo and run the node from docker: - - `cd bedrock` - - Change line 14 of `docker-compose.yml` from `"0:18080/tcp"` into `"8080:18080/tcp"` - - `docker compose up` + - Alternatively (WARNING: This node is outdated) go to `logos-blockchain/lssa/` repo and run the node from docker: + - `cd bedrock` + - Change line 14 of `docker-compose.yml` from `"0:18080/tcp"` into `"8080:18080/tcp"` + - `docker compose up` - 3. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service: + 2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service: - `RUST_LOG=info cargo run -p indexer_service indexer/service/configs/indexer_config.json` - 4. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer: + 3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer: - `RUST_LOG=info cargo run -p sequencer_runner sequencer_runner/configs/debug` + 4. (To run the explorer): on another terminal go to `logos-blockchain/lssa/explorer_service` and run the following: + - `cargo install cargo-leptos` + - `cargo leptos build --release` + - `cargo leptos serve --release` ### Notes on cleanup diff --git a/artifacts/program_methods/amm.bin b/artifacts/program_methods/amm.bin index 359d062a..d6e08b64 100644 Binary files a/artifacts/program_methods/amm.bin and b/artifacts/program_methods/amm.bin differ diff --git a/artifacts/program_methods/authenticated_transfer.bin b/artifacts/program_methods/authenticated_transfer.bin index 5ff56cf5..5dc0bf97 100644 Binary files a/artifacts/program_methods/authenticated_transfer.bin and b/artifacts/program_methods/authenticated_transfer.bin differ diff --git a/artifacts/program_methods/pinata.bin b/artifacts/program_methods/pinata.bin index aef1a70c..cd2ffa53 100644 Binary files a/artifacts/program_methods/pinata.bin and b/artifacts/program_methods/pinata.bin differ diff --git a/artifacts/program_methods/pinata_token.bin b/artifacts/program_methods/pinata_token.bin index 47ada3c2..c6a2a8dd 100644 Binary files a/artifacts/program_methods/pinata_token.bin and b/artifacts/program_methods/pinata_token.bin differ diff --git a/artifacts/program_methods/privacy_preserving_circuit.bin b/artifacts/program_methods/privacy_preserving_circuit.bin index 5b854dca..91dd81d5 100644 Binary files a/artifacts/program_methods/privacy_preserving_circuit.bin and b/artifacts/program_methods/privacy_preserving_circuit.bin differ diff --git a/artifacts/program_methods/token.bin b/artifacts/program_methods/token.bin index dbbf3c07..d047e652 100644 Binary files a/artifacts/program_methods/token.bin and b/artifacts/program_methods/token.bin differ diff --git a/artifacts/test_program_methods/burner.bin b/artifacts/test_program_methods/burner.bin index a3a2839e..0c0f3089 100644 Binary files a/artifacts/test_program_methods/burner.bin and b/artifacts/test_program_methods/burner.bin differ diff --git a/artifacts/test_program_methods/chain_caller.bin b/artifacts/test_program_methods/chain_caller.bin index b920b0e2..1abe0774 100644 Binary files a/artifacts/test_program_methods/chain_caller.bin and b/artifacts/test_program_methods/chain_caller.bin differ diff --git a/artifacts/test_program_methods/changer_claimer.bin b/artifacts/test_program_methods/changer_claimer.bin index 019b2df0..8c24294d 100644 Binary files a/artifacts/test_program_methods/changer_claimer.bin and b/artifacts/test_program_methods/changer_claimer.bin differ diff --git a/artifacts/test_program_methods/claimer.bin b/artifacts/test_program_methods/claimer.bin index 6aaa6bba..674ca600 100644 Binary files a/artifacts/test_program_methods/claimer.bin and b/artifacts/test_program_methods/claimer.bin differ diff --git a/artifacts/test_program_methods/data_changer.bin b/artifacts/test_program_methods/data_changer.bin index 5712c28e..bd5ea48a 100644 Binary files a/artifacts/test_program_methods/data_changer.bin and b/artifacts/test_program_methods/data_changer.bin differ diff --git a/artifacts/test_program_methods/extra_output.bin b/artifacts/test_program_methods/extra_output.bin index 20e0e12a..ab13c315 100644 Binary files a/artifacts/test_program_methods/extra_output.bin and b/artifacts/test_program_methods/extra_output.bin differ diff --git a/artifacts/test_program_methods/malicious_authorization_changer.bin b/artifacts/test_program_methods/malicious_authorization_changer.bin index 8c434e84..ebe05ff6 100644 Binary files a/artifacts/test_program_methods/malicious_authorization_changer.bin and b/artifacts/test_program_methods/malicious_authorization_changer.bin differ diff --git a/artifacts/test_program_methods/minter.bin b/artifacts/test_program_methods/minter.bin index 648c9e2a..407142e1 100644 Binary files a/artifacts/test_program_methods/minter.bin and b/artifacts/test_program_methods/minter.bin differ diff --git a/artifacts/test_program_methods/missing_output.bin b/artifacts/test_program_methods/missing_output.bin index dcaf8cd0..75c7d69e 100644 Binary files a/artifacts/test_program_methods/missing_output.bin and b/artifacts/test_program_methods/missing_output.bin differ diff --git a/artifacts/test_program_methods/modified_transfer.bin b/artifacts/test_program_methods/modified_transfer.bin index b06bcdd4..e78597c9 100644 Binary files a/artifacts/test_program_methods/modified_transfer.bin and b/artifacts/test_program_methods/modified_transfer.bin differ diff --git a/artifacts/test_program_methods/nonce_changer.bin b/artifacts/test_program_methods/nonce_changer.bin index c9434606..4e7c1f5e 100644 Binary files a/artifacts/test_program_methods/nonce_changer.bin and b/artifacts/test_program_methods/nonce_changer.bin differ diff --git a/artifacts/test_program_methods/noop.bin b/artifacts/test_program_methods/noop.bin index 42cddfab..3384a65a 100644 Binary files a/artifacts/test_program_methods/noop.bin and b/artifacts/test_program_methods/noop.bin differ diff --git a/artifacts/test_program_methods/program_owner_changer.bin b/artifacts/test_program_methods/program_owner_changer.bin index 66474082..b88ade13 100644 Binary files a/artifacts/test_program_methods/program_owner_changer.bin and b/artifacts/test_program_methods/program_owner_changer.bin differ diff --git a/artifacts/test_program_methods/simple_balance_transfer.bin b/artifacts/test_program_methods/simple_balance_transfer.bin index f2383856..8ae149e0 100644 Binary files a/artifacts/test_program_methods/simple_balance_transfer.bin and b/artifacts/test_program_methods/simple_balance_transfer.bin differ diff --git a/bedrock_client/Cargo.toml b/bedrock_client/Cargo.toml index 696174d8..2137cb74 100644 --- a/bedrock_client/Cargo.toml +++ b/bedrock_client/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] common.workspace = true diff --git a/bedrock_client/src/lib.rs b/bedrock_client/src/lib.rs index 534a0cf6..fdd14f72 100644 --- a/bedrock_client/src/lib.rs +++ b/bedrock_client/src/lib.rs @@ -2,7 +2,7 @@ use std::time::Duration; use anyhow::{Context as _, Result}; use common::config::BasicAuth; -use futures::{Stream, TryFutureExt}; +use futures::{Stream, TryFutureExt as _}; #[expect(clippy::single_component_path_imports, reason = "Satisfy machete")] use humantime_serde; use log::{info, warn}; @@ -14,7 +14,7 @@ use reqwest::{Client, Url}; use serde::{Deserialize, Serialize}; use tokio_retry::Retry; -/// Fibonacci backoff retry strategy configuration +/// Fibonacci backoff retry strategy configuration. #[derive(Debug, Copy, Clone, Serialize, Deserialize)] pub struct BackoffConfig { #[serde(with = "humantime_serde")] @@ -31,9 +31,9 @@ impl Default for BackoffConfig { } } -// Simple wrapper -// maybe extend in the future for our purposes -// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`). +/// Simple wrapper +/// maybe extend in the future for our purposes +/// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`). #[derive(Clone)] pub struct BedrockClient { http_client: CommonHttpClient, @@ -62,10 +62,22 @@ impl BedrockClient { }) } - pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> { - Retry::spawn(self.backoff_strategy(), || { - self.http_client + pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result, Error> { + Retry::spawn(self.backoff_strategy(), || async { + match self + .http_client .post_transaction(self.node_url.clone(), tx.clone()) + .await + { + Ok(()) => Ok(Ok(())), + Err(err) => match err { + // Retry arm. + // Retrying only reqwest errors: mainly connected to http. + Error::Request(_) => Err(err), + // Returning non-retryable error + Error::Server(_) | Error::Client(_) | Error::Url(_) => Ok(Err(err)), + }, + } }) .await } @@ -96,9 +108,14 @@ impl BedrockClient { } fn backoff_strategy(&self) -> impl Iterator { - tokio_retry::strategy::FibonacciBackoff::from_millis( - self.backoff.start_delay.as_millis() as u64 - ) - .take(self.backoff.max_retries) + let start_delay_millis = self + .backoff + .start_delay + .as_millis() + .try_into() + .expect("Start delay must be less than u64::MAX milliseconds"); + + tokio_retry::strategy::FibonacciBackoff::from_millis(start_delay_millis) + .take(self.backoff.max_retries) } } diff --git a/clippy.toml b/clippy.toml new file mode 100644 index 00000000..aa90e0d6 --- /dev/null +++ b/clippy.toml @@ -0,0 +1,54 @@ +module-item-order-groupings = [ + [ + "use", + [ + "use", + ], + ], + [ + "modules", + [ + "extern_crate", + "mod", + "foreign_mod", + ], + ], + [ + "macros", + [ + "macro", + ], + ], + [ + "global_asm", + [ + "global_asm", + ], + ], + [ + "UPPER_SNAKE_CASE", + [ + "static", + "const", + ], + ], + [ + "PascalCase", + [ + "ty_alias", + "enum", + "struct", + "union", + "trait", + "trait_alias", + "impl", + ], + ], + [ + "lower_snake_case", + [ + "fn", + ], + ], +] +source-item-ordering = ["module"] diff --git a/common/Cargo.toml b/common/Cargo.toml index bf4a0032..8aafed40 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa.workspace = true nssa_core.workspace = true diff --git a/common/src/block.rs b/common/src/block.rs index 0343435b..8ef2eb0c 100644 --- a/common/src/block.rs +++ b/common/src/block.rs @@ -1,7 +1,7 @@ use borsh::{BorshDeserialize, BorshSerialize}; use nssa::AccountId; use serde::{Deserialize, Serialize}; -use sha2::{Digest, Sha256, digest::FixedOutput}; +use sha2::{Digest as _, Sha256, digest::FixedOutput as _}; use crate::{HashType, transaction::NSSATransaction}; @@ -20,7 +20,7 @@ pub struct BlockMeta { #[derive(Debug, Clone)] /// Our own hasher. /// Currently it is SHA256 hasher wrapper. May change in a future. -pub struct OwnHasher {} +pub struct OwnHasher; impl OwnHasher { fn hash(data: &[u8]) -> HashType { @@ -69,6 +69,7 @@ pub struct HashableBlockData { } impl HashableBlockData { + #[must_use] pub fn into_pending_block( self, signing_key: &nssa::PrivateKey, @@ -93,6 +94,7 @@ impl HashableBlockData { } } + #[must_use] pub fn block_hash(&self) -> BlockHash { OwnHasher::hash(&borsh::to_vec(&self).unwrap()) } @@ -109,14 +111,14 @@ impl From for HashableBlockData { } } -/// Helper struct for account (de-)serialization +/// Helper struct for account (de-)serialization. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct AccountInitialData { pub account_id: AccountId, pub balance: u128, } -/// Helper struct to (de-)serialize initial commitments +/// Helper struct to (de-)serialize initial commitments. #[derive(Debug, Clone, Serialize, Deserialize)] pub struct CommitmentsInitialData { pub npk: nssa_core::NullifierPublicKey, @@ -128,7 +130,7 @@ mod tests { use crate::{HashType, block::HashableBlockData, test_utils}; #[test] - fn test_encoding_roundtrip() { + fn encoding_roundtrip() { let transactions = vec![test_utils::produce_dummy_empty_transaction()]; let block = test_utils::produce_dummy_block(1, Some(HashType([1; 32])), transactions); let hashable = HashableBlockData::from(block); diff --git a/common/src/config.rs b/common/src/config.rs index 3850f08c..c076f699 100644 --- a/common/src/config.rs +++ b/common/src/config.rs @@ -42,14 +42,14 @@ impl FromStr for BasicAuth { })?; Ok(Self { - username: username.to_string(), - password: password.map(|p| p.to_string()), + username: username.to_owned(), + password: password.map(std::string::ToString::to_string), }) } } impl From for BasicAuthCredentials { fn from(value: BasicAuth) -> Self { - BasicAuthCredentials::new(value.username, value.password) + Self::new(value.username, value.password) } } diff --git a/common/src/error.rs b/common/src/error.rs index 3301bc87..1e348a32 100644 --- a/common/src/error.rs +++ b/common/src/error.rs @@ -22,14 +22,14 @@ pub enum SequencerClientError { impl From for SequencerClientError { fn from(value: SequencerRpcError) -> Self { - SequencerClientError::InternalError(value) + Self::InternalError(value) } } #[derive(Debug, thiserror::Error)] pub enum ExecutionFailureKind { - #[error("Failed to get account data from sequencer")] - SequencerError, + #[error("Failed to get data from sequencer")] + SequencerError(#[source] anyhow::Error), #[error("Inputs amounts does not match outputs")] AmountMismatchError, #[error("Accounts key not found")] diff --git a/common/src/lib.rs b/common/src/lib.rs index 11fa4c3d..da07a602 100644 --- a/common/src/lib.rs +++ b/common/src/lib.rs @@ -17,7 +17,6 @@ pub mod test_utils; pub const PINATA_BASE58: &str = "EfQhKQAkX2FJiwNii2WFQsGndjvF1Mzd7RuVe7QdPLw7"; #[derive( - Debug, Default, Copy, Clone, @@ -37,13 +36,19 @@ impl Display for HashType { } } +impl std::fmt::Debug for HashType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + impl FromStr for HashType { type Err = hex::FromHexError; fn from_str(s: &str) -> Result { - let mut bytes = [0u8; 32]; + let mut bytes = [0_u8; 32]; hex::decode_to_slice(s, &mut bytes)?; - Ok(HashType(bytes)) + Ok(Self(bytes)) } } @@ -61,7 +66,7 @@ impl From for [u8; 32] { impl From<[u8; 32]> for HashType { fn from(bytes: [u8; 32]) -> Self { - HashType(bytes) + Self(bytes) } } @@ -69,7 +74,7 @@ impl TryFrom> for HashType { type Error = <[u8; 32] as TryFrom>>::Error; fn try_from(value: Vec) -> Result { - Ok(HashType(value.try_into()?)) + Ok(Self(value.try_into()?)) } } @@ -85,7 +90,7 @@ mod tests { #[test] fn serialization_roundtrip() { - let original = HashType([1u8; 32]); + let original = HashType([1_u8; 32]); let serialized = original.to_string(); let deserialized = HashType::from_str(&serialized).unwrap(); assert_eq!(original, deserialized); diff --git a/common/src/rpc_primitives/errors.rs b/common/src/rpc_primitives/errors.rs index 1f799815..28ec0b63 100644 --- a/common/src/rpc_primitives/errors.rs +++ b/common/src/rpc_primitives/errors.rs @@ -5,25 +5,25 @@ use serde_json::{Value, to_value}; #[derive(serde::Serialize)] pub struct RpcParseError(pub String); -#[allow(clippy::too_long_first_doc_paragraph)] -/// This struct may be returned from JSON RPC server in case of error +/// This struct may be returned from JSON RPC server in case of error. +/// /// It is expected that that this struct has impls From<_> all other RPC errors -/// like [`RpcBlockError`](crate::types::blocks::RpcBlockError) -#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)] +/// like [`RpcBlockError`](crate::types::blocks::RpcBlockError). +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct RpcError { #[serde(flatten)] pub error_struct: Option, - /// Deprecated please use the `error_struct` instead + /// Deprecated please use the `error_struct` instead. pub code: i64, - /// Deprecated please use the `error_struct` instead + /// Deprecated please use the `error_struct` instead. pub message: String, - /// Deprecated please use the `error_struct` instead + /// Deprecated please use the `error_struct` instead. #[serde(skip_serializing_if = "Option::is_none")] pub data: Option, } -#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)] #[serde(tag = "name", content = "cause", rename_all = "SCREAMING_SNAKE_CASE")] pub enum RpcErrorKind { RequestValidationError(RpcRequestValidationErrorKind), @@ -31,14 +31,14 @@ pub enum RpcErrorKind { InternalError(Value), } -#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)] #[serde(tag = "name", content = "info", rename_all = "SCREAMING_SNAKE_CASE")] pub enum RpcRequestValidationErrorKind { MethodNotFound { method_name: String }, ParseError { error_message: String }, } -/// A general Server Error +/// A general Server Error. #[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq, Clone)] pub enum ServerError { Timeout, @@ -49,8 +49,9 @@ impl RpcError { /// A generic constructor. /// /// Mostly for completeness, doesn't do anything but filling in the corresponding fields. - pub fn new(code: i64, message: String, data: Option) -> Self { - RpcError { + #[must_use] + pub const fn new(code: i64, message: String, data: Option) -> Self { + Self { code, message, data, @@ -69,12 +70,12 @@ impl RpcError { ))); } }; - RpcError::new(-32_602, "Invalid params".to_owned(), Some(value)) + Self::new(-32_602, "Invalid params".to_owned(), Some(value)) } /// Create a server error. pub fn server_error(e: Option) -> Self { - RpcError::new( + Self::new( -32_000, "Server error".to_owned(), e.map(|v| to_value(v).expect("Must be representable in JSON")), @@ -82,8 +83,9 @@ impl RpcError { } /// Create a parse error. + #[must_use] pub fn parse_error(e: String) -> Self { - RpcError { + Self { code: -32_700, message: "Parse error".to_owned(), data: Some(Value::String(e.clone())), @@ -93,12 +95,14 @@ impl RpcError { } } + #[must_use] pub fn serialization_error(e: &str) -> Self { - RpcError::new_internal_error(Some(Value::String(e.to_owned())), e) + Self::new_internal_error(Some(Value::String(e.to_owned())), e) } /// Helper method to define extract `INTERNAL_ERROR` in separate `RpcErrorKind` - /// Returns `HANDLER_ERROR` if the error is not internal one + /// Returns `HANDLER_ERROR` if the error is not internal one. + #[must_use] pub fn new_internal_or_handler_error(error_data: Option, error_struct: Value) -> Self { if error_struct["name"] == "INTERNAL_ERROR" { let error_message = match error_struct["info"].get("error_message") { @@ -111,8 +115,9 @@ impl RpcError { } } + #[must_use] pub fn new_internal_error(error_data: Option, info: &str) -> Self { - RpcError { + Self { code: -32_000, message: "Server error".to_owned(), data: error_data, @@ -124,7 +129,7 @@ impl RpcError { } fn new_handler_error(error_data: Option, error_struct: Value) -> Self { - RpcError { + Self { code: -32_000, message: "Server error".to_owned(), data: error_data, @@ -133,8 +138,9 @@ impl RpcError { } /// Create a method not found error. + #[must_use] pub fn method_not_found(method: String) -> Self { - RpcError { + Self { code: -32_601, message: "Method not found".to_owned(), data: Some(Value::String(method.clone())), @@ -161,6 +167,7 @@ impl From for RpcError { impl From for RpcError { fn from(_: std::convert::Infallible) -> Self { + // SAFETY: Infallible error can never be constructed, so this code can never be reached. unsafe { core::hint::unreachable_unchecked() } } } @@ -168,20 +175,20 @@ impl From for RpcError { impl fmt::Display for ServerError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { - ServerError::Timeout => write!(f, "ServerError: Timeout"), - ServerError::Closed => write!(f, "ServerError: Closed"), + Self::Timeout => write!(f, "ServerError: Timeout"), + Self::Closed => write!(f, "ServerError: Closed"), } } } impl From for RpcError { - fn from(e: ServerError) -> RpcError { + fn from(e: ServerError) -> Self { let error_data = match to_value(&e) { Ok(value) => value, Err(_err) => { - return RpcError::new_internal_error(None, "Failed to serialize ServerError"); + return Self::new_internal_error(None, "Failed to serialize ServerError"); } }; - RpcError::new_internal_error(Some(error_data), e.to_string().as_str()) + Self::new_internal_error(Some(error_data), e.to_string().as_str()) } } diff --git a/common/src/rpc_primitives/message.rs b/common/src/rpc_primitives/message.rs index 98867448..de7f132e 100644 --- a/common/src/rpc_primitives/message.rs +++ b/common/src/rpc_primitives/message.rs @@ -13,12 +13,14 @@ use std::fmt::{Formatter, Result as FmtResult}; use serde::{ de::{Deserializer, Error, Unexpected, Visitor}, - ser::{SerializeStruct, Serializer}, + ser::{SerializeStruct as _, Serializer}, }; use serde_json::{Result as JsonResult, Value}; use super::errors::RpcError; +pub type Parsed = Result; + #[derive(Debug, Clone, PartialEq, Eq, Hash)] struct Version; @@ -29,10 +31,13 @@ impl serde::Serialize for Version { } impl<'de> serde::Deserialize<'de> for Version { + #[expect( + clippy::renamed_function_params, + reason = "More readable than original serde parameter names" + )] fn deserialize>(deserializer: D) -> Result { struct VersionVisitor; - #[allow(clippy::needless_lifetimes)] - impl<'de> Visitor<'de> for VersionVisitor { + impl Visitor<'_> for VersionVisitor { type Value = Version; fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult { @@ -51,8 +56,12 @@ impl<'de> serde::Deserialize<'de> for Version { } /// An RPC request. -#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] +#[expect( + clippy::partial_pub_fields, + reason = "We don't want to allow access to the version, but the others are public for ease of use" +)] pub struct Request { jsonrpc: Version, pub method: String, @@ -62,6 +71,7 @@ pub struct Request { } impl Request { + #[must_use] pub fn from_payload_version_2_0(method: String, payload: serde_json::Value) -> Self { Self { jsonrpc: Version, @@ -75,6 +85,7 @@ impl Request { /// Answer the request with a (positive) reply. /// /// The ID is taken from the request. + #[must_use] pub fn reply(&self, reply: Value) -> Message { Message::Response(Response { jsonrpc: Version, @@ -84,6 +95,7 @@ impl Request { } /// Answer the request with an error. + #[must_use] pub fn error(&self, error: RpcError) -> Message { Message::Response(Response { jsonrpc: Version, @@ -96,7 +108,11 @@ impl Request { /// A response to an RPC. /// /// It is created by the methods on [Request](struct.Request.html). -#[derive(Debug, Clone, PartialEq)] +#[expect( + clippy::partial_pub_fields, + reason = "We don't want to allow access to the version, but the others are public for ease of use" +)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct Response { jsonrpc: Version, pub result: Result, @@ -107,30 +123,22 @@ impl serde::Serialize for Response { fn serialize(&self, serializer: S) -> Result { let mut sub = serializer.serialize_struct("Response", 3)?; sub.serialize_field("jsonrpc", &self.jsonrpc)?; - match self.result { - Ok(ref value) => sub.serialize_field("result", value), - Err(ref err) => sub.serialize_field("error", err), + match &self.result { + Ok(value) => sub.serialize_field("result", value), + Err(err) => sub.serialize_field("error", err), }?; sub.serialize_field("id", &self.id)?; sub.end() } } -/// Deserializer for `Option` that produces `Some(Value::Null)`. -/// -/// The usual one produces None in that case. But we need to know the difference between -/// `{x: null}` and `{}`. -fn some_value<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { - serde::Deserialize::deserialize(deserializer).map(Some) -} - /// A helper trick for deserialization. #[derive(serde::Deserialize)] #[serde(deny_unknown_fields)] struct WireResponse { // It is actually used to eat and sanity check the deserialized text - #[allow(dead_code)] - jsonrpc: Version, + #[serde(rename = "jsonrpc")] + _jsonrpc: Version, // Make sure we accept null as Some(Value::Null), instead of going to None #[serde(default, deserialize_with = "some_value")] result: Option, @@ -152,7 +160,7 @@ impl<'de> serde::Deserialize<'de> for Response { return Err(err); } }; - Ok(Response { + Ok(Self { jsonrpc: Version, result, id: wr.id, @@ -161,7 +169,11 @@ impl<'de> serde::Deserialize<'de> for Response { } /// A notification (doesn't expect an answer). -#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)] +#[expect( + clippy::partial_pub_fields, + reason = "We don't want to allow access to the version, but the others are public for ease of use" +)] +#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct Notification { jsonrpc: Version, @@ -198,7 +210,7 @@ pub enum Message { /// message. /// /// This variant has no direct constructor and is expected to be constructed manually. - Batch(Vec), + Batch(Vec), /// An unmatched sub entry in a `Batch`. /// /// When there's a `Batch` and an element doesn't comform to the JSONRPC 2.0 format, that one @@ -212,9 +224,10 @@ impl Message { /// A constructor for a request. /// /// The ID is auto-set to dontcare. + #[must_use] pub fn request(method: String, params: Value) -> Self { let id = Value::from("dontcare"); - Message::Request(Request { + Self::Request(Request { jsonrpc: Version, method, params, @@ -223,8 +236,9 @@ impl Message { } /// Create a top-level error (without an ID). - pub fn error(error: RpcError) -> Self { - Message::Response(Response { + #[must_use] + pub const fn error(error: RpcError) -> Self { + Self::Response(Response { jsonrpc: Version, result: Err(error), id: Value::Null, @@ -232,8 +246,9 @@ impl Message { } /// A constructor for a notification. - pub fn notification(method: String, params: Value) -> Self { - Message::Notification(Notification { + #[must_use] + pub const fn notification(method: String, params: Value) -> Self { + Self::Notification(Notification { jsonrpc: Version, method, params, @@ -241,8 +256,9 @@ impl Message { } /// A constructor for a response. - pub fn response(id: Value, result: Result) -> Self { - Message::Response(Response { + #[must_use] + pub const fn response(id: Value, result: Result) -> Self { + Self::Response(Response { jsonrpc: Version, result, id, @@ -250,18 +266,33 @@ impl Message { } /// Returns id or Null if there is no id. + #[must_use] pub fn id(&self) -> Value { match self { - Message::Request(req) => req.id.clone(), - _ => Value::Null, + Self::Request(req) => req.id.clone(), + Self::Response(response) => response.id.clone(), + Self::Notification(_) | Self::Batch(_) | Self::UnmatchedSub(_) => Value::Null, } } } +impl From for String { + fn from(val: Message) -> Self { + ::serde_json::ser::to_string(&val).expect("message serialization to json should not fail") + } +} + +impl From for Vec { + fn from(val: Message) -> Self { + ::serde_json::ser::to_vec(&val) + .expect("message serialization to json bytes should not fail") + } +} + /// A broken message. /// /// Protocol-level errors. -#[derive(Debug, Clone, PartialEq, serde::Deserialize)] +#[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize)] #[serde(untagged)] pub enum Broken { /// It was valid JSON, but doesn't match the form of a JSONRPC 2.0 message. @@ -276,12 +307,13 @@ impl Broken { /// /// The error message for these things are specified in the RFC, so this just creates an error /// with the right values. + #[must_use] pub fn reply(&self) -> Message { - match *self { - Broken::Unmatched(_) => Message::error(RpcError::parse_error( + match self { + Self::Unmatched(_) => Message::error(RpcError::parse_error( "JSON RPC Request format was expected".to_owned(), )), - Broken::SyntaxError(ref e) => Message::error(RpcError::parse_error(e.clone())), + Self::SyntaxError(e) => Message::error(RpcError::parse_error(e.clone())), } } } @@ -303,8 +335,6 @@ pub fn decoded_to_parsed(res: JsonResult) -> Parsed { } } -pub type Parsed = Result; - /// Read a [Message](enum.Message.html) from a slice. /// /// Invalid JSON or JSONRPC messages are reported as [Broken](enum.Broken.html). @@ -319,16 +349,12 @@ pub fn from_str(s: &str) -> Parsed { from_slice(s.as_bytes()) } -impl From for String { - fn from(val: Message) -> Self { - ::serde_json::ser::to_string(&val).unwrap() - } -} - -impl From for Vec { - fn from(val: Message) -> Self { - ::serde_json::ser::to_vec(&val).unwrap() - } +/// Deserializer for `Option` that produces `Some(Value::Null)`. +/// +/// The usual one produces None in that case. But we need to know the difference between +/// `{x: null}` and `{}`. +fn some_value<'de, D: Deserializer<'de>>(deserializer: D) -> Result, D::Error> { + serde::Deserialize::deserialize(deserializer).map(Some) } #[cfg(test)] @@ -337,13 +363,12 @@ mod tests { use super::*; - /// Test serialization and deserialization of the Message + /// Test serialization and deserialization of the Message. /// /// We first deserialize it from a string. That way we check deserialization works. /// But since serialization doesn't have to produce the exact same result (order, spaces, …), /// we then serialize and deserialize the thing again and check it matches. #[test] - #[allow(clippy::too_many_lines)] fn message_serde() { // A helper for running one message test fn one(input: &str, expected: &Message) { @@ -463,11 +488,10 @@ mod tests { /// /// Check that the given JSON string parses, but is not recognized as a valid RPC message. /// - /// Test things that are almost but not entirely JSONRPC are rejected + /// Test things that are almost but not entirely JSONRPC are rejected. /// /// The reject is done by returning it as Unmatched. #[test] - #[allow(clippy::panic)] fn broken() { // A helper with one test fn one(input: &str) { @@ -491,19 +515,18 @@ mod tests { // Something completely different one(r#"{"x": [1, 2, 3]}"#); - match from_str(r#"{]"#) { + match from_str("{]") { Err(Broken::SyntaxError(_)) => (), other => panic!("Something unexpected: {other:?}"), - }; + } } - /// Test some non-trivial aspects of the constructors + /// Test some non-trivial aspects of the constructors. /// /// This doesn't have a full coverage, because there's not much to actually test there. /// Most of it is related to the ids. #[test] - #[allow(clippy::panic)] - #[ignore] + #[ignore = "Not a full coverage test"] fn constructors() { let msg1 = Message::request("call".to_owned(), json!([1, 2, 3])); let msg2 = Message::request("call".to_owned(), json!([1, 2, 3])); @@ -520,9 +543,9 @@ mod tests { }; let id1 = req1.id.clone(); // When we answer a message, we get the same ID - if let Message::Response(ref resp) = req1.reply(json!([1, 2, 3])) { + if let Message::Response(resp) = req1.reply(json!([1, 2, 3])) { assert_eq!( - *resp, + resp, Response { jsonrpc: Version, result: Ok(json!([1, 2, 3])), @@ -534,11 +557,9 @@ mod tests { } let id2 = req2.id.clone(); // The same with an error - if let Message::Response(ref resp) = - req2.error(RpcError::new(42, "Wrong!".to_owned(), None)) - { + if let Message::Response(resp) = req2.error(RpcError::new(42, "Wrong!".to_owned(), None)) { assert_eq!( - *resp, + resp, Response { jsonrpc: Version, result: Err(RpcError::new(42, "Wrong!".to_owned(), None)), @@ -549,11 +570,11 @@ mod tests { panic!("Not a response"); } // When we have unmatched, we generate a top-level error with Null id. - if let Message::Response(ref resp) = + if let Message::Response(resp) = Message::error(RpcError::new(43, "Also wrong!".to_owned(), None)) { assert_eq!( - *resp, + resp, Response { jsonrpc: Version, result: Err(RpcError::new(43, "Also wrong!".to_owned(), None)), diff --git a/common/src/rpc_primitives/mod.rs b/common/src/rpc_primitives/mod.rs index be5182e1..cd643712 100644 --- a/common/src/rpc_primitives/mod.rs +++ b/common/src/rpc_primitives/mod.rs @@ -30,7 +30,7 @@ pub struct RpcConfig { impl Default for RpcConfig { fn default() -> Self { - RpcConfig { + Self { addr: "0.0.0.0:3040".to_owned(), cors_allowed_origins: vec!["*".to_owned()], limits_config: RpcLimitsConfig::default(), @@ -39,15 +39,17 @@ impl Default for RpcConfig { } impl RpcConfig { + #[must_use] pub fn new(addr: &str) -> Self { - RpcConfig { + Self { addr: addr.to_owned(), ..Default::default() } } + #[must_use] pub fn with_port(port: u16) -> Self { - RpcConfig { + Self { addr: format!("0.0.0.0:{port}"), ..Default::default() } diff --git a/common/src/rpc_primitives/parser.rs b/common/src/rpc_primitives/parser.rs index 983e2f6c..0b918c94 100644 --- a/common/src/rpc_primitives/parser.rs +++ b/common/src/rpc_primitives/parser.rs @@ -3,18 +3,6 @@ use serde_json::Value; use super::errors::RpcParseError; -pub trait RpcRequest: Sized { - fn parse(value: Option) -> Result; -} - -pub fn parse_params(value: Option) -> Result { - if let Some(value) = value { - serde_json::from_value(value) - .map_err(|err| RpcParseError(format!("Failed parsing args: {err}"))) - } else { - Err(RpcParseError("Require at least one parameter".to_owned())) - } -} #[macro_export] macro_rules! parse_request { ($request_name:ty) => { @@ -25,3 +13,17 @@ macro_rules! parse_request { } }; } + +pub trait RpcRequest: Sized { + fn parse(value: Option) -> Result; +} + +pub fn parse_params(value: Option) -> Result { + value.map_or_else( + || Err(RpcParseError("Require at least one parameter".to_owned())), + |value| { + serde_json::from_value(value) + .map_err(|err| RpcParseError(format!("Failed parsing args: {err}"))) + }, + ) +} diff --git a/common/src/rpc_primitives/requests.rs b/common/src/rpc_primitives/requests.rs index 8c61ee32..fd566c89 100644 --- a/common/src/rpc_primitives/requests.rs +++ b/common/src/rpc_primitives/requests.rs @@ -11,8 +11,62 @@ use super::{ }; use crate::{HashType, parse_request}; +mod base64_deser { + use base64::{Engine as _, engine::general_purpose}; + use serde::{self, Deserialize, Deserializer, Serializer, ser::SerializeSeq as _}; + + pub mod vec { + use super::*; + + pub fn serialize(bytes_vec: &[Vec], serializer: S) -> Result + where + S: Serializer, + { + let mut seq = serializer.serialize_seq(Some(bytes_vec.len()))?; + for bytes in bytes_vec { + let s = general_purpose::STANDARD.encode(bytes); + seq.serialize_element(&s)?; + } + seq.end() + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result>, D::Error> + where + D: Deserializer<'de>, + { + let base64_strings: Vec = Deserialize::deserialize(deserializer)?; + base64_strings + .into_iter() + .map(|s| { + general_purpose::STANDARD + .decode(&s) + .map_err(serde::de::Error::custom) + }) + .collect() + } + } + + pub fn serialize(bytes: &[u8], serializer: S) -> Result + where + S: Serializer, + { + let base64_string = general_purpose::STANDARD.encode(bytes); + serializer.serialize_str(&base64_string) + } + + pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> + where + D: Deserializer<'de>, + { + let base64_string: String = Deserialize::deserialize(deserializer)?; + general_purpose::STANDARD + .decode(&base64_string) + .map_err(serde::de::Error::custom) + } +} + #[derive(Serialize, Deserialize, Debug)] -pub struct HelloRequest {} +pub struct HelloRequest; #[derive(Serialize, Deserialize, Debug)] pub struct RegisterAccountRequest { @@ -30,7 +84,7 @@ pub struct GetBlockDataRequest { pub block_id: u64, } -/// Get a range of blocks from `start_block_id` to `end_block_id` (inclusive) +/// Get a range of blocks from `start_block_id` to `end_block_id` (inclusive). #[derive(Serialize, Deserialize, Debug)] pub struct GetBlockRangeDataRequest { pub start_block_id: u64, @@ -38,13 +92,13 @@ pub struct GetBlockRangeDataRequest { } #[derive(Serialize, Deserialize, Debug)] -pub struct GetGenesisIdRequest {} +pub struct GetGenesisIdRequest; #[derive(Serialize, Deserialize, Debug)] -pub struct GetLastBlockRequest {} +pub struct GetLastBlockRequest; #[derive(Serialize, Deserialize, Debug)] -pub struct GetInitialTestnetAccountsRequest {} +pub struct GetInitialTestnetAccountsRequest; #[derive(Serialize, Deserialize, Debug)] pub struct GetAccountBalanceRequest { @@ -72,7 +126,7 @@ pub struct GetProofForCommitmentRequest { } #[derive(Serialize, Deserialize, Debug)] -pub struct GetProgramIdsRequest {} +pub struct GetProgramIdsRequest; parse_request!(HelloRequest); parse_request!(RegisterAccountRequest); @@ -117,60 +171,6 @@ pub struct GetBlockRangeDataResponse { pub blocks: Vec>, } -mod base64_deser { - use base64::{Engine as _, engine::general_purpose}; - use serde::{self, Deserialize, Deserializer, Serializer, ser::SerializeSeq as _}; - - pub fn serialize(bytes: &[u8], serializer: S) -> Result - where - S: Serializer, - { - let base64_string = general_purpose::STANDARD.encode(bytes); - serializer.serialize_str(&base64_string) - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result, D::Error> - where - D: Deserializer<'de>, - { - let base64_string: String = Deserialize::deserialize(deserializer)?; - general_purpose::STANDARD - .decode(&base64_string) - .map_err(serde::de::Error::custom) - } - - pub mod vec { - use super::*; - - pub fn serialize(bytes_vec: &[Vec], serializer: S) -> Result - where - S: Serializer, - { - let mut seq = serializer.serialize_seq(Some(bytes_vec.len()))?; - for bytes in bytes_vec { - let s = general_purpose::STANDARD.encode(bytes); - seq.serialize_element(&s)?; - } - seq.end() - } - - pub fn deserialize<'de, D>(deserializer: D) -> Result>, D::Error> - where - D: Deserializer<'de>, - { - let base64_strings: Vec = Deserialize::deserialize(deserializer)?; - base64_strings - .into_iter() - .map(|s| { - general_purpose::STANDARD - .decode(&s) - .map_err(serde::de::Error::custom) - }) - .collect() - } - } -} - #[derive(Serialize, Deserialize, Debug)] pub struct GetGenesisIdResponse { pub genesis_id: u64, @@ -213,7 +213,7 @@ pub struct GetProgramIdsResponse { #[derive(Debug, Serialize, Deserialize, Clone)] pub struct GetInitialTestnetAccountsResponse { - /// Hex encoded account id + /// Hex encoded account id. pub account_id: String, pub balance: u64, } diff --git a/common/src/sequencer_client.rs b/common/src/sequencer_client.rs index f847b865..d52e4585 100644 --- a/common/src/sequencer_client.rs +++ b/common/src/sequencer_client.rs @@ -30,6 +30,15 @@ use crate::{ transaction::NSSATransaction, }; +#[derive(Debug, Clone, Deserialize)] +struct SequencerRpcResponse { + #[serde(rename = "jsonrpc")] + _jsonrpc: String, + result: serde_json::Value, + #[serde(rename = "id")] + _id: u64, +} + #[derive(Clone)] pub struct SequencerClient { pub client: reqwest::Client, @@ -61,7 +70,7 @@ impl SequencerClient { payload: Value, ) -> Result { let request = - rpc_primitives::message::Request::from_payload_version_2_0(method.to_string(), payload); + rpc_primitives::message::Request::from_payload_version_2_0(method.to_owned(), payload); log::debug!( "Calling method {method} with payload {request:?} to sequencer at {}", @@ -86,14 +95,6 @@ impl SequencerClient { }) .await?; - #[derive(Debug, Clone, Deserialize)] - #[allow(dead_code)] - pub struct SequencerRpcResponse { - pub jsonrpc: String, - pub result: serde_json::Value, - pub id: u64, - } - if let Ok(response) = serde_json::from_value::(response_vall.clone()) { Ok(response.result) @@ -104,7 +105,7 @@ impl SequencerClient { } } - /// Get block data at `block_id` from sequencer + /// Get block data at `block_id` from sequencer. pub async fn get_block( &self, block_id: u64, @@ -140,7 +141,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Get last known `blokc_id` from sequencer + /// Get last known `blokc_id` from sequencer. pub async fn get_last_block(&self) -> Result { let block_req = GetLastBlockRequest {}; @@ -224,7 +225,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Send transaction to sequencer + /// Send transaction to sequencer. pub async fn send_tx_public( &self, transaction: nssa::PublicTransaction, @@ -244,7 +245,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Send transaction to sequencer + /// Send transaction to sequencer. pub async fn send_tx_private( &self, transaction: nssa::PrivacyPreservingTransaction, @@ -264,7 +265,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Get genesis id from sequencer + /// Get genesis id from sequencer. pub async fn get_genesis_id(&self) -> Result { let genesis_req = GetGenesisIdRequest {}; @@ -280,7 +281,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Get initial testnet accounts from sequencer + /// Get initial testnet accounts from sequencer. pub async fn get_initial_testnet_accounts( &self, ) -> Result, SequencerClientError> { @@ -298,7 +299,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Get proof for commitment + /// Get proof for commitment. pub async fn get_proof_for_commitment( &self, commitment: nssa_core::Commitment, @@ -338,7 +339,7 @@ impl SequencerClient { Ok(resp_deser) } - /// Get Ids of the programs used by the node + /// Get Ids of the programs used by the node. pub async fn get_program_ids( &self, ) -> Result, SequencerClientError> { diff --git a/common/src/test_utils.rs b/common/src/test_utils.rs index 09651c18..720bd2f9 100644 --- a/common/src/test_utils.rs +++ b/common/src/test_utils.rs @@ -8,19 +8,21 @@ use crate::{ // Helpers +#[must_use] pub fn sequencer_sign_key_for_testing() -> nssa::PrivateKey { nssa::PrivateKey::try_new([37; 32]).unwrap() } // Dummy producers -/// Produce dummy block with +/// Produce dummy block with. /// -/// `id` - block id, provide zero for genesis +/// `id` - block id, provide zero for genesis. /// -/// `prev_hash` - hash of previous block, provide None for genesis +/// `prev_hash` - hash of previous block, provide None for genesis. /// -/// `transactions` - vector of `EncodedTransaction` objects +/// `transactions` - vector of `EncodedTransaction` objects. +#[must_use] pub fn produce_dummy_block( id: u64, prev_hash: Option, @@ -29,13 +31,14 @@ pub fn produce_dummy_block( let block_data = HashableBlockData { block_id: id, prev_block_hash: prev_hash.unwrap_or_default(), - timestamp: id * 100, + timestamp: id.saturating_mul(100), transactions, }; block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32]) } +#[must_use] pub fn produce_dummy_empty_transaction() -> NSSATransaction { let program_id = nssa::program::Program::authenticated_transfer_program().id(); let account_ids = vec![]; @@ -56,15 +59,16 @@ pub fn produce_dummy_empty_transaction() -> NSSATransaction { NSSATransaction::Public(nssa_tx) } +#[must_use] pub fn create_transaction_native_token_transfer( from: AccountId, nonce: u128, to: AccountId, balance_to_move: u128, - signing_key: nssa::PrivateKey, + signing_key: &nssa::PrivateKey, ) -> NSSATransaction { let account_ids = vec![from, to]; - let nonces = vec![nonce]; + let nonces = vec![nonce.into()]; let program_id = nssa::program::Program::authenticated_transfer_program().id(); let message = nssa::public_transaction::Message::try_new( program_id, @@ -73,7 +77,7 @@ pub fn create_transaction_native_token_transfer( balance_to_move, ) .unwrap(); - let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[&signing_key]); + let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[signing_key]); let nssa_tx = nssa::PublicTransaction::new(message, witness_set); diff --git a/common/src/transaction.rs b/common/src/transaction.rs index a996250b..8fdc2074 100644 --- a/common/src/transaction.rs +++ b/common/src/transaction.rs @@ -13,19 +13,21 @@ pub enum NSSATransaction { } impl NSSATransaction { + #[must_use] pub fn hash(&self) -> HashType { HashType(match self { - NSSATransaction::Public(tx) => tx.hash(), - NSSATransaction::PrivacyPreserving(tx) => tx.hash(), - NSSATransaction::ProgramDeployment(tx) => tx.hash(), + Self::Public(tx) => tx.hash(), + Self::PrivacyPreserving(tx) => tx.hash(), + Self::ProgramDeployment(tx) => tx.hash(), }) } + #[must_use] pub fn affected_public_account_ids(&self) -> Vec { match self { - NSSATransaction::ProgramDeployment(tx) => tx.affected_public_account_ids(), - NSSATransaction::Public(tx) => tx.affected_public_account_ids(), - NSSATransaction::PrivacyPreserving(tx) => tx.affected_public_account_ids(), + Self::ProgramDeployment(tx) => tx.affected_public_account_ids(), + Self::Public(tx) => tx.affected_public_account_ids(), + Self::PrivacyPreserving(tx) => tx.affected_public_account_ids(), } } @@ -33,21 +35,21 @@ impl NSSATransaction { pub fn transaction_stateless_check(self) -> Result { // Stateless checks here match self { - NSSATransaction::Public(tx) => { + Self::Public(tx) => { if tx.witness_set().is_valid_for(tx.message()) { - Ok(NSSATransaction::Public(tx)) + Ok(Self::Public(tx)) } else { Err(TransactionMalformationError::InvalidSignature) } } - NSSATransaction::PrivacyPreserving(tx) => { + Self::PrivacyPreserving(tx) => { if tx.witness_set().signatures_are_valid_for(tx.message()) { - Ok(NSSATransaction::PrivacyPreserving(tx)) + Ok(Self::PrivacyPreserving(tx)) } else { Err(TransactionMalformationError::InvalidSignature) } } - NSSATransaction::ProgramDeployment(tx) => Ok(NSSATransaction::ProgramDeployment(tx)), + Self::ProgramDeployment(tx) => Ok(Self::ProgramDeployment(tx)), } } @@ -56,13 +58,9 @@ impl NSSATransaction { state: &mut V02State, ) -> Result { match &self { - NSSATransaction::Public(tx) => state.transition_from_public_transaction(tx), - NSSATransaction::PrivacyPreserving(tx) => { - state.transition_from_privacy_preserving_transaction(tx) - } - NSSATransaction::ProgramDeployment(tx) => { - state.transition_from_program_deployment_transaction(tx) - } + Self::Public(tx) => state.transition_from_public_transaction(tx), + Self::PrivacyPreserving(tx) => state.transition_from_privacy_preserving_transaction(tx), + Self::ProgramDeployment(tx) => state.transition_from_program_deployment_transaction(tx), } .inspect_err(|err| warn!("Error at transition {err:#?}"))?; @@ -97,7 +95,7 @@ pub enum TxKind { ProgramDeployment, } -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, thiserror::Error)] +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, thiserror::Error)] pub enum TransactionMalformationError { #[error("Invalid signature(-s)")] InvalidSignature, diff --git a/completions/README.md b/completions/README.md index 9da5a093..d274774c 100644 --- a/completions/README.md +++ b/completions/README.md @@ -118,9 +118,80 @@ wallet account get --account-id # Shows: Public/... Private/... ``` +## Bash + +Works with bash 4+. The `bash-completion` package is required for auto-sourcing from +`/etc/bash_completion.d/`; without it, source the file directly from `~/.bashrc` instead. + +### Features + +- Full completion for all wallet subcommands +- Contextual option completion for each command +- Dynamic account ID completion via `wallet account list` +- Falls back to `Public/` / `Private/` prefixes when no accounts are available + +Note that only accounts created by the user auto-complete (same filtering as zsh — see above). + +### Installation + +#### Option A — source directly from `~/.bashrc` (works everywhere) + +```sh +echo "source $(pwd)/completions/bash/wallet" >> ~/.bashrc +exec bash +``` + +#### Option B — system-wide via `bash-completion` + +1. Copy the file: + + ```sh + cp ./bash/wallet /etc/bash_completion.d/wallet + ``` + +2. Ensure `bash-completion` is initialised in every interactive shell. On many Linux + distributions (e.g. Fedora) it is only sourced for **login** shells via + `/etc/profile.d/bash_completion.sh`. For non-login shells (e.g. a bash session started + inside zsh), add this to `~/.bashrc`: + + ```sh + [[ -f /usr/share/bash-completion/bash_completion ]] && source /usr/share/bash-completion/bash_completion + ``` + +3. Reload your shell: + + ```sh + exec bash + ``` + +### Requirements + +The completion script calls `wallet account list` to dynamically fetch account IDs. Ensure the `wallet` command is in your `$PATH`. + +### Usage + +```sh +# Main commands +wallet + +# Account subcommands +wallet account + +# Options for auth-transfer send +wallet auth-transfer send -- + +# Account types when creating +wallet account new +# Shows: public private + +# Account IDs (fetched dynamically) +wallet account get --account-id +# Shows: Public/... Private/... +``` + ## Troubleshooting -### Completions not appearing +### Zsh completions not appearing 1. Check that `compinit` is called in your `.zshrc` 2. Rebuild the completion cache: diff --git a/completions/bash/wallet b/completions/bash/wallet new file mode 100644 index 00000000..57dd3636 --- /dev/null +++ b/completions/bash/wallet @@ -0,0 +1,382 @@ +#!/usr/bin/env bash + +# Bash completion script for the wallet CLI +# See instructions in ../README.md + +# Helper function to complete account IDs +# Uses `wallet account list` to get available accounts +# Only includes accounts with /N prefix (where N is a number) +_wallet_complete_account_id() { + local cur="$1" + local accounts + + if command -v wallet &>/dev/null; then + accounts=$(wallet account list 2>/dev/null | grep '^/[0-9]' | awk '{print $2}' | tr -d ',') + fi + + if [[ -n "$accounts" ]]; then + COMPREPLY=($(compgen -W "$accounts" -- "$cur")) + else + COMPREPLY=($(compgen -W "Public/ Private/" -- "$cur")) + compopt -o nospace 2>/dev/null + fi +} + +_wallet() { + local cur prev words cword + _init_completion 2>/dev/null || { + COMPREPLY=() + cur="${COMP_WORDS[COMP_CWORD]}" + prev="${COMP_WORDS[COMP_CWORD-1]}" + words=("${COMP_WORDS[@]}") + cword=$COMP_CWORD + } + + local commands="auth-transfer chain-info account pinata token amm check-health config restore-keys deploy-program help" + + # Find the main command and subcommand by scanning words before the cursor. + # Global options that take a value are skipped along with their argument. + local cmd="" subcmd="" + local cmd_idx=0 subcmd_idx=0 + local i + for ((i = 1; i < cword; i++)); do + local w="${words[$i]}" + case "$w" in + --auth) + ((i++)) # skip the auth value + ;; + -c | --continuous-run) + # boolean flag, no value + ;; + -*) + # unrecognised option, skip + ;; + *) + if [[ -z "$cmd" ]]; then + cmd="$w" + cmd_idx=$i + elif [[ -z "$subcmd" ]]; then + subcmd="$w" + subcmd_idx=$i + fi + ;; + esac + done + + local config_keys="override_rust_log sequencer_addr seq_poll_timeout seq_tx_poll_max_blocks seq_poll_max_retries seq_block_poll_max_amount initial_accounts basic_auth" + + case "$cmd" in + "") + # Completing the main command or a global option + if [[ "$prev" == "--auth" ]]; then + return # completing the --auth value; no suggestions + fi + case "$cur" in + -*) + COMPREPLY=($(compgen -W "-c --continuous-run --auth" -- "$cur")) + ;; + *) + COMPREPLY=($(compgen -W "$commands" -- "$cur")) + ;; + esac + ;; + + auth-transfer) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "init send help" -- "$cur")) + ;; + init) + case "$prev" in + --account-id) + _wallet_complete_account_id "$cur" + ;; + *) + COMPREPLY=($(compgen -W "--account-id" -- "$cur")) + ;; + esac + ;; + send) + case "$prev" in + --from | --to) + _wallet_complete_account_id "$cur" + ;; + --to-npk | --to-vpk | --amount) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--from --to --to-npk --to-vpk --amount" -- "$cur")) + ;; + esac + ;; + esac + ;; + + chain-info) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "current-block-id block transaction help" -- "$cur")) + ;; + block) + case "$prev" in + -i | --id) + ;; # no specific completion for block ID + *) + COMPREPLY=($(compgen -W "-i --id" -- "$cur")) + ;; + esac + ;; + transaction) + case "$prev" in + -t | --hash) + ;; # no specific completion for tx hash + *) + COMPREPLY=($(compgen -W "-t --hash" -- "$cur")) + ;; + esac + ;; + esac + ;; + + account) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "get new sync-private list ls label help" -- "$cur")) + ;; + get) + case "$prev" in + -a | --account-id) + _wallet_complete_account_id "$cur" + ;; + *) + COMPREPLY=($(compgen -W "-r --raw -k --keys -a --account-id" -- "$cur")) + ;; + esac + ;; + list | ls) + COMPREPLY=($(compgen -W "-l --long" -- "$cur")) + ;; + sync-private) + ;; # no options + new) + # `account new` is itself a subcommand: public | private + local new_subcmd="" + for ((i = subcmd_idx + 1; i < cword; i++)); do + case "${words[$i]}" in + public | private) + new_subcmd="${words[$i]}" + break + ;; + esac + done + + if [[ -z "$new_subcmd" ]]; then + COMPREPLY=($(compgen -W "public private" -- "$cur")) + else + case "$prev" in + --cci | -l | --label) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--cci -l --label" -- "$cur")) + ;; + esac + fi + ;; + label) + case "$prev" in + -a | --account-id) + _wallet_complete_account_id "$cur" + ;; + -l | --label) + ;; # no specific completion for label value + *) + COMPREPLY=($(compgen -W "-a --account-id -l --label" -- "$cur")) + ;; + esac + ;; + esac + ;; + + pinata) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "claim help" -- "$cur")) + ;; + claim) + case "$prev" in + --to) + _wallet_complete_account_id "$cur" + ;; + *) + COMPREPLY=($(compgen -W "--to" -- "$cur")) + ;; + esac + ;; + esac + ;; + + token) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "new send burn mint help" -- "$cur")) + ;; + new) + case "$prev" in + --definition-account-id | --supply-account-id) + _wallet_complete_account_id "$cur" + ;; + -n | --name | -t | --total-supply) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--definition-account-id --supply-account-id -n --name -t --total-supply" -- "$cur")) + ;; + esac + ;; + send) + case "$prev" in + --from | --to) + _wallet_complete_account_id "$cur" + ;; + --to-npk | --to-vpk | --amount) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--from --to --to-npk --to-vpk --amount" -- "$cur")) + ;; + esac + ;; + burn) + case "$prev" in + --definition | --holder) + _wallet_complete_account_id "$cur" + ;; + --amount) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--definition --holder --amount" -- "$cur")) + ;; + esac + ;; + mint) + case "$prev" in + --definition | --holder) + _wallet_complete_account_id "$cur" + ;; + --holder-npk | --holder-vpk | --amount) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--definition --holder --holder-npk --holder-vpk --amount" -- "$cur")) + ;; + esac + ;; + esac + ;; + + amm) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "new swap add-liquidity remove-liquidity help" -- "$cur")) + ;; + new) + case "$prev" in + --user-holding-a | --user-holding-b | --user-holding-lp) + _wallet_complete_account_id "$cur" + ;; + --balance-a | --balance-b) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --user-holding-lp --balance-a --balance-b" -- "$cur")) + ;; + esac + ;; + swap) + case "$prev" in + --user-holding-a | --user-holding-b) + _wallet_complete_account_id "$cur" + ;; + --amount-in | --min-amount-out | --token-definition) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --amount-in --min-amount-out --token-definition" -- "$cur")) + ;; + esac + ;; + add-liquidity) + case "$prev" in + --user-holding-a | --user-holding-b | --user-holding-lp) + _wallet_complete_account_id "$cur" + ;; + --max-amount-a | --max-amount-b | --min-amount-lp) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --user-holding-lp --max-amount-a --max-amount-b --min-amount-lp" -- "$cur")) + ;; + esac + ;; + remove-liquidity) + case "$prev" in + --user-holding-a | --user-holding-b | --user-holding-lp) + _wallet_complete_account_id "$cur" + ;; + --balance-lp | --min-amount-a | --min-amount-b) + ;; # no specific completion + *) + COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --user-holding-lp --balance-lp --min-amount-a --min-amount-b" -- "$cur")) + ;; + esac + ;; + esac + ;; + + config) + case "$subcmd" in + "") + COMPREPLY=($(compgen -W "get set description help" -- "$cur")) + ;; + get) + # Accepts optional -a/--all flag and an optional positional key + COMPREPLY=($(compgen -W "--all -a $config_keys" -- "$cur")) + ;; + set) + # set — only complete the key; no completion for the value + local set_args=0 + for ((i = subcmd_idx + 1; i < cword; i++)); do + [[ "${words[$i]}" != -* ]] && ((set_args++)) + done + if [[ $set_args -eq 0 ]]; then + COMPREPLY=($(compgen -W "$config_keys" -- "$cur")) + fi + ;; + description) + # description — only complete if no key provided yet + local has_key=false + for ((i = subcmd_idx + 1; i < cword; i++)); do + [[ "${words[$i]}" != -* ]] && has_key=true && break + done + if ! $has_key; then + COMPREPLY=($(compgen -W "$config_keys" -- "$cur")) + fi + ;; + esac + ;; + + restore-keys) + case "$prev" in + -d | --depth) + ;; # no specific completion for depth value + *) + COMPREPLY=($(compgen -W "-d --depth" -- "$cur")) + ;; + esac + ;; + + deploy-program) + COMPREPLY=($(compgen -f -- "$cur")) + compopt -o filenames 2>/dev/null + ;; + + help) + COMPREPLY=($(compgen -W "$commands" -- "$cur")) + ;; + esac +} + +complete -F _wallet wallet diff --git a/completions/zsh/_wallet b/completions/zsh/_wallet index e0c5f415..6e60cc53 100644 --- a/completions/zsh/_wallet +++ b/completions/zsh/_wallet @@ -181,7 +181,8 @@ _wallet_account() { ;; new_args) _arguments \ - '--cci[Chain index of a parent node]:chain_index:' + '--cci[Chain index of a parent node]:chain_index:' \ + '(-l --label)'{-l,--label}'[Label to assign to the new account]:label:' ;; esac ;; @@ -343,7 +344,6 @@ _wallet_config() { local -a config_keys config_keys=( - 'all' 'override_rust_log' 'sequencer_addr' 'seq_poll_timeout' @@ -370,7 +370,12 @@ _wallet_config() { ;; args) case $line[1] in - get|description) + get) + _arguments \ + '(-a --all)'{-a,--all}'[Print all config fields]' \ + '::key:compadd -a config_keys' + ;; + description) compadd -a config_keys ;; set) diff --git a/examples/program_deployment/Cargo.toml b/examples/program_deployment/Cargo.toml index 2199fe21..96964a36 100644 --- a/examples/program_deployment/Cargo.toml +++ b/examples/program_deployment/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa.workspace = true nssa_core.workspace = true diff --git a/examples/program_deployment/methods/Cargo.toml b/examples/program_deployment/methods/Cargo.toml index 95b10ea8..269df069 100644 --- a/examples/program_deployment/methods/Cargo.toml +++ b/examples/program_deployment/methods/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [build-dependencies] risc0-build.workspace = true diff --git a/examples/program_deployment/methods/guest/Cargo.toml b/examples/program_deployment/methods/guest/Cargo.toml index 1f4db355..69d0ffb1 100644 --- a/examples/program_deployment/methods/guest/Cargo.toml +++ b/examples/program_deployment/methods/guest/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true diff --git a/examples/program_deployment/methods/guest/src/bin/hello_world_with_authorization.rs b/examples/program_deployment/methods/guest/src/bin/hello_world_with_authorization.rs index 043da1bf..e327ca47 100644 --- a/examples/program_deployment/methods/guest/src/bin/hello_world_with_authorization.rs +++ b/examples/program_deployment/methods/guest/src/bin/hello_world_with_authorization.rs @@ -36,9 +36,7 @@ fn main() { // Fail if the input account is not authorized // The `is_authorized` field will be correctly populated or verified by the system if // authorization is provided. - if !pre_state.is_authorized { - panic!("Missing required authorization"); - } + assert!(pre_state.is_authorized, "Missing required authorization"); // #### // Construct the post state account values diff --git a/examples/program_deployment/methods/guest/src/bin/hello_world_with_move_function.rs b/examples/program_deployment/methods/guest/src/bin/hello_world_with_move_function.rs index af0d4bf4..65f0f9cd 100644 --- a/examples/program_deployment/methods/guest/src/bin/hello_world_with_move_function.rs +++ b/examples/program_deployment/methods/guest/src/bin/hello_world_with_move_function.rs @@ -1,5 +1,5 @@ use nssa_core::{ - account::{Account, AccountWithMetadata}, + account::{Account, AccountWithMetadata, Data}, program::{ AccountPostState, DEFAULT_PROGRAM_ID, ProgramInput, read_nssa_inputs, write_nssa_outputs, }, @@ -21,10 +21,11 @@ use nssa_core::{ // In case an input account is uninitialized, the program will claim it when // producing the post-state. -type Instruction = (u8, Vec); const WRITE_FUNCTION_ID: u8 = 0; const MOVE_DATA_FUNCTION_ID: u8 = 1; +type Instruction = (u8, Vec); + fn build_post_state(post_account: Account) -> AccountPostState { if post_account.program_owner == DEFAULT_PROGRAM_ID { // This produces a claim request @@ -35,12 +36,12 @@ fn build_post_state(post_account: Account) -> AccountPostState { } } -fn write(pre_state: AccountWithMetadata, greeting: Vec) -> AccountPostState { +fn write(pre_state: AccountWithMetadata, greeting: &[u8]) -> AccountPostState { // Construct the post state account values let post_account = { - let mut this = pre_state.account.clone(); + let mut this = pre_state.account; let mut bytes = this.data.into_inner(); - bytes.extend_from_slice(&greeting); + bytes.extend_from_slice(greeting); this.data = bytes .try_into() .expect("Data should fit within the allowed limits"); @@ -50,21 +51,18 @@ fn write(pre_state: AccountWithMetadata, greeting: Vec) -> AccountPostState build_post_state(post_account) } -fn move_data( - from_pre: &AccountWithMetadata, - to_pre: &AccountWithMetadata, -) -> Vec { +fn move_data(from_pre: AccountWithMetadata, to_pre: AccountWithMetadata) -> Vec { // Construct the post state account values let from_data: Vec = from_pre.account.data.clone().into(); let from_post = { - let mut this = from_pre.account.clone(); - this.data = Default::default(); + let mut this = from_pre.account; + this.data = Data::default(); build_post_state(this) }; let to_post = { - let mut this = to_pre.account.clone(); + let mut this = to_pre.account; let mut bytes = this.data.into_inner(); bytes.extend_from_slice(&from_data); this.data = bytes @@ -88,11 +86,11 @@ fn main() { let post_states = match (pre_states.as_slice(), function_id, data.len()) { ([account_pre], WRITE_FUNCTION_ID, _) => { - let post = write(account_pre.clone(), data); + let post = write(account_pre.clone(), &data); vec![post] } ([account_from_pre, account_to_pre], MOVE_DATA_FUNCTION_ID, 0) => { - move_data(account_from_pre, account_to_pre) + move_data(account_from_pre.clone(), account_to_pre.clone()) } _ => panic!("invalid params"), }; diff --git a/examples/program_deployment/methods/guest/src/bin/simple_tail_call.rs b/examples/program_deployment/methods/guest/src/bin/simple_tail_call.rs index e933598f..01389085 100644 --- a/examples/program_deployment/methods/guest/src/bin/simple_tail_call.rs +++ b/examples/program_deployment/methods/guest/src/bin/simple_tail_call.rs @@ -29,7 +29,7 @@ fn main() { let ( ProgramInput { pre_states, - instruction: _, + instruction: (), }, instruction_data, ) = read_nssa_inputs::<()>(); diff --git a/examples/program_deployment/methods/guest/src/bin/tail_call_with_pda.rs b/examples/program_deployment/methods/guest/src/bin/tail_call_with_pda.rs index 684fa1e8..3ebcabd2 100644 --- a/examples/program_deployment/methods/guest/src/bin/tail_call_with_pda.rs +++ b/examples/program_deployment/methods/guest/src/bin/tail_call_with_pda.rs @@ -34,14 +34,13 @@ fn main() { let ( ProgramInput { pre_states, - instruction: _, + instruction: (), }, instruction_data, ) = read_nssa_inputs::<()>(); // Unpack the input account pre state let [pre_state] = pre_states - .clone() .try_into() .unwrap_or_else(|_| panic!("Input pre states should consist of a single account")); diff --git a/examples/program_deployment/src/bin/run_hello_world_through_tail_call_private.rs b/examples/program_deployment/src/bin/run_hello_world_through_tail_call_private.rs index 9b3619cb..4fac3eec 100644 --- a/examples/program_deployment/src/bin/run_hello_world_through_tail_call_private.rs +++ b/examples/program_deployment/src/bin/run_hello_world_through_tail_call_private.rs @@ -48,7 +48,7 @@ async fn main() { let hello_world_bytecode: Vec = std::fs::read(hello_world_path).unwrap(); let hello_world = Program::new(hello_world_bytecode).unwrap(); let dependencies: HashMap = - [(hello_world.id(), hello_world)].into_iter().collect(); + std::iter::once((hello_world.id(), hello_world)).collect(); let program_with_dependencies = ProgramWithDependencies::new(simple_tail_call, dependencies); let accounts = vec![PrivacyPreservingAccount::PrivateOwned(account_id)]; diff --git a/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs b/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs index 5e7df2d2..f38443ac 100644 --- a/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs +++ b/examples/program_deployment/src/bin/run_hello_world_with_authorization.rs @@ -3,6 +3,7 @@ use nssa::{ program::Program, public_transaction::{Message, WitnessSet}, }; +use nssa_core::account::Nonce; use wallet::WalletCore; // Before running this example, compile the `hello_world_with_authorization.rs` guest program with: @@ -62,7 +63,13 @@ async fn main() { .await .expect("Node should be reachable to query account data"); let signing_keys = [signing_key]; - let message = Message::try_new(program.id(), vec![account_id], nonces, greeting).unwrap(); + let message = Message::try_new( + program.id(), + vec![account_id], + nonces.iter().map(|x| Nonce(*x)).collect(), + greeting, + ) + .unwrap(); // Pass the signing key to sign the message. This will be used by the node // to flag the pre_state as `is_authorized` when executing the program let witness_set = WitnessSet::for_message(&message, &signing_keys); diff --git a/examples/program_deployment/src/bin/run_hello_world_with_authorization_through_tail_call_with_pda.rs b/examples/program_deployment/src/bin/run_hello_world_with_authorization_through_tail_call_with_pda.rs index 43839ba9..4371b000 100644 --- a/examples/program_deployment/src/bin/run_hello_world_with_authorization_through_tail_call_with_pda.rs +++ b/examples/program_deployment/src/bin/run_hello_world_with_authorization_through_tail_call_with_pda.rs @@ -1,3 +1,8 @@ +#![expect( + clippy::print_stdout, + reason = "This is an example program, it's fine to print to stdout" +)] + use nssa::{ AccountId, PublicTransaction, program::Program, diff --git a/examples/program_deployment/src/bin/run_hello_world_with_move_function.rs b/examples/program_deployment/src/bin/run_hello_world_with_move_function.rs index fc116241..0d4af502 100644 --- a/examples/program_deployment/src/bin/run_hello_world_with_move_function.rs +++ b/examples/program_deployment/src/bin/run_hello_world_with_move_function.rs @@ -19,13 +19,14 @@ use wallet::{PrivacyPreservingAccount, WalletCore}; // methods/guest/target/riscv32im-risc0-zkvm-elf/docker/hello_world_with_move_function.bin \ // write-public Ds8q5PjLcKwwV97Zi7duhRVF9uwA2PuYMoLL7FwCzsXE Hola -type Instruction = (u8, Vec); const WRITE_FUNCTION_ID: u8 = 0; const MOVE_DATA_FUNCTION_ID: u8 = 1; +type Instruction = (u8, Vec); + #[derive(Parser, Debug)] struct Cli { - /// Path to program binary + /// Path to program binary. program_path: String, #[command(subcommand)] @@ -34,7 +35,7 @@ struct Cli { #[derive(Subcommand, Debug)] enum Command { - /// Write instruction into one account + /// Write instruction into one account. WritePublic { account_id: String, greeting: String, @@ -43,7 +44,7 @@ enum Command { account_id: String, greeting: String, }, - /// Move data between two accounts + /// Move data between two accounts. MoveDataPublicToPublic { from: String, to: String, @@ -148,5 +149,5 @@ async fn main() { .await .unwrap(); } - }; + } } diff --git a/explorer_service/Cargo.toml b/explorer_service/Cargo.toml index 1dc989d0..fb787230 100644 --- a/explorer_service/Cargo.toml +++ b/explorer_service/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license.workspace = true +[lints] +workspace = true + [lib] crate-type = ["cdylib", "rlib"] diff --git a/explorer_service/Dockerfile b/explorer_service/Dockerfile index e10c5ebe..238e77e6 100644 --- a/explorer_service/Dockerfile +++ b/explorer_service/Dockerfile @@ -1,4 +1,4 @@ -FROM rust:1.91.1-trixie AS builder +FROM rust:1.94.0-trixie AS builder # Install cargo-binstall, which makes it easier to install other # cargo extensions like cargo-leptos diff --git a/explorer_service/src/api.rs b/explorer_service/src/api.rs index 1e2bdd5b..b37145af 100644 --- a/explorer_service/src/api.rs +++ b/explorer_service/src/api.rs @@ -2,7 +2,7 @@ use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Tra use leptos::prelude::*; use serde::{Deserialize, Serialize}; -/// Search results structure +/// Search results structure. #[derive(Clone, Debug, Serialize, Deserialize)] pub struct SearchResults { pub blocks: Vec, @@ -10,7 +10,7 @@ pub struct SearchResults { pub accounts: Vec<(AccountId, Account)>, } -/// RPC client type +/// RPC client type. #[cfg(feature = "ssr")] pub type IndexerRpcClient = jsonrpsee::http_client::HttpClient; @@ -22,7 +22,7 @@ pub async fn get_account(account_id: AccountId) -> Result Result client .get_block_by_id(block_id) .await - .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}"))) } /// Get latest block ID @@ -91,7 +91,7 @@ pub async fn get_latest_block_id() -> Result { client .get_last_finalized_block_id() .await - .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}"))) } /// Get block by hash @@ -102,7 +102,7 @@ pub async fn get_block_by_hash(block_hash: HashType) -> Result Result, limit: u32) -> Result, ServerFnError> { +pub async fn get_blocks(before: Option, limit: u64) -> Result, ServerFnError> { use indexer_service_rpc::RpcClient as _; let client = expect_context::(); client .get_blocks(before, limit) .await - .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}"))) } /// Get transactions by account #[server] pub async fn get_transactions_by_account( account_id: AccountId, - limit: u32, - offset: u32, + offset: u64, + limit: u64, ) -> Result, ServerFnError> { use indexer_service_rpc::RpcClient as _; let client = expect_context::(); client - .get_transactions_by_account(account_id, limit, offset) + .get_transactions_by_account(account_id, offset, limit) .await - .map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e))) + .map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}"))) } -/// Create the RPC client for the indexer service (server-side only) +/// Create the RPC client for the indexer service (server-side only). #[cfg(feature = "ssr")] pub fn create_indexer_rpc_client(url: &url::Url) -> Result { use jsonrpsee::http_client::HttpClientBuilder; diff --git a/explorer_service/src/components/block_preview.rs b/explorer_service/src/components/block_preview.rs index e16ab831..8fe48f9f 100644 --- a/explorer_service/src/components/block_preview.rs +++ b/explorer_service/src/components/block_preview.rs @@ -4,8 +4,8 @@ use leptos_router::components::A; use crate::format_utils; -/// Get CSS class for bedrock status -fn status_class(status: &BedrockStatus) -> &'static str { +/// Get CSS class for bedrock status. +const fn status_class(status: &BedrockStatus) -> &'static str { match status { BedrockStatus::Pending => "status-pending", BedrockStatus::Safe => "status-safe", diff --git a/explorer_service/src/components/mod.rs b/explorer_service/src/components/mod.rs index a0032b10..306c79a8 100644 --- a/explorer_service/src/components/mod.rs +++ b/explorer_service/src/components/mod.rs @@ -1,7 +1,7 @@ -pub mod account_preview; -pub mod block_preview; -pub mod transaction_preview; - pub use account_preview::AccountPreview; pub use block_preview::BlockPreview; pub use transaction_preview::TransactionPreview; + +pub mod account_preview; +pub mod block_preview; +pub mod transaction_preview; diff --git a/explorer_service/src/components/transaction_preview.rs b/explorer_service/src/components/transaction_preview.rs index 68c1e86e..094ca4ff 100644 --- a/explorer_service/src/components/transaction_preview.rs +++ b/explorer_service/src/components/transaction_preview.rs @@ -2,8 +2,8 @@ use indexer_service_protocol::Transaction; use leptos::prelude::*; use leptos_router::components::A; -/// Get transaction type name and CSS class -fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) { +/// Get transaction type name and CSS class. +const fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) { match tx { Transaction::Public(_) => ("Public", "tx-type-public"), Transaction::PrivacyPreserving(_) => ("Privacy-Preserving", "tx-type-private"), @@ -13,6 +13,10 @@ fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) { /// Transaction preview component #[component] +#[expect( + clippy::needless_pass_by_value, + reason = "Leptos component props are passed by value by framework convention" +)] pub fn TransactionPreview(transaction: Transaction) -> impl IntoView { let hash = transaction.hash(); let hash_str = hash.to_string(); diff --git a/explorer_service/src/format_utils.rs b/explorer_service/src/format_utils.rs index 20c1824c..2df76658 100644 --- a/explorer_service/src/format_utils.rs +++ b/explorer_service/src/format_utils.rs @@ -1,9 +1,17 @@ -//! Formatting utilities for the explorer +//! Formatting utilities for the explorer. -/// Format timestamp to human-readable string +/// Format timestamp to human-readable string. +#[expect( + clippy::integer_division, + clippy::integer_division_remainder_used, + reason = "We need to convert milliseconds to seconds, and this is the most straightforward way to do it" +)] pub fn format_timestamp(timestamp: u64) -> String { let seconds = timestamp / 1000; - let datetime = chrono::DateTime::from_timestamp(seconds as i64, 0) - .unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap()); + let datetime = chrono::DateTime::from_timestamp( + i64::try_from(seconds).expect("Timestamp out of range"), + 0, + ) + .unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap()); datetime.format("%Y-%m-%d %H:%M:%S UTC").to_string() } diff --git a/explorer_service/src/lib.rs b/explorer_service/src/lib.rs index e2b2291e..edfe7df5 100644 --- a/explorer_service/src/lib.rs +++ b/explorer_service/src/lib.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::must_use_candidate, + clippy::same_name_method, + reason = "Warns on code generated by leptos macros" +)] + use leptos::prelude::*; use leptos_meta::{Meta, Stylesheet, Title, provide_meta_context}; use leptos_router::{ diff --git a/explorer_service/src/main.rs b/explorer_service/src/main.rs index 6cc4a9a4..5940a9e1 100644 --- a/explorer_service/src/main.rs +++ b/explorer_service/src/main.rs @@ -1,3 +1,7 @@ +#[expect( + clippy::print_stdout, + reason = "This is just simple and handy for such a small server" +)] #[cfg(feature = "ssr")] #[tokio::main] async fn main() { @@ -5,20 +9,20 @@ async fn main() { use clap::Parser; use explorer_service::App; use leptos::prelude::*; - use leptos_axum::{LeptosRoutes, generate_route_list}; + use leptos_axum::{LeptosRoutes as _, generate_route_list}; use leptos_meta::MetaTags; - env_logger::init(); - /// LEZ Block Explorer Server CLI arguments. #[derive(Parser, Debug)] #[command(version, about, long_about = None)] struct Args { - /// Indexer RPC URL + /// Indexer RPC URL. #[arg(long, env = "INDEXER_RPC_URL", default_value = "http://localhost:8779")] indexer_rpc_url: url::Url, } + env_logger::init(); + let args = Args::parse(); let conf = get_configuration(None).unwrap(); diff --git a/explorer_service/src/pages/account_page.rs b/explorer_service/src/pages/account_page.rs index 4c0af1ac..a02a8b7c 100644 --- a/explorer_service/src/pages/account_page.rs +++ b/explorer_service/src/pages/account_page.rs @@ -10,11 +10,11 @@ use crate::{api, components::TransactionPreview}; #[component] pub fn AccountPage() -> impl IntoView { let params = use_params_map(); - let (tx_offset, set_tx_offset) = signal(0u32); + let (tx_offset, set_tx_offset) = signal(0_u64); let (all_transactions, set_all_transactions) = signal(Vec::new()); let (is_loading, set_is_loading) = signal(false); let (has_more, set_has_more) = signal(true); - let tx_limit = 10u32; + let tx_limit = 10_u64; // Parse account ID from URL params let account_id = move || { @@ -27,7 +27,7 @@ pub fn AccountPage() -> impl IntoView { match acc_id_opt { Some(acc_id) => api::get_account(acc_id).await, None => Err(leptos::prelude::ServerFnError::ServerError( - "Invalid account ID".to_string(), + "Invalid account ID".to_owned(), )), } }); @@ -35,9 +35,9 @@ pub fn AccountPage() -> impl IntoView { // Load initial transactions let transactions_resource = Resource::new(account_id, move |acc_id_opt| async move { match acc_id_opt { - Some(acc_id) => api::get_transactions_by_account(acc_id, tx_limit, 0).await, + Some(acc_id) => api::get_transactions_by_account(acc_id, 0, tx_limit).await, None => Err(leptos::prelude::ServerFnError::ServerError( - "Invalid account ID".to_string(), + "Invalid account ID".to_owned(), )), } }); @@ -46,7 +46,9 @@ pub fn AccountPage() -> impl IntoView { Effect::new(move || { if let Some(Ok(txs)) = transactions_resource.get() { set_all_transactions.set(txs.clone()); - set_has_more.set(txs.len() as u32 == tx_limit); + set_has_more.set( + u64::try_from(txs.len()).expect("Transaction count should fit in u64") == tx_limit, + ); } }); @@ -57,18 +59,19 @@ pub fn AccountPage() -> impl IntoView { }; set_is_loading.set(true); - let current_offset = tx_offset.get() + tx_limit; + let current_offset = tx_offset.get().saturating_add(tx_limit); set_tx_offset.set(current_offset); leptos::task::spawn_local(async move { - match api::get_transactions_by_account(acc_id, tx_limit, current_offset).await { + match api::get_transactions_by_account(acc_id, current_offset, tx_limit).await { Ok(new_txs) => { - let txs_count = new_txs.len() as u32; + let txs_count = + u64::try_from(new_txs.len()).expect("Transaction count should fit in u64"); set_all_transactions.update(|txs| txs.extend(new_txs)); set_has_more.set(txs_count == tx_limit); } Err(e) => { - log::error!("Failed to load more transactions: {}", e); + log::error!("Failed to load more transactions: {e}"); } } set_is_loading.set(false); @@ -108,114 +111,111 @@ pub fn AccountPage() -> impl IntoView {
"Account ID:" {account_id_str} -
-
- "Balance:" - {balance_str} -
-
- "Program Owner:" - {program_id} -
-
- "Nonce:" - {nonce_str} -
-
- "Data:" - {format!("{} bytes", data_len)} -
- - + +
+ "Balance:" + {balance_str} +
+
+ "Program Owner:" + {program_id} +
+
+ "Nonce:" + {nonce_str} +
+
+ "Data:" + {format!("{data_len} bytes")} +
+ + - } - }> - - {move || { - transactions_resource - .get() - .map(|result| match result { - Ok(_) => { - let txs = all_transactions.get(); - if txs.is_empty() { - view! { -
- "No transactions found" -
- } - .into_any() - } else { - view! { -
-
- {txs - .into_iter() - .map(|tx| { - view! { } - }) - .collect::>()} -
- {move || { - if has_more.get() { - view! { - - } - .into_any() - } else { - ().into_any() + } + }> + {move || { + transactions_resource + .get() + .map(|load_tx_result| match load_tx_result { + Ok(_) => { + let txs = all_transactions.get(); + if txs.is_empty() { + view! { +
+ "No transactions found" +
} - }} + .into_any() + } else { + view! { +
+
+ {txs + .into_iter() + .map(|tx| { + view! { } + }) + .collect::>()} +
+ {move || { + if has_more.get() { + view! { +
- } - .into_any() - } - } - Err(e) => { - view! { -
- {format!("Failed to load transactions: {}", e)} -
- } - .into_any() - } - }) - }} + + } + .into_any() + } else { + ().into_any() + } + }} - -
- - } - .into_any() + + } + .into_any() + } + } + Err(e) => { + view! { +
+ {format!("Failed to load transactions: {e}")} +
+ } + .into_any() + } + }) + }} + + + + } + .into_any() } Err(e) => { view! {

"Error"

-

{format!("Failed to load account: {}", e)}

+

{format!("Failed to load account: {e}")}

} .into_any() } }) }} - } diff --git a/explorer_service/src/pages/block_page.rs b/explorer_service/src/pages/block_page.rs index aee0a7cf..8f54fe18 100644 --- a/explorer_service/src/pages/block_page.rs +++ b/explorer_service/src/pages/block_page.rs @@ -38,7 +38,7 @@ pub fn BlockPage() -> impl IntoView { Some(BlockIdOrHash::BlockId(id)) => api::get_block_by_id(id).await, Some(BlockIdOrHash::Hash(hash)) => api::get_block_by_hash(hash).await, None => Err(leptos::prelude::ServerFnError::ServerError( - "Invalid block ID or hash".to_string(), + "Invalid block ID or hash".to_owned(), )), } }, @@ -144,7 +144,7 @@ pub fn BlockPage() -> impl IntoView { view! {

"Error"

-

{format!("Failed to load block: {}", e)}

+

{format!("Failed to load block: {e}")}

} .into_any() diff --git a/explorer_service/src/pages/main_page.rs b/explorer_service/src/pages/main_page.rs index 28d1d1d9..7e26e794 100644 --- a/explorer_service/src/pages/main_page.rs +++ b/explorer_service/src/pages/main_page.rs @@ -1,5 +1,8 @@ use leptos::prelude::*; -use leptos_router::hooks::{use_navigate, use_query_map}; +use leptos_router::{ + NavigateOptions, + hooks::{use_navigate, use_query_map}, +}; use web_sys::SubmitEvent; use crate::{ @@ -7,7 +10,7 @@ use crate::{ components::{AccountPreview, BlockPreview, TransactionPreview}, }; -const RECENT_BLOCKS_LIMIT: u32 = 10; +const RECENT_BLOCKS_LIMIT: u64 = 10; /// Main page component #[component] @@ -33,7 +36,7 @@ pub fn MainPage() -> impl IntoView { match api::search(query).await { Ok(result) => Some(result), Err(e) => { - log::error!("Search error: {}", e); + log::error!("Search error: {e}"); None } } @@ -48,7 +51,7 @@ pub fn MainPage() -> impl IntoView { // Load recent blocks on mount let recent_blocks_resource = Resource::new( || (), - |_| async { api::get_blocks(None, RECENT_BLOCKS_LIMIT).await }, + |()| async { api::get_blocks(None, RECENT_BLOCKS_LIMIT).await }, ); // Update all_blocks when initial load completes @@ -57,8 +60,11 @@ pub fn MainPage() -> impl IntoView { let oldest_id = blocks.last().map(|b| b.header.block_id); set_all_blocks.set(blocks.clone()); set_oldest_loaded_block_id.set(oldest_id); - set_has_more_blocks - .set(blocks.len() as u32 == RECENT_BLOCKS_LIMIT && oldest_id.unwrap_or(0) > 1); + set_has_more_blocks.set( + u64::try_from(blocks.len()).expect("usize should fit in u64") + == RECENT_BLOCKS_LIMIT + && oldest_id.unwrap_or(0) > 1, + ); } }); @@ -75,7 +81,8 @@ pub fn MainPage() -> impl IntoView { leptos::task::spawn_local(async move { match api::get_blocks(before_id, RECENT_BLOCKS_LIMIT).await { Ok(new_blocks) => { - let blocks_count = new_blocks.len() as u32; + let blocks_count = + u64::try_from(new_blocks.len()).expect("usize should fit in u64"); let new_oldest_id = new_blocks.last().map(|b| b.header.block_id); set_all_blocks.update(|blocks| blocks.extend(new_blocks)); set_oldest_loaded_block_id.set(new_oldest_id); @@ -83,7 +90,7 @@ pub fn MainPage() -> impl IntoView { .set(blocks_count == RECENT_BLOCKS_LIMIT && new_oldest_id.unwrap_or(0) > 1); } Err(e) => { - log::error!("Failed to load more blocks: {}", e); + log::error!("Failed to load more blocks: {e}"); } } set_is_loading_blocks.set(false); @@ -95,13 +102,13 @@ pub fn MainPage() -> impl IntoView { ev.prevent_default(); let query = search_query.get(); if query.is_empty() { - navigate("?", Default::default()); + navigate("?", NavigateOptions::default()); return; } navigate( &format!("?q={}", urlencoding::encode(&query)), - Default::default(), + NavigateOptions::default(), ); }; @@ -142,78 +149,78 @@ pub fn MainPage() -> impl IntoView { view! {

"Search Results"

- {if !has_results { - view! {
"No results found"
} - .into_any() - } else { - view! { -
- {if !blocks.is_empty() { - view! { -
-

"Blocks"

-
- {blocks - .into_iter() - .map(|block| { - view! { } - }) - .collect::>()} + {if has_results { + view! { +
+ {if blocks.is_empty() { + ().into_any() + } else { + view! { +
+

"Blocks"

+
+ {blocks + .into_iter() + .map(|block| { + view! { } + }) + .collect::>()} +
-
- } - .into_any() - } else { - ().into_any() - }} + } + .into_any() + }} - {if !transactions.is_empty() { - view! { -
-

"Transactions"

-
- {transactions - .into_iter() - .map(|tx| { - view! { } - }) - .collect::>()} + {if transactions.is_empty() { + ().into_any() + } else { + view! { +
+

"Transactions"

+
+ {transactions + .into_iter() + .map(|tx| { + view! { } + }) + .collect::>()} +
-
- } - .into_any() - } else { - ().into_any() - }} + } + .into_any() + }} - {if !accounts.is_empty() { - view! { -
-

"Accounts"

-
- {accounts - .into_iter() - .map(|(id, account)| { - view! { - - } - }) - .collect::>()} + {if accounts.is_empty() { + ().into_any() + } else { + view! { +
+

"Accounts"

+
+ {accounts + .into_iter() + .map(|(id, account)| { + view! { + + } + }) + .collect::>()} +
-
- } - .into_any() - } else { - ().into_any() - }} + } + .into_any() + }} -
- } - .into_any() - }} +
+ } + .into_any() + } else { + view! {
"No results found"
} + .into_any() + }}
} .into_any() @@ -274,7 +281,7 @@ pub fn MainPage() -> impl IntoView { } } Err(e) => { - view! {
{format!("Error: {}", e)}
} + view! {
{format!("Error: {e}")}
} .into_any() } }) diff --git a/explorer_service/src/pages/mod.rs b/explorer_service/src/pages/mod.rs index f4220145..92885150 100644 --- a/explorer_service/src/pages/mod.rs +++ b/explorer_service/src/pages/mod.rs @@ -1,9 +1,9 @@ -pub mod account_page; -pub mod block_page; -pub mod main_page; -pub mod transaction_page; - pub use account_page::AccountPage; pub use block_page::BlockPage; pub use main_page::MainPage; pub use transaction_page::TransactionPage; + +pub mod account_page; +pub mod block_page; +pub mod main_page; +pub mod transaction_page; diff --git a/explorer_service/src/pages/transaction_page.rs b/explorer_service/src/pages/transaction_page.rs index 2859719f..211dc505 100644 --- a/explorer_service/src/pages/transaction_page.rs +++ b/explorer_service/src/pages/transaction_page.rs @@ -4,7 +4,7 @@ use indexer_service_protocol::{ HashType, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet, }; -use itertools::{EitherOrBoth, Itertools}; +use itertools::{EitherOrBoth, Itertools as _}; use leptos::prelude::*; use leptos_router::{components::A, hooks::use_params_map}; @@ -17,16 +17,14 @@ pub fn TransactionPage() -> impl IntoView { let transaction_resource = Resource::new( move || { - params - .read() - .get("hash") - .and_then(|s| HashType::from_str(&s).ok()) + let s = params.read().get("hash")?; + HashType::from_str(&s).ok() }, |hash_opt| async move { match hash_opt { Some(hash) => api::get_transaction(hash).await, None => Err(leptos::prelude::ServerFnError::ServerError( - "Invalid transaction hash".to_string(), + "Invalid transaction hash".to_owned(), )), } }, @@ -105,7 +103,7 @@ pub fn TransactionPage() -> impl IntoView {
"Proof Size:" - {format!("{} bytes", proof_len)} + {format!("{proof_len} bytes")}
"Signatures:" @@ -141,7 +139,7 @@ pub fn TransactionPage() -> impl IntoView { {account_id_str} - " (nonce: "{"Not affected by this transaction".to_string()}" )" + " (nonce: "{"Not affected by this transaction".to_owned()}" )"
} @@ -153,7 +151,7 @@ pub fn TransactionPage() -> impl IntoView { {"Account not found"} - " (nonce: "{"Account not found".to_string()}" )" + " (nonce: "{"Account not found".to_owned()}" )"
} @@ -212,7 +210,7 @@ pub fn TransactionPage() -> impl IntoView {
"Proof Size:" - {format!("{} bytes", proof_len)} + {format!("{proof_len} bytes")}
@@ -244,7 +242,7 @@ pub fn TransactionPage() -> impl IntoView { {account_id_str} - " (nonce: "{"Not affected by this transaction".to_string()}" )" + " (nonce: "{"Not affected by this transaction".to_owned()}" )" } @@ -256,7 +254,7 @@ pub fn TransactionPage() -> impl IntoView { {"Account not found"} - " (nonce: "{"Account not found".to_string()}" )" + " (nonce: "{"Account not found".to_owned()}" )" } @@ -284,7 +282,7 @@ pub fn TransactionPage() -> impl IntoView {
"Bytecode Size:" - {format!("{} bytes", bytecode_len)} + {format!("{bytecode_len} bytes")}
@@ -302,7 +300,7 @@ pub fn TransactionPage() -> impl IntoView { view! {

"Error"

-

{format!("Failed to load transaction: {}", e)}

+

{format!("Failed to load transaction: {e}")}

} .into_any() diff --git a/indexer/core/Cargo.toml b/indexer/core/Cargo.toml index 792fb4b7..8129c1ea 100644 --- a/indexer/core/Cargo.toml +++ b/indexer/core/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] common.workspace = true bedrock_client.workspace = true @@ -22,3 +25,7 @@ url.workspace = true logos-blockchain-core.workspace = true serde_json.workspace = true async-stream.workspace = true + +[dev-dependencies] +tempfile.workspace = true + diff --git a/indexer/core/src/block_store.rs b/indexer/core/src/block_store.rs index 681b63c8..db2f855b 100644 --- a/indexer/core/src/block_store.rs +++ b/indexer/core/src/block_store.rs @@ -3,15 +3,17 @@ use std::{path::Path, sync::Arc}; use anyhow::Result; use bedrock_client::HeaderId; use common::{ - block::{BedrockStatus, Block}, + block::{BedrockStatus, Block, BlockId}, transaction::NSSATransaction, }; use nssa::{Account, AccountId, V02State}; use storage::indexer::RocksDBIO; +use tokio::sync::RwLock; #[derive(Clone)] pub struct IndexerStore { dbio: Arc, + current_state: Arc>, } impl IndexerStore { @@ -21,20 +23,18 @@ impl IndexerStore { /// ATTENTION: Will overwrite genesis block. pub fn open_db_with_genesis( location: &Path, - start_data: Option<(Block, V02State)>, + genesis_block: &Block, + initial_state: &V02State, ) -> Result { - let dbio = RocksDBIO::open_or_create(location, start_data)?; + let dbio = RocksDBIO::open_or_create(location, genesis_block, initial_state)?; + let current_state = dbio.final_state()?; Ok(Self { dbio: Arc::new(dbio), + current_state: Arc::new(RwLock::new(current_state)), }) } - /// Reopening existing database - pub fn open_db_restart(location: &Path) -> Result { - Self::open_db_with_genesis(location, None) - } - pub fn last_observed_l1_lib_header(&self) -> Result> { Ok(self .dbio @@ -50,7 +50,7 @@ impl IndexerStore { Ok(self.dbio.get_block(id)?) } - pub fn get_block_batch(&self, before: Option, limit: u64) -> Result> { + pub fn get_block_batch(&self, before: Option, limit: u64) -> Result> { Ok(self.dbio.get_block_batch(before, limit)?) } @@ -79,12 +79,14 @@ impl IndexerStore { Ok(self.dbio.get_acc_transactions(acc_id, offset, limit)?) } + #[must_use] pub fn genesis_id(&self) -> u64 { self.dbio .get_meta_first_block_in_db() .expect("Must be set at the DB startup") } + #[must_use] pub fn last_block(&self) -> u64 { self.dbio .get_meta_last_block_in_db() @@ -95,22 +97,31 @@ impl IndexerStore { Ok(self.dbio.calculate_state_for_id(block_id)?) } - pub fn final_state(&self) -> Result { + /// Recalculation of final state directly from DB. + /// + /// Used for indexer healthcheck. + pub fn recalculate_final_state(&self) -> Result { Ok(self.dbio.final_state()?) } - pub fn get_account_final(&self, account_id: &AccountId) -> Result { - Ok(self.final_state()?.get_account_by_id(*account_id)) + pub async fn account_current_state(&self, account_id: &AccountId) -> Result { + Ok(self + .current_state + .read() + .await + .get_account_by_id(*account_id)) } - pub fn put_block(&self, mut block: Block, l1_header: HeaderId) -> Result<()> { - let mut final_state = self.dbio.final_state()?; + pub async fn put_block(&self, mut block: Block, l1_header: HeaderId) -> Result<()> { + { + let mut state_guard = self.current_state.write().await; - for transaction in &block.body.transactions { - transaction - .clone() - .transaction_stateless_check()? - .execute_check_on_state(&mut final_state)?; + for transaction in &block.body.transactions { + transaction + .clone() + .transaction_stateless_check()? + .execute_check_on_state(&mut state_guard)?; + } } // ToDo: Currently we are fetching only finalized blocks @@ -118,6 +129,98 @@ impl IndexerStore { // to represent correct block finality block.bedrock_status = BedrockStatus::Finalized; - Ok(self.dbio.put_block(block, l1_header.into())?) + Ok(self.dbio.put_block(&block, l1_header.into())?) + } +} + +#[cfg(test)] +mod tests { + use nssa::{AccountId, PublicKey}; + use tempfile::tempdir; + + use super::*; + + fn genesis_block() -> Block { + common::test_utils::produce_dummy_block(1, None, vec![]) + } + + fn acc1_sign_key() -> nssa::PrivateKey { + nssa::PrivateKey::try_new([1; 32]).unwrap() + } + + fn acc2_sign_key() -> nssa::PrivateKey { + nssa::PrivateKey::try_new([2; 32]).unwrap() + } + + fn acc1() -> AccountId { + AccountId::from(&PublicKey::new_from_private_key(&acc1_sign_key())) + } + + fn acc2() -> AccountId { + AccountId::from(&PublicKey::new_from_private_key(&acc2_sign_key())) + } + + #[test] + fn correct_startup() { + let home = tempdir().unwrap(); + + let storage = IndexerStore::open_db_with_genesis( + home.as_ref(), + &genesis_block(), + &nssa::V02State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[]), + ) + .unwrap(); + + let block = storage.get_block_at_id(1).unwrap(); + let final_id = storage.get_last_block_id().unwrap(); + + assert_eq!(block.header.hash, genesis_block().header.hash); + assert_eq!(final_id, 1); + } + + #[tokio::test] + async fn state_transition() { + let home = tempdir().unwrap(); + + let storage = IndexerStore::open_db_with_genesis( + home.as_ref(), + &genesis_block(), + &nssa::V02State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[]), + ) + .unwrap(); + + let mut prev_hash = genesis_block().header.hash; + + let from = acc1(); + let to = acc2(); + let sign_key = acc1_sign_key(); + + for i in 2..10 { + let tx = common::test_utils::create_transaction_native_token_transfer( + from, + i - 2, + to, + 10, + &sign_key, + ); + + let next_block = common::test_utils::produce_dummy_block( + u64::try_from(i).unwrap(), + Some(prev_hash), + vec![tx], + ); + prev_hash = next_block.header.hash; + + storage + .put_block(next_block, HeaderId::from([u8::try_from(i).unwrap(); 32])) + .await + .unwrap(); + } + + let acc1_val = storage.account_current_state(&acc1()).await.unwrap(); + let acc2_val = storage.account_current_state(&acc2()).await.unwrap(); + + assert_eq!(acc1_val.balance, 9920); + assert_eq!(acc2_val.balance, 20080); } } diff --git a/indexer/core/src/config.rs b/indexer/core/src/config.rs index 95e6147c..a85284cc 100644 --- a/indexer/core/src/config.rs +++ b/indexer/core/src/config.rs @@ -27,13 +27,13 @@ pub struct ClientConfig { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct IndexerConfig { - /// Home dir of sequencer storage + /// Home dir of sequencer storage. pub home: PathBuf, - /// List of initial accounts data + /// List of initial accounts data. pub initial_accounts: Vec, - /// List of initial commitments + /// List of initial commitments. pub initial_commitments: Vec, - /// Sequencers signing key + /// Sequencers signing key. pub signing_key: [u8; 32], #[serde(with = "humantime_serde")] pub consensus_info_polling_interval: Duration, @@ -42,12 +42,17 @@ pub struct IndexerConfig { } impl IndexerConfig { - pub fn from_path(config_path: &Path) -> Result { - let file = File::open(config_path) - .with_context(|| format!("Failed to open indexer config at {config_path:?}"))?; + pub fn from_path(config_path: &Path) -> Result { + let file = File::open(config_path).with_context(|| { + format!("Failed to open indexer config at {}", config_path.display()) + })?; let reader = BufReader::new(file); - serde_json::from_reader(reader) - .with_context(|| format!("Failed to parse indexer config at {config_path:?}")) + serde_json::from_reader(reader).with_context(|| { + format!( + "Failed to parse indexer config at {}", + config_path.display() + ) + }) } } diff --git a/indexer/core/src/lib.rs b/indexer/core/src/lib.rs index 6d56eb18..6c96821e 100644 --- a/indexer/core/src/lib.rs +++ b/indexer/core/src/lib.rs @@ -24,14 +24,14 @@ pub struct IndexerCore { } #[derive(Clone)] -/// This struct represents one L1 block data fetched from backfilling +/// This struct represents one L1 block data fetched from backfilling. pub struct BackfillBlockData { l2_blocks: Vec, l1_header: HeaderId, } #[derive(Clone)] -/// This struct represents data fetched fom backfilling in one iteration +/// This struct represents data fetched fom backfilling in one iteration. pub struct BackfillData { block_data: VecDeque, curr_fin_l1_lib_header: HeaderId, @@ -52,7 +52,7 @@ impl IndexerCore { // ToDo: remove key from indexer config, use some default. let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); let channel_genesis_msg_id = [0; 32]; - let start_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id); + let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id); // This is a troubling moment, because changes in key protocol can // affect this. And indexer can not reliably ask this data from sequencer @@ -94,47 +94,44 @@ impl IndexerCore { config.bedrock_client_config.auth.clone(), )?, config, - store: IndexerStore::open_db_with_genesis(&home, Some((start_block, state)))?, + store: IndexerStore::open_db_with_genesis(&home, &genesis_block, &state)?, }) } - pub async fn subscribe_parse_block_stream(&self) -> impl futures::Stream> { + pub fn subscribe_parse_block_stream(&self) -> impl futures::Stream> { async_stream::stream! { info!("Searching for initial header"); - let last_l1_lib_header = self.store.last_observed_l1_lib_header()?; + let last_stored_l1_lib_header = self.store.last_observed_l1_lib_header()?; - let mut prev_last_l1_lib_header = match last_l1_lib_header { - Some(last_l1_lib_header) => { - info!("Last l1 lib header found: {last_l1_lib_header}"); - last_l1_lib_header - }, - None => { - info!("Last l1 lib header not found in DB"); - info!("Searching for the start of a channel"); + let mut prev_last_l1_lib_header = if let Some(last_l1_lib_header) = last_stored_l1_lib_header { + info!("Last l1 lib header found: {last_l1_lib_header}"); + last_l1_lib_header + } else { + info!("Last l1 lib header not found in DB"); + info!("Searching for the start of a channel"); - let BackfillData { - block_data: start_buff, - curr_fin_l1_lib_header: last_l1_lib_header, - } = self.search_for_channel_start().await?; + let BackfillData { + block_data: start_buff, + curr_fin_l1_lib_header: last_l1_lib_header, + } = self.search_for_channel_start().await?; - for BackfillBlockData { - l2_blocks: l2_block_vec, - l1_header, - } in start_buff { - let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect(); - l2_blocks_parsed_ids.sort(); - info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids); + for BackfillBlockData { + l2_blocks: l2_block_vec, + l1_header, + } in start_buff { + let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect(); + l2_blocks_parsed_ids.sort_unstable(); + info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids); for l2_block in l2_block_vec { - self.store.put_block(l2_block.clone(), l1_header)?; + self.store.put_block(l2_block.clone(), l1_header).await?; - yield Ok(l2_block); - } + yield Ok(l2_block); } + } - last_l1_lib_header - }, + last_l1_lib_header }; info!("Searching for initial header finished"); @@ -157,11 +154,11 @@ impl IndexerCore { l1_header: header, } in buff { let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect(); - l2_blocks_parsed_ids.sort(); + l2_blocks_parsed_ids.sort_unstable(); info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids); for l2_block in l2_block_vec { - self.store.put_block(l2_block.clone(), header)?; + self.store.put_block(l2_block.clone(), header).await?; yield Ok(l2_block); } @@ -177,20 +174,20 @@ impl IndexerCore { async fn get_next_lib(&self, prev_lib: HeaderId) -> Result { loop { let next_lib = self.get_lib().await?; - if next_lib != prev_lib { - break Ok(next_lib); - } else { + if next_lib == prev_lib { info!( "Wait {:?} to not spam the node", self.config.consensus_info_polling_interval ); tokio::time::sleep(self.config.consensus_info_polling_interval).await; + } else { + break Ok(next_lib); } } } /// WARNING: depending on channel state, - /// may take indefinite amount of time + /// may take indefinite amount of time. pub async fn search_for_channel_start(&self) -> Result { let mut curr_last_l1_lib_header = self.get_lib().await?; let mut backfill_start = curr_last_l1_lib_header; @@ -204,15 +201,13 @@ impl IndexerCore { let mut cycle_header = curr_last_l1_lib_header; loop { - let cycle_block = - if let Some(block) = self.bedrock_client.get_block_by_id(cycle_header).await? { - block - } else { - // First run can reach root easily - // so here we are optimistic about L1 - // failing to get parent. - break; - }; + let Some(cycle_block) = self.bedrock_client.get_block_by_id(cycle_header).await? + else { + // First run can reach root easily + // so here we are optimistic about L1 + // failing to get parent. + break; + }; // It would be better to have id, but block does not have it, so slot will do. info!( @@ -289,10 +284,9 @@ impl IndexerCore { if cycle_block.header().id() == last_fin_l1_lib_header { break; - } else { - // Step back to parent - cycle_header = cycle_block.header().parent(); } + // Step back to parent + cycle_header = cycle_block.header().parent(); // It would be better to have id, but block does not have it, so slot will do. info!( @@ -324,6 +318,10 @@ fn parse_block_owned( decoded_channel_id: &ChannelId, ) -> (Vec, HeaderId) { ( + #[expect( + clippy::wildcard_enum_match_arm, + reason = "We are only interested in channel inscription ops, so it's fine to ignore the rest" + )] l1_block .transactions() .flat_map(|tx| { @@ -335,7 +333,7 @@ fn parse_block_owned( }) if channel_id == decoded_channel_id => { borsh::from_slice::(inscription) .inspect_err(|err| { - error!("Failed to deserialize our inscription with err: {err:#?}") + error!("Failed to deserialize our inscription with err: {err:#?}"); }) .ok() } diff --git a/indexer/service/Cargo.toml b/indexer/service/Cargo.toml index 925482e8..911121fd 100644 --- a/indexer/service/Cargo.toml +++ b/indexer/service/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] indexer_service_protocol = { workspace = true, features = ["convert"] } indexer_service_rpc = { workspace = true, features = ["server"] } diff --git a/indexer/service/Dockerfile b/indexer/service/Dockerfile index bf77ffd2..bb93c2f2 100644 --- a/indexer/service/Dockerfile +++ b/indexer/service/Dockerfile @@ -1,5 +1,5 @@ # Chef stage - uses pre-built cargo-chef image -FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef +FROM lukemathwalker/cargo-chef:latest-rust-1.94.0-slim-trixie AS chef # Install build dependencies RUN apt-get update && apt-get install -y \ diff --git a/indexer/service/protocol/Cargo.toml b/indexer/service/protocol/Cargo.toml index f9a3c2ad..2ee61b74 100644 --- a/indexer/service/protocol/Cargo.toml +++ b/indexer/service/protocol/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core = { workspace = true, optional = true, features = ["host"] } nssa = { workspace = true, optional = true } diff --git a/indexer/service/protocol/src/convert.rs b/indexer/service/protocol/src/convert.rs index 1de28aa3..499baa4c 100644 --- a/indexer/service/protocol/src/convert.rs +++ b/indexer/service/protocol/src/convert.rs @@ -1,6 +1,14 @@ -//! Conversions between indexer_service_protocol types and nssa/nssa_core types +//! Conversions between `indexer_service_protocol` types and `nssa/nssa_core` types. -use crate::*; +use nssa_core::account::Nonce; + +use crate::{ + Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, Ciphertext, Commitment, + CommitmentSetDigest, Data, EncryptedAccountData, EphemeralPublicKey, HashType, MantleMsgId, + Nullifier, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage, + ProgramDeploymentTransaction, ProgramId, Proof, PublicKey, PublicMessage, PublicTransaction, + Signature, Transaction, WitnessSet, +}; // ============================================================================ // Account-related conversions @@ -29,7 +37,7 @@ impl From for AccountId { impl From for nssa_core::account::AccountId { fn from(value: AccountId) -> Self { let AccountId { value } = value; - nssa_core::account::AccountId::new(value) + Self::new(value) } } @@ -46,7 +54,7 @@ impl From for Account { program_owner: program_owner.into(), balance, data: data.into(), - nonce, + nonce: nonce.0, } } } @@ -62,11 +70,11 @@ impl TryFrom for nssa_core::account::Account { nonce, } = value; - Ok(nssa_core::account::Account { + Ok(Self { program_owner: program_owner.into(), balance, data: data.try_into()?, - nonce, + nonce: Nonce(nonce), }) } } @@ -81,7 +89,7 @@ impl TryFrom for nssa_core::account::Data { type Error = nssa_core::account::data::DataTooBigError; fn try_from(value: Data) -> Result { - nssa_core::account::Data::try_from(value.0) + Self::try_from(value.0) } } @@ -97,7 +105,7 @@ impl From for Commitment { impl From for nssa_core::Commitment { fn from(value: Commitment) -> Self { - nssa_core::Commitment::from_byte_array(value.0) + Self::from_byte_array(value.0) } } @@ -109,7 +117,7 @@ impl From for Nullifier { impl From for nssa_core::Nullifier { fn from(value: Nullifier) -> Self { - nssa_core::Nullifier::from_byte_array(value.0) + Self::from_byte_array(value.0) } } @@ -137,7 +145,7 @@ impl From for Ciphertext { impl From for nssa_core::encryption::Ciphertext { fn from(value: Ciphertext) -> Self { - nssa_core::encryption::Ciphertext::from_inner(value.0) + Self::from_inner(value.0) } } @@ -149,7 +157,7 @@ impl From for EphemeralPublicKey { impl From for nssa_core::encryption::EphemeralPublicKey { fn from(value: EphemeralPublicKey) -> Self { - nssa_core::encryption::shared_key_derivation::Secp256k1Point(value.0) + Self(value.0) } } @@ -167,7 +175,7 @@ impl From for Signature { impl From for nssa::Signature { fn from(value: Signature) -> Self { let Signature(sig_value) = value; - nssa::Signature { value: sig_value } + Self { value: sig_value } } } @@ -181,7 +189,7 @@ impl TryFrom for nssa::PublicKey { type Error = nssa::error::NssaError; fn try_from(value: PublicKey) -> Result { - nssa::PublicKey::try_new(value.0) + Self::try_new(value.0) } } @@ -197,7 +205,7 @@ impl From for Proof { impl From for nssa::privacy_preserving_transaction::circuit::Proof { fn from(value: Proof) -> Self { - nssa::privacy_preserving_transaction::circuit::Proof::from_inner(value.0) + Self::from_inner(value.0) } } @@ -244,7 +252,7 @@ impl From for PublicMessage { Self { program_id: program_id.into(), account_ids: account_ids.into_iter().map(Into::into).collect(), - nonces, + nonces: nonces.iter().map(|x| x.0).collect(), instruction_data, } } @@ -261,7 +269,10 @@ impl From for nssa::public_transaction::Message { Self::new_preserialized( program_id.into(), account_ids.into_iter().map(Into::into).collect(), - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction_data, ) } @@ -279,7 +290,7 @@ impl From for PrivacyPre } = value; Self { public_account_ids: public_account_ids.into_iter().map(Into::into).collect(), - nonces, + nonces: nonces.iter().map(|x| x.0).collect(), public_post_states: public_post_states.into_iter().map(Into::into).collect(), encrypted_private_post_states: encrypted_private_post_states .into_iter() @@ -308,7 +319,10 @@ impl TryFrom for nssa::privacy_preserving_transaction: } = value; Ok(Self { public_account_ids: public_account_ids.into_iter().map(Into::into).collect(), - nonces, + nonces: nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), public_post_states: public_post_states .into_iter() .map(TryInto::try_into) @@ -499,12 +513,12 @@ impl From for nssa::ProgramDeploymentTransaction { impl From for Transaction { fn from(value: common::transaction::NSSATransaction) -> Self { match value { - common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()), + common::transaction::NSSATransaction::Public(tx) => Self::Public(tx.into()), common::transaction::NSSATransaction::PrivacyPreserving(tx) => { - Transaction::PrivacyPreserving(tx.into()) + Self::PrivacyPreserving(tx.into()) } common::transaction::NSSATransaction::ProgramDeployment(tx) => { - Transaction::ProgramDeployment(tx.into()) + Self::ProgramDeployment(tx.into()) } } } @@ -515,15 +529,9 @@ impl TryFrom for common::transaction::NSSATransaction { fn try_from(value: Transaction) -> Result { match value { - Transaction::Public(tx) => { - Ok(common::transaction::NSSATransaction::Public(tx.try_into()?)) - } - Transaction::PrivacyPreserving(tx) => Ok( - common::transaction::NSSATransaction::PrivacyPreserving(tx.try_into()?), - ), - Transaction::ProgramDeployment(tx) => Ok( - common::transaction::NSSATransaction::ProgramDeployment(tx.into()), - ), + Transaction::Public(tx) => Ok(Self::Public(tx.try_into()?)), + Transaction::PrivacyPreserving(tx) => Ok(Self::PrivacyPreserving(tx.try_into()?)), + Transaction::ProgramDeployment(tx) => Ok(Self::ProgramDeployment(tx.into())), } } } @@ -677,6 +685,6 @@ impl From for HashType { impl From for common::HashType { fn from(value: HashType) -> Self { - common::HashType(value.0) + Self(value.0) } } diff --git a/indexer/service/protocol/src/lib.rs b/indexer/service/protocol/src/lib.rs index 8fdd3289..98ef5650 100644 --- a/indexer/service/protocol/src/lib.rs +++ b/indexer/service/protocol/src/lib.rs @@ -14,6 +14,40 @@ use serde_with::{DeserializeFromStr, SerializeDisplay}; #[cfg(feature = "convert")] mod convert; +mod base64 { + use base64::prelude::{BASE64_STANDARD, Engine as _}; + use serde::{Deserialize as _, Deserializer, Serialize as _, Serializer}; + + pub mod arr { + use super::{Deserializer, Serializer}; + + pub fn serialize(v: &[u8], s: S) -> Result { + super::serialize(v, s) + } + + pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>( + d: D, + ) -> Result<[u8; N], D::Error> { + let vec = super::deserialize(d)?; + vec.try_into().map_err(|_bytes| { + serde::de::Error::custom(format!("Invalid length, expected {N} bytes")) + }) + } + } + + pub fn serialize(v: &[u8], s: S) -> Result { + let base64 = BASE64_STANDARD.encode(v); + String::serialize(&base64, s) + } + + pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { + let base64 = String::deserialize(d)?; + BASE64_STANDARD + .decode(base64.as_bytes()) + .map_err(serde::de::Error::custom) + } +} + pub type Nonce = u128; #[derive( @@ -23,26 +57,43 @@ pub struct ProgramId(pub [u32; 8]); impl Display for ProgramId { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let bytes: Vec = self.0.iter().flat_map(|n| n.to_be_bytes()).collect(); + let bytes: Vec = self.0.iter().flat_map(|n| n.to_le_bytes()).collect(); write!(f, "{}", bytes.to_base58()) } } +#[derive(Debug)] +pub enum ProgramIdParseError { + InvalidBase58(base58::FromBase58Error), + InvalidLength(usize), +} + +impl Display for ProgramIdParseError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::InvalidBase58(err) => write!(f, "invalid base58: {err:?}"), + Self::InvalidLength(len) => { + write!(f, "invalid length: expected 32 bytes, got {len}") + } + } + } +} + impl FromStr for ProgramId { - type Err = hex::FromHexError; + type Err = ProgramIdParseError; fn from_str(s: &str) -> Result { let bytes = s .from_base58() - .map_err(|_| hex::FromHexError::InvalidStringLength)?; + .map_err(ProgramIdParseError::InvalidBase58)?; if bytes.len() != 32 { - return Err(hex::FromHexError::InvalidStringLength); + return Err(ProgramIdParseError::InvalidLength(bytes.len())); } - let mut arr = [0u32; 8]; + let mut arr = [0_u32; 8]; for (i, chunk) in bytes.chunks_exact(4).enumerate() { - arr[i] = u32::from_be_bytes(chunk.try_into().unwrap()); + arr[i] = u32::from_le_bytes(chunk.try_into().unwrap()); } - Ok(ProgramId(arr)) + Ok(Self(arr)) } } @@ -72,9 +123,9 @@ impl FromStr for AccountId { bytes.len() )); } - let mut value = [0u8; 32]; + let mut value = [0_u8; 32]; value.copy_from_slice(&bytes); - Ok(AccountId { value }) + Ok(Self { value }) } } @@ -121,9 +172,9 @@ impl FromStr for Signature { type Err = hex::FromHexError; fn from_str(s: &str) -> Result { - let mut bytes = [0u8; 64]; + let mut bytes = [0_u8; 64]; hex::decode_to_slice(s, &mut bytes)?; - Ok(Signature(bytes)) + Ok(Self(bytes)) } } @@ -140,12 +191,14 @@ pub enum Transaction { } impl Transaction { - /// Get the hash of the transaction - pub fn hash(&self) -> &self::HashType { + /// Get the hash of the transaction. + #[expect(clippy::same_name_method, reason = "This is handy")] + #[must_use] + pub const fn hash(&self) -> &self::HashType { match self { - Transaction::Public(tx) => &tx.hash, - Transaction::PrivacyPreserving(tx) => &tx.hash, - Transaction::ProgramDeployment(tx) => &tx.hash, + Self::Public(tx) => &tx.hash, + Self::PrivacyPreserving(tx) => &tx.hash, + Self::ProgramDeployment(tx) => &tx.hash, } } } @@ -283,9 +336,9 @@ impl FromStr for HashType { type Err = hex::FromHexError; fn from_str(s: &str) -> Result { - let mut bytes = [0u8; 32]; + let mut bytes = [0_u8; 32]; hex::decode_to_slice(s, &mut bytes)?; - Ok(HashType(bytes)) + Ok(Self(bytes)) } } @@ -302,37 +355,3 @@ pub enum BedrockStatus { Safe, Finalized, } - -mod base64 { - use base64::prelude::{BASE64_STANDARD, Engine as _}; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - pub mod arr { - use super::*; - - pub fn serialize(v: &[u8], s: S) -> Result { - super::serialize(v, s) - } - - pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>( - d: D, - ) -> Result<[u8; N], D::Error> { - let vec = super::deserialize(d)?; - vec.try_into().map_err(|_| { - serde::de::Error::custom(format!("Invalid length, expected {N} bytes")) - }) - } - } - - pub fn serialize(v: &[u8], s: S) -> Result { - let base64 = BASE64_STANDARD.encode(v); - String::serialize(&base64, s) - } - - pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result, D::Error> { - let base64 = String::deserialize(d)?; - BASE64_STANDARD - .decode(base64.as_bytes()) - .map_err(serde::de::Error::custom) - } -} diff --git a/indexer/service/rpc/Cargo.toml b/indexer/service/rpc/Cargo.toml index 0fa72635..e52191bd 100644 --- a/indexer/service/rpc/Cargo.toml +++ b/indexer/service/rpc/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] indexer_service_protocol.workspace = true diff --git a/indexer/service/rpc/src/lib.rs b/indexer/service/rpc/src/lib.rs index 2a67ac50..be0e45ca 100644 --- a/indexer/service/rpc/src/lib.rs +++ b/indexer/service/rpc/src/lib.rs @@ -44,16 +44,16 @@ pub trait Rpc { #[method(name = "getBlocks")] async fn get_blocks( &self, - before: Option, - limit: u32, + before: Option, + limit: u64, ) -> Result, ErrorObjectOwned>; #[method(name = "getTransactionsByAccount")] async fn get_transactions_by_account( &self, account_id: AccountId, - limit: u32, - offset: u32, + offset: u64, + limit: u64, ) -> Result, ErrorObjectOwned>; // ToDo: expand healthcheck response into some kind of report diff --git a/indexer/service/src/lib.rs b/indexer/service/src/lib.rs index 5741f003..1f87e929 100644 --- a/indexer/service/src/lib.rs +++ b/indexer/service/src/lib.rs @@ -16,14 +16,15 @@ pub struct IndexerHandle { server_handle: Option, } impl IndexerHandle { - fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self { + const fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self { Self { addr, server_handle: Some(server_handle), } } - pub fn addr(&self) -> SocketAddr { + #[must_use] + pub const fn addr(&self) -> SocketAddr { self.addr } @@ -33,9 +34,14 @@ impl IndexerHandle { .take() .expect("Indexer server handle is set"); - handle.stopped().await + handle.stopped().await; } + #[expect( + clippy::redundant_closure_for_method_calls, + reason = "Clippy suggested path jsonrpsee::jsonrpsee_server::ServerHandle is not accessible" + )] + #[must_use] pub fn is_stopped(&self) -> bool { self.server_handle .as_ref() diff --git a/indexer/service/src/main.rs b/indexer/service/src/main.rs index e4d18feb..b34777b4 100644 --- a/indexer/service/src/main.rs +++ b/indexer/service/src/main.rs @@ -15,6 +15,10 @@ struct Args { } #[tokio::main] +#[expect( + clippy::integer_division_remainder_used, + reason = "Generated by select! macro, can't be easily rewritten to avoid this lint" +)] async fn main() -> Result<()> { env_logger::init(); @@ -26,10 +30,10 @@ async fn main() -> Result<()> { let indexer_handle = indexer_service::run_server(config, port).await?; tokio::select! { - _ = cancellation_token.cancelled() => { + () = cancellation_token.cancelled() => { info!("Shutting down server..."); } - _ = indexer_handle.stopped() => { + () = indexer_handle.stopped() => { error!("Server stopped unexpectedly"); } } diff --git a/indexer/service/src/mock_service.rs b/indexer/service/src/mock_service.rs index 5f0cfbf2..bc131740 100644 --- a/indexer/service/src/mock_service.rs +++ b/indexer/service/src/mock_service.rs @@ -1,3 +1,11 @@ +#![expect( + clippy::as_conversions, + clippy::arithmetic_side_effects, + clippy::cast_possible_truncation, + clippy::cast_lossless, + clippy::integer_division_remainder_used, + reason = "Mock service uses intentional casts and format patterns for test data generation" +)] use std::collections::HashMap; use indexer_service_protocol::{ @@ -9,7 +17,7 @@ use indexer_service_protocol::{ }; use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned}; -/// A mock implementation of the IndexerService RPC for testing purposes. +/// A mock implementation of the `IndexerService` RPC for testing purposes. pub struct MockIndexerService { blocks: Vec, accounts: HashMap, @@ -17,6 +25,7 @@ pub struct MockIndexerService { } impl MockIndexerService { + #[must_use] pub fn new_with_mock_blocks() -> Self { let mut blocks = Vec::new(); let mut accounts = HashMap::new(); @@ -25,7 +34,7 @@ impl MockIndexerService { // Create some mock accounts let account_ids: Vec = (0..5) .map(|i| { - let mut value = [0u8; 32]; + let mut value = [0_u8; 32]; value[0] = i; AccountId { value } }) @@ -44,11 +53,11 @@ impl MockIndexerService { } // Create 100 blocks with transactions - let mut prev_hash = HashType([0u8; 32]); + let mut prev_hash = HashType([0_u8; 32]); for block_id in 1..=100 { let block_hash = { - let mut hash = [0u8; 32]; + let mut hash = [0_u8; 32]; hash[0] = block_id as u8; hash[1] = 0xff; HashType(hash) @@ -61,7 +70,7 @@ impl MockIndexerService { for tx_idx in 0..num_txs { let tx_hash = { - let mut hash = [0u8; 32]; + let mut hash = [0_u8; 32]; hash[0] = block_id as u8; hash[1] = tx_idx as u8; HashType(hash) @@ -73,7 +82,7 @@ impl MockIndexerService { 0 | 1 => Transaction::Public(PublicTransaction { hash: tx_hash, message: PublicMessage { - program_id: ProgramId([1u32; 8]), + program_id: ProgramId([1_u32; 8]), account_ids: vec![ account_ids[tx_idx as usize % account_ids.len()], account_ids[(tx_idx as usize + 1) % account_ids.len()], @@ -95,7 +104,7 @@ impl MockIndexerService { ], nonces: vec![block_id as u128], public_post_states: vec![Account { - program_owner: ProgramId([1u32; 8]), + program_owner: ProgramId([1_u32; 8]), balance: 500, data: Data(vec![0xdd, 0xee]), nonce: block_id as u128, @@ -136,8 +145,8 @@ impl MockIndexerService { block_id, prev_block_hash: prev_hash, hash: block_hash, - timestamp: 1704067200000 + (block_id * 12000), // ~12 seconds per block - signature: Signature([0u8; 64]), + timestamp: 1_704_067_200_000 + (block_id * 12_000), // ~12 seconds per block + signature: Signature([0_u8; 64]), }, body: BlockBody { transactions: block_transactions, @@ -185,7 +194,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService { .last() .map(|bl| bl.header.block_id) .ok_or_else(|| { - ErrorObjectOwned::owned(-32001, "Last block not found".to_string(), None::<()>) + ErrorObjectOwned::owned(-32001, "Last block not found".to_owned(), None::<()>) }) } @@ -197,7 +206,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService { .ok_or_else(|| { ErrorObjectOwned::owned( -32001, - format!("Block with ID {} not found", block_id), + format!("Block with ID {block_id} not found"), None::<()>, ) }) @@ -227,15 +236,18 @@ impl indexer_service_rpc::RpcServer for MockIndexerService { async fn get_blocks( &self, - before: Option, - limit: u32, + before: Option, + limit: u64, ) -> Result, ErrorObjectOwned> { - let start_id = before.map_or_else(|| self.blocks.len() as u64, |id| id.saturating_sub(1)); + let start_id = before.map_or_else( + || self.blocks.len(), + |id| usize::try_from(id.saturating_sub(1)).expect("u64 should fit in usize"), + ); let result = (1..=start_id) .rev() .take(limit as usize) - .map_while(|block_id| self.blocks.get(block_id as usize - 1).cloned()) + .map_while(|block_id| self.blocks.get(block_id - 1).cloned()) .collect(); Ok(result) @@ -244,8 +256,8 @@ impl indexer_service_rpc::RpcServer for MockIndexerService { async fn get_transactions_by_account( &self, account_id: AccountId, - limit: u32, - offset: u32, + offset: u64, + limit: u64, ) -> Result, ErrorObjectOwned> { let mut account_txs: Vec<_> = self .transactions diff --git a/indexer/service/src/service.rs b/indexer/service/src/service.rs index da3e7cbd..256ef33d 100644 --- a/indexer/service/src/service.rs +++ b/indexer/service/src/service.rs @@ -74,7 +74,8 @@ impl indexer_service_rpc::RpcServer for IndexerService { Ok(self .indexer .store - .get_account_final(&account_id.into()) + .account_current_state(&account_id.into()) + .await .map_err(db_error)? .into()) } @@ -90,19 +91,19 @@ impl indexer_service_rpc::RpcServer for IndexerService { async fn get_blocks( &self, - before: Option, - limit: u32, + before: Option, + limit: u64, ) -> Result, ErrorObjectOwned> { let blocks = self .indexer .store - .get_block_batch(before, limit as u64) + .get_block_batch(before, limit) .map_err(db_error)?; let mut block_res = vec![]; for block in blocks { - block_res.push(block.into()) + block_res.push(block.into()); } Ok(block_res) @@ -111,19 +112,19 @@ impl indexer_service_rpc::RpcServer for IndexerService { async fn get_transactions_by_account( &self, account_id: AccountId, - limit: u32, - offset: u32, + offset: u64, + limit: u64, ) -> Result, ErrorObjectOwned> { let transactions = self .indexer .store - .get_transactions_by_account(account_id.value, offset as u64, limit as u64) + .get_transactions_by_account(account_id.value, offset, limit) .map_err(db_error)?; let mut tx_res = vec![]; for tx in transactions { - tx_res.push(tx.into()) + tx_res.push(tx.into()); } Ok(tx_res) @@ -131,7 +132,11 @@ impl indexer_service_rpc::RpcServer for IndexerService { async fn healthcheck(&self) -> Result<(), ErrorObjectOwned> { // Checking, that indexer can calculate last state - let _ = self.indexer.store.final_state().map_err(db_error)?; + let _ = self + .indexer + .store + .recalculate_final_state() + .map_err(db_error)?; Ok(()) } @@ -154,8 +159,10 @@ impl SubscriptionService { pub async fn add_subscription(&self, subscription: Subscription) -> Result<()> { let guard = self.parts.load(); - if let Err(err) = guard.new_subscription_sender.send(subscription) { - error!("Failed to send new subscription to subscription service with error: {err:#?}"); + if let Err(send_err) = guard.new_subscription_sender.send(subscription) { + error!( + "Failed to send new subscription to subscription service with error: {send_err:#?}" + ); // Respawn the subscription service loop if it has finished (either with error or panic) if guard.handle.is_finished() { @@ -177,8 +184,8 @@ impl SubscriptionService { } } - bail!(err); - }; + bail!(send_err) + } Ok(()) } @@ -190,8 +197,12 @@ impl SubscriptionService { let handle = tokio::spawn(async move { let mut subscribers = Vec::new(); - let mut block_stream = pin!(indexer.subscribe_parse_block_stream().await); + let mut block_stream = pin!(indexer.subscribe_parse_block_stream()); + #[expect( + clippy::integer_division_remainder_used, + reason = "Generated by select! macro, can't be easily rewritten to avoid this lint" + )] loop { tokio::select! { sub = sub_receiver.recv() => { @@ -246,7 +257,7 @@ struct Subscription { } impl Subscription { - fn new(sink: SubscriptionSink) -> Self { + const fn new(sink: SubscriptionSink) -> Self { Self { sink, _marker: std::marker::PhantomData, @@ -273,6 +284,7 @@ impl Drop for Subscription { } } +#[must_use] pub fn not_yet_implemented_error() -> ErrorObjectOwned { ErrorObject::owned( ErrorCode::InternalError.code(), @@ -281,10 +293,14 @@ pub fn not_yet_implemented_error() -> ErrorObjectOwned { ) } +#[expect( + clippy::needless_pass_by_value, + reason = "Error is consumed to extract details for error response" +)] fn db_error(err: anyhow::Error) -> ErrorObjectOwned { ErrorObjectOwned::owned( ErrorCode::InternalError.code(), - "DBError".to_string(), + "DBError".to_owned(), Some(format!("{err:#?}")), ) } diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index ac14d183..b18b782f 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core = { workspace = true, features = ["host"] } nssa.workspace = true diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index f0111735..6929ec92 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -2,15 +2,15 @@ use std::{net::SocketAddr, path::PathBuf, sync::LazyLock}; -use anyhow::{Context, Result, bail}; -use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use anyhow::{Context as _, Result, bail}; +use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64}; use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction}; use futures::FutureExt as _; use indexer_service::IndexerHandle; use log::{debug, error, warn}; use nssa::{AccountId, PrivacyPreservingTransaction}; use nssa_core::Commitment; -use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait}; +use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait as _}; use sequencer_runner::SequencerHandle; use tempfile::TempDir; use testcontainers::compose::DockerCompose; @@ -52,7 +52,8 @@ impl TestContext { Self::builder().build().await } - pub fn builder() -> TestContextBuilder { + #[must_use] + pub const fn builder() -> TestContextBuilder { TestContextBuilder::new() } @@ -120,6 +121,10 @@ impl TestContext { // Setting port to 0 to avoid conflicts between parallel tests, actual port will be retrieved after container is up .with_env("PORT", "0"); + #[expect( + clippy::items_after_statements, + reason = "This is more readable is this function used just after its definition" + )] async fn up_and_retrieve_port(compose: &mut DockerCompose) -> Result { compose .up() @@ -151,10 +156,12 @@ impl TestContext { } let mut port = None; - let mut attempt = 0; - let max_attempts = 5; + let mut attempt = 0_u32; + let max_attempts = 5_u32; while port.is_none() && attempt < max_attempts { - attempt += 1; + attempt = attempt + .checked_add(1) + .expect("We check that attempt < max_attempts, so this won't overflow"); match up_and_retrieve_port(&mut compose).await { Ok(p) => { port = Some(p); @@ -181,7 +188,10 @@ impl TestContext { let temp_indexer_dir = tempfile::tempdir().context("Failed to create temp dir for indexer home")?; - debug!("Using temp indexer home at {:?}", temp_indexer_dir.path()); + debug!( + "Using temp indexer home at {}", + temp_indexer_dir.path().display() + ); let indexer_config = config::indexer_config( bedrock_addr, @@ -206,8 +216,8 @@ impl TestContext { tempfile::tempdir().context("Failed to create temp dir for sequencer home")?; debug!( - "Using temp sequencer home at {:?}", - temp_sequencer_dir.path() + "Using temp sequencer home at {}", + temp_sequencer_dir.path().display() ); let config = config::sequencer_config( @@ -260,30 +270,35 @@ impl TestContext { } /// Get reference to the wallet. - pub fn wallet(&self) -> &WalletCore { + #[must_use] + pub const fn wallet(&self) -> &WalletCore { &self.wallet } + #[must_use] pub fn wallet_password(&self) -> &str { &self.wallet_password } /// Get mutable reference to the wallet. - pub fn wallet_mut(&mut self) -> &mut WalletCore { + pub const fn wallet_mut(&mut self) -> &mut WalletCore { &mut self.wallet } /// Get reference to the sequencer client. - pub fn sequencer_client(&self) -> &SequencerClient { + #[must_use] + pub const fn sequencer_client(&self) -> &SequencerClient { &self.sequencer_client } /// Get reference to the indexer client. - pub fn indexer_client(&self) -> &IndexerClient { + #[must_use] + pub const fn indexer_client(&self) -> &IndexerClient { &self.indexer_client } /// Get existing public account IDs in the wallet. + #[must_use] pub fn existing_public_accounts(&self) -> Vec { self.wallet .storage() @@ -293,6 +308,7 @@ impl TestContext { } /// Get existing private account IDs in the wallet. + #[must_use] pub fn existing_private_accounts(&self) -> Vec { self.wallet .storage() @@ -352,7 +368,7 @@ impl Drop for TestContext { } } -/// A test context to be used in normal #[test] tests +/// A test context to be used in normal #[test] tests. pub struct BlockingTestContext { ctx: Option, runtime: tokio::runtime::Runtime, @@ -368,7 +384,7 @@ impl BlockingTestContext { }) } - pub fn ctx(&self) -> &TestContext { + pub const fn ctx(&self) -> &TestContext { self.ctx.as_ref().expect("TestContext is set") } } @@ -379,19 +395,21 @@ pub struct TestContextBuilder { } impl TestContextBuilder { - fn new() -> Self { + const fn new() -> Self { Self { initial_data: None, sequencer_partial_config: None, } } + #[must_use] pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self { self.initial_data = Some(initial_data); self } - pub fn with_sequencer_partial_config( + #[must_use] + pub const fn with_sequencer_partial_config( mut self, sequencer_partial_config: config::SequencerPartialConfig, ) -> Self { @@ -419,18 +437,24 @@ impl Drop for BlockingTestContext { if let Some(ctx) = ctx.take() { drop(ctx); } - }) + }); } } +#[must_use] pub fn format_public_account_id(account_id: AccountId) -> String { format!("Public/{account_id}") } +#[must_use] pub fn format_private_account_id(account_id: AccountId) -> String { format!("Private/{account_id}") } +#[expect( + clippy::wildcard_enum_match_arm, + reason = "We want the code to panic if the transaction type is not PrivacyPreserving" +)] pub async fn fetch_privacy_preserving_tx( seq_client: &SequencerClient, tx_hash: HashType, diff --git a/integration_tests/tests/account.rs b/integration_tests/tests/account.rs index 36dcca5e..3f1d0993 100644 --- a/integration_tests/tests/account.rs +++ b/integration_tests/tests/account.rs @@ -1,3 +1,8 @@ +#![expect( + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use anyhow::Result; use integration_tests::TestContext; use log::info; @@ -25,7 +30,7 @@ async fn get_existing_account() -> Result<()> { ); assert_eq!(account.balance, 10000); assert!(account.data.is_empty()); - assert_eq!(account.nonce, 0); + assert_eq!(account.nonce.0, 0); info!("Successfully retrieved account with correct details"); @@ -36,7 +41,7 @@ async fn get_existing_account() -> Result<()> { async fn new_public_account_with_label() -> Result<()> { let mut ctx = TestContext::new().await?; - let label = "my-test-public-account".to_string(); + let label = "my-test-public-account".to_owned(); let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public { cci: None, label: Some(label.clone()), @@ -45,9 +50,8 @@ async fn new_public_account_with_label() -> Result<()> { let result = execute_subcommand(ctx.wallet_mut(), command).await?; // Extract the account_id from the result - let account_id = match result { - wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } => account_id, - _ => panic!("Expected RegisterAccount return value"), + let wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } = result else { + panic!("Expected RegisterAccount return value") }; // Verify the label was stored @@ -69,7 +73,7 @@ async fn new_public_account_with_label() -> Result<()> { async fn new_private_account_with_label() -> Result<()> { let mut ctx = TestContext::new().await?; - let label = "my-test-private-account".to_string(); + let label = "my-test-private-account".to_owned(); let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None, label: Some(label.clone()), @@ -78,9 +82,9 @@ async fn new_private_account_with_label() -> Result<()> { let result = execute_subcommand(ctx.wallet_mut(), command).await?; // Extract the account_id from the result - let account_id = match result { - wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } => account_id, - _ => panic!("Expected RegisterAccount return value"), + + let wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } = result else { + panic!("Expected RegisterAccount return value") }; // Verify the label was stored @@ -110,9 +114,9 @@ async fn new_public_account_without_label() -> Result<()> { let result = execute_subcommand(ctx.wallet_mut(), command).await?; // Extract the account_id from the result - let account_id = match result { - wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } => account_id, - _ => panic!("Expected RegisterAccount return value"), + + let wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } = result else { + panic!("Expected RegisterAccount return value") }; // Verify no label was stored diff --git a/integration_tests/tests/amm.rs b/integration_tests/tests/amm.rs index ecea91b3..bdb2da72 100644 --- a/integration_tests/tests/amm.rs +++ b/integration_tests/tests/amm.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::shadow_unrelated, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use std::time::Duration; use anyhow::Result; @@ -108,7 +114,7 @@ async fn amm_public() -> Result<()> { let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(definition_account_id_1), supply_account_id: format_public_account_id(supply_account_id_1), - name: "A NAM1".to_string(), + name: "A NAM1".to_owned(), total_supply: 37, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; @@ -132,7 +138,7 @@ async fn amm_public() -> Result<()> { let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(definition_account_id_2), supply_account_id: format_public_account_id(supply_account_id_2), - name: "A NAM2".to_string(), + name: "A NAM2".to_owned(), total_supply: 37, }; wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?; diff --git a/integration_tests/tests/auth_transfer/main.rs b/integration_tests/tests/auth_transfer/main.rs index c97008bd..62b8b836 100644 --- a/integration_tests/tests/auth_transfer/main.rs +++ b/integration_tests/tests/auth_transfer/main.rs @@ -1,2 +1,8 @@ +#![expect( + clippy::shadow_unrelated, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + mod private; mod public; diff --git a/integration_tests/tests/auth_transfer/private.rs b/integration_tests/tests/auth_transfer/private.rs index 6140dd7f..93e925d9 100644 --- a/integration_tests/tests/auth_transfer/private.rs +++ b/integration_tests/tests/auth_transfer/private.rs @@ -86,7 +86,7 @@ async fn private_transfer_to_foreign_account() -> Result<()> { assert_eq!(tx.message.new_commitments[0], new_commitment1); assert_eq!(tx.message.new_commitments.len(), 2); - for commitment in tx.message.new_commitments.into_iter() { + for commitment in tx.message.new_commitments { assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await); } @@ -198,7 +198,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> { assert_eq!(tx.message.new_commitments[0], new_commitment1); assert_eq!(tx.message.new_commitments.len(), 2); - for commitment in tx.message.new_commitments.into_iter() { + for commitment in tx.message.new_commitments { assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await); } @@ -353,7 +353,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> { // Verify commitments are in state assert_eq!(tx.message.new_commitments.len(), 2); - for commitment in tx.message.new_commitments.into_iter() { + for commitment in tx.message.new_commitments { assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await); } diff --git a/integration_tests/tests/auth_transfer/public.rs b/integration_tests/tests/auth_transfer/public.rs index ed8296ec..ce73d62f 100644 --- a/integration_tests/tests/auth_transfer/public.rs +++ b/integration_tests/tests/auth_transfer/public.rs @@ -112,7 +112,7 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> { to: Some(format_public_account_id(ctx.existing_public_accounts()[1])), to_npk: None, to_vpk: None, - amount: 1000000, + amount: 1_000_000, }); let failed_send = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await; @@ -241,7 +241,7 @@ async fn initialize_public_account() -> Result<()> { Program::authenticated_transfer_program().id() ); assert_eq!(account.balance, 0); - assert_eq!(account.nonce, 1); + assert_eq!(account.nonce.0, 1); assert!(account.data.is_empty()); info!("Successfully initialized public account"); diff --git a/integration_tests/tests/block_size_limit.rs b/integration_tests/tests/block_size_limit.rs index d8ee64dc..41c9fc76 100644 --- a/integration_tests/tests/block_size_limit.rs +++ b/integration_tests/tests/block_size_limit.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::as_conversions, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use std::time::Duration; use anyhow::Result; @@ -24,7 +30,7 @@ async fn reject_oversized_transaction() -> Result<()> { // Create a transaction that's definitely too large // Block size is 1 MiB (1,048,576 bytes), minus ~200 bytes for header = ~1,048,376 bytes max tx // Create a 1.1 MiB binary to ensure it exceeds the limit - let oversized_binary = vec![0u8; 1100 * 1024]; // 1.1 MiB binary + let oversized_binary = vec![0_u8; 1100 * 1024]; // 1.1 MiB binary let message = nssa::program_deployment_transaction::Message::new(oversized_binary); let tx = nssa::ProgramDeploymentTransaction::new(message); @@ -38,13 +44,12 @@ async fn reject_oversized_transaction() -> Result<()> { ); let err = result.unwrap_err(); - let err_str = format!("{:?}", err); + let err_str = format!("{err:?}"); // Check if the error contains information about transaction being too large assert!( err_str.contains("TransactionTooLarge") || err_str.contains("too large"), - "Expected TransactionTooLarge error, got: {}", - err_str + "Expected TransactionTooLarge error, got: {err_str}" ); Ok(()) @@ -63,7 +68,7 @@ async fn accept_transaction_within_limit() -> Result<()> { .await?; // Create a small program deployment that should fit - let small_binary = vec![0u8; 1024]; // 1 KiB binary + let small_binary = vec![0_u8; 1024]; // 1 KiB binary let message = nssa::program_deployment_transaction::Message::new(small_binary); let tx = nssa::ProgramDeploymentTransaction::new(message); diff --git a/integration_tests/tests/config.rs b/integration_tests/tests/config.rs index ed301616..09105833 100644 --- a/integration_tests/tests/config.rs +++ b/integration_tests/tests/config.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::shadow_unrelated, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use anyhow::Result; use integration_tests::TestContext; use log::info; @@ -12,8 +18,8 @@ async fn modify_config_field() -> Result<()> { // Change config field let command = Command::Config(ConfigSubcommand::Set { - key: "seq_poll_timeout".to_string(), - value: "1s".to_string(), + key: "seq_poll_timeout".to_owned(), + value: "1s".to_owned(), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; @@ -22,8 +28,8 @@ async fn modify_config_field() -> Result<()> { // Return how it was at the beginning let command = Command::Config(ConfigSubcommand::Set { - key: "seq_poll_timeout".to_string(), - value: format!("{:?}", old_seq_poll_timeout), + key: "seq_poll_timeout".to_owned(), + value: format!("{old_seq_poll_timeout:?}"), }); wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; diff --git a/integration_tests/tests/indexer.rs b/integration_tests/tests/indexer.rs index ad169790..0b947135 100644 --- a/integration_tests/tests/indexer.rs +++ b/integration_tests/tests/indexer.rs @@ -1,18 +1,19 @@ +#![expect( + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use std::time::Duration; -use anyhow::{Context, Result}; -use indexer_service_rpc::RpcClient; -use integration_tests::{ - TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id, - format_public_account_id, verify_commitment_is_in_state, -}; +use anyhow::Result; +use indexer_service_rpc::RpcClient as _; +use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id}; use log::info; -use nssa::AccountId; use tokio::test; use wallet::cli::{Command, programs::native_token_transfer::AuthTransferSubcommand}; -/// Timeout in milliseconds to reliably await for block finalization -const L2_TO_L1_TIMEOUT_MILLIS: u64 = 600000; +/// Timeout in milliseconds to reliably await for block finalization. +const L2_TO_L1_TIMEOUT_MILLIS: u64 = 600_000; #[test] async fn indexer_test_run() -> Result<()> { @@ -114,36 +115,6 @@ async fn indexer_state_consistency() -> Result<()> { assert_eq!(acc_1_balance.balance, 9900); assert_eq!(acc_2_balance.balance, 20100); - let from: AccountId = ctx.existing_private_accounts()[0]; - let to: AccountId = ctx.existing_private_accounts()[1]; - - let command = Command::AuthTransfer(AuthTransferSubcommand::Send { - from: format_private_account_id(from), - to: Some(format_private_account_id(to)), - to_npk: None, - to_vpk: None, - amount: 100, - }); - - wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?; - - info!("Waiting for next block creation"); - tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await; - - let new_commitment1 = ctx - .wallet() - .get_private_account_commitment(from) - .context("Failed to get private account commitment for sender")?; - assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await); - - let new_commitment2 = ctx - .wallet() - .get_private_account_commitment(to) - .context("Failed to get private account commitment for receiver")?; - assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await); - - info!("Successfully transferred privately to owned account"); - // WAIT info!("Waiting for indexer to parse blocks"); tokio::time::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)).await; diff --git a/integration_tests/tests/keys_restoration.rs b/integration_tests/tests/keys_restoration.rs index 38b9c5b8..0d20f8b4 100644 --- a/integration_tests/tests/keys_restoration.rs +++ b/integration_tests/tests/keys_restoration.rs @@ -1,6 +1,12 @@ -use std::{str::FromStr, time::Duration}; +#![expect( + clippy::shadow_unrelated, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] -use anyhow::{Context, Result}; +use std::{str::FromStr as _, time::Duration}; + +use anyhow::{Context as _, Result}; use integration_tests::{ TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx, format_private_account_id, format_public_account_id, verify_commitment_is_in_state, @@ -87,7 +93,7 @@ async fn sync_private_account_with_non_zero_chain_index() -> Result<()> { assert_eq!(tx.message.new_commitments[0], new_commitment1); assert_eq!(tx.message.new_commitments.len(), 2); - for commitment in tx.message.new_commitments.into_iter() { + for commitment in tx.message.new_commitments { assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await); } diff --git a/integration_tests/tests/pinata.rs b/integration_tests/tests/pinata.rs index 002dd2c7..38cfeac3 100644 --- a/integration_tests/tests/pinata.rs +++ b/integration_tests/tests/pinata.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::shadow_unrelated, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use std::time::Duration; use anyhow::{Context as _, Result}; @@ -16,6 +22,118 @@ use wallet::cli::{ }, }; +#[test] +async fn claim_pinata_to_uninitialized_public_account_fails_fast() -> Result<()> { + let mut ctx = TestContext::new().await?; + + let result = wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Account(AccountSubcommand::New(NewSubcommand::Public { + cci: None, + label: None, + })), + ) + .await?; + let SubcommandReturnValue::RegisterAccount { + account_id: winner_account_id, + } = result + else { + anyhow::bail!("Expected RegisterAccount return value"); + }; + + let winner_account_id_formatted = format_public_account_id(winner_account_id); + + let pinata_balance_pre = ctx + .sequencer_client() + .get_account_balance(PINATA_BASE58.parse().unwrap()) + .await? + .balance; + + let claim_result = wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Pinata(PinataProgramAgnosticSubcommand::Claim { + to: winner_account_id_formatted, + }), + ) + .await; + + assert!( + claim_result.is_err(), + "Expected uninitialized account error" + ); + let err = claim_result.unwrap_err().to_string(); + assert!( + err.contains("wallet auth-transfer init --account-id Public/"), + "Expected init guidance, got: {err}", + ); + + let pinata_balance_post = ctx + .sequencer_client() + .get_account_balance(PINATA_BASE58.parse().unwrap()) + .await? + .balance; + + assert_eq!(pinata_balance_post, pinata_balance_pre); + + Ok(()) +} + +#[test] +async fn claim_pinata_to_uninitialized_private_account_fails_fast() -> Result<()> { + let mut ctx = TestContext::new().await?; + + let result = wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Account(AccountSubcommand::New(NewSubcommand::Private { + cci: None, + label: None, + })), + ) + .await?; + let SubcommandReturnValue::RegisterAccount { + account_id: winner_account_id, + } = result + else { + anyhow::bail!("Expected RegisterAccount return value"); + }; + + let winner_account_id_formatted = format_private_account_id(winner_account_id); + + let pinata_balance_pre = ctx + .sequencer_client() + .get_account_balance(PINATA_BASE58.parse().unwrap()) + .await? + .balance; + + let claim_result = wallet::cli::execute_subcommand( + ctx.wallet_mut(), + Command::Pinata(PinataProgramAgnosticSubcommand::Claim { + to: winner_account_id_formatted, + }), + ) + .await; + + assert!( + claim_result.is_err(), + "Expected uninitialized account error" + ); + let err = claim_result.unwrap_err().to_string(); + assert!( + err.contains("wallet auth-transfer init --account-id Private/"), + "Expected init guidance, got: {err}", + ); + + let pinata_balance_post = ctx + .sequencer_client() + .get_account_balance(PINATA_BASE58.parse().unwrap()) + .await? + .balance; + + assert_eq!(pinata_balance_post, pinata_balance_pre); + + Ok(()) +} + #[test] async fn claim_pinata_to_existing_public_account() -> Result<()> { let mut ctx = TestContext::new().await?; diff --git a/integration_tests/tests/program_deployment.rs b/integration_tests/tests/program_deployment.rs index 098083d2..1feb7290 100644 --- a/integration_tests/tests/program_deployment.rs +++ b/integration_tests/tests/program_deployment.rs @@ -1,3 +1,8 @@ +#![expect( + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use std::{path::PathBuf, time::Duration}; use anyhow::Result; @@ -58,7 +63,7 @@ async fn deploy_and_execute_program() -> Result<()> { assert_eq!(post_state_account.program_owner, data_changer.id()); assert_eq!(post_state_account.balance, 0); assert_eq!(post_state_account.data.as_ref(), &[0]); - assert_eq!(post_state_account.nonce, 0); + assert_eq!(post_state_account.nonce.0, 0); info!("Successfully deployed and executed program"); diff --git a/integration_tests/tests/token.rs b/integration_tests/tests/token.rs index a058c94e..5efd69ef 100644 --- a/integration_tests/tests/token.rs +++ b/integration_tests/tests/token.rs @@ -1,3 +1,9 @@ +#![expect( + clippy::shadow_unrelated, + clippy::tests_outside_test_module, + reason = "We don't care about these in tests" +)] + use std::time::Duration; use anyhow::{Context as _, Result}; @@ -69,7 +75,7 @@ async fn create_and_transfer_public_token() -> Result<()> { }; // Create new token - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(definition_account_id), @@ -317,7 +323,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> { }; // Create new token - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(definition_account_id), @@ -475,7 +481,7 @@ async fn create_token_with_private_definition() -> Result<()> { }; // Create token with private definition - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(definition_account_id), @@ -671,7 +677,7 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> { }; // Create token with both private definition and supply - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(definition_account_id), @@ -843,7 +849,7 @@ async fn shielded_token_transfer() -> Result<()> { }; // Create token - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(definition_account_id), @@ -966,7 +972,7 @@ async fn deshielded_token_transfer() -> Result<()> { }; // Create token with private supply - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_public_account_id(definition_account_id), @@ -1073,7 +1079,7 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> { }; // Create token - let name = "A NAME".to_string(); + let name = "A NAME".to_owned(); let total_supply = 37; let subcommand = TokenProgramAgnosticSubcommand::New { definition_account_id: format_private_account_id(definition_account_id), diff --git a/integration_tests/tests/tps.rs b/integration_tests/tests/tps.rs index 12669f90..1dee3a85 100644 --- a/integration_tests/tests/tps.rs +++ b/integration_tests/tests/tps.rs @@ -1,3 +1,14 @@ +#![expect( + clippy::arithmetic_side_effects, + clippy::float_arithmetic, + clippy::missing_asserts_for_indexing, + clippy::as_conversions, + clippy::tests_outside_test_module, + clippy::integer_division, + clippy::integer_division_remainder_used, + reason = "We don't care about these in tests" +)] + use std::time::{Duration, Instant}; use anyhow::Result; @@ -16,11 +27,107 @@ use nssa::{ }; use nssa_core::{ MembershipProof, NullifierPublicKey, - account::{AccountWithMetadata, data::Data}, + account::{AccountWithMetadata, Nonce, data::Data}, encryption::ViewingPublicKey, }; use tokio::test; +pub(crate) struct TpsTestManager { + public_keypairs: Vec<(PrivateKey, AccountId)>, + target_tps: u64, +} + +impl TpsTestManager { + /// Generates public account keypairs. These are used to populate the config and to generate + /// valid public transactions for the tps test. + pub(crate) fn new(target_tps: u64, number_transactions: usize) -> Self { + let public_keypairs = (1..(number_transactions + 2)) + .map(|i| { + let mut private_key_bytes = [0_u8; 32]; + private_key_bytes[..8].copy_from_slice(&i.to_le_bytes()); + let private_key = PrivateKey::try_new(private_key_bytes).unwrap(); + let public_key = PublicKey::new_from_private_key(&private_key); + let account_id = AccountId::from(&public_key); + (private_key, account_id) + }) + .collect(); + Self { + public_keypairs, + target_tps, + } + } + + #[expect( + clippy::cast_precision_loss, + reason = "This is just for testing purposes, we don't care about precision loss here" + )] + pub(crate) fn target_time(&self) -> Duration { + let number_transactions = (self.public_keypairs.len() - 1) as u64; + Duration::from_secs_f64(number_transactions as f64 / self.target_tps as f64) + } + + /// Build a batch of public transactions to submit to the node. + pub fn build_public_txs(&self) -> Vec { + // Create valid public transactions + let program = Program::authenticated_transfer_program(); + let public_txs: Vec = self + .public_keypairs + .windows(2) + .map(|pair| { + let amount: u128 = 1; + let message = putx::Message::try_new( + program.id(), + [pair[0].1, pair[1].1].to_vec(), + [Nonce(0_u128)].to_vec(), + amount, + ) + .unwrap(); + let witness_set = + nssa::public_transaction::WitnessSet::for_message(&message, &[&pair[0].0]); + PublicTransaction::new(message, witness_set) + }) + .collect(); + + public_txs + } + + /// Generates a sequencer configuration with initial balance in a number of public accounts. + /// The transactions generated with the function `build_public_txs` will be valid in a node + /// started with the config from this method. + fn generate_initial_data(&self) -> InitialData { + // Create public public keypairs + let public_accounts = self + .public_keypairs + .iter() + .map(|(key, _)| (key.clone(), 10)) + .collect(); + + // Generate an initial commitment to be used with the privacy preserving transaction + // created with the `build_privacy_transaction` function. + let key_chain = KeyChain::new_os_random(); + let account = Account { + balance: 100, + nonce: Nonce(0xdead_beef), + program_owner: Program::authenticated_transfer_program().id(), + data: Data::default(), + }; + + InitialData { + public_accounts, + private_accounts: vec![(key_chain, account)], + } + } + + const fn generate_sequencer_partial_config() -> SequencerPartialConfig { + SequencerPartialConfig { + max_num_tx_in_block: 300, + max_block_size: ByteSize::mb(500), + mempool_max_size: 10_000, + block_create_timeout: Duration::from_secs(12), + } + } +} + // TODO: Make a proper benchmark instead of an ad-hoc test #[test] pub async fn tps_test() -> Result<()> { @@ -56,16 +163,17 @@ pub async fn tps_test() -> Result<()> { for (i, tx_hash) in tx_hashes.iter().enumerate() { loop { - if now.elapsed().as_millis() > target_time.as_millis() { - panic!("TPS test failed by timeout"); - } + assert!( + now.elapsed().as_millis() <= target_time.as_millis(), + "TPS test failed by timeout" + ); let tx_obj = ctx .sequencer_client() .get_transaction_by_hash(*tx_hash) .await .inspect_err(|err| { - log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}") + log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}"); }); if let Ok(tx_obj) = tx_obj @@ -94,98 +202,6 @@ pub async fn tps_test() -> Result<()> { Ok(()) } -pub(crate) struct TpsTestManager { - public_keypairs: Vec<(PrivateKey, AccountId)>, - target_tps: u64, -} - -impl TpsTestManager { - /// Generates public account keypairs. These are used to populate the config and to generate - /// valid public transactions for the tps test. - pub(crate) fn new(target_tps: u64, number_transactions: usize) -> Self { - let public_keypairs = (1..(number_transactions + 2)) - .map(|i| { - let mut private_key_bytes = [0u8; 32]; - private_key_bytes[..8].copy_from_slice(&i.to_le_bytes()); - let private_key = PrivateKey::try_new(private_key_bytes).unwrap(); - let public_key = PublicKey::new_from_private_key(&private_key); - let account_id = AccountId::from(&public_key); - (private_key, account_id) - }) - .collect(); - Self { - public_keypairs, - target_tps, - } - } - - pub(crate) fn target_time(&self) -> Duration { - let number_transactions = (self.public_keypairs.len() - 1) as u64; - Duration::from_secs_f64(number_transactions as f64 / self.target_tps as f64) - } - - /// Build a batch of public transactions to submit to the node. - pub fn build_public_txs(&self) -> Vec { - // Create valid public transactions - let program = Program::authenticated_transfer_program(); - let public_txs: Vec = self - .public_keypairs - .windows(2) - .map(|pair| { - let amount: u128 = 1; - let message = putx::Message::try_new( - program.id(), - [pair[0].1, pair[1].1].to_vec(), - [0u128].to_vec(), - amount, - ) - .unwrap(); - let witness_set = - nssa::public_transaction::WitnessSet::for_message(&message, &[&pair[0].0]); - PublicTransaction::new(message, witness_set) - }) - .collect(); - - public_txs - } - - /// Generates a sequencer configuration with initial balance in a number of public accounts. - /// The transactions generated with the function `build_public_txs` will be valid in a node - /// started with the config from this method. - fn generate_initial_data(&self) -> InitialData { - // Create public public keypairs - let public_accounts = self - .public_keypairs - .iter() - .map(|(key, _)| (key.clone(), 10)) - .collect(); - - // Generate an initial commitment to be used with the privacy preserving transaction - // created with the `build_privacy_transaction` function. - let key_chain = KeyChain::new_os_random(); - let account = Account { - balance: 100, - nonce: 0xdeadbeef, - program_owner: Program::authenticated_transfer_program().id(), - data: Data::default(), - }; - - InitialData { - public_accounts, - private_accounts: vec![(key_chain, account)], - } - } - - fn generate_sequencer_partial_config() -> SequencerPartialConfig { - SequencerPartialConfig { - max_num_tx_in_block: 300, - max_block_size: ByteSize::mb(500), - mempool_max_size: 10_000, - block_create_timeout: Duration::from_secs(12), - } - } -} - /// Builds a single privacy transaction to use in stress tests. This involves generating a proof so /// it may take a while to run. In normal execution of the node this transaction will be accepted /// only once. Disabling the node's nullifier uniqueness check allows to submit this transaction @@ -200,7 +216,7 @@ fn build_privacy_transaction() -> PrivacyPreservingTransaction { let sender_pre = AccountWithMetadata::new( Account { balance: 100, - nonce: 0xdeadbeef, + nonce: Nonce(0xdead_beef), program_owner: program.id(), data: Data::default(), }, @@ -234,7 +250,6 @@ fn build_privacy_transaction() -> PrivacyPreservingTransaction { vec![sender_pre, recipient_pre], Program::serialize_instruction(balance_to_move).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ (sender_npk.clone(), sender_ss), (recipient_npk.clone(), recipient_ss), diff --git a/integration_tests/tests/wallet_ffi.rs b/integration_tests/tests/wallet_ffi.rs index 0b30f107..dad4c79e 100644 --- a/integration_tests/tests/wallet_ffi.rs +++ b/integration_tests/tests/wallet_ffi.rs @@ -1,7 +1,19 @@ +#![expect( + clippy::redundant_test_prefix, + reason = "Otherwise names interfere with ffi bindings" +)] +#![expect( + clippy::tests_outside_test_module, + clippy::undocumented_unsafe_blocks, + clippy::multiple_unsafe_ops_per_block, + clippy::shadow_unrelated, + reason = "We don't care about these in tests" +)] + use std::{ collections::HashSet, ffi::{CStr, CString, c_char}, - io::Write, + io::Write as _, path::Path, time::Duration, }; @@ -152,7 +164,7 @@ unsafe extern "C" { fn new_wallet_ffi_with_test_context_config( ctx: &BlockingTestContext, home: &Path, -) -> *mut WalletHandle { +) -> Result<*mut WalletHandle> { let config_path = home.join("wallet_config.json"); let storage_path = home.join("storage.json"); let mut config = ctx.ctx().wallet().config().to_owned(); @@ -163,75 +175,68 @@ fn new_wallet_ffi_with_test_context_config( .write(true) .create(true) .truncate(true) - .open(&config_path) - .unwrap(); + .open(&config_path)?; - let config_with_overrides_serialized = serde_json::to_vec_pretty(&config).unwrap(); + let config_with_overrides_serialized = serde_json::to_vec_pretty(&config)?; - file.write_all(&config_with_overrides_serialized).unwrap(); + file.write_all(&config_with_overrides_serialized)?; - let config_path = CString::new(config_path.to_str().unwrap()).unwrap(); - let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap(); - let password = CString::new(ctx.ctx().wallet_password()).unwrap(); + let config_path = CString::new(config_path.to_str().unwrap())?; + let storage_path = CString::new(storage_path.to_str().unwrap())?; + let password = CString::new(ctx.ctx().wallet_password())?; - unsafe { + Ok(unsafe { wallet_ffi_create_new( config_path.as_ptr(), storage_path.as_ptr(), password.as_ptr(), ) - } + }) } -fn new_wallet_ffi_with_default_config(password: &str) -> *mut WalletHandle { - let tempdir = tempdir().unwrap(); +fn new_wallet_ffi_with_default_config(password: &str) -> Result<*mut WalletHandle> { + let tempdir = tempdir()?; let config_path = tempdir.path().join("wallet_config.json"); let storage_path = tempdir.path().join("storage.json"); - let config_path_c = CString::new(config_path.to_str().unwrap()).unwrap(); - let storage_path_c = CString::new(storage_path.to_str().unwrap()).unwrap(); - let password = CString::new(password).unwrap(); + let config_path_c = CString::new(config_path.to_str().unwrap())?; + let storage_path_c = CString::new(storage_path.to_str().unwrap())?; + let password = CString::new(password)?; - unsafe { + Ok(unsafe { wallet_ffi_create_new( config_path_c.as_ptr(), storage_path_c.as_ptr(), password.as_ptr(), ) - } + }) } -fn new_wallet_rust_with_default_config(password: &str) -> WalletCore { - let tempdir = tempdir().unwrap(); +fn new_wallet_rust_with_default_config(password: &str) -> Result { + let tempdir = tempdir()?; let config_path = tempdir.path().join("wallet_config.json"); let storage_path = tempdir.path().join("storage.json"); - WalletCore::new_init_storage( - config_path.to_path_buf(), - storage_path.to_path_buf(), - None, - password.to_string(), - ) - .unwrap() + WalletCore::new_init_storage(config_path, storage_path, None, password.to_owned()) } -fn load_existing_ffi_wallet(home: &Path) -> *mut WalletHandle { +fn load_existing_ffi_wallet(home: &Path) -> Result<*mut WalletHandle> { let config_path = home.join("wallet_config.json"); let storage_path = home.join("storage.json"); - let config_path = CString::new(config_path.to_str().unwrap()).unwrap(); - let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap(); + let config_path = CString::new(config_path.to_str().unwrap())?; + let storage_path = CString::new(storage_path.to_str().unwrap())?; - unsafe { wallet_ffi_open(config_path.as_ptr(), storage_path.as_ptr()) } + Ok(unsafe { wallet_ffi_open(config_path.as_ptr(), storage_path.as_ptr()) }) } #[test] -fn test_wallet_ffi_create_public_accounts() { +fn wallet_ffi_create_public_accounts() -> Result<()> { let password = "password_for_tests"; let n_accounts = 10; // First `n_accounts` public accounts created with Rust wallet let new_public_account_ids_rust = { let mut account_ids = Vec::new(); - let mut wallet_rust = new_wallet_rust_with_default_config(password); + let mut wallet_rust = new_wallet_rust_with_default_config(password)?; for _ in 0..n_accounts { let account_id = wallet_rust.create_new_account_public(None).0; account_ids.push(*account_id.value()); @@ -243,13 +248,10 @@ fn test_wallet_ffi_create_public_accounts() { let new_public_account_ids_ffi = unsafe { let mut account_ids = Vec::new(); - let wallet_ffi_handle = new_wallet_ffi_with_default_config(password); + let wallet_ffi_handle = new_wallet_ffi_with_default_config(password)?; for _ in 0..n_accounts { let mut out_account_id = FfiBytes32::from_bytes([0; 32]); - wallet_ffi_create_account_public( - wallet_ffi_handle, - (&mut out_account_id) as *mut FfiBytes32, - ); + wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id); account_ids.push(out_account_id.data); } wallet_ffi_destroy(wallet_ffi_handle); @@ -257,17 +259,19 @@ fn test_wallet_ffi_create_public_accounts() { }; assert_eq!(new_public_account_ids_ffi, new_public_account_ids_rust); + + Ok(()) } #[test] -fn test_wallet_ffi_create_private_accounts() { +fn wallet_ffi_create_private_accounts() -> Result<()> { let password = "password_for_tests"; let n_accounts = 10; // First `n_accounts` private accounts created with Rust wallet let new_private_account_ids_rust = { let mut account_ids = Vec::new(); - let mut wallet_rust = new_wallet_rust_with_default_config(password); + let mut wallet_rust = new_wallet_rust_with_default_config(password)?; for _ in 0..n_accounts { let account_id = wallet_rust.create_new_account_private(None).0; account_ids.push(*account_id.value()); @@ -279,56 +283,52 @@ fn test_wallet_ffi_create_private_accounts() { let new_private_account_ids_ffi = unsafe { let mut account_ids = Vec::new(); - let wallet_ffi_handle = new_wallet_ffi_with_default_config(password); + let wallet_ffi_handle = new_wallet_ffi_with_default_config(password)?; for _ in 0..n_accounts { let mut out_account_id = FfiBytes32::from_bytes([0; 32]); - wallet_ffi_create_account_private( - wallet_ffi_handle, - (&mut out_account_id) as *mut FfiBytes32, - ); + wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id); account_ids.push(out_account_id.data); } wallet_ffi_destroy(wallet_ffi_handle); account_ids }; - assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust) + assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust); + + Ok(()) } #[test] -fn test_wallet_ffi_save_and_load_persistent_storage() -> Result<()> { +fn wallet_ffi_save_and_load_persistent_storage() -> Result<()> { let ctx = BlockingTestContext::new()?; let mut out_private_account_id = FfiBytes32::from_bytes([0; 32]); - let home = tempfile::tempdir().unwrap(); + let home = tempfile::tempdir()?; // Create a private account with the wallet FFI and save it unsafe { - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); - wallet_ffi_create_account_private( - wallet_ffi_handle, - (&mut out_private_account_id) as *mut FfiBytes32, - ); + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; + wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_private_account_id); wallet_ffi_save(wallet_ffi_handle); wallet_ffi_destroy(wallet_ffi_handle); } let private_account_keys = unsafe { - let wallet_ffi_handle = load_existing_ffi_wallet(home.path()); + let wallet_ffi_handle = load_existing_ffi_wallet(home.path())?; let mut private_account = FfiAccount::default(); let result = wallet_ffi_get_account_private( wallet_ffi_handle, - (&out_private_account_id) as *const FfiBytes32, - (&mut private_account) as *mut FfiAccount, + &raw const out_private_account_id, + &raw mut private_account, ); assert_eq!(result, error::WalletFfiError::Success); let mut out_keys = FfiPrivateAccountKeys::default(); let result = wallet_ffi_get_private_account_keys( wallet_ffi_handle, - (&out_private_account_id) as *const FfiBytes32, - (&mut out_keys) as *mut FfiPrivateAccountKeys, + &raw const out_private_account_id, + &raw mut out_keys, ); assert_eq!(result, error::WalletFfiError::Success); @@ -346,17 +346,17 @@ fn test_wallet_ffi_save_and_load_persistent_storage() -> Result<()> { } #[test] -fn test_wallet_ffi_list_accounts() { +fn test_wallet_ffi_list_accounts() -> Result<()> { let password = "password_for_tests"; // Create the wallet FFI let wallet_ffi_handle = unsafe { - let handle = new_wallet_ffi_with_default_config(password); + let handle = new_wallet_ffi_with_default_config(password)?; // Create 5 public accounts and 5 private accounts for _ in 0..5 { let mut out_account_id = FfiBytes32::from_bytes([0; 32]); - wallet_ffi_create_account_public(handle, (&mut out_account_id) as *mut FfiBytes32); - wallet_ffi_create_account_private(handle, (&mut out_account_id) as *mut FfiBytes32); + wallet_ffi_create_account_public(handle, &raw mut out_account_id); + wallet_ffi_create_account_private(handle, &raw mut out_account_id); } handle @@ -364,7 +364,7 @@ fn test_wallet_ffi_list_accounts() { // Create the wallet Rust let wallet_rust = { - let mut wallet = new_wallet_rust_with_default_config(password); + let mut wallet = new_wallet_rust_with_default_config(password)?; // Create 5 public accounts and 5 private accounts for _ in 0..5 { wallet.create_new_account_public(None); @@ -376,7 +376,7 @@ fn test_wallet_ffi_list_accounts() { // Get the account list with FFI method let mut wallet_ffi_account_list = unsafe { let mut out_list = FfiAccountList::default(); - wallet_ffi_list_accounts(wallet_ffi_handle, (&mut out_list) as *mut FfiAccountList); + wallet_ffi_list_accounts(wallet_ffi_handle, &raw mut out_list); out_list }; @@ -400,7 +400,7 @@ fn test_wallet_ffi_list_accounts() { assert_eq!( wallet_rust_account_ids .iter() - .map(|id| id.value()) + .map(nssa::AccountId::value) .collect::>(), wallet_ffi_account_list_slice .iter() @@ -409,7 +409,7 @@ fn test_wallet_ffi_list_accounts() { ); // Assert `is_pub` flag is correct in the FFI result - for entry in wallet_ffi_account_list_slice.iter() { + for entry in wallet_ffi_account_list_slice { let account_id = AccountId::new(entry.account_id.data); let is_pub_default_in_rust_wallet = wallet_rust .storage() @@ -429,27 +429,30 @@ fn test_wallet_ffi_list_accounts() { } unsafe { - wallet_ffi_free_account_list((&mut wallet_ffi_account_list) as *mut FfiAccountList); + wallet_ffi_free_account_list(&raw mut wallet_ffi_account_list); wallet_ffi_destroy(wallet_ffi_handle); } + + Ok(()) } #[test] fn test_wallet_ffi_get_balance_public() -> Result<()> { let ctx = BlockingTestContext::new()?; let account_id: AccountId = ctx.ctx().existing_public_accounts()[0]; - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; let ffi_account_id = FfiBytes32::from(&account_id); - let _result = wallet_ffi_get_balance( + wallet_ffi_get_balance( wallet_ffi_handle, - (&ffi_account_id) as *const FfiBytes32, + &raw const ffi_account_id, true, - (&mut out_balance) as *mut [u8; 16], - ); + &raw mut out_balance, + ) + .unwrap(); u128::from_le_bytes(out_balance) }; assert_eq!(balance, 10000); @@ -467,17 +470,18 @@ fn test_wallet_ffi_get_balance_public() -> Result<()> { fn test_wallet_ffi_get_account_public() -> Result<()> { let ctx = BlockingTestContext::new()?; let account_id: AccountId = ctx.ctx().existing_public_accounts()[0]; - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let mut out_account = FfiAccount::default(); let account: Account = unsafe { let ffi_account_id = FfiBytes32::from(&account_id); - let _result = wallet_ffi_get_account_public( + wallet_ffi_get_account_public( wallet_ffi_handle, - (&ffi_account_id) as *const FfiBytes32, - (&mut out_account) as *mut FfiAccount, - ); + &raw const ffi_account_id, + &raw mut out_account, + ) + .unwrap(); (&out_account).try_into().unwrap() }; @@ -487,10 +491,10 @@ fn test_wallet_ffi_get_account_public() -> Result<()> { ); assert_eq!(account.balance, 10000); assert!(account.data.is_empty()); - assert_eq!(account.nonce, 0); + assert_eq!(account.nonce.0, 0); unsafe { - wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount); + wallet_ffi_free_account_data(&raw mut out_account); wallet_ffi_destroy(wallet_ffi_handle); } @@ -503,17 +507,18 @@ fn test_wallet_ffi_get_account_public() -> Result<()> { fn test_wallet_ffi_get_account_private() -> Result<()> { let ctx = BlockingTestContext::new()?; let account_id: AccountId = ctx.ctx().existing_private_accounts()[0]; - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let mut out_account = FfiAccount::default(); let account: Account = unsafe { let ffi_account_id = FfiBytes32::from(&account_id); - let _result = wallet_ffi_get_account_private( + wallet_ffi_get_account_private( wallet_ffi_handle, - (&ffi_account_id) as *const FfiBytes32, - (&mut out_account) as *mut FfiAccount, - ); + &raw const ffi_account_id, + &raw mut out_account, + ) + .unwrap(); (&out_account).try_into().unwrap() }; @@ -523,10 +528,10 @@ fn test_wallet_ffi_get_account_private() -> Result<()> { ); assert_eq!(account.balance, 10000); assert!(account.data.is_empty()); - assert_eq!(account.nonce, 0); + assert_eq!(account.nonce, 0_u128.into()); unsafe { - wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount); + wallet_ffi_free_account_data(&raw mut out_account); wallet_ffi_destroy(wallet_ffi_handle); } @@ -539,17 +544,18 @@ fn test_wallet_ffi_get_account_private() -> Result<()> { fn test_wallet_ffi_get_public_account_keys() -> Result<()> { let ctx = BlockingTestContext::new()?; let account_id: AccountId = ctx.ctx().existing_public_accounts()[0]; - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let mut out_key = FfiPublicAccountKey::default(); let key: PublicKey = unsafe { let ffi_account_id = FfiBytes32::from(&account_id); - let _result = wallet_ffi_get_public_account_key( + wallet_ffi_get_public_account_key( wallet_ffi_handle, - (&ffi_account_id) as *const FfiBytes32, - (&mut out_key) as *mut FfiPublicAccountKey, - ); + &raw const ffi_account_id, + &raw mut out_key, + ) + .unwrap(); (&out_key).try_into().unwrap() }; @@ -577,17 +583,18 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> { fn test_wallet_ffi_get_private_account_keys() -> Result<()> { let ctx = BlockingTestContext::new()?; let account_id: AccountId = ctx.ctx().existing_private_accounts()[0]; - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let mut keys = FfiPrivateAccountKeys::default(); unsafe { let ffi_account_id = FfiBytes32::from(&account_id); - let _result = wallet_ffi_get_private_account_keys( + wallet_ffi_get_private_account_keys( wallet_ffi_handle, - (&ffi_account_id) as *const FfiBytes32, - (&mut keys) as *mut FfiPrivateAccountKeys, - ); + &raw const ffi_account_id, + &raw mut keys, + ) + .unwrap(); }; let key_chain = &ctx @@ -606,7 +613,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> { assert_eq!(&keys.vpk().unwrap(), expected_vpk); unsafe { - wallet_ffi_free_private_account_keys((&mut keys) as *mut FfiPrivateAccountKeys); + wallet_ffi_free_private_account_keys(&raw mut keys); wallet_ffi_destroy(wallet_ffi_handle); } @@ -616,66 +623,65 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> { } #[test] -fn test_wallet_ffi_account_id_to_base58() { +fn test_wallet_ffi_account_id_to_base58() -> Result<()> { let private_key = PrivateKey::new_os_random(); let public_key = PublicKey::new_from_private_key(&private_key); let account_id = AccountId::from(&public_key); let ffi_bytes: FfiBytes32 = (&account_id).into(); - let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) }; + let ptr = unsafe { wallet_ffi_account_id_to_base58(&raw const ffi_bytes) }; - let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() }; + let ffi_result = unsafe { CStr::from_ptr(ptr).to_str()? }; assert_eq!(account_id.to_string(), ffi_result); unsafe { wallet_ffi_free_string(ptr); } + + Ok(()) } #[test] -fn test_wallet_ffi_base58_to_account_id() { +fn wallet_ffi_base58_to_account_id() -> Result<()> { let private_key = PrivateKey::new_os_random(); let public_key = PublicKey::new_from_private_key(&private_key); let account_id = AccountId::from(&public_key); let account_id_str = account_id.to_string(); - let account_id_c_str = CString::new(account_id_str.clone()).unwrap(); + let account_id_c_str = CString::new(account_id_str.clone())?; let account_id: AccountId = unsafe { let mut out_account_id_bytes = FfiBytes32::default(); - wallet_ffi_account_id_from_base58( - account_id_c_str.as_ptr(), - (&mut out_account_id_bytes) as *mut FfiBytes32, - ); + wallet_ffi_account_id_from_base58(account_id_c_str.as_ptr(), &raw mut out_account_id_bytes); out_account_id_bytes.into() }; - let expected_account_id = account_id_str.parse().unwrap(); + let expected_account_id = account_id_str.parse()?; assert_eq!(account_id, expected_account_id); + + Ok(()) } #[test] -fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { - let ctx = BlockingTestContext::new().unwrap(); - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); +fn wallet_ffi_init_public_account_auth_transfer() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; // Create a new uninitialized public account let mut out_account_id = FfiBytes32::from_bytes([0; 32]); unsafe { - wallet_ffi_create_account_public( - wallet_ffi_handle, - (&mut out_account_id) as *mut FfiBytes32, - ); + wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id); } // Check its program owner is the default program id let account: Account = unsafe { let mut out_account = FfiAccount::default(); - let _result = wallet_ffi_get_account_public( + wallet_ffi_get_account_public( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut out_account) as *mut FfiAccount, - ); + &raw const out_account_id, + &raw mut out_account, + ) + .unwrap(); (&out_account).try_into().unwrap() }; assert_eq!(account.program_owner, DEFAULT_PROGRAM_ID); @@ -685,8 +691,8 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { unsafe { wallet_ffi_register_public_account( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut transfer_result) as *mut FfiTransferResult, + &raw const out_account_id, + &raw mut transfer_result, ); } @@ -696,11 +702,12 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { // Check that the program owner is now the authenticated transfer program let account: Account = unsafe { let mut out_account = FfiAccount::default(); - let _result = wallet_ffi_get_account_public( + wallet_ffi_get_account_public( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut out_account) as *mut FfiAccount, - ); + &raw const out_account_id, + &raw mut out_account, + ) + .unwrap(); (&out_account).try_into().unwrap() }; assert_eq!( @@ -709,7 +716,7 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { ); unsafe { - wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_free_transfer_result(&raw mut transfer_result); wallet_ffi_destroy(wallet_ffi_handle); } @@ -717,18 +724,15 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> { } #[test] -fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> { - let ctx = BlockingTestContext::new().unwrap(); - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); +fn wallet_ffi_init_private_account_auth_transfer() -> Result<()> { + let ctx = BlockingTestContext::new()?; + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; // Create a new uninitialized public account let mut out_account_id = FfiBytes32::from_bytes([0; 32]); unsafe { - wallet_ffi_create_account_private( - wallet_ffi_handle, - (&mut out_account_id) as *mut FfiBytes32, - ); + wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id); } // Check its program owner is the default program id @@ -736,8 +740,8 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> { let mut out_account = FfiAccount::default(); wallet_ffi_get_account_private( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut out_account) as *mut FfiAccount, + &raw const out_account_id, + &raw mut out_account, ); (&out_account).try_into().unwrap() }; @@ -748,8 +752,8 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> { unsafe { wallet_ffi_register_private_account( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut transfer_result) as *mut FfiTransferResult, + &raw const out_account_id, + &raw mut transfer_result, ); } @@ -759,18 +763,19 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); }; // Check that the program owner is now the authenticated transfer program let account: Account = unsafe { let mut out_account = FfiAccount::default(); - let _result = wallet_ffi_get_account_private( + wallet_ffi_get_account_private( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut out_account) as *mut FfiAccount, - ); + &raw const out_account_id, + &raw mut out_account, + ) + .unwrap(); (&out_account).try_into().unwrap() }; assert_eq!( @@ -779,7 +784,7 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> { ); unsafe { - wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_free_transfer_result(&raw mut transfer_result); wallet_ffi_destroy(wallet_ffi_handle); } @@ -788,21 +793,21 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> { #[test] fn test_wallet_ffi_transfer_public() -> Result<()> { - let ctx = BlockingTestContext::new().unwrap(); - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let ctx = BlockingTestContext::new()?; + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let from: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into(); let to: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[1]).into(); - let amount: [u8; 16] = 100u128.to_le_bytes(); + let amount: [u8; 16] = 100_u128.to_le_bytes(); let mut transfer_result = FfiTransferResult::default(); unsafe { wallet_ffi_transfer_public( wallet_ffi_handle, - (&from) as *const FfiBytes32, - (&to) as *const FfiBytes32, - (&amount) as *const [u8; 16], - (&mut transfer_result) as *mut FfiTransferResult, + &raw const from, + &raw const to, + &raw const amount, + &raw mut transfer_result, ); } @@ -811,23 +816,20 @@ fn test_wallet_ffi_transfer_public() -> Result<()> { let from_balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; - let _result = wallet_ffi_get_balance( + wallet_ffi_get_balance( wallet_ffi_handle, - (&from) as *const FfiBytes32, + &raw const from, true, - (&mut out_balance) as *mut [u8; 16], - ); + &raw mut out_balance, + ) + .unwrap(); u128::from_le_bytes(out_balance) }; let to_balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; - let _result = wallet_ffi_get_balance( - wallet_ffi_handle, - (&to) as *const FfiBytes32, - true, - (&mut out_balance) as *mut [u8; 16], - ); + wallet_ffi_get_balance(wallet_ffi_handle, &raw const to, true, &raw mut out_balance) + .unwrap(); u128::from_le_bytes(out_balance) }; @@ -835,7 +837,7 @@ fn test_wallet_ffi_transfer_public() -> Result<()> { assert_eq!(to_balance, 20100); unsafe { - wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_free_transfer_result(&raw mut transfer_result); wallet_ffi_destroy(wallet_ffi_handle); } @@ -844,34 +846,31 @@ fn test_wallet_ffi_transfer_public() -> Result<()> { #[test] fn test_wallet_ffi_transfer_shielded() -> Result<()> { - let ctx = BlockingTestContext::new().unwrap(); - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let ctx = BlockingTestContext::new()?; + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let from: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into(); let (to, to_keys) = unsafe { let mut out_account_id = FfiBytes32::default(); let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_account_private( - wallet_ffi_handle, - (&mut out_account_id) as *mut FfiBytes32, - ); + wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id); wallet_ffi_get_private_account_keys( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut out_keys) as *mut FfiPrivateAccountKeys, + &raw const out_account_id, + &raw mut out_keys, ); (out_account_id, out_keys) }; - let amount: [u8; 16] = 100u128.to_le_bytes(); + let amount: [u8; 16] = 100_u128.to_le_bytes(); let mut transfer_result = FfiTransferResult::default(); unsafe { wallet_ffi_transfer_shielded( wallet_ffi_handle, - (&from) as *const FfiBytes32, - (&to_keys) as *const FfiPrivateAccountKeys, - (&amount) as *const [u8; 16], - (&mut transfer_result) as *mut FfiTransferResult, + &raw const from, + &raw const to_keys, + &raw const amount, + &raw mut transfer_result, ); } @@ -881,18 +880,19 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); }; let from_balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; - let _result = wallet_ffi_get_balance( + wallet_ffi_get_balance( wallet_ffi_handle, - (&from) as *const FfiBytes32, + &raw const from, true, - (&mut out_balance) as *mut [u8; 16], - ); + &raw mut out_balance, + ) + .unwrap(); u128::from_le_bytes(out_balance) }; @@ -900,9 +900,9 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { let mut out_balance: [u8; 16] = [0; 16]; let _result = wallet_ffi_get_balance( wallet_ffi_handle, - (&to) as *const FfiBytes32, + &raw const to, false, - (&mut out_balance) as *mut [u8; 16], + &raw mut out_balance, ); u128::from_le_bytes(out_balance) }; @@ -911,7 +911,7 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { assert_eq!(to_balance, 100); unsafe { - wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_free_transfer_result(&raw mut transfer_result); wallet_ffi_destroy(wallet_ffi_handle); } @@ -920,21 +920,21 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> { #[test] fn test_wallet_ffi_transfer_deshielded() -> Result<()> { - let ctx = BlockingTestContext::new().unwrap(); - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let ctx = BlockingTestContext::new()?; + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let from: FfiBytes32 = (&ctx.ctx().existing_private_accounts()[0]).into(); let to = FfiBytes32::from_bytes([37; 32]); - let amount: [u8; 16] = 100u128.to_le_bytes(); + let amount: [u8; 16] = 100_u128.to_le_bytes(); let mut transfer_result = FfiTransferResult::default(); unsafe { wallet_ffi_transfer_deshielded( wallet_ffi_handle, - (&from) as *const FfiBytes32, - (&to) as *const FfiBytes32, - (&amount) as *const [u8; 16], - (&mut transfer_result) as *mut FfiTransferResult, + &raw const from, + &raw const to, + &raw const amount, + &raw mut transfer_result, ); } @@ -944,7 +944,7 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); }; @@ -952,21 +952,17 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { let mut out_balance: [u8; 16] = [0; 16]; let _result = wallet_ffi_get_balance( wallet_ffi_handle, - (&from) as *const FfiBytes32, + &raw const from, false, - (&mut out_balance) as *mut [u8; 16], + &raw mut out_balance, ); u128::from_le_bytes(out_balance) }; let to_balance = unsafe { let mut out_balance: [u8; 16] = [0; 16]; - let _result = wallet_ffi_get_balance( - wallet_ffi_handle, - (&to) as *const FfiBytes32, - true, - (&mut out_balance) as *mut [u8; 16], - ); + let _result = + wallet_ffi_get_balance(wallet_ffi_handle, &raw const to, true, &raw mut out_balance); u128::from_le_bytes(out_balance) }; @@ -974,7 +970,7 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { assert_eq!(to_balance, 100); unsafe { - wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_free_transfer_result(&raw mut transfer_result); wallet_ffi_destroy(wallet_ffi_handle); } @@ -983,36 +979,33 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> { #[test] fn test_wallet_ffi_transfer_private() -> Result<()> { - let ctx = BlockingTestContext::new().unwrap(); - let home = tempfile::tempdir().unwrap(); - let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path()); + let ctx = BlockingTestContext::new()?; + let home = tempfile::tempdir()?; + let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?; let from: FfiBytes32 = (&ctx.ctx().existing_private_accounts()[0]).into(); let (to, to_keys) = unsafe { let mut out_account_id = FfiBytes32::default(); let mut out_keys = FfiPrivateAccountKeys::default(); - wallet_ffi_create_account_private( - wallet_ffi_handle, - (&mut out_account_id) as *mut FfiBytes32, - ); + wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id); wallet_ffi_get_private_account_keys( wallet_ffi_handle, - (&out_account_id) as *const FfiBytes32, - (&mut out_keys) as *mut FfiPrivateAccountKeys, + &raw const out_account_id, + &raw mut out_keys, ); (out_account_id, out_keys) }; - let amount: [u8; 16] = 100u128.to_le_bytes(); + let amount: [u8; 16] = 100_u128.to_le_bytes(); let mut transfer_result = FfiTransferResult::default(); unsafe { wallet_ffi_transfer_private( wallet_ffi_handle, - (&from) as *const FfiBytes32, - (&to_keys) as *const FfiPrivateAccountKeys, - (&amount) as *const [u8; 16], - (&mut transfer_result) as *mut FfiTransferResult, + &raw const from, + &raw const to_keys, + &raw const amount, + &raw mut transfer_result, ); } @@ -1022,7 +1015,7 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { // Sync private account local storage with onchain encrypted state unsafe { let mut current_height = 0; - wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64); + wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height); wallet_ffi_sync_to_block(wallet_ffi_handle, current_height); }; @@ -1030,9 +1023,9 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { let mut out_balance: [u8; 16] = [0; 16]; let _result = wallet_ffi_get_balance( wallet_ffi_handle, - (&from) as *const FfiBytes32, + &raw const from, false, - (&mut out_balance) as *mut [u8; 16], + &raw mut out_balance, ); u128::from_le_bytes(out_balance) }; @@ -1041,9 +1034,9 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { let mut out_balance: [u8; 16] = [0; 16]; let _result = wallet_ffi_get_balance( wallet_ffi_handle, - (&to) as *const FfiBytes32, + &raw const to, false, - (&mut out_balance) as *mut [u8; 16], + &raw mut out_balance, ); u128::from_le_bytes(out_balance) }; @@ -1052,7 +1045,7 @@ fn test_wallet_ffi_transfer_private() -> Result<()> { assert_eq!(to_balance, 100); unsafe { - wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult); + wallet_ffi_free_transfer_result(&raw mut transfer_result); wallet_ffi_destroy(wallet_ffi_handle); } diff --git a/key_protocol/Cargo.toml b/key_protocol/Cargo.toml index 0a5e9e9a..7a16b627 100644 --- a/key_protocol/Cargo.toml +++ b/key_protocol/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] secp256k1 = "0.31.1" diff --git a/key_protocol/src/key_management/ephemeral_key_holder.rs b/key_protocol/src/key_management/ephemeral_key_holder.rs index 4aaea58c..6ef9e305 100644 --- a/key_protocol/src/key_management/ephemeral_key_holder.rs +++ b/key_protocol/src/key_management/ephemeral_key_holder.rs @@ -2,8 +2,8 @@ use nssa_core::{ NullifierPublicKey, SharedSecretKey, encryption::{EphemeralPublicKey, EphemeralSecretKey, ViewingPublicKey}, }; -use rand::{RngCore, rngs::OsRng}; -use sha2::Digest; +use rand::{RngCore as _, rngs::OsRng}; +use sha2::Digest as _; #[derive(Debug)] /// Ephemeral secret key holder. Non-clonable as intended for one-time use. Produces ephemeral @@ -12,18 +12,8 @@ pub struct EphemeralKeyHolder { ephemeral_secret_key: EphemeralSecretKey, } -pub fn produce_one_sided_shared_secret_receiver( - vpk: &ViewingPublicKey, -) -> (SharedSecretKey, EphemeralPublicKey) { - let mut esk = [0; 32]; - OsRng.fill_bytes(&mut esk); - ( - SharedSecretKey::new(&esk, vpk), - EphemeralPublicKey::from_scalar(esk), - ) -} - impl EphemeralKeyHolder { + #[must_use] pub fn new(receiver_nullifier_public_key: &NullifierPublicKey) -> Self { let mut nonce_bytes = [0; 16]; OsRng.fill_bytes(&mut nonce_bytes); @@ -36,10 +26,12 @@ impl EphemeralKeyHolder { } } + #[must_use] pub fn generate_ephemeral_public_key(&self) -> EphemeralPublicKey { EphemeralPublicKey::from_scalar(self.ephemeral_secret_key) } + #[must_use] pub fn calculate_shared_secret_sender( &self, receiver_viewing_public_key: &ViewingPublicKey, @@ -47,3 +39,15 @@ impl EphemeralKeyHolder { SharedSecretKey::new(&self.ephemeral_secret_key, receiver_viewing_public_key) } } + +#[must_use] +pub fn produce_one_sided_shared_secret_receiver( + vpk: &ViewingPublicKey, +) -> (SharedSecretKey, EphemeralPublicKey) { + let mut esk = [0; 32]; + OsRng.fill_bytes(&mut esk); + ( + SharedSecretKey::new(&esk, vpk), + EphemeralPublicKey::from_scalar(esk), + ) +} diff --git a/key_protocol/src/key_management/key_tree/chain_index.rs b/key_protocol/src/key_management/key_tree/chain_index.rs index d5fbf401..b22dc779 100644 --- a/key_protocol/src/key_management/key_tree/chain_index.rs +++ b/key_protocol/src/key_management/key_tree/chain_index.rs @@ -1,6 +1,6 @@ use std::{fmt::Display, str::FromStr}; -use itertools::Itertools; +use itertools::Itertools as _; use serde::{Deserialize, Serialize}; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize, Deserialize, Hash)] @@ -23,12 +23,12 @@ impl FromStr for ChainIndex { } if s == "/" { - return Ok(ChainIndex(vec![])); + return Ok(Self(vec![])); } let uprooted_substring = s.strip_prefix("/").unwrap(); - let splitted_chain: Vec<&str> = uprooted_substring.split("/").collect(); + let splitted_chain: Vec<&str> = uprooted_substring.split('/').collect(); let mut res = vec![]; for split_ch in splitted_chain { @@ -47,7 +47,7 @@ impl Display for ChainIndex { write!(f, "{cci}/")?; } if let Some(last) = self.0.last() { - write!(f, "{}", last)?; + write!(f, "{last}")?; } Ok(()) } @@ -55,84 +55,96 @@ impl Display for ChainIndex { impl Default for ChainIndex { fn default() -> Self { - ChainIndex::from_str("/").expect("Root parsing failure") + Self::from_str("/").expect("Root parsing failure") } } impl ChainIndex { + #[must_use] pub fn root() -> Self { - ChainIndex::default() + Self::default() } + #[must_use] pub fn chain(&self) -> &[u32] { &self.0 } + #[must_use] pub fn index(&self) -> Option { self.chain().last().copied() } - pub fn next_in_line(&self) -> ChainIndex { + #[must_use] + pub fn next_in_line(&self) -> Option { let mut chain = self.0.clone(); // ToDo: Add overflow check if let Some(last_p) = chain.last_mut() { - *last_p += 1 + *last_p = last_p.checked_add(1)?; } - ChainIndex(chain) + Some(Self(chain)) } - pub fn previous_in_line(&self) -> Option { + #[must_use] + pub fn previous_in_line(&self) -> Option { let mut chain = self.0.clone(); if let Some(last_p) = chain.last_mut() { *last_p = last_p.checked_sub(1)?; } - Some(ChainIndex(chain)) + Some(Self(chain)) } - pub fn parent(&self) -> Option { + #[must_use] + pub fn parent(&self) -> Option { if self.0.is_empty() { None } else { - Some(ChainIndex(self.0[..(self.0.len() - 1)].to_vec())) + let last = self.0.len().checked_sub(1)?; + Some(Self(self.0[..last].to_vec())) } } - pub fn nth_child(&self, child_id: u32) -> ChainIndex { + #[must_use] + pub fn nth_child(&self, child_id: u32) -> Self { let mut chain = self.0.clone(); chain.push(child_id); - ChainIndex(chain) + Self(chain) } + #[must_use] pub fn depth(&self) -> u32 { - self.0.iter().map(|cci| cci + 1).sum() + self.0 + .iter() + .map(|cci| cci.checked_add(1).expect("Max cci reached")) + .sum() } fn collapse_back(&self) -> Option { let mut res = self.parent()?; let last_mut = res.0.last_mut()?; - *last_mut += *(self.0.last()?) + 1; + *last_mut = last_mut.checked_add(self.0.last()?.checked_add(1)?)?; Some(res) } - fn shuffle_iter(&self) -> impl Iterator { + fn shuffle_iter(&self) -> impl Iterator { self.0 .iter() .permutations(self.0.len()) .unique() - .map(|item| ChainIndex(item.into_iter().cloned().collect())) + .map(|item| Self(item.into_iter().copied().collect())) } - pub fn chain_ids_at_depth(depth: usize) -> impl Iterator { - let mut stack = vec![ChainIndex(vec![0; depth])]; - let mut cumulative_stack = vec![ChainIndex(vec![0; depth])]; + pub fn chain_ids_at_depth(depth: usize) -> impl Iterator { + let mut stack = vec![Self(vec![0; depth])]; + let mut cumulative_stack = vec![Self(vec![0; depth])]; - while let Some(id) = stack.pop() { - if let Some(collapsed_id) = id.collapse_back() { + while let Some(top_id) = stack.pop() { + if let Some(collapsed_id) = top_id.collapse_back() { for id in collapsed_id.shuffle_iter() { stack.push(id.clone()); cumulative_stack.push(id); @@ -143,12 +155,12 @@ impl ChainIndex { cumulative_stack.into_iter().unique() } - pub fn chain_ids_at_depth_rev(depth: usize) -> impl Iterator { - let mut stack = vec![ChainIndex(vec![0; depth])]; - let mut cumulative_stack = vec![ChainIndex(vec![0; depth])]; + pub fn chain_ids_at_depth_rev(depth: usize) -> impl Iterator { + let mut stack = vec![Self(vec![0; depth])]; + let mut cumulative_stack = vec![Self(vec![0; depth])]; - while let Some(id) = stack.pop() { - if let Some(collapsed_id) = id.collapse_back() { + while let Some(top_id) = stack.pop() { + if let Some(collapsed_id) = top_id.collapse_back() { for id in collapsed_id.shuffle_iter() { stack.push(id.clone()); cumulative_stack.push(id); @@ -165,7 +177,7 @@ mod tests { use super::*; #[test] - fn test_chain_id_root_correct() { + fn chain_id_root_correct() { let chain_id = ChainIndex::root(); let chain_id_2 = ChainIndex::from_str("/").unwrap(); @@ -173,21 +185,21 @@ mod tests { } #[test] - fn test_chain_id_deser_correct() { + fn chain_id_deser_correct() { let chain_id = ChainIndex::from_str("/257").unwrap(); assert_eq!(chain_id.chain(), &[257]); } #[test] - fn test_chain_id_deser_failure_no_root() { + fn chain_id_deser_failure_no_root() { let chain_index_error = ChainIndex::from_str("257").err().unwrap(); assert!(matches!(chain_index_error, ChainIndexError::NoRootFound)); } #[test] - fn test_chain_id_deser_failure_int_parsing_failure() { + fn chain_id_deser_failure_int_parsing_failure() { let chain_index_error = ChainIndex::from_str("/hello").err().unwrap(); assert!(matches!( @@ -197,15 +209,15 @@ mod tests { } #[test] - fn test_chain_id_next_in_line_correct() { + fn chain_id_next_in_line_correct() { let chain_id = ChainIndex::from_str("/257").unwrap(); - let next_in_line = chain_id.next_in_line(); + let next_in_line = chain_id.next_in_line().unwrap(); assert_eq!(next_in_line, ChainIndex::from_str("/258").unwrap()); } #[test] - fn test_chain_id_child_correct() { + fn chain_id_child_correct() { let chain_id = ChainIndex::from_str("/257").unwrap(); let child = chain_id.nth_child(3); @@ -213,88 +225,88 @@ mod tests { } #[test] - fn test_correct_display() { + fn correct_display() { let chainid = ChainIndex(vec![5, 7, 8]); let string_index = format!("{chainid}"); - assert_eq!(string_index, "/5/7/8".to_string()); + assert_eq!(string_index, "/5/7/8".to_owned()); } #[test] - fn test_prev_in_line() { + fn prev_in_line() { let chain_id = ChainIndex(vec![1, 7, 3]); let prev_chain_id = chain_id.previous_in_line().unwrap(); - assert_eq!(prev_chain_id, ChainIndex(vec![1, 7, 2])) + assert_eq!(prev_chain_id, ChainIndex(vec![1, 7, 2])); } #[test] - fn test_prev_in_line_no_prev() { + fn prev_in_line_no_prev() { let chain_id = ChainIndex(vec![1, 7, 0]); let prev_chain_id = chain_id.previous_in_line(); - assert_eq!(prev_chain_id, None) + assert_eq!(prev_chain_id, None); } #[test] - fn test_parent() { + fn parent() { let chain_id = ChainIndex(vec![1, 7, 3]); let parent_chain_id = chain_id.parent().unwrap(); - assert_eq!(parent_chain_id, ChainIndex(vec![1, 7])) + assert_eq!(parent_chain_id, ChainIndex(vec![1, 7])); } #[test] - fn test_parent_no_parent() { + fn parent_no_parent() { let chain_id = ChainIndex(vec![]); let parent_chain_id = chain_id.parent(); - assert_eq!(parent_chain_id, None) + assert_eq!(parent_chain_id, None); } #[test] - fn test_parent_root() { + fn parent_root() { let chain_id = ChainIndex(vec![1]); let parent_chain_id = chain_id.parent().unwrap(); - assert_eq!(parent_chain_id, ChainIndex::root()) + assert_eq!(parent_chain_id, ChainIndex::root()); } #[test] - fn test_collapse_back() { + fn collapse_back() { let chain_id = ChainIndex(vec![1, 1]); let collapsed = chain_id.collapse_back().unwrap(); - assert_eq!(collapsed, ChainIndex(vec![3])) + assert_eq!(collapsed, ChainIndex(vec![3])); } #[test] - fn test_collapse_back_one() { + fn collapse_back_one() { let chain_id = ChainIndex(vec![1]); let collapsed = chain_id.collapse_back(); - assert_eq!(collapsed, None) + assert_eq!(collapsed, None); } #[test] - fn test_collapse_back_root() { + fn collapse_back_root() { let chain_id = ChainIndex(vec![]); let collapsed = chain_id.collapse_back(); - assert_eq!(collapsed, None) + assert_eq!(collapsed, None); } #[test] - fn test_shuffle() { + fn shuffle() { for id in ChainIndex::chain_ids_at_depth(5) { println!("{id}"); } diff --git a/key_protocol/src/key_management/key_tree/mod.rs b/key_protocol/src/key_management/key_tree/mod.rs index ead60595..a94e8291 100644 --- a/key_protocol/src/key_management/key_tree/mod.rs +++ b/key_protocol/src/key_management/key_tree/mod.rs @@ -1,7 +1,4 @@ -use std::{ - collections::{BTreeMap, HashMap}, - sync::Arc, -}; +use std::{collections::BTreeMap, sync::Arc}; use anyhow::Result; use common::sequencer_client::SequencerClient; @@ -25,13 +22,14 @@ pub const DEPTH_SOFT_CAP: u32 = 20; #[derive(Debug, Serialize, Deserialize, Clone)] pub struct KeyTree { pub key_map: BTreeMap, - pub account_id_map: HashMap, + pub account_id_map: BTreeMap, } pub type KeyTreePublic = KeyTree; pub type KeyTreePrivate = KeyTree; impl KeyTree { + #[must_use] pub fn new(seed: &SeedHolder) -> Self { let seed_fit: [u8; 64] = seed .seed @@ -43,7 +41,7 @@ impl KeyTree { let account_id = root_keys.account_id(); let key_map = BTreeMap::from_iter([(ChainIndex::root(), root_keys)]); - let account_id_map = HashMap::from_iter([(account_id, ChainIndex::root())]); + let account_id_map = BTreeMap::from_iter([(account_id, ChainIndex::root())]); Self { key_map, @@ -52,7 +50,7 @@ impl KeyTree { } pub fn new_from_root(root: N) -> Self { - let account_id_map = HashMap::from_iter([(root.account_id(), ChainIndex::root())]); + let account_id_map = BTreeMap::from_iter([(root.account_id(), ChainIndex::root())]); let key_map = BTreeMap::from_iter([(ChainIndex::root(), root)]); Self { @@ -63,6 +61,7 @@ impl KeyTree { // ToDo: Add function to create a tree from list of nodes with consistency check. + #[must_use] pub fn find_next_last_child_of_id(&self, parent_id: &ChainIndex) -> Option { if !self.key_map.contains_key(parent_id) { return None; @@ -82,19 +81,19 @@ impl KeyTree { let rightmost_child = parent_id.nth_child(right); let rightmost_ref = self.key_map.get(&rightmost_child); - let rightmost_ref_next = self.key_map.get(&rightmost_child.next_in_line()); + let rightmost_ref_next = self.key_map.get(&rightmost_child.next_in_line()?); match (&rightmost_ref, &rightmost_ref_next) { (Some(_), Some(_)) => { left_border = right; - right = (right + right_border) / 2; + right = u32::midpoint(right, right_border); } (Some(_), None) => { - break Some(right + 1); + break Some(right.checked_add(1)?); } (None, None) => { right_border = right; - right = (left_border + right) / 2; + right = u32::midpoint(left_border, right); } (None, Some(_)) => { unreachable!(); @@ -131,7 +130,7 @@ impl KeyTree { break 'outer chain_id; } } - depth += 1; + depth = depth.checked_add(1).expect("Max depth reached"); } } @@ -152,16 +151,15 @@ impl KeyTree { self.fill_node(&self.find_next_slot_layered()) } + #[must_use] pub fn get_node(&self, account_id: nssa::AccountId) -> Option<&N> { - self.account_id_map - .get(&account_id) - .and_then(|chain_id| self.key_map.get(chain_id)) + let chain_id = self.account_id_map.get(&account_id)?; + self.key_map.get(chain_id) } pub fn get_node_mut(&mut self, account_id: nssa::AccountId) -> Option<&mut N> { - self.account_id_map - .get(&account_id) - .and_then(|chain_id| self.key_map.get_mut(chain_id)) + let chain_id = self.account_id_map.get(&account_id)?; + self.key_map.get_mut(chain_id) } pub fn insert(&mut self, account_id: nssa::AccountId, chain_index: ChainIndex, node: N) { @@ -170,7 +168,7 @@ impl KeyTree { } pub fn remove(&mut self, addr: nssa::AccountId) -> Option { - let chain_index = self.account_id_map.remove(&addr).unwrap(); + let chain_index = self.account_id_map.remove(&addr)?; self.key_map.remove(&chain_index) } @@ -179,7 +177,7 @@ impl KeyTree { /// For given `depth` adds children to a tree such that their `ChainIndex::depth(&self) < /// depth`. /// - /// Tree must be empty before start + /// Tree must be empty before start. pub fn generate_tree_for_depth(&mut self, depth: u32) { let mut id_stack = vec![ChainIndex::root()]; @@ -189,23 +187,26 @@ impl KeyTree { while (next_id.depth()) < depth { self.generate_new_node(&curr_id); id_stack.push(next_id.clone()); - next_id = next_id.next_in_line(); + next_id = match next_id.next_in_line() { + Some(id) => id, + None => break, + }; } } } } impl KeyTree { - /// Cleanup of all non-initialized accounts in a private tree + /// Cleanup of all non-initialized accounts in a private tree. /// /// For given `depth` checks children to a tree such that their `ChainIndex::depth(&self) < /// depth`. /// /// If account is default, removes them. /// - /// Chain must be parsed for accounts beforehand + /// Chain must be parsed for accounts beforehand. /// - /// Fast, leaves gaps between accounts + /// Fast, leaves gaps between accounts. pub fn cleanup_tree_remove_uninit_for_depth(&mut self, depth: u32) { let mut id_stack = vec![ChainIndex::root()]; @@ -222,22 +223,26 @@ impl KeyTree { while (next_id.depth()) < depth { id_stack.push(next_id.clone()); - next_id = next_id.next_in_line(); + next_id = match next_id.next_in_line() { + Some(id) => id, + None => break, + }; } } } - /// Cleanup of non-initialized accounts in a private tree + /// Cleanup of non-initialized accounts in a private tree. /// /// If account is default, removes them, stops at first non-default account. /// - /// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()` + /// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`. /// - /// Chain must be parsed for accounts beforehand + /// Chain must be parsed for accounts beforehand. /// /// Slow, maintains tree consistency. pub fn cleanup_tree_remove_uninit_layered(&mut self, depth: u32) { - 'outer: for i in (1..(depth as usize)).rev() { + let depth = usize::try_from(depth).expect("Depth is expected to fit in usize"); + 'outer: for i in (1..depth).rev() { println!("Cleanup of tree at depth {i}"); for id in ChainIndex::chain_ids_at_depth(i) { if let Some(node) = self.key_map.get(&id) { @@ -254,14 +259,14 @@ impl KeyTree { } impl KeyTree { - /// Cleanup of all non-initialized accounts in a public tree + /// Cleanup of all non-initialized accounts in a public tree. /// /// For given `depth` checks children to a tree such that their `ChainIndex::depth(&self) < /// depth`. /// /// If account is default, removes them. /// - /// Fast, leaves gaps between accounts + /// Fast, leaves gaps between accounts. pub async fn cleanup_tree_remove_ininit_for_depth( &mut self, depth: u32, @@ -283,18 +288,21 @@ impl KeyTree { while (next_id.depth()) < depth { id_stack.push(next_id.clone()); - next_id = next_id.next_in_line(); + next_id = match next_id.next_in_line() { + Some(id) => id, + None => break, + }; } } Ok(()) } - /// Cleanup of non-initialized accounts in a public tree + /// Cleanup of non-initialized accounts in a public tree. /// /// If account is default, removes them, stops at first non-default account. /// - /// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()` + /// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`. /// /// Slow, maintains tree consistency. pub async fn cleanup_tree_remove_uninit_layered( @@ -302,7 +310,8 @@ impl KeyTree { depth: u32, client: Arc, ) -> Result<()> { - 'outer: for i in (1..(depth as usize)).rev() { + let depth = usize::try_from(depth).expect("Depth is expected to fit in usize"); + 'outer: for i in (1..depth).rev() { println!("Cleanup of tree at depth {i}"); for id in ChainIndex::chain_ids_at_depth(i) { if let Some(node) = self.key_map.get(&id) { @@ -325,7 +334,9 @@ impl KeyTree { #[cfg(test)] mod tests { - use std::{collections::HashSet, str::FromStr}; + #![expect(clippy::shadow_unrelated, reason = "We don't care about this in tests")] + + use std::{collections::HashSet, str::FromStr as _}; use nssa::AccountId; @@ -338,7 +349,7 @@ mod tests { } #[test] - fn test_simple_key_tree() { + fn simple_key_tree() { let seed_holder = seed_holder_for_tests(); let tree = KeyTreePublic::new(&seed_holder); @@ -351,7 +362,7 @@ mod tests { } #[test] - fn test_small_key_tree() { + fn small_key_tree() { let seed_holder = seed_holder_for_tests(); let mut tree = KeyTreePrivate::new(&seed_holder); @@ -390,7 +401,7 @@ mod tests { } #[test] - fn test_key_tree_can_not_make_child_keys() { + fn key_tree_can_not_make_child_keys() { let seed_holder = seed_holder_for_tests(); let mut tree = KeyTreePrivate::new(&seed_holder); @@ -420,7 +431,7 @@ mod tests { } #[test] - fn test_key_tree_complex_structure() { + fn key_tree_complex_structure() { let seed_holder = seed_holder_for_tests(); let mut tree = KeyTreePublic::new(&seed_holder); @@ -515,7 +526,7 @@ mod tests { } #[test] - fn test_tree_balancing_automatic() { + fn tree_balancing_automatic() { let seed_holder = seed_holder_for_tests(); let mut tree = KeyTreePublic::new(&seed_holder); @@ -530,7 +541,7 @@ mod tests { } #[test] - fn test_cleanup() { + fn cleanup() { let seed_holder = seed_holder_for_tests(); let mut tree = KeyTreePrivate::new(&seed_holder); @@ -563,13 +574,13 @@ mod tests { tree.cleanup_tree_remove_uninit_layered(10); let mut key_set_res = HashSet::new(); - key_set_res.insert("/0".to_string()); - key_set_res.insert("/1".to_string()); - key_set_res.insert("/2".to_string()); - key_set_res.insert("/".to_string()); - key_set_res.insert("/0/0".to_string()); - key_set_res.insert("/0/1".to_string()); - key_set_res.insert("/1/0".to_string()); + key_set_res.insert("/0".to_owned()); + key_set_res.insert("/1".to_owned()); + key_set_res.insert("/2".to_owned()); + key_set_res.insert("/".to_owned()); + key_set_res.insert("/0/0".to_owned()); + key_set_res.insert("/0/1".to_owned()); + key_set_res.insert("/1/0".to_owned()); let mut key_set = HashSet::new(); @@ -579,28 +590,16 @@ mod tests { assert_eq!(key_set, key_set_res); - let acc = tree - .key_map - .get(&ChainIndex::from_str("/1").unwrap()) - .unwrap(); + let acc = &tree.key_map[&ChainIndex::from_str("/1").unwrap()]; assert_eq!(acc.value.1.balance, 2); - let acc = tree - .key_map - .get(&ChainIndex::from_str("/2").unwrap()) - .unwrap(); + let acc = &tree.key_map[&ChainIndex::from_str("/2").unwrap()]; assert_eq!(acc.value.1.balance, 3); - let acc = tree - .key_map - .get(&ChainIndex::from_str("/0/1").unwrap()) - .unwrap(); + let acc = &tree.key_map[&ChainIndex::from_str("/0/1").unwrap()]; assert_eq!(acc.value.1.balance, 5); - let acc = tree - .key_map - .get(&ChainIndex::from_str("/1/0").unwrap()) - .unwrap(); + let acc = &tree.key_map[&ChainIndex::from_str("/1/0").unwrap()]; assert_eq!(acc.value.1.balance, 6); } } diff --git a/key_protocol/src/key_management/key_tree/traits.rs b/key_protocol/src/key_management/key_tree/traits.rs index 5770c47d..65e8fae0 100644 --- a/key_protocol/src/key_management/key_tree/traits.rs +++ b/key_protocol/src/key_management/key_tree/traits.rs @@ -1,9 +1,10 @@ -/// Trait, that reperesents a Node in hierarchical key tree +/// Trait, that reperesents a Node in hierarchical key tree. pub trait KeyNode { - /// Tree root node + /// Tree root node. fn root(seed: [u8; 64]) -> Self; - /// `cci`'s child of node + /// `cci`'s child of node. + #[must_use] fn nth_child(&self, cci: u32) -> Self; fn chain_code(&self) -> &[u8; 32]; diff --git a/key_protocol/src/key_management/mod.rs b/key_protocol/src/key_management/mod.rs index d5aacdf9..e29e5862 100644 --- a/key_protocol/src/key_management/mod.rs +++ b/key_protocol/src/key_management/mod.rs @@ -5,22 +5,23 @@ use nssa_core::{ use secret_holders::{PrivateKeyHolder, SecretSpendingKey, SeedHolder}; use serde::{Deserialize, Serialize}; -pub type PublicAccountSigningKey = [u8; 32]; - pub mod ephemeral_key_holder; pub mod key_tree; pub mod secret_holders; +pub type PublicAccountSigningKey = [u8; 32]; + #[derive(Serialize, Deserialize, Clone, Debug)] -/// Entrypoint to key management +/// Entrypoint to key management. pub struct KeyChain { pub secret_spending_key: SecretSpendingKey, pub private_key_holder: PrivateKeyHolder, - pub nullifier_public_key: NullifierPublicKey, + pub nullifer_public_key: NullifierPublicKey, pub viewing_public_key: ViewingPublicKey, } impl KeyChain { + #[must_use] pub fn new_os_random() -> Self { // Currently dropping SeedHolder at the end of initialization. // Now entirely sure if we need it in the future. @@ -29,17 +30,18 @@ impl KeyChain { let private_key_holder = secret_spending_key.produce_private_key_holder(None); - let nullifier_public_key = private_key_holder.generate_nullifier_public_key(); + let nullifer_public_key = private_key_holder.generate_nullifier_public_key(); let viewing_public_key = private_key_holder.generate_viewing_public_key(); Self { secret_spending_key, private_key_holder, - nullifier_public_key, + nullifer_public_key, viewing_public_key, } } + #[must_use] pub fn new_mnemonic(passphrase: String) -> Self { // Currently dropping SeedHolder at the end of initialization. // Not entirely sure if we need it in the future. @@ -48,25 +50,26 @@ impl KeyChain { let private_key_holder = secret_spending_key.produce_private_key_holder(None); - let nullifier_public_key = private_key_holder.generate_nullifier_public_key(); + let nullifer_public_key = private_key_holder.generate_nullifier_public_key(); let viewing_public_key = private_key_holder.generate_viewing_public_key(); Self { secret_spending_key, private_key_holder, - nullifier_public_key, + nullifer_public_key, viewing_public_key, } } + #[must_use] pub fn calculate_shared_secret_receiver( &self, - ephemeral_public_key_sender: EphemeralPublicKey, + ephemeral_public_key_sender: &EphemeralPublicKey, index: Option, ) -> SharedSecretKey { SharedSecretKey::new( &self.secret_spending_key.generate_viewing_secret_key(index), - &ephemeral_public_key_sender, + ephemeral_public_key_sender, ) } } @@ -74,9 +77,9 @@ impl KeyChain { #[cfg(test)] mod tests { use aes_gcm::aead::OsRng; - use base58::ToBase58; - use k256::{AffinePoint, elliptic_curve::group::GroupEncoding}; - use rand::RngCore; + use base58::ToBase58 as _; + use k256::{AffinePoint, elliptic_curve::group::GroupEncoding as _}; + use rand::RngCore as _; use super::*; use crate::key_management::{ @@ -84,19 +87,19 @@ mod tests { }; #[test] - fn test_new_os_random() { + fn new_os_random() { // Ensure that a new KeyChain instance can be created without errors. let account_id_key_holder = KeyChain::new_os_random(); // Check that key holder fields are initialized with expected types assert_ne!( - account_id_key_holder.nullifier_public_key.as_ref(), - &[0u8; 32] + account_id_key_holder.nullifer_public_key.as_ref(), + &[0_u8; 32] ); } #[test] - fn test_calculate_shared_secret_receiver() { + fn calculate_shared_secret_receiver() { let account_id_key_holder = KeyChain::new_os_random(); // Generate a random ephemeral public key sender @@ -106,7 +109,7 @@ mod tests { // Calculate shared secret let _shared_secret = account_id_key_holder - .calculate_shared_secret_receiver(ephemeral_public_key_sender, None); + .calculate_shared_secret_receiver(&ephemeral_public_key_sender, None); } #[test] @@ -116,7 +119,7 @@ mod tests { let utxo_secret_key_holder = top_secret_key_holder.produce_private_key_holder(None); - let nullifier_public_key = utxo_secret_key_holder.generate_nullifier_public_key(); + let nullifer_public_key = utxo_secret_key_holder.generate_nullifier_public_key(); let viewing_public_key = utxo_secret_key_holder.generate_viewing_public_key(); let pub_account_signing_key = nssa::PrivateKey::new_os_random(); @@ -147,7 +150,7 @@ mod tests { println!("Account {:?}", account.value().to_base58()); println!( "Nulifier public key {:?}", - hex::encode(nullifier_public_key.to_byte_array()) + hex::encode(nullifer_public_key.to_byte_array()) ); println!( "Viewing public key {:?}", @@ -177,14 +180,14 @@ mod tests { } #[test] - fn test_non_trivial_chain_index() { + fn non_trivial_chain_index() { let keys = account_with_chain_index_2_for_tests(); - let eph_key_holder = EphemeralKeyHolder::new(&keys.nullifier_public_key); + let eph_key_holder = EphemeralKeyHolder::new(&keys.nullifer_public_key); let key_sender = eph_key_holder.calculate_shared_secret_sender(&keys.viewing_public_key); let key_receiver = keys.calculate_shared_secret_receiver( - eph_key_holder.generate_ephemeral_public_key(), + &eph_key_holder.generate_ephemeral_public_key(), Some(2), ); diff --git a/key_protocol/src/key_management/secret_holders.rs b/key_protocol/src/key_management/secret_holders.rs index 316e6154..b504f0df 100644 --- a/key_protocol/src/key_management/secret_holders.rs +++ b/key_protocol/src/key_management/secret_holders.rs @@ -4,9 +4,9 @@ use nssa_core::{ NullifierPublicKey, NullifierSecretKey, encryption::{Scalar, ViewingPublicKey}, }; -use rand::{RngCore, rngs::OsRng}; +use rand::{RngCore as _, rngs::OsRng}; use serde::{Deserialize, Serialize}; -use sha2::{Digest, digest::FixedOutput}; +use sha2::{Digest as _, digest::FixedOutput as _}; const NSSA_ENTROPY_BYTES: [u8; 32] = [0; 32]; @@ -25,14 +25,16 @@ pub struct SecretSpendingKey(pub(crate) [u8; 32]); pub type ViewingSecretKey = Scalar; #[derive(Serialize, Deserialize, Debug, Clone)] -/// Private key holder. Produces public keys. Can produce account_id. Can produce shared secret for -/// recepient. +/// Private key holder. Produces public keys. Can produce `account_id`. Can produce shared secret +/// for recepient. +#[expect(clippy::partial_pub_fields, reason = "TODO: fix later")] pub struct PrivateKeyHolder { pub nullifier_secret_key: NullifierSecretKey, pub(crate) viewing_secret_key: ViewingSecretKey, } impl SeedHolder { + #[must_use] pub fn new_os_random() -> Self { let mut enthopy_bytes: [u8; 32] = [0; 32]; OsRng.fill_bytes(&mut enthopy_bytes); @@ -46,6 +48,7 @@ impl SeedHolder { } } + #[must_use] pub fn new_mnemonic(passphrase: String) -> Self { let mnemonic = Mnemonic::from_entropy(&NSSA_ENTROPY_BYTES) .expect("Enthropy must be a multiple of 32 bytes"); @@ -56,6 +59,7 @@ impl SeedHolder { } } + #[must_use] pub fn generate_secret_spending_key_hash(&self) -> HashType { let mut hash = hmac_sha512::HMAC::mac(&self.seed, "NSSA_seed"); @@ -67,22 +71,24 @@ impl SeedHolder { HashType(*hash.first_chunk::<32>().unwrap()) } + #[must_use] pub fn produce_top_secret_key_holder(&self) -> SecretSpendingKey { SecretSpendingKey(self.generate_secret_spending_key_hash().into()) } } impl SecretSpendingKey { + #[must_use] pub fn generate_nullifier_secret_key(&self, index: Option) -> NullifierSecretKey { - let index = match index { - None => 0u32, - _ => index.expect("Expect a valid u32"), - }; - const PREFIX: &[u8; 8] = b"LEE/keys"; const SUFFIX_1: &[u8; 1] = &[1]; const SUFFIX_2: &[u8; 19] = &[0; 19]; + let index = match index { + None => 0_u32, + _ => index.expect("Expect a valid u32"), + }; + let mut hasher = sha2::Sha256::new(); hasher.update(PREFIX); hasher.update(self.0); @@ -93,15 +99,17 @@ impl SecretSpendingKey { ::from(hasher.finalize_fixed()) } + #[must_use] pub fn generate_viewing_secret_key(&self, index: Option) -> ViewingSecretKey { - let index = match index { - None => 0u32, - _ => index.expect("Expect a valid u32"), - }; const PREFIX: &[u8; 8] = b"LEE/keys"; const SUFFIX_1: &[u8; 1] = &[2]; const SUFFIX_2: &[u8; 19] = &[0; 19]; + let index = match index { + None => 0_u32, + _ => index.expect("Expect a valid u32"), + }; + let mut hasher = sha2::Sha256::new(); hasher.update(PREFIX); hasher.update(self.0); @@ -112,6 +120,7 @@ impl SecretSpendingKey { hasher.finalize_fixed().into() } + #[must_use] pub fn produce_private_key_holder(&self, index: Option) -> PrivateKeyHolder { PrivateKeyHolder { nullifier_secret_key: self.generate_nullifier_secret_key(index), @@ -121,10 +130,12 @@ impl SecretSpendingKey { } impl PrivateKeyHolder { + #[must_use] pub fn generate_nullifier_public_key(&self) -> NullifierPublicKey { (&self.nullifier_secret_key).into() } + #[must_use] pub fn generate_viewing_public_key(&self) -> ViewingPublicKey { ViewingPublicKey::from_scalar(self.viewing_secret_key) } @@ -148,7 +159,7 @@ mod tests { assert_eq!(seed_holder.seed.len(), 64); - let _ = seed_holder.generate_secret_spending_key_hash(); + let _hash = seed_holder.generate_secret_spending_key_hash(); } #[test] @@ -159,15 +170,15 @@ mod tests { let top_secret_key_holder = seed_holder.produce_top_secret_key_holder(); - let _ = top_secret_key_holder.generate_viewing_secret_key(None); + let _vsk = top_secret_key_holder.generate_viewing_secret_key(None); } #[test] fn two_seeds_generated_same_from_same_mnemonic() { let mnemonic = "test_pass"; - let seed_holder1 = SeedHolder::new_mnemonic(mnemonic.to_string()); - let seed_holder2 = SeedHolder::new_mnemonic(mnemonic.to_string()); + let seed_holder1 = SeedHolder::new_mnemonic(mnemonic.to_owned()); + let seed_holder2 = SeedHolder::new_mnemonic(mnemonic.to_owned()); assert_eq!(seed_holder1.seed, seed_holder2.seed); } diff --git a/key_protocol/src/key_protocol_core/mod.rs b/key_protocol/src/key_protocol_core/mod.rs index 65f0aeb3..65c592e3 100644 --- a/key_protocol/src/key_protocol_core/mod.rs +++ b/key_protocol/src/key_protocol_core/mod.rs @@ -14,14 +14,14 @@ pub type PublicKey = AffinePoint; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct NSSAUserData { - /// Default public accounts + /// Default public accounts. pub default_pub_account_signing_keys: BTreeMap, - /// Default private accounts + /// Default private accounts. pub default_user_private_accounts: BTreeMap, - /// Tree of public keys + /// Tree of public keys. pub public_key_tree: KeyTreePublic, - /// Tree of private keys + /// Tree of private keys. pub private_key_tree: KeyTreePrivate, } @@ -34,7 +34,7 @@ impl NSSAUserData { let expected_account_id = nssa::AccountId::from(&nssa::PublicKey::new_from_private_key(key)); if &expected_account_id != account_id { - println!("{}, {}", expected_account_id, account_id); + println!("{expected_account_id}, {account_id}"); check_res = false; } } @@ -48,7 +48,7 @@ impl NSSAUserData { for (account_id, (key, _)) in accounts_keys_map { let expected_account_id = nssa::AccountId::from(&key.nullifier_public_key); if expected_account_id != *account_id { - println!("{}, {}", expected_account_id, account_id); + println!("{expected_account_id}, {account_id}"); check_res = false; } } @@ -84,9 +84,9 @@ impl NSSAUserData { }) } - /// Generated new private key for public transaction signatures + /// Generated new private key for public transaction signatures. /// - /// Returns the account_id of new account + /// Returns the `account_id` of new account. pub fn generate_new_public_transaction_private_key( &mut self, parent_cci: Option, @@ -103,23 +103,20 @@ impl NSSAUserData { } } - /// Returns the signing key for public transaction signatures + /// Returns the signing key for public transaction signatures. + #[must_use] pub fn get_pub_account_signing_key( &self, account_id: nssa::AccountId, ) -> Option<&nssa::PrivateKey> { - // First seek in defaults - if let Some(key) = self.default_pub_account_signing_keys.get(&account_id) { - Some(key) - // Then seek in tree - } else { - self.public_key_tree.get_node(account_id).map(Into::into) - } + self.default_pub_account_signing_keys + .get(&account_id) + .or_else(|| self.public_key_tree.get_node(account_id).map(Into::into)) } - /// Generated new private key for privacy preserving transactions + /// Generated new private key for privacy preserving transactions. /// - /// Returns the account_id of new account + /// Returns the `account_id` of new account. pub fn generate_new_privacy_preserving_transaction_key_chain( &mut self, parent_cci: Option, @@ -136,21 +133,18 @@ impl NSSAUserData { } } - /// Returns the signing key for public transaction signatures + /// Returns the signing key for public transaction signatures. + #[must_use] pub fn get_private_account( &self, account_id: nssa::AccountId, ) -> Option<&(KeyChain, nssa_core::account::Account)> { - // First seek in defaults - if let Some(key) = self.default_user_private_accounts.get(&account_id) { - Some(key) - // Then seek in tree - } else { - self.private_key_tree.get_node(account_id).map(Into::into) - } + self.default_user_private_accounts + .get(&account_id) + .or_else(|| self.private_key_tree.get_node(account_id).map(Into::into)) } - /// Returns the signing key for public transaction signatures + /// Returns the signing key for public transaction signatures. pub fn get_private_account_mut( &mut self, account_id: &nssa::AccountId, @@ -190,8 +184,8 @@ impl Default for NSSAUserData { Self::new_with_accounts( BTreeMap::new(), BTreeMap::new(), - KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_string())), - KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_string())), + KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_owned())), + KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_owned())), ) .unwrap() } @@ -202,7 +196,7 @@ mod tests { use super::*; #[test] - fn test_new_account() { + fn new_account() { let mut user_data = NSSAUserData::default(); let (account_id_private, _) = user_data diff --git a/key_protocol/src/lib.rs b/key_protocol/src/lib.rs index 1a52c202..e3fe31cf 100644 --- a/key_protocol/src/lib.rs +++ b/key_protocol/src/lib.rs @@ -1,2 +1,4 @@ +#![expect(clippy::print_stdout, reason = "TODO: fix later")] + pub mod key_management; pub mod key_protocol_core; diff --git a/mempool/Cargo.toml b/mempool/Cargo.toml index ee7e884c..a2f51bc0 100644 --- a/mempool/Cargo.toml +++ b/mempool/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] tokio = { workspace = true, features = ["sync"] } diff --git a/mempool/src/lib.rs b/mempool/src/lib.rs index fae52b3e..3bf4ac2a 100644 --- a/mempool/src/lib.rs +++ b/mempool/src/lib.rs @@ -6,6 +6,7 @@ pub struct MemPool { } impl MemPool { + #[must_use] pub fn new(max_size: usize) -> (Self, MemPoolHandle) { let (sender, receiver) = tokio::sync::mpsc::channel(max_size); @@ -17,6 +18,7 @@ impl MemPool { (mem_pool, sender) } + /// Pop an item from the mempool first checking the front buffer (LIFO) then the channel (FIFO). pub fn pop(&mut self) -> Option { use tokio::sync::mpsc::error::TryRecvError; @@ -36,7 +38,7 @@ impl MemPool { } } - /// Push an item to the front of the mempool (will be popped first) + /// Push an item to the front of the mempool (will be popped first). pub fn push_front(&mut self, item: T) { self.front_buffer.push(item); } @@ -47,11 +49,11 @@ pub struct MemPoolHandle { } impl MemPoolHandle { - fn new(sender: Sender) -> Self { + const fn new(sender: Sender) -> Self { Self { sender } } - /// Send an item to the mempool blocking if max size is reached + /// Send an item to the mempool blocking if max size is reached. pub async fn push(&self, item: T) -> Result<(), tokio::sync::mpsc::error::SendError> { self.sender.send(item).await } @@ -64,13 +66,13 @@ mod tests { use super::*; #[test] - async fn test_mempool_new() { + async fn mempool_new() { let (mut pool, _handle): (MemPool, _) = MemPool::new(10); assert_eq!(pool.pop(), None); } #[test] - async fn test_push_and_pop() { + async fn push_and_pop() { let (mut pool, handle) = MemPool::new(10); handle.push(1).await.unwrap(); @@ -81,7 +83,7 @@ mod tests { } #[test] - async fn test_multiple_push_pop() { + async fn multiple_push_pop() { let (mut pool, handle) = MemPool::new(10); handle.push(1).await.unwrap(); @@ -95,13 +97,13 @@ mod tests { } #[test] - async fn test_pop_empty() { + async fn pop_empty() { let (mut pool, _handle): (MemPool, _) = MemPool::new(10); assert_eq!(pool.pop(), None); } #[test] - async fn test_max_size() { + async fn max_size() { let (mut pool, handle) = MemPool::new(2); handle.push(1).await.unwrap(); @@ -114,7 +116,7 @@ mod tests { } #[test] - async fn test_push_front() { + async fn push_front() { let (mut pool, handle) = MemPool::new(10); handle.push(1).await.unwrap(); diff --git a/nssa/Cargo.toml b/nssa/Cargo.toml index fe74b7a3..e1b6805f 100644 --- a/nssa/Cargo.toml +++ b/nssa/Cargo.toml @@ -4,9 +4,13 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core = { workspace = true, features = ["host"] } +anyhow.workspace = true thiserror.workspace = true risc0-zkvm.workspace = true serde.workspace = true @@ -24,7 +28,6 @@ risc0-binfmt = "3.0.2" [dev-dependencies] token_core.workspace = true -amm_core.workspace = true test_program_methods.workspace = true env_logger.workspace = true @@ -34,3 +37,4 @@ test-case = "3.3.1" [features] default = [] prove = ["risc0-zkvm/prove"] +test-utils = [] \ No newline at end of file diff --git a/nssa/build.rs b/nssa/build.rs index 020b838c..ce39df93 100644 --- a/nssa/build.rs +++ b/nssa/build.rs @@ -1,4 +1,4 @@ -use std::{env, fs, path::PathBuf}; +use std::{env, fmt::Write as _, fs, path::PathBuf}; fn main() -> Result<(), Box> { let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?); @@ -15,7 +15,7 @@ fn main() -> Result<(), Box> { .collect::>(); if bins.is_empty() { - return Err(format!("No .bin files found in {:?}", program_methods_dir).into()); + return Err(format!("No .bin files found in {}", program_methods_dir.display()).into()); } fs::create_dir_all(&mod_dir)?; @@ -25,14 +25,16 @@ fn main() -> Result<(), Box> { let name = path.file_stem().unwrap().to_string_lossy(); let bytecode = fs::read(&path)?; let image_id: [u32; 8] = risc0_binfmt::compute_image_id(&bytecode)?.into(); - src.push_str(&format!( + write!( + src, "pub const {}_ELF: &[u8] = include_bytes!(r#\"{}\"#);\n\ + #[expect(clippy::unreadable_literal, reason = \"Generated image IDs from risc0 are cryptographic hashes represented as u32 arrays\")]\n\ pub const {}_ID: [u32; 8] = {:?};\n", name.to_uppercase(), path.display(), name.to_uppercase(), image_id - )); + )?; } fs::write(&mod_file, src)?; println!("cargo:warning=Generated module at {}", mod_file.display()); diff --git a/nssa/core/Cargo.toml b/nssa/core/Cargo.toml index 3b6b430f..d9e80af4 100644 --- a/nssa/core/Cargo.toml +++ b/nssa/core/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] risc0-zkvm.workspace = true borsh.workspace = true @@ -14,7 +17,7 @@ bytemuck.workspace = true bytesize.workspace = true base58.workspace = true k256 = { workspace = true, optional = true } -chacha20 = { version = "0.9", default-features = false } +chacha20 = { version = "0.10" } [dev-dependencies] serde_json.workspace = true diff --git a/nssa/core/src/account.rs b/nssa/core/src/account.rs index 9bcdcd4b..0f9248e3 100644 --- a/nssa/core/src/account.rs +++ b/nssa/core/src/account.rs @@ -1,28 +1,126 @@ -use std::{fmt::Display, str::FromStr}; +use std::{ + fmt::{Display, Write as _}, + str::FromStr, +}; -use base58::{FromBase58, ToBase58}; +use base58::{FromBase58 as _, ToBase58 as _}; use borsh::{BorshDeserialize, BorshSerialize}; pub use data::Data; +use risc0_zkvm::sha::{Impl, Sha256 as _}; use serde::{Deserialize, Serialize}; use serde_with::{DeserializeFromStr, SerializeDisplay}; -use crate::program::ProgramId; +use crate::{NullifierPublicKey, NullifierSecretKey, program::ProgramId}; pub mod data; -pub type Nonce = u128; +#[derive(Copy, Debug, Default, Clone, Eq, PartialEq)] +pub struct Nonce(pub u128); -/// Account to be used both in public and private contexts +impl Nonce { + pub const fn public_account_nonce_increment(&mut self) { + self.0 = self + .0 + .checked_add(1) + .expect("Overflow when incrementing nonce"); + } + + #[must_use] + pub fn private_account_nonce_init(npk: &NullifierPublicKey) -> Self { + let mut bytes: [u8; 64] = [0_u8; 64]; + bytes[..32].copy_from_slice(&npk.0); + let result: [u8; 32] = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); + let result = result.first_chunk::<16>().unwrap(); + + Self(u128::from_le_bytes(*result)) + } + + #[must_use] + pub fn private_account_nonce_increment(self, nsk: &NullifierSecretKey) -> Self { + let mut bytes: [u8; 64] = [0_u8; 64]; + bytes[..32].copy_from_slice(nsk); + bytes[32..48].copy_from_slice(&self.0.to_le_bytes()); + let result: [u8; 32] = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); + let result = result.first_chunk::<16>().unwrap(); + + Self(u128::from_le_bytes(*result)) + } +} + +impl From for Nonce { + fn from(value: u128) -> Self { + Self(value) + } +} + +impl From for u128 { + fn from(value: Nonce) -> Self { + value.0 + } +} + +impl Serialize for Nonce { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + Serialize::serialize(&self.0, serializer) + } +} + +impl<'de> Deserialize<'de> for Nonce { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + Ok(::deserialize(deserializer)?.into()) + } +} + +impl BorshSerialize for Nonce { + fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + BorshSerialize::serialize(&self.0, writer) + } +} + +impl BorshDeserialize for Nonce { + fn deserialize_reader(reader: &mut R) -> std::io::Result { + Ok(::deserialize_reader(reader)?.into()) + } +} + +pub type Balance = u128; + +/// Account to be used both in public and private contexts. #[derive( - Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize, + Default, Clone, Eq, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize, )] pub struct Account { pub program_owner: ProgramId, - pub balance: u128, + pub balance: Balance, pub data: Data, pub nonce: Nonce, } +impl std::fmt::Debug for Account { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let program_owner_hex = self + .program_owner + .iter() + .flat_map(|n| n.to_le_bytes()) + .fold(String::new(), |mut acc, bytes| { + write!(acc, "{bytes:02x}").expect("writing to string should not fail"); + acc + }); + f.debug_struct("Account") + .field("program_owner", &program_owner_hex) + .field("balance", &self.balance) + .field("data", &self.data) + .field("nonce", &self.nonce) + .finish() + } +} + #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)] pub struct AccountWithMetadata { pub account: Account, @@ -42,7 +140,6 @@ impl AccountWithMetadata { } #[derive( - Debug, Default, Copy, Clone, @@ -59,16 +156,25 @@ pub struct AccountId { value: [u8; 32], } +impl std::fmt::Debug for AccountId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.value.to_base58()) + } +} + impl AccountId { - pub fn new(value: [u8; 32]) -> Self { + #[must_use] + pub const fn new(value: [u8; 32]) -> Self { Self { value } } - pub fn value(&self) -> &[u8; 32] { + #[must_use] + pub const fn value(&self) -> &[u8; 32] { &self.value } - pub fn into_value(self) -> [u8; 32] { + #[must_use] + pub const fn into_value(self) -> [u8; 32] { self.value } } @@ -95,9 +201,9 @@ impl FromStr for AccountId { if bytes.len() != 32 { return Err(AccountIdError::InvalidLength(bytes.len())); } - let mut value = [0u8; 32]; + let mut value = [0_u8; 32]; value.copy_from_slice(&bytes); - Ok(AccountId { value }) + Ok(Self { value }) } } @@ -113,28 +219,28 @@ mod tests { use crate::program::DEFAULT_PROGRAM_ID; #[test] - fn test_zero_balance_account_data_creation() { + fn zero_balance_account_data_creation() { let new_acc = Account::default(); assert_eq!(new_acc.balance, 0); } #[test] - fn test_zero_nonce_account_data_creation() { + fn zero_nonce_account_data_creation() { let new_acc = Account::default(); - assert_eq!(new_acc.nonce, 0); + assert_eq!(new_acc.nonce.0, 0); } #[test] - fn test_empty_data_account_data_creation() { + fn empty_data_account_data_creation() { let new_acc = Account::default(); assert!(new_acc.data.is_empty()); } #[test] - fn test_default_program_owner_account_data_creation() { + fn default_program_owner_account_data_creation() { let new_acc = Account::default(); assert_eq!(new_acc.program_owner, DEFAULT_PROGRAM_ID); @@ -142,7 +248,7 @@ mod tests { #[cfg(feature = "host")] #[test] - fn test_account_with_metadata_constructor() { + fn account_with_metadata_constructor() { let account = Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], balance: 1337, @@ -150,7 +256,7 @@ mod tests { .to_vec() .try_into() .unwrap(), - nonce: 0xdeadbeef, + nonce: Nonce(0xdead_beef), }; let fingerprint = AccountId::new([8; 32]); let new_acc_with_metadata = AccountWithMetadata::new(account.clone(), true, fingerprint); @@ -164,7 +270,7 @@ mod tests { fn parse_valid_account_id() { let base58_str = "11111111111111111111111111111111"; let account_id: AccountId = base58_str.parse().unwrap(); - assert_eq!(account_id.value, [0u8; 32]); + assert_eq!(account_id.value, [0_u8; 32]); } #[cfg(feature = "host")] @@ -197,4 +303,52 @@ mod tests { let expected_account_id = AccountId::new([0; 32]); assert!(default_account_id == expected_account_id); } + + #[test] + fn initialize_private_nonce() { + let npk = NullifierPublicKey([42; 32]); + let nonce = Nonce::private_account_nonce_init(&npk); + let expected_nonce = Nonce(37_937_661_125_547_691_021_612_781_941_709_513_486); + assert_eq!(nonce, expected_nonce); + } + + #[test] + fn increment_private_nonce() { + let nsk: NullifierSecretKey = [42_u8; 32]; + let nonce = Nonce(37_937_661_125_547_691_021_612_781_941_709_513_486) + .private_account_nonce_increment(&nsk); + let expected_nonce = Nonce(327_300_903_218_789_900_388_409_116_014_290_259_894); + assert_eq!(nonce, expected_nonce); + } + + #[test] + fn increment_public_nonce() { + let value = 42_u128; + let mut nonce = Nonce(value); + nonce.public_account_nonce_increment(); + let expected_nonce = Nonce(value + 1); + assert_eq!(nonce, expected_nonce); + } + + #[test] + fn serde_roundtrip_for_nonce() { + let nonce: Nonce = 7_u128.into(); + + let serde_serialized_nonce = serde_json::to_vec(&nonce).unwrap(); + + let nonce_restored = serde_json::from_slice(&serde_serialized_nonce).unwrap(); + + assert_eq!(nonce, nonce_restored); + } + + #[test] + fn borsh_roundtrip_for_nonce() { + let nonce: Nonce = 7_u128.into(); + + let borsh_serialized_nonce = borsh::to_vec(&nonce).unwrap(); + + let nonce_restored = borsh::from_slice(&borsh_serialized_nonce).unwrap(); + + assert_eq!(nonce, nonce_restored); + } } diff --git a/nssa/core/src/account/data.rs b/nssa/core/src/account/data.rs index 91c58516..36f82653 100644 --- a/nssa/core/src/account/data.rs +++ b/nssa/core/src/account/data.rs @@ -10,26 +10,29 @@ pub const DATA_MAX_LENGTH: ByteSize = ByteSize::kib(100); pub struct Data(Vec); impl Data { + #[must_use] pub fn into_inner(self) -> Vec { self.0 } + /// Reads data from a cursor. #[cfg(feature = "host")] pub fn from_cursor( cursor: &mut std::io::Cursor<&[u8]>, ) -> Result { use std::io::Read as _; - let mut u32_bytes = [0u8; 4]; + let mut u32_bytes = [0_u8; 4]; cursor.read_exact(&mut u32_bytes)?; let data_length = u32::from_le_bytes(u32_bytes); - if data_length as usize > DATA_MAX_LENGTH.as_u64() as usize { + if u64::from(data_length) > DATA_MAX_LENGTH.as_u64() { return Err( std::io::Error::new(std::io::ErrorKind::InvalidData, DataTooBigError).into(), ); } - let mut data = vec![0; data_length as usize]; + let mut data = + vec![0; usize::try_from(data_length).expect("data length is expected to fit in usize")]; cursor.read_exact(&mut data)?; Ok(Self(data)) } @@ -49,7 +52,9 @@ impl TryFrom> for Data { type Error = DataTooBigError; fn try_from(value: Vec) -> Result { - if value.len() > DATA_MAX_LENGTH.as_u64() as usize { + if value.len() + > usize::try_from(DATA_MAX_LENGTH.as_u64()).expect("DATA_MAX_LENGTH fits in usize") + { Err(DataTooBigError) } else { Ok(Self(value)) @@ -98,13 +103,17 @@ impl<'de> Deserialize<'de> for Data { A: serde::de::SeqAccess<'de>, { let mut vec = Vec::with_capacity( - seq.size_hint() - .unwrap_or(0) - .min(DATA_MAX_LENGTH.as_u64() as usize), + seq.size_hint().unwrap_or(0).min( + usize::try_from(DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize"), + ), ); while let Some(value) = seq.next_element()? { - if vec.len() >= DATA_MAX_LENGTH.as_u64() as usize { + if vec.len() + >= usize::try_from(DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize") + { return Err(serde::de::Error::custom(DataTooBigError)); } vec.push(value); @@ -125,7 +134,7 @@ impl BorshDeserialize for Data { let len = u32::deserialize_reader(reader)?; match len { 0 => Ok(Self::default()), - len if len as usize > DATA_MAX_LENGTH.as_u64() as usize => Err(std::io::Error::new( + len if u64::from(len) > DATA_MAX_LENGTH.as_u64() => Err(std::io::Error::new( std::io::ErrorKind::InvalidData, DataTooBigError, )), @@ -143,22 +152,36 @@ mod tests { use super::*; #[test] - fn test_data_max_length_allowed() { - let max_vec = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize]; + fn data_max_length_allowed() { + let max_vec = vec![ + 0_u8; + usize::try_from(DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize") + ]; let result = Data::try_from(max_vec); assert!(result.is_ok()); } #[test] - fn test_data_too_big_error() { - let big_vec = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize + 1]; + fn data_too_big_error() { + let big_vec = vec![ + 0_u8; + usize::try_from(DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize") + + 1 + ]; let result = Data::try_from(big_vec); assert!(matches!(result, Err(DataTooBigError))); } #[test] - fn test_borsh_deserialize_exceeding_limit_error() { - let too_big_data = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize + 1]; + fn borsh_deserialize_exceeding_limit_error() { + let too_big_data = vec![ + 0_u8; + usize::try_from(DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize") + + 1 + ]; let mut serialized = Vec::new(); <_ as BorshSerialize>::serialize(&too_big_data, &mut serialized).unwrap(); @@ -167,8 +190,13 @@ mod tests { } #[test] - fn test_json_deserialize_exceeding_limit_error() { - let data = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize + 1]; + fn json_deserialize_exceeding_limit_error() { + let data = vec![ + 0_u8; + usize::try_from(DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize") + + 1 + ]; let json = serde_json::to_string(&data).unwrap(); let result: Result = serde_json::from_str(&json); diff --git a/nssa/core/src/circuit_io.rs b/nssa/core/src/circuit_io.rs index dedcf780..56d63022 100644 --- a/nssa/core/src/circuit_io.rs +++ b/nssa/core/src/circuit_io.rs @@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize}; use crate::{ Commitment, CommitmentSetDigest, MembershipProof, Nullifier, NullifierPublicKey, NullifierSecretKey, SharedSecretKey, - account::{Account, AccountWithMetadata, Nonce}, + account::{Account, AccountWithMetadata}, encryption::Ciphertext, program::{ProgramId, ProgramOutput}, }; @@ -18,8 +18,6 @@ pub struct PrivacyPreservingCircuitInput { /// - `1` - private account with authentication /// - `2` - private account without authentication pub visibility_mask: Vec, - /// Nonces of private accounts. - pub private_account_nonces: Vec, /// Public keys of private accounts. pub private_account_keys: Vec<(NullifierPublicKey, SharedSecretKey)>, /// Nullifier secret keys for authorized private accounts. @@ -42,6 +40,8 @@ pub struct PrivacyPreservingCircuitOutput { #[cfg(feature = "host")] impl PrivacyPreservingCircuitOutput { + /// Serializes the circuit output to a byte vector. + #[must_use] pub fn to_bytes(&self) -> Vec { bytemuck::cast_slice(&risc0_zkvm::serde::to_vec(&self).unwrap()).to_vec() } @@ -55,19 +55,19 @@ mod tests { use super::*; use crate::{ Commitment, Nullifier, NullifierPublicKey, - account::{Account, AccountId, AccountWithMetadata}, + account::{Account, AccountId, AccountWithMetadata, Nonce}, }; #[test] - fn test_privacy_preserving_circuit_output_to_bytes_is_compatible_with_from_slice() { + fn privacy_preserving_circuit_output_to_bytes_is_compatible_with_from_slice() { let output = PrivacyPreservingCircuitOutput { public_pre_states: vec![ AccountWithMetadata::new( Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - balance: 12345678901234567890, + balance: 12_345_678_901_234_567_890, data: b"test data".to_vec().try_into().unwrap(), - nonce: 18446744073709551614, + nonce: Nonce(0xFFFF_FFFF_FFFF_FFFE), }, true, AccountId::new([0; 32]), @@ -75,9 +75,9 @@ mod tests { AccountWithMetadata::new( Account { program_owner: [9, 9, 9, 8, 8, 8, 7, 7], - balance: 123123123456456567112, + balance: 123_123_123_456_456_567_112, data: b"test data".to_vec().try_into().unwrap(), - nonce: 9999999999999999999999, + nonce: Nonce(9_999_999_999_999_999_999_999), }, false, AccountId::new([1; 32]), @@ -87,7 +87,7 @@ mod tests { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], balance: 100, data: b"post state data".to_vec().try_into().unwrap(), - nonce: 18446744073709551615, + nonce: Nonce(0xFFFF_FFFF_FFFF_FFFF), }], ciphertexts: vec![Ciphertext(vec![255, 255, 1, 1, 2, 2])], new_commitments: vec![Commitment::new( diff --git a/nssa/core/src/commitment.rs b/nssa/core/src/commitment.rs index b08e3005..36730dd0 100644 --- a/nssa/core/src/commitment.rs +++ b/nssa/core/src/commitment.rs @@ -1,16 +1,9 @@ use borsh::{BorshDeserialize, BorshSerialize}; -use risc0_zkvm::sha::{Impl, Sha256}; +use risc0_zkvm::sha::{Impl, Sha256 as _}; use serde::{Deserialize, Serialize}; use crate::{NullifierPublicKey, account::Account}; -#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr( - any(feature = "host", test), - derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord) -)] -pub struct Commitment(pub(super) [u8; 32]); - /// A commitment to all zero data. /// ```python /// from hashlib import sha256 @@ -23,7 +16,7 @@ pub const DUMMY_COMMITMENT: Commitment = Commitment([ 165, 33, 34, 172, 227, 30, 215, 20, 85, 47, 230, 29, ]); -/// The hash of the dummy commitment +/// The hash of the dummy commitment. /// ```python /// from hashlib import sha256 /// hasher = sha256() @@ -35,9 +28,30 @@ pub const DUMMY_COMMITMENT_HASH: [u8; 32] = [ 194, 216, 67, 56, 251, 208, 226, 0, 117, 149, 39, ]; +#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] +#[cfg_attr( + any(feature = "host", test), + derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord) +)] +pub struct Commitment(pub(super) [u8; 32]); + +#[cfg(any(feature = "host", test))] +impl std::fmt::Debug for Commitment { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use std::fmt::Write as _; + + let hex: String = self.0.iter().fold(String::new(), |mut acc, b| { + write!(acc, "{b:02x}").expect("writing to string should not fail"); + acc + }); + write!(f, "Commitment({hex})") + } +} + impl Commitment { /// Generates the commitment to a private account owned by user for npk: - /// SHA256(npk || program_owner || balance || nonce || SHA256(data)) + /// SHA256(npk || `program_owner` || balance || nonce || SHA256(data)). + #[must_use] pub fn new(npk: &NullifierPublicKey, account: &Account) -> Self { let mut bytes = Vec::new(); bytes.extend_from_slice(&npk.to_byte_array()); @@ -47,7 +61,7 @@ impl Commitment { this.extend_from_slice(&word.to_le_bytes()); } this.extend_from_slice(&account.balance.to_le_bytes()); - this.extend_from_slice(&account.nonce.to_le_bytes()); + this.extend_from_slice(&account.nonce.0.to_le_bytes()); let hashed_data: [u8; 32] = Impl::hash_bytes(&account.data) .as_bytes() .try_into() @@ -64,7 +78,8 @@ pub type CommitmentSetDigest = [u8; 32]; pub type MembershipProof = (usize, Vec<[u8; 32]>); -/// Computes the resulting digest for the given membership proof and corresponding commitment +/// Computes the resulting digest for the given membership proof and corresponding commitment. +#[must_use] pub fn compute_digest_for_path( commitment: &Commitment, proof: &MembershipProof, @@ -76,18 +91,16 @@ pub fn compute_digest_for_path( .unwrap(); let mut level_index = proof.0; for node in &proof.1 { + let mut bytes = [0_u8; 64]; let is_left_child = level_index & 1 == 0; if is_left_child { - let mut bytes = [0u8; 64]; bytes[..32].copy_from_slice(&result); bytes[32..].copy_from_slice(node); - result = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); } else { - let mut bytes = [0u8; 64]; bytes[..32].copy_from_slice(node); bytes[32..].copy_from_slice(&result); - result = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); } + result = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); level_index >>= 1; } result @@ -95,14 +108,14 @@ pub fn compute_digest_for_path( #[cfg(test)] mod tests { - use risc0_zkvm::sha::{Impl, Sha256}; + use risc0_zkvm::sha::{Impl, Sha256 as _}; use crate::{ Commitment, DUMMY_COMMITMENT, DUMMY_COMMITMENT_HASH, NullifierPublicKey, account::Account, }; #[test] - fn test_nothing_up_my_sleeve_dummy_commitment() { + fn nothing_up_my_sleeve_dummy_commitment() { let default_account = Account::default(); let npk_null = NullifierPublicKey([0; 32]); let expected_dummy_commitment = Commitment::new(&npk_null, &default_account); @@ -110,7 +123,7 @@ mod tests { } #[test] - fn test_nothing_up_my_sleeve_dummy_commitment_hash() { + fn nothing_up_my_sleeve_dummy_commitment_hash() { let expected_dummy_commitment_hash: [u8; 32] = Impl::hash_bytes(&DUMMY_COMMITMENT.to_byte_array()) .as_bytes() diff --git a/nssa/core/src/encoding.rs b/nssa/core/src/encoding.rs index 34be3782..ac9317c2 100644 --- a/nssa/core/src/encoding.rs +++ b/nssa/core/src/encoding.rs @@ -2,7 +2,7 @@ #[cfg(feature = "host")] use std::io::Cursor; #[cfg(feature = "host")] -use std::io::Read; +use std::io::Read as _; #[cfg(feature = "host")] use crate::Nullifier; @@ -17,28 +17,31 @@ use crate::{ }; impl Account { + /// Serializes the account to bytes. + #[must_use] pub fn to_bytes(&self) -> Vec { let mut bytes = Vec::new(); for word in &self.program_owner { bytes.extend_from_slice(&word.to_le_bytes()); } bytes.extend_from_slice(&self.balance.to_le_bytes()); - bytes.extend_from_slice(&self.nonce.to_le_bytes()); - let data_length: u32 = self.data.len() as u32; + bytes.extend_from_slice(&self.nonce.0.to_le_bytes()); + let data_length: u32 = u32::try_from(self.data.len()).expect("Invalid u32"); bytes.extend_from_slice(&data_length.to_le_bytes()); bytes.extend_from_slice(self.data.as_ref()); bytes } + /// Deserializes an account from a cursor. #[cfg(feature = "host")] pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { - use crate::account::data::Data; + use crate::account::{Nonce, data::Data}; - let mut u32_bytes = [0u8; 4]; - let mut u128_bytes = [0u8; 16]; + let mut u32_bytes = [0_u8; 4]; + let mut u128_bytes = [0_u8; 16]; // program owner - let mut program_owner = [0u32; 8]; + let mut program_owner = [0_u32; 8]; for word in &mut program_owner { cursor.read_exact(&mut u32_bytes)?; *word = u32::from_le_bytes(u32_bytes); @@ -50,7 +53,7 @@ impl Account { // nonce cursor.read_exact(&mut u128_bytes)?; - let nonce = u128::from_le_bytes(u128_bytes); + let nonce = Nonce(u128::from_le_bytes(u128_bytes)); // data let data = Data::from_cursor(cursor)?; @@ -65,51 +68,61 @@ impl Account { } impl Commitment { - pub fn to_byte_array(&self) -> [u8; 32] { + #[must_use] + pub const fn to_byte_array(&self) -> [u8; 32] { self.0 } #[cfg(feature = "host")] - pub fn from_byte_array(bytes: [u8; 32]) -> Self { + #[must_use] + pub const fn from_byte_array(bytes: [u8; 32]) -> Self { Self(bytes) } + /// Deserializes a commitment from a cursor. #[cfg(feature = "host")] pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { - let mut bytes = [0u8; 32]; + let mut bytes = [0_u8; 32]; cursor.read_exact(&mut bytes)?; Ok(Self(bytes)) } } impl NullifierPublicKey { - pub fn to_byte_array(&self) -> [u8; 32] { + #[must_use] + pub const fn to_byte_array(&self) -> [u8; 32] { self.0 } } #[cfg(feature = "host")] impl Nullifier { - pub fn to_byte_array(&self) -> [u8; 32] { + #[must_use] + pub const fn to_byte_array(&self) -> [u8; 32] { self.0 } #[cfg(feature = "host")] - pub fn from_byte_array(bytes: [u8; 32]) -> Self { + #[must_use] + pub const fn from_byte_array(bytes: [u8; 32]) -> Self { Self(bytes) } + /// Deserializes a nullifier from a cursor. pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { - let mut bytes = [0u8; 32]; + let mut bytes = [0_u8; 32]; cursor.read_exact(&mut bytes)?; Ok(Self(bytes)) } } impl Ciphertext { + /// Serializes the ciphertext to bytes. + #[must_use] pub fn to_bytes(&self) -> Vec { let mut bytes = Vec::new(); - let ciphertext_length: u32 = self.0.len() as u32; + let ciphertext_length: u32 = + u32::try_from(self.0.len()).expect("ciphertext length fits in u32"); bytes.extend_from_slice(&ciphertext_length.to_le_bytes()); bytes.extend_from_slice(&self.0); @@ -117,22 +130,27 @@ impl Ciphertext { } #[cfg(feature = "host")] + #[must_use] pub fn into_inner(self) -> Vec { self.0 } #[cfg(feature = "host")] - pub fn from_inner(inner: Vec) -> Self { + #[must_use] + pub const fn from_inner(inner: Vec) -> Self { Self(inner) } #[cfg(feature = "host")] + /// Deserializes ciphertext from a cursor. pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { let mut u32_bytes = [0; 4]; cursor.read_exact(&mut u32_bytes)?; let ciphertext_lenght = u32::from_le_bytes(u32_bytes); - let mut ciphertext = vec![0; ciphertext_lenght as usize]; + let ciphertext_length = + usize::try_from(ciphertext_lenght).expect("ciphertext length fits in usize"); + let mut ciphertext = vec![0; ciphertext_length]; cursor.read_exact(&mut ciphertext)?; Ok(Self(ciphertext)) @@ -141,10 +159,13 @@ impl Ciphertext { #[cfg(feature = "host")] impl Secp256k1Point { + /// Converts the point to bytes. + #[must_use] pub fn to_bytes(&self) -> [u8; 33] { self.0.clone().try_into().unwrap() } + /// Deserializes a secp256k1 point from a cursor. pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result { let mut value = vec![0; 33]; cursor.read_exact(&mut value)?; @@ -153,7 +174,8 @@ impl Secp256k1Point { } impl AccountId { - pub fn to_bytes(&self) -> [u8; 32] { + #[must_use] + pub const fn to_bytes(&self) -> [u8; 32] { *self.value() } } @@ -163,11 +185,11 @@ mod tests { use super::*; #[test] - fn test_enconding() { + fn enconding() { let account = Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - balance: 123456789012345678901234567890123456, - nonce: 42, + balance: 123_456_789_012_345_678_901_234_567_890_123_456, + nonce: 42_u128.into(), data: b"hola mundo".to_vec().try_into().unwrap(), }; @@ -184,7 +206,7 @@ mod tests { } #[test] - fn test_commitment_to_bytes() { + fn commitment_to_bytes() { let commitment = Commitment((0..32).collect::>().try_into().unwrap()); let expected_bytes: [u8; 32] = (0..32).collect::>().try_into().unwrap(); @@ -194,7 +216,7 @@ mod tests { #[cfg(feature = "host")] #[test] - fn test_nullifier_to_bytes() { + fn nullifier_to_bytes() { let nullifier = Nullifier((0..32).collect::>().try_into().unwrap()); let expected_bytes: [u8; 32] = (0..32).collect::>().try_into().unwrap(); @@ -204,7 +226,7 @@ mod tests { #[cfg(feature = "host")] #[test] - fn test_commitment_to_bytes_roundtrip() { + fn commitment_to_bytes_roundtrip() { let commitment = Commitment((0..32).collect::>().try_into().unwrap()); let bytes = commitment.to_byte_array(); let mut cursor = Cursor::new(bytes.as_ref()); @@ -214,7 +236,7 @@ mod tests { #[cfg(feature = "host")] #[test] - fn test_nullifier_to_bytes_roundtrip() { + fn nullifier_to_bytes_roundtrip() { let nullifier = Nullifier((0..32).collect::>().try_into().unwrap()); let bytes = nullifier.to_byte_array(); let mut cursor = Cursor::new(bytes.as_ref()); @@ -224,11 +246,11 @@ mod tests { #[cfg(feature = "host")] #[test] - fn test_account_to_bytes_roundtrip() { + fn account_to_bytes_roundtrip() { let account = Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], - balance: 123456789012345678901234567890123456, - nonce: 42, + balance: 123_456_789_012_345_678_901_234_567_890_123_456, + nonce: 42_u128.into(), data: b"hola mundo".to_vec().try_into().unwrap(), }; let bytes = account.to_bytes(); diff --git a/nssa/core/src/encryption/mod.rs b/nssa/core/src/encryption/mod.rs index 4817d3c8..400fb331 100644 --- a/nssa/core/src/encryption/mod.rs +++ b/nssa/core/src/encryption/mod.rs @@ -1,18 +1,16 @@ use borsh::{BorshDeserialize, BorshSerialize}; use chacha20::{ ChaCha20, - cipher::{KeyIvInit, StreamCipher}, + cipher::{KeyIvInit as _, StreamCipher as _}, }; -use risc0_zkvm::sha::{Impl, Sha256}; +use risc0_zkvm::sha::{Impl, Sha256 as _}; use serde::{Deserialize, Serialize}; - -#[cfg(feature = "host")] -pub mod shared_key_derivation; - #[cfg(feature = "host")] pub use shared_key_derivation::{EphemeralPublicKey, EphemeralSecretKey, ViewingPublicKey}; use crate::{Commitment, account::Account}; +#[cfg(feature = "host")] +pub mod shared_key_derivation; pub type Scalar = [u8; 32]; @@ -22,17 +20,31 @@ pub struct SharedSecretKey(pub [u8; 32]); pub struct EncryptionScheme; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq))] +#[cfg_attr(any(feature = "host", test), derive(Clone, PartialEq, Eq))] pub struct Ciphertext(pub(crate) Vec); +#[cfg(any(feature = "host", test))] +impl std::fmt::Debug for Ciphertext { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use std::fmt::Write as _; + + let hex: String = self.0.iter().fold(String::new(), |mut acc, b| { + write!(acc, "{b:02x}").expect("writing to string should not fail"); + acc + }); + write!(f, "Ciphertext({hex})") + } +} + impl EncryptionScheme { + #[must_use] pub fn encrypt( account: &Account, shared_secret: &SharedSecretKey, commitment: &Commitment, output_index: u32, ) -> Ciphertext { - let mut buffer = account.to_bytes().to_vec(); + let mut buffer = account.to_bytes(); Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index); Ciphertext(buffer) } @@ -64,6 +76,11 @@ impl EncryptionScheme { } #[cfg(feature = "host")] + #[expect( + clippy::print_stdout, + reason = "This is the current way to debug things. TODO: fix later" + )] + #[must_use] pub fn decrypt( ciphertext: &Ciphertext, shared_secret: &SharedSecretKey, @@ -71,7 +88,7 @@ impl EncryptionScheme { output_index: u32, ) -> Option { use std::io::Cursor; - let mut buffer = ciphertext.0.to_owned(); + let mut buffer = ciphertext.0.clone(); Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index); let mut cursor = Cursor::new(buffer.as_slice()); @@ -79,12 +96,12 @@ impl EncryptionScheme { .inspect_err(|err| { println!( "Failed to decode {ciphertext:?} \n - with secret {:?} ,\n + with secret {:?} ,\n commitment {commitment:?} ,\n and output_index {output_index} ,\n with error {err:?}", shared_secret.0 - ) + ); }) .ok() } diff --git a/nssa/core/src/encryption/shared_key_derivation.rs b/nssa/core/src/encryption/shared_key_derivation.rs index e946d5e3..8169e8f9 100644 --- a/nssa/core/src/encryption/shared_key_derivation.rs +++ b/nssa/core/src/encryption/shared_key_derivation.rs @@ -1,20 +1,38 @@ +#![expect( + clippy::arithmetic_side_effects, + reason = "Multiplication of finite field elements can't overflow" +)] + +use std::fmt::Write as _; + use borsh::{BorshDeserialize, BorshSerialize}; use k256::{ AffinePoint, EncodedPoint, FieldBytes, ProjectivePoint, elliptic_curve::{ - PrimeField, - sec1::{FromEncodedPoint, ToEncodedPoint}, + PrimeField as _, + sec1::{FromEncodedPoint as _, ToEncodedPoint as _}, }, }; use serde::{Deserialize, Serialize}; use crate::{SharedSecretKey, encryption::Scalar}; -#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Secp256k1Point(pub Vec); +impl std::fmt::Debug for Secp256k1Point { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let hex: String = self.0.iter().fold(String::new(), |mut acc, b| { + write!(acc, "{b:02x}").expect("writing to string should not fail"); + acc + }); + write!(f, "Secp256k1Point({hex})") + } +} + impl Secp256k1Point { - pub fn from_scalar(value: Scalar) -> Secp256k1Point { + #[must_use] + pub fn from_scalar(value: Scalar) -> Self { let x_bytes: FieldBytes = value.into(); let x = k256::Scalar::from_repr(x_bytes).unwrap(); @@ -31,11 +49,13 @@ pub type EphemeralPublicKey = Secp256k1Point; pub type ViewingPublicKey = Secp256k1Point; impl From<&EphemeralSecretKey> for EphemeralPublicKey { fn from(value: &EphemeralSecretKey) -> Self { - Secp256k1Point::from_scalar(*value) + Self::from_scalar(*value) } } impl SharedSecretKey { + /// Creates a new shared secret key from a scalar and a point. + #[must_use] pub fn new(scalar: &Scalar, point: &Secp256k1Point) -> Self { let scalar = k256::Scalar::from_repr((*scalar).into()).unwrap(); let point: [u8; 33] = point.0.clone().try_into().unwrap(); @@ -46,9 +66,9 @@ impl SharedSecretKey { let shared = ProjectivePoint::from(pubkey_affine) * scalar; let shared_affine = shared.to_affine(); - let encoded = shared_affine.to_encoded_point(false); - let x_bytes_slice = encoded.x().unwrap(); - let mut x_bytes = [0u8; 32]; + let shared_affine_encoded = shared_affine.to_encoded_point(false); + let x_bytes_slice = shared_affine_encoded.x().unwrap(); + let mut x_bytes = [0_u8; 32]; x_bytes.copy_from_slice(x_bytes_slice); Self(x_bytes) diff --git a/nssa/core/src/lib.rs b/nssa/core/src/lib.rs index 8d4fce5f..8014c7ca 100644 --- a/nssa/core/src/lib.rs +++ b/nssa/core/src/lib.rs @@ -1,10 +1,7 @@ -pub mod account; -mod circuit_io; -mod commitment; -mod encoding; -pub mod encryption; -mod nullifier; -pub mod program; +#![expect( + clippy::multiple_inherent_impl, + reason = "We prefer to group methods by functionality rather than by type for encoding" +)] pub use circuit_io::{PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput}; pub use commitment::{ @@ -14,5 +11,13 @@ pub use commitment::{ pub use encryption::{EncryptionScheme, SharedSecretKey}; pub use nullifier::{Nullifier, NullifierPublicKey, NullifierSecretKey}; +pub mod account; +mod circuit_io; +mod commitment; +mod encoding; +pub mod encryption; +mod nullifier; +pub mod program; + #[cfg(feature = "host")] pub mod error; diff --git a/nssa/core/src/nullifier.rs b/nssa/core/src/nullifier.rs index c019b185..6ba59860 100644 --- a/nssa/core/src/nullifier.rs +++ b/nssa/core/src/nullifier.rs @@ -1,5 +1,5 @@ use borsh::{BorshDeserialize, BorshSerialize}; -use risc0_zkvm::sha::{Impl, Sha256}; +use risc0_zkvm::sha::{Impl, Sha256 as _}; use serde::{Deserialize, Serialize}; use crate::{Commitment, account::AccountId}; @@ -16,7 +16,12 @@ impl From<&NullifierPublicKey> for AccountId { let mut bytes = [0; 64]; bytes[0..32].copy_from_slice(PRIVATE_ACCOUNT_ID_PREFIX); bytes[32..].copy_from_slice(&value.0); - AccountId::new(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap()) + Self::new( + Impl::hash_bytes(&bytes) + .as_bytes() + .try_into() + .expect("Conversion should not fail"), + ) } } @@ -28,15 +33,20 @@ impl AsRef<[u8]> for NullifierPublicKey { impl From<&NullifierSecretKey> for NullifierPublicKey { fn from(value: &NullifierSecretKey) -> Self { - let mut bytes = Vec::new(); const PREFIX: &[u8; 8] = b"LEE/keys"; const SUFFIX_1: &[u8; 1] = &[7]; const SUFFIX_2: &[u8; 23] = &[0; 23]; + let mut bytes = Vec::new(); bytes.extend_from_slice(PREFIX); bytes.extend_from_slice(value); bytes.extend_from_slice(SUFFIX_1); bytes.extend_from_slice(SUFFIX_2); - Self(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap()) + Self( + Impl::hash_bytes(&bytes) + .as_bytes() + .try_into() + .expect("hash should be exactly 32 bytes long"), + ) } } @@ -45,11 +55,26 @@ pub type NullifierSecretKey = [u8; 32]; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] #[cfg_attr( any(feature = "host", test), - derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash) + derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash) )] pub struct Nullifier(pub(super) [u8; 32]); +#[cfg(any(feature = "host", test))] +impl std::fmt::Debug for Nullifier { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use std::fmt::Write as _; + + let hex: String = self.0.iter().fold(String::new(), |mut acc, b| { + write!(acc, "{b:02x}").expect("writing to string should not fail"); + acc + }); + write!(f, "Nullifier({hex})") + } +} + impl Nullifier { + /// Computes a nullifier for an account update. + #[must_use] pub fn for_account_update(commitment: &Commitment, nsk: &NullifierSecretKey) -> Self { const UPDATE_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Update/\x00\x00\x00\x00"; let mut bytes = UPDATE_PREFIX.to_vec(); @@ -58,6 +83,8 @@ impl Nullifier { Self(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap()) } + /// Computes a nullifier for an account initialization. + #[must_use] pub fn for_account_initialization(npk: &NullifierPublicKey) -> Self { const INIT_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Initialize/"; let mut bytes = INIT_PREFIX.to_vec(); @@ -71,8 +98,8 @@ mod tests { use super::*; #[test] - fn test_constructor_for_account_update() { - let commitment = Commitment((0..32u8).collect::>().try_into().unwrap()); + fn constructor_for_account_update() { + let commitment = Commitment((0..32_u8).collect::>().try_into().unwrap()); let nsk = [0x42; 32]; let expected_nullifier = Nullifier([ 148, 243, 116, 209, 140, 231, 211, 61, 35, 62, 114, 110, 143, 224, 82, 201, 221, 34, @@ -83,7 +110,7 @@ mod tests { } #[test] - fn test_constructor_for_account_initialization() { + fn constructor_for_account_initialization() { let npk = NullifierPublicKey([ 112, 188, 193, 129, 150, 55, 228, 67, 88, 168, 29, 151, 5, 92, 23, 190, 17, 162, 164, 255, 29, 105, 42, 186, 43, 11, 157, 168, 132, 225, 17, 163, @@ -97,7 +124,7 @@ mod tests { } #[test] - fn test_from_secret_key() { + fn from_secret_key() { let nsk = [ 57, 5, 64, 115, 153, 56, 184, 51, 207, 238, 99, 165, 147, 214, 213, 151, 30, 251, 30, 196, 134, 22, 224, 211, 237, 120, 136, 225, 188, 220, 249, 28, @@ -111,7 +138,7 @@ mod tests { } #[test] - fn test_account_id_from_nullifier_public_key() { + fn account_id_from_nullifier_public_key() { let nsk = [ 57, 5, 64, 115, 153, 56, 184, 51, 207, 238, 99, 165, 147, 214, 213, 151, 30, 251, 30, 196, 134, 22, 224, 211, 237, 120, 136, 225, 188, 220, 249, 28, diff --git a/nssa/core/src/program.rs b/nssa/core/src/program.rs index a6a04425..31b76b0f 100644 --- a/nssa/core/src/program.rs +++ b/nssa/core/src/program.rs @@ -5,17 +5,17 @@ use serde::{Deserialize, Serialize}; use crate::account::{Account, AccountId, AccountWithMetadata}; -pub type ProgramId = [u32; 8]; -pub type InstructionData = Vec; pub const DEFAULT_PROGRAM_ID: ProgramId = [0; 8]; pub const MAX_NUMBER_CHAINED_CALLS: usize = 10; +pub type ProgramId = [u32; 8]; +pub type InstructionData = Vec; pub struct ProgramInput { pub pre_states: Vec, pub instruction: T, } -/// A 32-byte seed used to compute a *Program-Derived AccountId* (PDA). +/// A 32-byte seed used to compute a *Program-Derived `AccountId`* (PDA). /// /// Each program can derive up to `2^256` unique account IDs by choosing different /// seeds. PDAs allow programs to control namespaced account identifiers without @@ -24,28 +24,15 @@ pub struct ProgramInput { pub struct PdaSeed([u8; 32]); impl PdaSeed { + #[must_use] pub const fn new(value: [u8; 32]) -> Self { Self(value) } } -pub fn compute_authorized_pdas( - caller_program_id: Option, - pda_seeds: &[PdaSeed], -) -> HashSet { - caller_program_id - .map(|caller_program_id| { - pda_seeds - .iter() - .map(|pda_seed| AccountId::from((&caller_program_id, pda_seed))) - .collect() - }) - .unwrap_or_default() -} - impl From<(&ProgramId, &PdaSeed)> for AccountId { fn from(value: (&ProgramId, &PdaSeed)) -> Self { - use risc0_zkvm::sha::{Impl, Sha256}; + use risc0_zkvm::sha::{Impl, Sha256 as _}; const PROGRAM_DERIVED_ACCOUNT_ID_PREFIX: &[u8; 32] = b"/NSSA/v0.2/AccountId/PDA/\x00\x00\x00\x00\x00\x00\x00"; @@ -55,7 +42,7 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId { bytemuck::try_cast_slice(value.0).expect("ProgramId should be castable to &[u8]"); bytes[32..64].copy_from_slice(program_id_bytes); bytes[64..].copy_from_slice(&value.1.0); - AccountId::new( + Self::new( Impl::hash_bytes(&bytes) .as_bytes() .try_into() @@ -66,10 +53,10 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId { #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] pub struct ChainedCall { - /// The program ID of the program to execute + /// The program ID of the program to execute. pub program_id: ProgramId, pub pre_states: Vec, - /// The instruction data to pass + /// The instruction data to pass. pub instruction_data: InstructionData, pub pda_seeds: Vec, } @@ -90,6 +77,7 @@ impl ChainedCall { } } + #[must_use] pub fn with_pda_seeds(mut self, pda_seeds: Vec) -> Self { self.pda_seeds = pda_seeds; self @@ -97,6 +85,7 @@ impl ChainedCall { } /// Represents the final state of an `Account` after a program execution. +/// /// A post state may optionally request that the executing program /// becomes the owner of the account (a “claim”). This is used to signal /// that the program intends to take ownership of the account. @@ -110,7 +99,8 @@ pub struct AccountPostState { impl AccountPostState { /// Creates a post state without a claim request. /// The executing program is not requesting ownership of the account. - pub fn new(account: Account) -> Self { + #[must_use] + pub const fn new(account: Account) -> Self { Self { account, claim: false, @@ -120,7 +110,8 @@ impl AccountPostState { /// Creates a post state that requests ownership of the account. /// This indicates that the executing program intends to claim the /// account as its own and is allowed to mutate it. - pub fn new_claimed(account: Account) -> Self { + #[must_use] + pub const fn new_claimed(account: Account) -> Self { Self { account, claim: true, @@ -129,6 +120,7 @@ impl AccountPostState { /// Creates a post state that requests ownership of the account /// if the account's program owner is the default program ID. + #[must_use] pub fn new_claimed_if_default(account: Account) -> Self { let claim = account.program_owner == DEFAULT_PROGRAM_ID; Self { account, claim } @@ -136,21 +128,24 @@ impl AccountPostState { /// Returns `true` if this post state requests that the account /// be claimed (owned) by the executing program. - pub fn requires_claim(&self) -> bool { + #[must_use] + pub const fn requires_claim(&self) -> bool { self.claim } - /// Returns the underlying account - pub fn account(&self) -> &Account { + /// Returns the underlying account. + #[must_use] + pub const fn account(&self) -> &Account { &self.account } - /// Returns the underlying account - pub fn account_mut(&mut self) -> &mut Account { + /// Returns the underlying account. + pub const fn account_mut(&mut self) -> &mut Account { &mut self.account } - /// Consumes the post state and returns the underlying account + /// Consumes the post state and returns the underlying account. + #[must_use] pub fn into_account(self) -> Account { self.account } @@ -159,14 +154,58 @@ impl AccountPostState { #[derive(Serialize, Deserialize, Clone)] #[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))] pub struct ProgramOutput { - /// The instruction data the program received to produce this output + /// The instruction data the program received to produce this output. pub instruction_data: InstructionData, - /// The account pre states the program received to produce this output + /// The account pre states the program received to produce this output. pub pre_states: Vec, pub post_states: Vec, pub chained_calls: Vec, } +/// Representation of a number as `lo + hi * 2^128`. +#[derive(PartialEq, Eq)] +struct WrappedBalanceSum { + lo: u128, + hi: u128, +} + +impl WrappedBalanceSum { + /// Constructs a [`WrappedBalanceSum`] from an iterator of balances. + /// + /// Returns [`None`] if balance sum overflows `lo + hi * 2^128` representation, which is not + /// expected in practical scenarios. + fn from_balances(balances: impl Iterator) -> Option { + let mut wrapped = Self { lo: 0, hi: 0 }; + + for balance in balances { + let (new_sum, did_overflow) = wrapped.lo.overflowing_add(balance); + if did_overflow { + wrapped.hi = wrapped.hi.checked_add(1)?; + } + wrapped.lo = new_sum; + } + + Some(wrapped) + } +} + +#[must_use] +pub fn compute_authorized_pdas( + caller_program_id: Option, + pda_seeds: &[PdaSeed], +) -> HashSet { + caller_program_id + .map(|caller_program_id| { + pda_seeds + .iter() + .map(|pda_seed| AccountId::from((&caller_program_id, pda_seed))) + .collect() + }) + .unwrap_or_default() +} + +/// Reads the NSSA inputs from the guest environment. +#[must_use] pub fn read_nssa_inputs() -> (ProgramInput, InstructionData) { let pre_states: Vec = env::read(); let instruction_words: InstructionData = env::read(); @@ -209,12 +248,13 @@ pub fn write_nssa_outputs_with_chained_call( env::commit(&output); } -/// Validates well-behaved program execution +/// Validates well-behaved program execution. /// /// # Parameters /// - `pre_states`: The list of input accounts, each annotated with authorization metadata. /// - `post_states`: The list of resulting accounts after executing the program logic. /// - `executing_program_id`: The identifier of the program that was executed. +#[must_use] pub fn validate_execution( pre_states: &[AccountWithMetadata], post_states: &[AccountPostState], @@ -298,44 +338,17 @@ fn validate_uniqueness_of_account_ids(pre_states: &[AccountWithMetadata]) -> boo number_of_accounts == number_of_account_ids } -/// Representation of a number as `lo + hi * 2^128`. -#[derive(PartialEq, Eq)] -struct WrappedBalanceSum { - lo: u128, - hi: u128, -} - -impl WrappedBalanceSum { - /// Constructs a [`WrappedBalanceSum`] from an iterator of balances. - /// - /// Returns [`None`] if balance sum overflows `lo + hi * 2^128` representation, which is not - /// expected in practical scenarios. - fn from_balances(balances: impl Iterator) -> Option { - let mut wrapped = WrappedBalanceSum { lo: 0, hi: 0 }; - - for balance in balances { - let (new_sum, did_overflow) = wrapped.lo.overflowing_add(balance); - if did_overflow { - wrapped.hi = wrapped.hi.checked_add(1)?; - } - wrapped.lo = new_sum; - } - - Some(wrapped) - } -} - #[cfg(test)] mod tests { use super::*; #[test] - fn test_post_state_new_with_claim_constructor() { + fn post_state_new_with_claim_constructor() { let account = Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], balance: 1337, data: vec![0xde, 0xad, 0xbe, 0xef].try_into().unwrap(), - nonce: 10, + nonce: 10_u128.into(), }; let account_post_state = AccountPostState::new_claimed(account.clone()); @@ -345,12 +358,12 @@ mod tests { } #[test] - fn test_post_state_new_without_claim_constructor() { + fn post_state_new_without_claim_constructor() { let account = Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], balance: 1337, data: vec![0xde, 0xad, 0xbe, 0xef].try_into().unwrap(), - nonce: 10, + nonce: 10_u128.into(), }; let account_post_state = AccountPostState::new(account.clone()); @@ -360,12 +373,12 @@ mod tests { } #[test] - fn test_post_state_account_getter() { + fn post_state_account_getter() { let mut account = Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], balance: 1337, data: vec![0xde, 0xad, 0xbe, 0xef].try_into().unwrap(), - nonce: 10, + nonce: 10_u128.into(), }; let mut account_post_state = AccountPostState::new(account.clone()); diff --git a/nssa/src/encoding/privacy_preserving_transaction.rs b/nssa/src/encoding/privacy_preserving_transaction.rs index fcb6c943..6fe5c443 100644 --- a/nssa/src/encoding/privacy_preserving_transaction.rs +++ b/nssa/src/encoding/privacy_preserving_transaction.rs @@ -4,6 +4,7 @@ use crate::{ }; impl Message { + #[must_use] pub fn to_bytes(&self) -> Vec { borsh::to_vec(&self).expect("Autoderived borsh serialization failure") } @@ -14,6 +15,7 @@ impl Message { } impl PrivacyPreservingTransaction { + #[must_use] pub fn to_bytes(&self) -> Vec { borsh::to_vec(&self).expect("Autoderived borsh serialization failure") } diff --git a/nssa/src/encoding/program_deployment_transaction.rs b/nssa/src/encoding/program_deployment_transaction.rs index ee66863d..fc1bf459 100644 --- a/nssa/src/encoding/program_deployment_transaction.rs +++ b/nssa/src/encoding/program_deployment_transaction.rs @@ -1,6 +1,7 @@ use crate::{ProgramDeploymentTransaction, error::NssaError}; impl ProgramDeploymentTransaction { + #[must_use] pub fn to_bytes(&self) -> Vec { borsh::to_vec(&self).expect("Autoderived borsh serialization failure") } @@ -15,7 +16,7 @@ mod tests { use crate::{ProgramDeploymentTransaction, program_deployment_transaction::Message}; #[test] - fn test_roundtrip() { + fn roundtrip() { let message = Message::new(vec![0xca, 0xfe, 0xca, 0xfe, 0x01, 0x02, 0x03]); let tx = ProgramDeploymentTransaction::new(message); let bytes = tx.to_bytes(); diff --git a/nssa/src/encoding/public_transaction.rs b/nssa/src/encoding/public_transaction.rs index ea0988c7..2549cf27 100644 --- a/nssa/src/encoding/public_transaction.rs +++ b/nssa/src/encoding/public_transaction.rs @@ -7,6 +7,7 @@ impl Message { } impl PublicTransaction { + #[must_use] pub fn to_bytes(&self) -> Vec { borsh::to_vec(&self).expect("Autoderived borsh serialization failure") } diff --git a/nssa/src/error.rs b/nssa/src/error.rs index 45d53108..3576b366 100644 --- a/nssa/src/error.rs +++ b/nssa/src/error.rs @@ -2,6 +2,15 @@ use std::io; use thiserror::Error; +#[macro_export] +macro_rules! ensure { + ($cond:expr, $err:expr) => { + if !$cond { + return Err($err); + } + }; +} + #[derive(Error, Debug)] pub enum NssaError { #[error("Invalid input: {0}")] @@ -20,7 +29,7 @@ pub enum NssaError { Io(#[from] io::Error), #[error("Invalid Public Key")] - InvalidPublicKey, + InvalidPublicKey(#[source] secp256k1::Error), #[error("Risc0 error: {0}")] ProgramWriteInputFailed(String), @@ -50,11 +59,35 @@ pub enum NssaError { CircuitProvingError(String), #[error("Invalid program bytecode")] - InvalidProgramBytecode, + InvalidProgramBytecode(#[source] anyhow::Error), #[error("Program already exists")] ProgramAlreadyExists, #[error("Chain of calls is too long")] MaxChainedCallsDepthExceeded, + + #[error("Max account nonce reached")] + MaxAccountNonceReached, +} + +#[cfg(test)] +mod tests { + + #[derive(Debug)] + enum TestError { + TestErr, + } + + fn test_function_ensure(cond: bool) -> Result<(), TestError> { + ensure!(cond, TestError::TestErr); + + Ok(()) + } + + #[test] + fn ensure_works() { + assert!(test_function_ensure(true).is_ok()); + assert!(test_function_ensure(false).is_err()); + } } diff --git a/nssa/src/lib.rs b/nssa/src/lib.rs index 47a0eadb..bc7cf121 100644 --- a/nssa/src/lib.rs +++ b/nssa/src/lib.rs @@ -1,16 +1,7 @@ -pub mod program_methods { - include!(concat!(env!("OUT_DIR"), "/program_methods/mod.rs")); -} - -pub mod encoding; -pub mod error; -mod merkle_tree; -pub mod privacy_preserving_transaction; -pub mod program; -pub mod program_deployment_transaction; -pub mod public_transaction; -mod signature; -mod state; +#![expect( + clippy::multiple_inherent_impl, + reason = "We prefer to group methods by functionality rather than by type for encoding" +)] pub use nssa_core::{ SharedSecretKey, @@ -26,3 +17,17 @@ pub use program_methods::PRIVACY_PRESERVING_CIRCUIT_ID; pub use public_transaction::PublicTransaction; pub use signature::{PrivateKey, PublicKey, Signature}; pub use state::V02State; + +pub mod encoding; +pub mod error; +mod merkle_tree; +pub mod privacy_preserving_transaction; +pub mod program; +pub mod program_deployment_transaction; +pub mod public_transaction; +mod signature; +mod state; + +pub mod program_methods { + include!(concat!(env!("OUT_DIR"), "/program_methods/mod.rs")); +} diff --git a/nssa/src/merkle_tree/default_values.rs b/nssa/src/merkle_tree/default_values.rs index 03166440..f4c8cbd1 100644 --- a/nssa/src/merkle_tree/default_values.rs +++ b/nssa/src/merkle_tree/default_values.rs @@ -1,4 +1,4 @@ -pub(crate) const DEFAULT_VALUES: [[u8; 32]; 32] = [ +pub const DEFAULT_VALUES: [[u8; 32]; 32] = [ [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, diff --git a/nssa/src/merkle_tree/mod.rs b/nssa/src/merkle_tree/mod.rs index 9c981b62..588f0f60 100644 --- a/nssa/src/merkle_tree/mod.rs +++ b/nssa/src/merkle_tree/mod.rs @@ -1,25 +1,13 @@ +#![expect(clippy::arithmetic_side_effects, reason = "TODO: fix later")] + use borsh::{BorshDeserialize, BorshSerialize}; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; mod default_values; type Value = [u8; 32]; type Node = [u8; 32]; -/// Compute parent as the hash of two child nodes -fn hash_two(left: &Node, right: &Node) -> Node { - let mut hasher = Sha256::new(); - hasher.update(left); - hasher.update(right); - hasher.finalize().into() -} - -fn hash_value(value: &Value) -> Node { - let mut hasher = Sha256::new(); - hasher.update(value); - hasher.finalize().into() -} - #[cfg_attr(test, derive(Debug, PartialEq, Eq))] #[derive(Clone, BorshSerialize, BorshDeserialize)] pub struct MerkleTree { @@ -36,7 +24,8 @@ impl MerkleTree { fn root_index(&self) -> usize { let tree_depth = self.depth(); - let capacity_depth = self.capacity.trailing_zeros() as usize; + let capacity_depth = + usize::try_from(self.capacity.trailing_zeros()).expect("u32 fits in usize"); if tree_depth == capacity_depth { 0 @@ -46,9 +35,10 @@ impl MerkleTree { } } - /// Number of levels required to hold all nodes + /// Number of levels required to hold all nodes. fn depth(&self) -> usize { - self.length.next_power_of_two().trailing_zeros() as usize + usize::try_from(self.length.next_power_of_two().trailing_zeros()) + .expect("u32 fits in usize") } fn get_node(&self, index: usize) -> &Node { @@ -62,14 +52,14 @@ impl MerkleTree { pub fn with_capacity(capacity: usize) -> Self { // Adjust capacity to ensure power of two let capacity = capacity.next_power_of_two(); - let total_depth = capacity.trailing_zeros() as usize; + let total_depth = usize::try_from(capacity.trailing_zeros()).expect("u32 fits in usize"); - let nodes = default_values::DEFAULT_VALUES[..(total_depth + 1)] + let nodes = default_values::DEFAULT_VALUES[..=total_depth] .iter() .rev() .enumerate() .flat_map(|(level, default_value)| std::iter::repeat_n(default_value, 1 << level)) - .cloned() + .copied() .collect(); Self { @@ -80,7 +70,7 @@ impl MerkleTree { } /// Reallocates storage of Merkle tree for double capacity. - /// The current tree is embedded into the new tree as a subtree + /// The current tree is embedded into the new tree as a subtree. fn reallocate_to_double_capacity(&mut self) { let old_capacity = self.capacity; let new_capacity = old_capacity << 1; @@ -152,30 +142,45 @@ impl MerkleTree { } } -fn prev_power_of_two(x: usize) -> usize { +/// Compute parent as the hash of two child nodes. +fn hash_two(left: &Node, right: &Node) -> Node { + let mut hasher = Sha256::new(); + hasher.update(left); + hasher.update(right); + hasher.finalize().into() +} + +fn hash_value(value: &Value) -> Node { + let mut hasher = Sha256::new(); + hasher.update(value); + hasher.finalize().into() +} + +const fn prev_power_of_two(x: usize) -> usize { if x == 0 { return 0; } - 1 << (usize::BITS as usize - x.leading_zeros() as usize - 1) + 1 << (usize::BITS - x.leading_zeros() - 1) } #[cfg(test)] mod tests { + use hex_literal::hex; + + use super::*; + impl MerkleTree { pub fn new(values: &[Value]) -> Self { let mut this = Self::with_capacity(values.len()); - for value in values.iter().cloned() { + for value in values.iter().copied() { this.insert(value); } this } } - use hex_literal::hex; - - use super::*; #[test] - fn test_empty_merkle_tree() { + fn empty_merkle_tree() { let tree = MerkleTree::with_capacity(4); let expected_root = hex!("0000000000000000000000000000000000000000000000000000000000000000"); @@ -185,7 +190,7 @@ mod tests { } #[test] - fn test_merkle_tree_0() { + fn merkle_tree_0() { let values = [[0; 32]]; let tree = MerkleTree::new(&values); assert_eq!(tree.root(), hash_value(&[0; 32])); @@ -194,18 +199,18 @@ mod tests { } #[test] - fn test_merkle_tree_1() { + fn merkle_tree_1() { let values = [[1; 32], [2; 32], [3; 32], [4; 32]]; let tree = MerkleTree::new(&values); let expected_root = hex!("48c73f7821a58a8d2a703e5b39c571c0aa20cf14abcd0af8f2b955bc202998de"); assert_eq!(tree.root(), expected_root); assert_eq!(tree.capacity, 4); - assert_eq!(tree.length, 4) + assert_eq!(tree.length, 4); } #[test] - fn test_merkle_tree_2() { + fn merkle_tree_2() { let values = [[1; 32], [2; 32], [3; 32], [0; 32]]; let tree = MerkleTree::new(&values); let expected_root = @@ -216,7 +221,7 @@ mod tests { } #[test] - fn test_merkle_tree_3() { + fn merkle_tree_3() { let values = [[1; 32], [2; 32], [3; 32]]; let tree = MerkleTree::new(&values); let expected_root = @@ -227,7 +232,7 @@ mod tests { } #[test] - fn test_merkle_tree_4() { + fn merkle_tree_4() { let values = [[11; 32], [12; 32], [13; 32], [14; 32], [15; 32]]; let tree = MerkleTree::new(&values); let expected_root = @@ -239,7 +244,7 @@ mod tests { } #[test] - fn test_merkle_tree_5() { + fn merkle_tree_5() { let values = [ [11; 32], [12; 32], [12; 32], [13; 32], [14; 32], [15; 32], [15; 32], [13; 32], [13; 32], [15; 32], [11; 32], @@ -253,7 +258,7 @@ mod tests { } #[test] - fn test_merkle_tree_6() { + fn merkle_tree_6() { let values = [[1; 32], [2; 32], [3; 32], [4; 32], [5; 32]]; let tree = MerkleTree::new(&values); let expected_root = @@ -262,7 +267,7 @@ mod tests { } #[test] - fn test_with_capacity_4() { + fn with_capacity_4() { let tree = MerkleTree::with_capacity(4); assert_eq!(tree.length, 0); @@ -277,25 +282,25 @@ mod tests { } #[test] - fn test_with_capacity_5() { + fn with_capacity_5() { let tree = MerkleTree::with_capacity(5); assert_eq!(tree.length, 0); assert_eq!(tree.nodes.len(), 15); for i in 7..15 { - assert_eq!(*tree.get_node(i), default_values::DEFAULT_VALUES[0]) + assert_eq!(*tree.get_node(i), default_values::DEFAULT_VALUES[0]); } for i in 3..7 { - assert_eq!(*tree.get_node(i), default_values::DEFAULT_VALUES[1]) + assert_eq!(*tree.get_node(i), default_values::DEFAULT_VALUES[1]); } for i in 1..3 { - assert_eq!(*tree.get_node(i), default_values::DEFAULT_VALUES[2]) + assert_eq!(*tree.get_node(i), default_values::DEFAULT_VALUES[2]); } - assert_eq!(*tree.get_node(0), default_values::DEFAULT_VALUES[3]) + assert_eq!(*tree.get_node(0), default_values::DEFAULT_VALUES[3]); } #[test] - fn test_with_capacity_6() { + fn with_capacity_6() { let mut tree = MerkleTree::with_capacity(100); let values = [[1; 32], [2; 32], [3; 32], [4; 32]]; @@ -312,7 +317,7 @@ mod tests { } #[test] - fn test_with_capacity_7() { + fn with_capacity_7() { let mut tree = MerkleTree::with_capacity(599); let values = [[1; 32], [2; 32], [3; 32]]; @@ -328,7 +333,7 @@ mod tests { } #[test] - fn test_with_capacity_8() { + fn with_capacity_8() { let mut tree = MerkleTree::with_capacity(1); let values = [[1; 32], [2; 32], [3; 32]]; @@ -344,7 +349,7 @@ mod tests { } #[test] - fn test_insert_value_1() { + fn insert_value_1() { let mut tree = MerkleTree::with_capacity(1); let values = [[1; 32], [2; 32], [3; 32]]; @@ -358,7 +363,7 @@ mod tests { } #[test] - fn test_insert_value_2() { + fn insert_value_2() { let mut tree = MerkleTree::with_capacity(1); let values = [[1; 32], [2; 32], [3; 32], [4; 32]]; @@ -373,7 +378,7 @@ mod tests { } #[test] - fn test_insert_value_3() { + fn insert_value_3() { let mut tree = MerkleTree::with_capacity(1); let values = [[11; 32], [12; 32], [13; 32], [14; 32], [15; 32]]; @@ -405,7 +410,7 @@ mod tests { } #[test] - fn test_authentication_path_1() { + fn authentication_path_1() { let values = [[1; 32], [2; 32], [3; 32], [4; 32]]; let tree = MerkleTree::new(&values); let expected_authentication_path = vec![ @@ -418,7 +423,7 @@ mod tests { } #[test] - fn test_authentication_path_2() { + fn authentication_path_2() { let values = [[1; 32], [2; 32], [3; 32]]; let tree = MerkleTree::new(&values); let expected_authentication_path = vec![ @@ -431,7 +436,7 @@ mod tests { } #[test] - fn test_authentication_path_3() { + fn authentication_path_3() { let values = [[1; 32], [2; 32], [3; 32], [4; 32], [5; 32]]; let tree = MerkleTree::new(&values); let expected_authentication_path = vec![ @@ -445,14 +450,14 @@ mod tests { } #[test] - fn test_authentication_path_4() { + fn authentication_path_4() { let values = [[1; 32], [2; 32], [3; 32], [4; 32], [5; 32]]; let tree = MerkleTree::new(&values); assert!(tree.get_authentication_path_for(5).is_none()); } #[test] - fn test_authentication_path_5() { + fn authentication_path_5() { let values = [[1; 32], [2; 32], [3; 32], [4; 32], [5; 32]]; let tree = MerkleTree::new(&values); let index = 4; @@ -467,7 +472,7 @@ mod tests { } #[test] - fn test_tree_with_63_insertions() { + fn tree_with_63_insertions() { let values = [ hex!("cd00acab0f45736e6c6311f1953becc0b69a062e7c2a7310875d28bdf9ef9c5b"), hex!("0df5a6afbcc7bf126caf7084acfc593593ab512e6ca433c61c1a922be40a04ea"), diff --git a/nssa/src/privacy_preserving_transaction/circuit.rs b/nssa/src/privacy_preserving_transaction/circuit.rs index 98e0be24..2ab141a3 100644 --- a/nssa/src/privacy_preserving_transaction/circuit.rs +++ b/nssa/src/privacy_preserving_transaction/circuit.rs @@ -16,18 +16,26 @@ use crate::{ state::MAX_NUMBER_CHAINED_CALLS, }; -/// Proof of the privacy preserving execution circuit +/// Proof of the privacy preserving execution circuit. #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Proof(pub(crate) Vec); impl Proof { + #[must_use] pub fn into_inner(self) -> Vec { self.0 } - pub fn from_inner(inner: Vec) -> Self { + #[must_use] + pub const fn from_inner(inner: Vec) -> Self { Self(inner) } + + pub(crate) fn is_valid_for(&self, circuit_output: &PrivacyPreservingCircuitOutput) -> bool { + let inner: InnerReceipt = borsh::from_slice(&self.0).unwrap(); + let receipt = Receipt::new(inner, circuit_output.to_bytes()); + receipt.verify(PRIVACY_PRESERVING_CIRCUIT_ID).is_ok() + } } #[derive(Clone)] @@ -38,7 +46,8 @@ pub struct ProgramWithDependencies { } impl ProgramWithDependencies { - pub fn new(program: Program, dependencies: HashMap) -> Self { + #[must_use] + pub const fn new(program: Program, dependencies: HashMap) -> Self { Self { program, dependencies, @@ -48,38 +57,37 @@ impl ProgramWithDependencies { impl From for ProgramWithDependencies { fn from(program: Program) -> Self { - ProgramWithDependencies::new(program, HashMap::new()) + Self::new(program, HashMap::new()) } } /// Generates a proof of the execution of a NSSA program inside the privacy preserving execution /// circuit. -#[expect(clippy::too_many_arguments, reason = "TODO: fix later")] +/// TODO: too many parameters. pub fn execute_and_prove( pre_states: Vec, instruction_data: InstructionData, visibility_mask: Vec, - private_account_nonces: Vec, private_account_keys: Vec<(NullifierPublicKey, SharedSecretKey)>, private_account_nsks: Vec, private_account_membership_proofs: Vec>, program_with_dependencies: &ProgramWithDependencies, ) -> Result<(PrivacyPreservingCircuitOutput, Proof), NssaError> { let ProgramWithDependencies { - program, + program: initial_program, dependencies, } = program_with_dependencies; let mut env_builder = ExecutorEnv::builder(); let mut program_outputs = Vec::new(); let initial_call = ChainedCall { - program_id: program.id(), - instruction_data: instruction_data.clone(), + program_id: initial_program.id(), + instruction_data, pre_states, pda_seeds: vec![], }; - let mut chained_calls = VecDeque::from_iter([(initial_call, program)]); + let mut chained_calls = VecDeque::from_iter([(initial_call, initial_program)]); let mut chain_calls_counter = 0; while let Some((chained_call, program)) = chained_calls.pop_front() { if chain_calls_counter >= MAX_NUMBER_CHAINED_CALLS { @@ -110,13 +118,14 @@ pub fn execute_and_prove( chained_calls.push_front((new_call, next_program)); } - chain_calls_counter += 1; + chain_calls_counter = chain_calls_counter + .checked_add(1) + .expect("we check the max depth at the beginning of the loop"); } let circuit_input = PrivacyPreservingCircuitInput { program_outputs, visibility_mask, - private_account_nonces, private_account_keys, private_account_nsks, private_account_membership_proofs, @@ -160,19 +169,13 @@ fn execute_and_prove_program( .receipt) } -impl Proof { - pub(crate) fn is_valid_for(&self, circuit_output: &PrivacyPreservingCircuitOutput) -> bool { - let inner: InnerReceipt = borsh::from_slice(&self.0).unwrap(); - let receipt = Receipt::new(inner, circuit_output.to_bytes()); - receipt.verify(PRIVACY_PRESERVING_CIRCUIT_ID).is_ok() - } -} - #[cfg(test)] mod tests { + #![expect(clippy::shadow_unrelated, reason = "We don't care about it in tests")] + use nssa_core::{ Commitment, DUMMY_COMMITMENT_HASH, EncryptionScheme, Nullifier, - account::{Account, AccountId, AccountWithMetadata, data::Data}, + account::{Account, AccountId, AccountWithMetadata, Nonce, data::Data}, }; use super::*; @@ -210,14 +213,14 @@ mod tests { let expected_sender_post = Account { program_owner: program.id(), balance: 100 - balance_to_move, - nonce: 0, + nonce: Nonce::default(), data: Data::default(), }; let expected_recipient_post = Account { program_owner: program.id(), balance: balance_to_move, - nonce: 0xdeadbeef, + nonce: Nonce::private_account_nonce_init(&recipient_keys.npk()), data: Data::default(), }; @@ -230,7 +233,6 @@ mod tests { vec![sender, recipient], Program::serialize_instruction(balance_to_move).unwrap(), vec![0, 2], - vec![0xdeadbeef], vec![(recipient_keys.npk(), shared_secret)], vec![], vec![None], @@ -264,10 +266,11 @@ mod tests { let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); + let sender_nonce = Nonce(0xdead_beef); let sender_pre = AccountWithMetadata::new( Account { balance: 100, - nonce: 0xdeadbeef, + nonce: sender_nonce, program_owner: program.id(), data: Data::default(), }, @@ -302,13 +305,13 @@ mod tests { let expected_private_account_1 = Account { program_owner: program.id(), balance: 100 - balance_to_move, - nonce: 0xdeadbeef1, + nonce: sender_nonce.private_account_nonce_increment(&sender_keys.nsk), ..Default::default() }; let expected_private_account_2 = Account { program_owner: program.id(), balance: balance_to_move, - nonce: 0xdeadbeef2, + nonce: Nonce::private_account_nonce_init(&recipient_keys.npk()), ..Default::default() }; let expected_new_commitments = vec![ @@ -323,10 +326,9 @@ mod tests { let shared_secret_2 = SharedSecretKey::new(&esk_2, &recipient_keys.vpk()); let (output, proof) = execute_and_prove( - vec![sender_pre.clone(), recipient], + vec![sender_pre, recipient], Program::serialize_instruction(balance_to_move).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ (sender_keys.npk(), shared_secret_1), (recipient_keys.npk(), shared_secret_2), diff --git a/nssa/src/privacy_preserving_transaction/message.rs b/nssa/src/privacy_preserving_transaction/message.rs index 47b0aa42..4b93e820 100644 --- a/nssa/src/privacy_preserving_transaction/message.rs +++ b/nssa/src/privacy_preserving_transaction/message.rs @@ -4,7 +4,7 @@ use nssa_core::{ account::{Account, Nonce}, encryption::{Ciphertext, EphemeralPublicKey, ViewingPublicKey}, }; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use crate::{AccountId, error::NssaError}; @@ -20,8 +20,8 @@ pub struct EncryptedAccountData { impl EncryptedAccountData { fn new( ciphertext: Ciphertext, - npk: NullifierPublicKey, - vpk: ViewingPublicKey, + npk: &NullifierPublicKey, + vpk: &ViewingPublicKey, epk: EphemeralPublicKey, ) -> Self { let view_tag = Self::compute_view_tag(npk, vpk); @@ -32,8 +32,9 @@ impl EncryptedAccountData { } } - /// Computes the tag as the first byte of SHA256("/NSSA/v0.2/ViewTag/" || Npk || vpk) - pub fn compute_view_tag(npk: NullifierPublicKey, vpk: ViewingPublicKey) -> ViewTag { + /// Computes the tag as the first byte of SHA256("/NSSA/v0.2/ViewTag/" || Npk || vpk). + #[must_use] + pub fn compute_view_tag(npk: &NullifierPublicKey, vpk: &ViewingPublicKey) -> ViewTag { let mut hasher = Sha256::new(); hasher.update(b"/NSSA/v0.2/ViewTag/"); hasher.update(npk.to_byte_array()); @@ -43,7 +44,7 @@ impl EncryptedAccountData { } } -#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] +#[derive(Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Message { pub public_account_ids: Vec, pub nonces: Vec, @@ -53,6 +54,33 @@ pub struct Message { pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>, } +impl std::fmt::Debug for Message { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + struct HexDigest<'arr>(&'arr [u8; 32]); + impl std::fmt::Debug for HexDigest<'_> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } + } + let nullifiers: Vec<_> = self + .new_nullifiers + .iter() + .map(|(n, d)| (n, HexDigest(d))) + .collect(); + f.debug_struct("Message") + .field("public_account_ids", &self.public_account_ids) + .field("nonces", &self.nonces) + .field("public_post_states", &self.public_post_states) + .field( + "encrypted_private_post_states", + &self.encrypted_private_post_states, + ) + .field("new_commitments", &self.new_commitments) + .field("new_nullifiers", &nullifiers) + .finish() + } +} + impl Message { pub fn try_from_circuit_output( public_account_ids: Vec, @@ -71,7 +99,7 @@ impl Message { .into_iter() .zip(public_keys) .map(|(ciphertext, (npk, vpk, epk))| { - EncryptedAccountData::new(ciphertext, npk, vpk, epk) + EncryptedAccountData::new(ciphertext, &npk, &vpk, epk) }) .collect(); Ok(Self { @@ -92,13 +120,14 @@ pub mod tests { account::Account, encryption::{EphemeralPublicKey, ViewingPublicKey}, }; - use sha2::{Digest, Sha256}; + use sha2::{Digest as _, Sha256}; use crate::{ AccountId, privacy_preserving_transaction::message::{EncryptedAccountData, Message}, }; + #[must_use] pub fn message_for_tests() -> Message { let account1 = Account::default(); let account2 = Account::default(); @@ -111,7 +140,7 @@ pub mod tests { let public_account_ids = vec![AccountId::new([1; 32])]; - let nonces = vec![1, 2, 3]; + let nonces = vec![1_u128.into(), 2_u128.into(), 3_u128.into()]; let public_post_states = vec![Account::default()]; @@ -126,17 +155,17 @@ pub mod tests { )]; Message { - public_account_ids: public_account_ids.clone(), - nonces: nonces.clone(), - public_post_states: public_post_states.clone(), - encrypted_private_post_states: encrypted_private_post_states.clone(), - new_commitments: new_commitments.clone(), - new_nullifiers: new_nullifiers.clone(), + public_account_ids, + nonces, + public_post_states, + encrypted_private_post_states, + new_commitments, + new_nullifiers, } } #[test] - fn test_encrypted_account_data_constructor() { + fn encrypted_account_data_constructor() { let npk = NullifierPublicKey::from(&[1; 32]); let vpk = ViewingPublicKey::from_scalar([2; 32]); let account = Account::default(); @@ -146,7 +175,7 @@ pub mod tests { let epk = EphemeralPublicKey::from_scalar(esk); let ciphertext = EncryptionScheme::encrypt(&account, &shared_secret, &commitment, 2); let encrypted_account_data = - EncryptedAccountData::new(ciphertext.clone(), npk.clone(), vpk.clone(), epk.clone()); + EncryptedAccountData::new(ciphertext.clone(), &npk, &vpk, epk.clone()); let expected_view_tag = { let mut hasher = Sha256::new(); @@ -161,7 +190,7 @@ pub mod tests { assert_eq!(encrypted_account_data.epk, epk); assert_eq!( encrypted_account_data.view_tag, - EncryptedAccountData::compute_view_tag(npk, vpk) + EncryptedAccountData::compute_view_tag(&npk, &vpk) ); assert_eq!(encrypted_account_data.view_tag, expected_view_tag); } diff --git a/nssa/src/privacy_preserving_transaction/mod.rs b/nssa/src/privacy_preserving_transaction/mod.rs index 48d88181..aab09147 100644 --- a/nssa/src/privacy_preserving_transaction/mod.rs +++ b/nssa/src/privacy_preserving_transaction/mod.rs @@ -1,9 +1,9 @@ +pub use message::Message; +pub use transaction::PrivacyPreservingTransaction; +pub use witness_set::WitnessSet; + pub mod message; pub mod transaction; pub mod witness_set; pub mod circuit; - -pub use message::Message; -pub use transaction::PrivacyPreservingTransaction; -pub use witness_set::WitnessSet; diff --git a/nssa/src/privacy_preserving_transaction/transaction.rs b/nssa/src/privacy_preserving_transaction/transaction.rs index 9e0ca30a..2b268c07 100644 --- a/nssa/src/privacy_preserving_transaction/transaction.rs +++ b/nssa/src/privacy_preserving_transaction/transaction.rs @@ -1,4 +1,7 @@ -use std::collections::{HashMap, HashSet}; +use std::{ + collections::{HashMap, HashSet}, + hash::Hash, +}; use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ @@ -21,7 +24,8 @@ pub struct PrivacyPreservingTransaction { } impl PrivacyPreservingTransaction { - pub fn new(message: Message, witness_set: WitnessSet) -> Self { + #[must_use] + pub const fn new(message: Message, witness_set: WitnessSet) -> Self { Self { message, witness_set, @@ -119,19 +123,22 @@ impl PrivacyPreservingTransaction { Ok(message .public_account_ids .iter() - .cloned() + .copied() .zip(message.public_post_states.clone()) .collect()) } - pub fn message(&self) -> &Message { + #[must_use] + pub const fn message(&self) -> &Message { &self.message } - pub fn witness_set(&self) -> &WitnessSet { + #[must_use] + pub const fn witness_set(&self) -> &WitnessSet { &self.witness_set } + #[must_use] pub fn hash(&self) -> [u8; 32] { let bytes = self.to_bytes(); let mut hasher = sha2::Sha256::new(); @@ -147,6 +154,7 @@ impl PrivacyPreservingTransaction { .collect() } + #[must_use] pub fn affected_public_account_ids(&self) -> Vec { let mut acc_set = self .signer_account_ids() @@ -183,7 +191,6 @@ fn check_privacy_preserving_circuit_proof_is_valid( .ok_or(NssaError::InvalidPrivacyPreservingProof) } -use std::hash::Hash; fn n_unique(data: &[T]) -> usize { let set: HashSet<&T> = data.iter().collect(); set.len() @@ -220,7 +227,7 @@ mod tests { } #[test] - fn test_privacy_preserving_transaction_encoding_bytes_roundtrip() { + fn privacy_preserving_transaction_encoding_bytes_roundtrip() { let tx = transaction_for_tests(); let bytes = tx.to_bytes(); let tx_from_bytes = PrivacyPreservingTransaction::from_bytes(&bytes).unwrap(); diff --git a/nssa/src/privacy_preserving_transaction/witness_set.rs b/nssa/src/privacy_preserving_transaction/witness_set.rs index 365b61b9..373bbc9c 100644 --- a/nssa/src/privacy_preserving_transaction/witness_set.rs +++ b/nssa/src/privacy_preserving_transaction/witness_set.rs @@ -12,6 +12,7 @@ pub struct WitnessSet { } impl WitnessSet { + #[must_use] pub fn for_message(message: &Message, proof: Proof, private_keys: &[&PrivateKey]) -> Self { let message_bytes = message.to_bytes(); let signatures_and_public_keys = private_keys @@ -24,11 +25,12 @@ impl WitnessSet { }) .collect(); Self { - proof, signatures_and_public_keys, + proof, } } + #[must_use] pub fn signatures_are_valid_for(&self, message: &Message) -> bool { let message_bytes = message.to_bytes(); for (signature, public_key) in self.signatures_and_public_keys() { @@ -39,19 +41,23 @@ impl WitnessSet { true } + #[must_use] pub fn signatures_and_public_keys(&self) -> &[(Signature, PublicKey)] { &self.signatures_and_public_keys } - pub fn proof(&self) -> &Proof { + #[must_use] + pub const fn proof(&self) -> &Proof { &self.proof } + #[must_use] pub fn into_raw_parts(self) -> (Vec<(Signature, PublicKey)>, Proof) { (self.signatures_and_public_keys, self.proof) } - pub fn from_raw_parts( + #[must_use] + pub const fn from_raw_parts( signatures_and_public_keys: Vec<(Signature, PublicKey)>, proof: Proof, ) -> Self { diff --git a/nssa/src/program.rs b/nssa/src/program.rs index 06c7ad29..3b372a22 100644 --- a/nssa/src/program.rs +++ b/nssa/src/program.rs @@ -12,7 +12,7 @@ use crate::{ }; /// Maximum number of cycles for a public execution. -/// TODO: Make this variable when fees are implemented +/// TODO: Make this variable when fees are implemented. const MAX_NUM_CYCLES_PUBLIC_EXECUTION: u64 = 1024 * 1024 * 32; // 32M cycles #[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)] @@ -24,18 +24,20 @@ pub struct Program { impl Program { pub fn new(bytecode: Vec) -> Result { let binary = risc0_binfmt::ProgramBinary::decode(&bytecode) - .map_err(|_| NssaError::InvalidProgramBytecode)?; + .map_err(NssaError::InvalidProgramBytecode)?; let id = binary .compute_image_id() - .map_err(|_| NssaError::InvalidProgramBytecode)? + .map_err(NssaError::InvalidProgramBytecode)? .into(); Ok(Self { elf: bytecode, id }) } - pub fn id(&self) -> ProgramId { + #[must_use] + pub const fn id(&self) -> ProgramId { self.id } + #[must_use] pub fn elf(&self) -> &[u8] { &self.elf } @@ -72,7 +74,7 @@ impl Program { Ok(program_output) } - /// Writes inputs to `env_builder` in the order expected by the programs + /// Writes inputs to `env_builder` in the order expected by the programs. pub(crate) fn write_inputs( pre_states: &[AccountWithMetadata], instruction_data: &[u32], @@ -85,18 +87,21 @@ impl Program { Ok(()) } + #[must_use] pub fn authenticated_transfer_program() -> Self { // This unwrap won't panic since the `AUTHENTICATED_TRANSFER_ELF` comes from risc0 build of // `program_methods` Self::new(AUTHENTICATED_TRANSFER_ELF.to_vec()).unwrap() } + #[must_use] pub fn token() -> Self { // This unwrap won't panic since the `TOKEN_ELF` comes from risc0 build of // `program_methods` Self::new(TOKEN_ELF.to_vec()).unwrap() } + #[must_use] pub fn amm() -> Self { Self::new(AMM_ELF.to_vec()).expect("The AMM program must be a valid Risc0 program") } @@ -104,12 +109,15 @@ impl Program { // TODO: Testnet only. Refactor to prevent compilation on mainnet. impl Program { + #[must_use] pub fn pinata() -> Self { // This unwrap won't panic since the `PINATA_ELF` comes from risc0 build of // `program_methods` Self::new(PINATA_ELF.to_vec()).unwrap() } + #[must_use] + #[expect(clippy::non_ascii_literal, reason = "More readable")] pub fn pinata_token() -> Self { use crate::program_methods::PINATA_TOKEN_ELF; Self::new(PINATA_TOKEN_ELF.to_vec()).expect("Piñata program must be a valid R0BF file") @@ -129,133 +137,147 @@ mod tests { }; impl Program { - /// A program that changes the nonce of an account + /// A program that changes the nonce of an account. + #[must_use] pub fn nonce_changer_program() -> Self { use test_program_methods::{NONCE_CHANGER_ELF, NONCE_CHANGER_ID}; - Program { + Self { id: NONCE_CHANGER_ID, elf: NONCE_CHANGER_ELF.to_vec(), } } - /// A program that produces more output accounts than the inputs it received + /// A program that produces more output accounts than the inputs it received. + #[must_use] pub fn extra_output_program() -> Self { use test_program_methods::{EXTRA_OUTPUT_ELF, EXTRA_OUTPUT_ID}; - Program { + Self { id: EXTRA_OUTPUT_ID, elf: EXTRA_OUTPUT_ELF.to_vec(), } } - /// A program that produces less output accounts than the inputs it received + /// A program that produces less output accounts than the inputs it received. + #[must_use] pub fn missing_output_program() -> Self { use test_program_methods::{MISSING_OUTPUT_ELF, MISSING_OUTPUT_ID}; - Program { + Self { id: MISSING_OUTPUT_ID, elf: MISSING_OUTPUT_ELF.to_vec(), } } - /// A program that changes the program owner of an account to [0, 1, 2, 3, 4, 5, 6, 7] + /// A program that changes the program owner of an account to [0, 1, 2, 3, 4, 5, 6, 7]. + #[must_use] pub fn program_owner_changer() -> Self { use test_program_methods::{PROGRAM_OWNER_CHANGER_ELF, PROGRAM_OWNER_CHANGER_ID}; - Program { + Self { id: PROGRAM_OWNER_CHANGER_ID, elf: PROGRAM_OWNER_CHANGER_ELF.to_vec(), } } - /// A program that transfers balance without caring about authorizations + /// A program that transfers balance without caring about authorizations. + #[must_use] pub fn simple_balance_transfer() -> Self { use test_program_methods::{SIMPLE_BALANCE_TRANSFER_ELF, SIMPLE_BALANCE_TRANSFER_ID}; - Program { + Self { id: SIMPLE_BALANCE_TRANSFER_ID, elf: SIMPLE_BALANCE_TRANSFER_ELF.to_vec(), } } - /// A program that modifies the data of an account + /// A program that modifies the data of an account. + #[must_use] pub fn data_changer() -> Self { use test_program_methods::{DATA_CHANGER_ELF, DATA_CHANGER_ID}; - Program { + Self { id: DATA_CHANGER_ID, elf: DATA_CHANGER_ELF.to_vec(), } } - /// A program that mints balance + /// A program that mints balance. + #[must_use] pub fn minter() -> Self { use test_program_methods::{MINTER_ELF, MINTER_ID}; - Program { + Self { id: MINTER_ID, elf: MINTER_ELF.to_vec(), } } - /// A program that burns balance + /// A program that burns balance. + #[must_use] pub fn burner() -> Self { use test_program_methods::{BURNER_ELF, BURNER_ID}; - Program { + Self { id: BURNER_ID, elf: BURNER_ELF.to_vec(), } } + #[must_use] pub fn chain_caller() -> Self { use test_program_methods::{CHAIN_CALLER_ELF, CHAIN_CALLER_ID}; - Program { + Self { id: CHAIN_CALLER_ID, elf: CHAIN_CALLER_ELF.to_vec(), } } + #[must_use] pub fn claimer() -> Self { use test_program_methods::{CLAIMER_ELF, CLAIMER_ID}; - Program { + Self { id: CLAIMER_ID, elf: CLAIMER_ELF.to_vec(), } } + #[must_use] pub fn changer_claimer() -> Self { use test_program_methods::{CHANGER_CLAIMER_ELF, CHANGER_CLAIMER_ID}; - Program { + Self { id: CHANGER_CLAIMER_ID, elf: CHANGER_CLAIMER_ELF.to_vec(), } } + #[must_use] pub fn noop() -> Self { use test_program_methods::{NOOP_ELF, NOOP_ID}; - Program { + Self { id: NOOP_ID, elf: NOOP_ELF.to_vec(), } } + #[must_use] pub fn malicious_authorization_changer() -> Self { use test_program_methods::{ MALICIOUS_AUTHORIZATION_CHANGER_ELF, MALICIOUS_AUTHORIZATION_CHANGER_ID, }; - Program { + Self { id: MALICIOUS_AUTHORIZATION_CHANGER_ID, elf: MALICIOUS_AUTHORIZATION_CHANGER_ELF.to_vec(), } } + #[must_use] pub fn modified_transfer_program() -> Self { use test_program_methods::MODIFIED_TRANSFER_ELF; // This unwrap won't panic since the `MODIFIED_TRANSFER_ELF` comes from risc0 build of @@ -265,13 +287,13 @@ mod tests { } #[test] - fn test_program_execution() { + fn program_execution() { let program = Program::simple_balance_transfer(); - let balance_to_move: u128 = 11223344556677; + let balance_to_move: u128 = 11_223_344_556_677; let instruction_data = Program::serialize_instruction(balance_to_move).unwrap(); let sender = AccountWithMetadata::new( Account { - balance: 77665544332211, + balance: 77_665_544_332_211, ..Account::default() }, true, @@ -281,7 +303,7 @@ mod tests { AccountWithMetadata::new(Account::default(), false, AccountId::new([1; 32])); let expected_sender_post = Account { - balance: 77665544332211 - balance_to_move, + balance: 77_665_544_332_211 - balance_to_move, ..Account::default() }; let expected_recipient_post = Account { @@ -299,7 +321,7 @@ mod tests { } #[test] - fn test_builtin_programs() { + fn builtin_programs() { let auth_transfer_program = Program::authenticated_transfer_program(); let token_program = Program::token(); let pinata_program = Program::pinata(); diff --git a/nssa/src/program_deployment_transaction/message.rs b/nssa/src/program_deployment_transaction/message.rs index 41c4e10a..a51e4149 100644 --- a/nssa/src/program_deployment_transaction/message.rs +++ b/nssa/src/program_deployment_transaction/message.rs @@ -6,10 +6,12 @@ pub struct Message { } impl Message { - pub fn new(bytecode: Vec) -> Self { + #[must_use] + pub const fn new(bytecode: Vec) -> Self { Self { bytecode } } + #[must_use] pub fn into_bytecode(self) -> Vec { self.bytecode } diff --git a/nssa/src/program_deployment_transaction/mod.rs b/nssa/src/program_deployment_transaction/mod.rs index b498826e..d1fc7d0d 100644 --- a/nssa/src/program_deployment_transaction/mod.rs +++ b/nssa/src/program_deployment_transaction/mod.rs @@ -1,5 +1,5 @@ -mod message; -mod transaction; - pub use message::Message; pub use transaction::ProgramDeploymentTransaction; + +mod message; +mod transaction; diff --git a/nssa/src/program_deployment_transaction/transaction.rs b/nssa/src/program_deployment_transaction/transaction.rs index 8e77bfe0..1e53388d 100644 --- a/nssa/src/program_deployment_transaction/transaction.rs +++ b/nssa/src/program_deployment_transaction/transaction.rs @@ -12,10 +12,12 @@ pub struct ProgramDeploymentTransaction { } impl ProgramDeploymentTransaction { - pub fn new(message: Message) -> Self { + #[must_use] + pub const fn new(message: Message) -> Self { Self { message } } + #[must_use] pub fn into_message(self) -> Message { self.message } @@ -33,6 +35,7 @@ impl ProgramDeploymentTransaction { } } + #[must_use] pub fn hash(&self) -> [u8; 32] { let bytes = self.to_bytes(); let mut hasher = sha2::Sha256::new(); @@ -40,7 +43,8 @@ impl ProgramDeploymentTransaction { hasher.finalize_fixed().into() } - pub fn affected_public_account_ids(&self) -> Vec { + #[must_use] + pub const fn affected_public_account_ids(&self) -> Vec { vec![] } } diff --git a/nssa/src/public_transaction/message.rs b/nssa/src/public_transaction/message.rs index 36a20fbb..d4838b87 100644 --- a/nssa/src/public_transaction/message.rs +++ b/nssa/src/public_transaction/message.rs @@ -7,7 +7,7 @@ use serde::Serialize; use crate::{AccountId, error::NssaError, program::Program}; -#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] +#[derive(Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Message { pub program_id: ProgramId, pub account_ids: Vec, @@ -15,6 +15,23 @@ pub struct Message { pub instruction_data: InstructionData, } +impl std::fmt::Debug for Message { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + let program_id_hex = hex::encode( + self.program_id + .iter() + .flat_map(|n| n.to_le_bytes()) + .collect::>(), + ); + f.debug_struct("Message") + .field("program_id", &program_id_hex) + .field("account_ids", &self.account_ids) + .field("nonces", &self.nonces) + .field("instruction_data", &self.instruction_data) + .finish() + } +} + impl Message { pub fn try_new( program_id: ProgramId, @@ -32,7 +49,8 @@ impl Message { }) } - pub fn new_preserialized( + #[must_use] + pub const fn new_preserialized( program_id: ProgramId, account_ids: Vec, nonces: Vec, diff --git a/nssa/src/public_transaction/mod.rs b/nssa/src/public_transaction/mod.rs index 278e9df7..1af61e10 100644 --- a/nssa/src/public_transaction/mod.rs +++ b/nssa/src/public_transaction/mod.rs @@ -1,7 +1,7 @@ -mod message; -mod transaction; -mod witness_set; - pub use message::Message; pub use transaction::PublicTransaction; pub use witness_set::WitnessSet; + +mod message; +mod transaction; +mod witness_set; diff --git a/nssa/src/public_transaction/transaction.rs b/nssa/src/public_transaction/transaction.rs index 07a87100..8c84d83c 100644 --- a/nssa/src/public_transaction/transaction.rs +++ b/nssa/src/public_transaction/transaction.rs @@ -6,10 +6,10 @@ use nssa_core::{ account::{Account, AccountId, AccountWithMetadata}, program::{ChainedCall, DEFAULT_PROGRAM_ID, validate_execution}, }; -use sha2::{Digest, digest::FixedOutput}; +use sha2::{Digest as _, digest::FixedOutput as _}; use crate::{ - V02State, + V02State, ensure, error::NssaError, public_transaction::{Message, WitnessSet}, state::MAX_NUMBER_CHAINED_CALLS, @@ -22,18 +22,21 @@ pub struct PublicTransaction { } impl PublicTransaction { - pub fn new(message: Message, witness_set: WitnessSet) -> Self { + #[must_use] + pub const fn new(message: Message, witness_set: WitnessSet) -> Self { Self { message, witness_set, } } - pub fn message(&self) -> &Message { + #[must_use] + pub const fn message(&self) -> &Message { &self.message } - pub fn witness_set(&self) -> &WitnessSet { + #[must_use] + pub const fn witness_set(&self) -> &WitnessSet { &self.witness_set } @@ -45,6 +48,7 @@ impl PublicTransaction { .collect() } + #[must_use] pub fn affected_public_account_ids(&self) -> Vec { let mut acc_set = self .signer_account_ids() @@ -55,6 +59,7 @@ impl PublicTransaction { acc_set.into_iter().collect() } + #[must_use] pub fn hash(&self) -> [u8; 32] { let bytes = self.to_bytes(); let mut hasher = sha2::Sha256::new(); @@ -70,33 +75,33 @@ impl PublicTransaction { let witness_set = self.witness_set(); // All account_ids must be different - if message.account_ids.iter().collect::>().len() != message.account_ids.len() { - return Err(NssaError::InvalidInput( - "Duplicate account_ids found in message".into(), - )); - } + ensure!( + message.account_ids.iter().collect::>().len() == message.account_ids.len(), + NssaError::InvalidInput("Duplicate account_ids found in message".into(),) + ); // Check exactly one nonce is provided for each signature - if message.nonces.len() != witness_set.signatures_and_public_keys.len() { - return Err(NssaError::InvalidInput( + ensure!( + message.nonces.len() == witness_set.signatures_and_public_keys.len(), + NssaError::InvalidInput( "Mismatch between number of nonces and signatures/public keys".into(), - )); - } + ) + ); // Check the signatures are valid - if !witness_set.is_valid_for(message) { - return Err(NssaError::InvalidInput( - "Invalid signature for given message and public key".into(), - )); - } + ensure!( + witness_set.is_valid_for(message), + NssaError::InvalidInput("Invalid signature for given message and public key".into()) + ); let signer_account_ids = self.signer_account_ids(); // Check nonces corresponds to the current nonces on the public state. for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) { let current_nonce = state.get_account_by_id(*account_id).nonce; - if current_nonce != *nonce { - return Err(NssaError::InvalidInput("Nonce mismatch".into())); - } + ensure!( + current_nonce == *nonce, + NssaError::InvalidInput("Nonce mismatch".into()) + ); } // Build pre_states for execution @@ -125,9 +130,10 @@ impl PublicTransaction { let mut chain_calls_counter = 0; while let Some((chained_call, caller_program_id)) = chained_calls.pop_front() { - if chain_calls_counter > MAX_NUMBER_CHAINED_CALLS { - return Err(NssaError::MaxChainedCallsDepthExceeded); - } + ensure!( + chain_calls_counter <= MAX_NUMBER_CHAINED_CALLS, + NssaError::MaxChainedCallsDepthExceeded + ); // Check that the `program_id` corresponds to a deployed program let Some(program) = state.programs().get(&chained_call.program_id) else { @@ -158,28 +164,31 @@ impl PublicTransaction { .get(&account_id) .cloned() .unwrap_or_else(|| state.get_account_by_id(account_id)); - if pre.account != expected_pre { - return Err(NssaError::InvalidProgramBehavior); - } + ensure!( + pre.account == expected_pre, + NssaError::InvalidProgramBehavior + ); // Check that authorization flags are consistent with the provided ones or // authorized by program through the PDA mechanism let is_authorized = signer_account_ids.contains(&account_id) || authorized_pdas.contains(&account_id); - if pre.is_authorized != is_authorized { - return Err(NssaError::InvalidProgramBehavior); - } + ensure!( + pre.is_authorized == is_authorized, + NssaError::InvalidProgramBehavior + ); } // Verify execution corresponds to a well-behaved program. // See the # Programs section for the definition of the `validate_execution` method. - if !validate_execution( - &program_output.pre_states, - &program_output.post_states, - chained_call.program_id, - ) { - return Err(NssaError::InvalidProgramBehavior); - } + ensure!( + validate_execution( + &program_output.pre_states, + &program_output.post_states, + chained_call.program_id, + ), + NssaError::InvalidProgramBehavior + ); for post in program_output .post_states @@ -207,7 +216,9 @@ impl PublicTransaction { chained_calls.push_front((new_call, Some(chained_call.program_id))); } - chain_calls_counter += 1; + chain_calls_counter = chain_calls_counter + .checked_add(1) + .expect("we check the max depth at the beginning of the loop"); } // Check that all modified uninitialized accounts where claimed @@ -221,9 +232,10 @@ impl PublicTransaction { } Some(post) }) { - if post.program_owner == DEFAULT_PROGRAM_ID { - return Err(NssaError::InvalidProgramBehavior); - } + ensure!( + post.program_owner != DEFAULT_PROGRAM_ID, + NssaError::InvalidProgramBehavior + ); } Ok(state_diff) @@ -232,7 +244,7 @@ impl PublicTransaction { #[cfg(test)] pub mod tests { - use sha2::{Digest, digest::FixedOutput}; + use sha2::{Digest as _, digest::FixedOutput as _}; use crate::{ AccountId, PrivateKey, PublicKey, PublicTransaction, Signature, V02State, @@ -257,7 +269,7 @@ pub mod tests { fn transaction_for_tests() -> PublicTransaction { let (key1, key2, addr1, addr2) = keys_for_tests(); - let nonces = vec![0, 0]; + let nonces = vec![0_u128.into(), 0_u128.into()]; let instruction = 1337; let message = Message::try_new( Program::authenticated_transfer_program().id(), @@ -272,7 +284,7 @@ pub mod tests { } #[test] - fn test_new_constructor() { + fn new_constructor() { let tx = transaction_for_tests(); let message = tx.message().clone(); let witness_set = tx.witness_set().clone(); @@ -282,19 +294,19 @@ pub mod tests { } #[test] - fn test_message_getter() { + fn message_getter() { let tx = transaction_for_tests(); assert_eq!(&tx.message, tx.message()); } #[test] - fn test_witness_set_getter() { + fn witness_set_getter() { let tx = transaction_for_tests(); assert_eq!(&tx.witness_set, tx.witness_set()); } #[test] - fn test_signer_account_ids() { + fn signer_account_ids() { let tx = transaction_for_tests(); let expected_signer_account_ids = vec![ AccountId::new([ @@ -311,7 +323,7 @@ pub mod tests { } #[test] - fn test_public_transaction_encoding_bytes_roundtrip() { + fn public_transaction_encoding_bytes_roundtrip() { let tx = transaction_for_tests(); let bytes = tx.to_bytes(); let tx_from_bytes = PublicTransaction::from_bytes(&bytes).unwrap(); @@ -319,7 +331,7 @@ pub mod tests { } #[test] - fn test_hash_is_sha256_of_transaction_bytes() { + fn hash_is_sha256_of_transaction_bytes() { let tx = transaction_for_tests(); let hash = tx.hash(); let expected_hash: [u8; 32] = { @@ -332,10 +344,10 @@ pub mod tests { } #[test] - fn test_account_id_list_cant_have_duplicates() { + fn account_id_list_cant_have_duplicates() { let (key1, _, addr1, _) = keys_for_tests(); let state = state_for_tests(); - let nonces = vec![0, 0]; + let nonces = vec![0_u128.into(), 0_u128.into()]; let instruction = 1337; let message = Message::try_new( Program::authenticated_transfer_program().id(), @@ -348,14 +360,14 @@ pub mod tests { let witness_set = WitnessSet::for_message(&message, &[&key1, &key1]); let tx = PublicTransaction::new(message, witness_set); let result = tx.validate_and_produce_public_state_diff(&state); - assert!(matches!(result, Err(NssaError::InvalidInput(_)))) + assert!(matches!(result, Err(NssaError::InvalidInput(_)))); } #[test] - fn test_number_of_nonces_must_match_number_of_signatures() { + fn number_of_nonces_must_match_number_of_signatures() { let (key1, key2, addr1, addr2) = keys_for_tests(); let state = state_for_tests(); - let nonces = vec![0]; + let nonces = vec![0_u128.into()]; let instruction = 1337; let message = Message::try_new( Program::authenticated_transfer_program().id(), @@ -368,14 +380,14 @@ pub mod tests { let witness_set = WitnessSet::for_message(&message, &[&key1, &key2]); let tx = PublicTransaction::new(message, witness_set); let result = tx.validate_and_produce_public_state_diff(&state); - assert!(matches!(result, Err(NssaError::InvalidInput(_)))) + assert!(matches!(result, Err(NssaError::InvalidInput(_)))); } #[test] - fn test_all_signatures_must_be_valid() { + fn all_signatures_must_be_valid() { let (key1, key2, addr1, addr2) = keys_for_tests(); let state = state_for_tests(); - let nonces = vec![0, 0]; + let nonces = vec![0_u128.into(), 0_u128.into()]; let instruction = 1337; let message = Message::try_new( Program::authenticated_transfer_program().id(), @@ -389,14 +401,14 @@ pub mod tests { witness_set.signatures_and_public_keys[0].0 = Signature::new_for_tests([1; 64]); let tx = PublicTransaction::new(message, witness_set); let result = tx.validate_and_produce_public_state_diff(&state); - assert!(matches!(result, Err(NssaError::InvalidInput(_)))) + assert!(matches!(result, Err(NssaError::InvalidInput(_)))); } #[test] - fn test_nonces_must_match_the_state_current_nonces() { + fn nonces_must_match_the_state_current_nonces() { let (key1, key2, addr1, addr2) = keys_for_tests(); let state = state_for_tests(); - let nonces = vec![0, 1]; + let nonces = vec![0_u128.into(), 1_u128.into()]; let instruction = 1337; let message = Message::try_new( Program::authenticated_transfer_program().id(), @@ -409,22 +421,22 @@ pub mod tests { let witness_set = WitnessSet::for_message(&message, &[&key1, &key2]); let tx = PublicTransaction::new(message, witness_set); let result = tx.validate_and_produce_public_state_diff(&state); - assert!(matches!(result, Err(NssaError::InvalidInput(_)))) + assert!(matches!(result, Err(NssaError::InvalidInput(_)))); } #[test] - fn test_program_id_must_belong_to_bulitin_program_ids() { + fn program_id_must_belong_to_bulitin_program_ids() { let (key1, key2, addr1, addr2) = keys_for_tests(); let state = state_for_tests(); - let nonces = vec![0, 0]; + let nonces = vec![0_u128.into(), 0_u128.into()]; let instruction = 1337; - let unknown_program_id = [0xdeadbeef; 8]; + let unknown_program_id = [0xdead_beef; 8]; let message = Message::try_new(unknown_program_id, vec![addr1, addr2], nonces, instruction).unwrap(); let witness_set = WitnessSet::for_message(&message, &[&key1, &key2]); let tx = PublicTransaction::new(message, witness_set); let result = tx.validate_and_produce_public_state_diff(&state); - assert!(matches!(result, Err(NssaError::InvalidInput(_)))) + assert!(matches!(result, Err(NssaError::InvalidInput(_)))); } } diff --git a/nssa/src/public_transaction/witness_set.rs b/nssa/src/public_transaction/witness_set.rs index 9b9cd290..d6b32891 100644 --- a/nssa/src/public_transaction/witness_set.rs +++ b/nssa/src/public_transaction/witness_set.rs @@ -8,6 +8,7 @@ pub struct WitnessSet { } impl WitnessSet { + #[must_use] pub fn for_message(message: &Message, private_keys: &[&PrivateKey]) -> Self { let message_bytes = message.to_bytes(); let signatures_and_public_keys = private_keys @@ -24,6 +25,7 @@ impl WitnessSet { } } + #[must_use] pub fn is_valid_for(&self, message: &Message) -> bool { let message_bytes = message.to_bytes(); for (signature, public_key) in self.signatures_and_public_keys() { @@ -34,15 +36,18 @@ impl WitnessSet { true } + #[must_use] pub fn signatures_and_public_keys(&self) -> &[(Signature, PublicKey)] { &self.signatures_and_public_keys } + #[must_use] pub fn into_raw_parts(self) -> Vec<(Signature, PublicKey)> { self.signatures_and_public_keys } - pub fn from_raw_parts(signatures_and_public_keys: Vec<(Signature, PublicKey)>) -> Self { + #[must_use] + pub const fn from_raw_parts(signatures_and_public_keys: Vec<(Signature, PublicKey)>) -> Self { Self { signatures_and_public_keys, } @@ -55,14 +60,14 @@ mod tests { use crate::AccountId; #[test] - fn test_for_message_constructor() { + fn for_message_constructor() { let key1 = PrivateKey::try_new([1; 32]).unwrap(); let key2 = PrivateKey::try_new([2; 32]).unwrap(); let pubkey1 = PublicKey::new_from_private_key(&key1); let pubkey2 = PublicKey::new_from_private_key(&key2); let addr1 = AccountId::from(&pubkey1); let addr2 = AccountId::from(&pubkey2); - let nonces = vec![1, 2]; + let nonces = vec![1_u128.into(), 2_u128.into()]; let instruction = vec![1, 2, 3, 4]; let message = Message::try_new([0; 8], vec![addr1, addr2], nonces, instruction).unwrap(); diff --git a/nssa/src/signature/bip340_test_vectors.rs b/nssa/src/signature/bip340_test_vectors.rs index 0e91bcb4..e316db5e 100644 --- a/nssa/src/signature/bip340_test_vectors.rs +++ b/nssa/src/signature/bip340_test_vectors.rs @@ -1,9 +1,5 @@ use crate::{PrivateKey, PublicKey, Signature}; -fn hex_to_bytes(hex: &str) -> [u8; N] { - hex::decode(hex).unwrap().try_into().unwrap() -} - pub struct TestVector { pub seckey: Option, pub pubkey: PublicKey, @@ -14,7 +10,7 @@ pub struct TestVector { } /// Test vectors from -/// https://github.com/bitcoin/bips/blob/master/bip-0340/test-vectors.csv +/// . // pub fn test_vectors() -> Vec { vec![ @@ -365,3 +361,7 @@ pub fn test_vectors() -> Vec { }, ] } + +fn hex_to_bytes(hex: &str) -> [u8; N] { + hex::decode(hex).unwrap().try_into().unwrap() +} diff --git a/nssa/src/signature/mod.rs b/nssa/src/signature/mod.rs index f76c480a..63377f15 100644 --- a/nssa/src/signature/mod.rs +++ b/nssa/src/signature/mod.rs @@ -1,19 +1,26 @@ -mod private_key; -mod public_key; - use borsh::{BorshDeserialize, BorshSerialize}; pub use private_key::PrivateKey; pub use public_key::PublicKey; -use rand::{RngCore, rngs::OsRng}; +use rand::{RngCore as _, rngs::OsRng}; -#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] +mod private_key; +mod public_key; + +#[derive(Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Signature { pub value: [u8; 64], } +impl std::fmt::Debug for Signature { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.value)) + } +} + impl Signature { + #[must_use] pub fn new(key: &PrivateKey, message: &[u8]) -> Self { - let mut aux_random = [0u8; 32]; + let mut aux_random = [0_u8; 32]; OsRng.fill_bytes(&mut aux_random); Self::new_with_aux_random(key, message, aux_random) } @@ -33,6 +40,7 @@ impl Signature { Self { value } } + #[must_use] pub fn is_valid_for(&self, bytes: &[u8], public_key: &PublicKey) -> bool { let pk = secp256k1::XOnlyPublicKey::from_byte_array(*public_key.value()).unwrap(); let secp = secp256k1::Secp256k1::new(); @@ -56,7 +64,7 @@ mod tests { } #[test] - fn test_signature_generation_from_bip340_test_vectors() { + fn signature_generation_from_bip340_test_vectors() { for (i, test_vector) in bip340_test_vectors::test_vectors().into_iter().enumerate() { let Some(private_key) = test_vector.seckey else { continue; @@ -79,7 +87,7 @@ mod tests { } #[test] - fn test_signature_verification_from_bip340_test_vectors() { + fn signature_verification_from_bip340_test_vectors() { for (i, test_vector) in bip340_test_vectors::test_vectors().into_iter().enumerate() { let message = test_vector.message.unwrap_or(vec![]); let expected_result = test_vector.verification_result; diff --git a/nssa/src/signature/private_key.rs b/nssa/src/signature/private_key.rs index 667fc306..d8ece0e0 100644 --- a/nssa/src/signature/private_key.rs +++ b/nssa/src/signature/private_key.rs @@ -1,4 +1,4 @@ -use rand::{Rng, rngs::OsRng}; +use rand::{Rng as _, rngs::OsRng}; use serde::{Deserialize, Serialize}; use crate::error::NssaError; @@ -9,14 +9,14 @@ use crate::error::NssaError; pub struct PrivateKey([u8; 32]); impl PrivateKey { + #[must_use] pub fn new_os_random() -> Self { let mut rng = OsRng; loop { - match Self::try_new(rng.r#gen()) { - Ok(key) => break key, - Err(_) => continue, - }; + if let Ok(key) = Self::try_new(rng.r#gen()) { + break key; + } } } @@ -32,7 +32,8 @@ impl PrivateKey { } } - pub fn value(&self) -> &[u8; 32] { + #[must_use] + pub const fn value(&self) -> &[u8; 32] { &self.0 } } @@ -41,13 +42,13 @@ impl PrivateKey { mod tests { use super::*; #[test] - fn test_value_getter() { + fn value_getter() { let key = PrivateKey::try_new([1; 32]).unwrap(); assert_eq!(key.value(), &key.0); } #[test] - fn test_produce_key() { + fn produce_key() { let _key = PrivateKey::new_os_random(); } } diff --git a/nssa/src/signature/public_key.rs b/nssa/src/signature/public_key.rs index 55e55b57..9cdac761 100644 --- a/nssa/src/signature/public_key.rs +++ b/nssa/src/signature/public_key.rs @@ -1,28 +1,30 @@ use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::account::AccountId; use serde::{Deserialize, Serialize}; -use sha2::{Digest, Sha256}; +use sha2::{Digest as _, Sha256}; use crate::{PrivateKey, error::NssaError}; -#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, Serialize, Deserialize)] +#[derive(Clone, PartialEq, Eq, BorshSerialize, Serialize, Deserialize)] pub struct PublicKey([u8; 32]); +impl std::fmt::Debug for PublicKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", hex::encode(self.0)) + } +} + impl BorshDeserialize for PublicKey { fn deserialize_reader(reader: &mut R) -> std::io::Result { - let mut buf = [0u8; 32]; + let mut buf = [0_u8; 32]; reader.read_exact(&mut buf)?; - Self::try_new(buf).map_err(|_| { - std::io::Error::new( - std::io::ErrorKind::InvalidData, - "Invalid public key: not a valid point", - ) - }) + Self::try_new(buf).map_err(|err| std::io::Error::new(std::io::ErrorKind::InvalidData, err)) } } impl PublicKey { + #[must_use] pub fn new_from_private_key(key: &PrivateKey) -> Self { let value = { let secret_key = secp256k1::SecretKey::from_byte_array(*key.value()).unwrap(); @@ -37,11 +39,12 @@ impl PublicKey { pub fn try_new(value: [u8; 32]) -> Result { // Check point is valid let _ = secp256k1::XOnlyPublicKey::from_byte_array(value) - .map_err(|_| NssaError::InvalidPublicKey)?; + .map_err(NssaError::InvalidPublicKey)?; Ok(Self(value)) } - pub fn value(&self) -> &[u8; 32] { + #[must_use] + pub const fn value(&self) -> &[u8; 32] { &self.0 } } @@ -63,7 +66,7 @@ mod test { use crate::{PublicKey, error::NssaError, signature::bip340_test_vectors}; #[test] - fn test_try_new_invalid_public_key_from_bip340_test_vectors_5() { + fn try_new_invalid_public_key_from_bip340_test_vectors_5() { let value_invalid_key = [ 238, 253, 234, 76, 219, 103, 119, 80, 164, 32, 254, 232, 7, 234, 207, 33, 235, 152, 152, 174, 121, 185, 118, 135, 102, 228, 250, 160, 74, 45, 74, 52, @@ -71,11 +74,11 @@ mod test { let result = PublicKey::try_new(value_invalid_key); - assert!(matches!(result, Err(NssaError::InvalidPublicKey))); + assert!(matches!(result, Err(NssaError::InvalidPublicKey(_)))); } #[test] - fn test_try_new_invalid_public_key_from_bip340_test_vector_14() { + fn try_new_invalid_public_key_from_bip340_test_vector_14() { let value_invalid_key = [ 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254, 255, 255, 252, 48, @@ -83,11 +86,11 @@ mod test { let result = PublicKey::try_new(value_invalid_key); - assert!(matches!(result, Err(NssaError::InvalidPublicKey))); + assert!(matches!(result, Err(NssaError::InvalidPublicKey(_)))); } #[test] - fn test_try_new_valid_public_keys() { + fn try_new_valid_public_keys() { for (i, test_vector) in bip340_test_vectors::test_vectors().into_iter().enumerate() { let expected_public_key = test_vector.pubkey; let public_key = PublicKey::try_new(*expected_public_key.value()).unwrap(); @@ -96,7 +99,7 @@ mod test { } #[test] - fn test_public_key_generation_from_bip340_test_vectors() { + fn public_key_generation_from_bip340_test_vectors() { for (i, test_vector) in bip340_test_vectors::test_vectors().into_iter().enumerate() { let Some(private_key) = &test_vector.seckey else { continue; @@ -111,7 +114,7 @@ mod test { } #[test] - fn test_correct_ser_deser_roundtrip() { + fn correct_ser_deser_roundtrip() { let pub_key = PublicKey::try_new([42; 32]).unwrap(); let pub_key_borsh_ser = borsh::to_vec(&pub_key).unwrap(); diff --git a/nssa/src/state.rs b/nssa/src/state.rs index c8599d97..8ae26e74 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -3,7 +3,7 @@ use std::collections::{BTreeSet, HashMap, HashSet}; use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, MembershipProof, Nullifier, - account::{Account, AccountId}, + account::{Account, AccountId, Nonce}, program::ProgramId, }; @@ -18,7 +18,7 @@ pub const MAX_NUMBER_CHAINED_CALLS: usize = 10; #[derive(Clone, BorshSerialize, BorshDeserialize)] #[cfg_attr(test, derive(Debug, PartialEq, Eq))] -pub(crate) struct CommitmentSet { +pub struct CommitmentSet { merkle_tree: MerkleTree, commitments: HashMap, root_history: HashSet, @@ -29,7 +29,7 @@ impl CommitmentSet { self.merkle_tree.root() } - /// Queries the `CommitmentSet` for a membership proof of commitment + /// Queries the `CommitmentSet` for a membership proof of commitment. pub fn get_proof_for(&self, commitment: &Commitment) -> Option { let index = *self.commitments.get(commitment)?; @@ -52,9 +52,9 @@ impl CommitmentSet { } /// Initializes an empty `CommitmentSet` with a given capacity. - /// If the capacity is not a power_of_two, then capacity is taken - /// to be the next power_of_two. - pub(crate) fn with_capacity(capacity: usize) -> CommitmentSet { + /// If the capacity is not a `power_of_two`, then capacity is taken + /// to be the next `power_of_two`. + pub(crate) fn with_capacity(capacity: usize) -> Self { Self { merkle_tree: MerkleTree::with_capacity(capacity), commitments: HashMap::new(), @@ -68,7 +68,7 @@ impl CommitmentSet { struct NullifierSet(BTreeSet); impl NullifierSet { - fn new() -> Self { + const fn new() -> Self { Self(BTreeSet::new()) } @@ -114,6 +114,7 @@ pub struct V02State { } impl V02State { + #[must_use] pub fn new_with_genesis_accounts( initial_data: &[(AccountId, u128)], initial_commitments: &[nssa_core::Commitment], @@ -159,7 +160,11 @@ impl V02State { ) -> Result<(), NssaError> { let state_diff = tx.validate_and_produce_public_state_diff(self)?; - for (account_id, post) in state_diff.into_iter() { + #[expect( + clippy::iter_over_hash_type, + reason = "Iteration order doesn't matter here" + )] + for (account_id, post) in state_diff { let current_account = self.get_account_by_id_mut(account_id); *current_account = post; @@ -167,7 +172,7 @@ impl V02State { for account_id in tx.signer_account_ids() { let current_account = self.get_account_by_id_mut(account_id); - current_account.nonce += 1; + current_account.nonce.public_account_nonce_increment(); } Ok(()) @@ -195,7 +200,11 @@ impl V02State { self.private_state.1.extend(new_nullifiers); // 4. Update public accounts - for (account_id, post) in public_state_diff.into_iter() { + #[expect( + clippy::iter_over_hash_type, + reason = "Iteration order doesn't matter here" + )] + for (account_id, post) in public_state_diff { let current_account = self.get_account_by_id_mut(account_id); *current_account = post; } @@ -203,7 +212,7 @@ impl V02State { // 5. Increment nonces for public signers for account_id in tx.signer_account_ids() { let current_account = self.get_account_by_id_mut(account_id); - current_account.nonce += 1; + current_account.nonce.public_account_nonce_increment(); } Ok(()) @@ -222,21 +231,24 @@ impl V02State { self.public_state.entry(account_id).or_default() } + #[must_use] pub fn get_account_by_id(&self, account_id: AccountId) -> Account { self.public_state .get(&account_id) .cloned() - .unwrap_or(Account::default()) + .unwrap_or_else(Account::default) } + #[must_use] pub fn get_proof_for_commitment(&self, commitment: &Commitment) -> Option { self.private_state.0.get_proof_for(commitment) } - pub(crate) fn programs(&self) -> &HashMap { + pub(crate) const fn programs(&self) -> &HashMap { &self.programs } + #[must_use] pub fn commitment_set_digest(&self) -> CommitmentSetDigest { self.private_state.0.digest() } @@ -245,10 +257,10 @@ impl V02State { &self, new_commitments: &[Commitment], ) -> Result<(), NssaError> { - for commitment in new_commitments.iter() { + for commitment in new_commitments { if self.private_state.0.contains(commitment) { return Err(NssaError::InvalidInput( - "Commitment already seen".to_string(), + "Commitment already seen".to_owned(), )); } } @@ -259,15 +271,13 @@ impl V02State { &self, new_nullifiers: &[(Nullifier, CommitmentSetDigest)], ) -> Result<(), NssaError> { - for (nullifier, digest) in new_nullifiers.iter() { + for (nullifier, digest) in new_nullifiers { if self.private_state.1.contains(nullifier) { - return Err(NssaError::InvalidInput( - "Nullifier already seen".to_string(), - )); + return Err(NssaError::InvalidInput("Nullifier already seen".to_owned())); } if !self.private_state.0.root_history.contains(digest) { return Err(NssaError::InvalidInput( - "Unrecognized commitment set digest".to_string(), + "Unrecognized commitment set digest".to_owned(), )); } } @@ -284,10 +294,10 @@ impl V02State { account_id, Account { program_owner: Program::pinata().id(), - balance: 1500000, + balance: 1_500_000, // Difficulty: 3 data: vec![3; 33].try_into().expect("should fit"), - nonce: 0, + nonce: Nonce::default(), }, ); } @@ -307,19 +317,29 @@ impl V02State { } } +#[cfg(any(test, feature = "test-utils"))] +impl V02State { + pub fn force_insert_account(&mut self, account_id: AccountId, account: Account) { + self.public_state.insert(account_id, account); + } +} + #[cfg(test)] pub mod tests { + #![expect( + clippy::arithmetic_side_effects, + clippy::shadow_unrelated, + reason = "We don't care about it in tests" + )] use std::collections::HashMap; - use amm_core::PoolDefinition; use nssa_core::{ Commitment, Nullifier, NullifierPublicKey, NullifierSecretKey, SharedSecretKey, account::{Account, AccountId, AccountWithMetadata, Nonce, data::Data}, encryption::{EphemeralPublicKey, Scalar, ViewingPublicKey}, program::{PdaSeed, ProgramId}, }; - use token_core::{TokenDefinition, TokenHolding}; use crate::{ PublicKey, PublicTransaction, V02State, @@ -337,29 +357,121 @@ pub mod tests { state::MAX_NUMBER_CHAINED_CALLS, }; + impl V02State { + /// Include test programs in the builtin programs map. + #[must_use] + pub fn with_test_programs(mut self) -> Self { + self.insert_program(Program::nonce_changer_program()); + self.insert_program(Program::extra_output_program()); + self.insert_program(Program::missing_output_program()); + self.insert_program(Program::program_owner_changer()); + self.insert_program(Program::simple_balance_transfer()); + self.insert_program(Program::data_changer()); + self.insert_program(Program::minter()); + self.insert_program(Program::burner()); + self.insert_program(Program::chain_caller()); + self.insert_program(Program::amm()); + self.insert_program(Program::claimer()); + self.insert_program(Program::changer_claimer()); + self + } + + #[must_use] + pub fn with_non_default_accounts_but_default_program_owners(mut self) -> Self { + let account_with_default_values_except_balance = Account { + balance: 100, + ..Account::default() + }; + let account_with_default_values_except_nonce = Account { + nonce: Nonce(37), + ..Account::default() + }; + let account_with_default_values_except_data = Account { + data: vec![0xca, 0xfe].try_into().unwrap(), + ..Account::default() + }; + self.force_insert_account( + AccountId::new([255; 32]), + account_with_default_values_except_balance, + ); + self.force_insert_account( + AccountId::new([254; 32]), + account_with_default_values_except_nonce, + ); + self.force_insert_account( + AccountId::new([253; 32]), + account_with_default_values_except_data, + ); + self + } + + #[must_use] + pub fn with_account_owned_by_burner_program(mut self) -> Self { + let account = Account { + program_owner: Program::burner().id(), + balance: 100, + ..Default::default() + }; + self.force_insert_account(AccountId::new([252; 32]), account); + self + } + + #[must_use] + pub fn with_private_account(mut self, keys: &TestPrivateKeys, account: &Account) -> Self { + let commitment = Commitment::new(&keys.npk(), account); + self.private_state.0.extend(&[commitment]); + self + } + } + + pub struct TestPublicKeys { + pub signing_key: PrivateKey, + } + + impl TestPublicKeys { + pub fn account_id(&self) -> AccountId { + AccountId::from(&PublicKey::new_from_private_key(&self.signing_key)) + } + } + + pub struct TestPrivateKeys { + pub nsk: NullifierSecretKey, + pub vsk: Scalar, + } + + impl TestPrivateKeys { + pub fn npk(&self) -> NullifierPublicKey { + NullifierPublicKey::from(&self.nsk) + } + + pub fn vpk(&self) -> ViewingPublicKey { + ViewingPublicKey::from_scalar(self.vsk) + } + } + fn transfer_transaction( from: AccountId, - from_key: PrivateKey, + from_key: &PrivateKey, nonce: u128, to: AccountId, balance: u128, ) -> PublicTransaction { let account_ids = vec![from, to]; - let nonces = vec![nonce]; + let nonces = vec![Nonce(nonce)]; let program_id = Program::authenticated_transfer_program().id(); let message = public_transaction::Message::try_new(program_id, account_ids, nonces, balance).unwrap(); - let witness_set = public_transaction::WitnessSet::for_message(&message, &[&from_key]); + let witness_set = public_transaction::WitnessSet::for_message(&message, &[from_key]); PublicTransaction::new(message, witness_set) } #[test] - fn test_new_with_genesis() { + fn new_with_genesis() { let key1 = PrivateKey::try_new([1; 32]).unwrap(); let key2 = PrivateKey::try_new([2; 32]).unwrap(); let addr1 = AccountId::from(&PublicKey::new_from_private_key(&key1)); let addr2 = AccountId::from(&PublicKey::new_from_private_key(&key2)); - let initial_data = [(addr1, 100u128), (addr2, 151u128)]; + let initial_data = [(addr1, 100_u128), (addr2, 151_u128)]; let authenticated_transfers_program = Program::authenticated_transfer_program(); let expected_public_state = { let mut this = HashMap::new(); @@ -399,7 +511,7 @@ pub mod tests { } #[test] - fn test_insert_program() { + fn insert_program() { let mut state = V02State::new_with_genesis_accounts(&[], &[]); let program_to_insert = Program::simple_balance_transfer(); let program_id = program_to_insert.id(); @@ -411,12 +523,12 @@ pub mod tests { } #[test] - fn test_get_account_by_account_id_non_default_account() { + fn get_account_by_account_id_non_default_account() { let key = PrivateKey::try_new([1; 32]).unwrap(); let account_id = AccountId::from(&PublicKey::new_from_private_key(&key)); - let initial_data = [(account_id, 100u128)]; + let initial_data = [(account_id, 100_u128)]; let state = V02State::new_with_genesis_accounts(&initial_data, &[]); - let expected_account = state.public_state.get(&account_id).unwrap(); + let expected_account = &state.public_state[&account_id]; let account = state.get_account_by_id(account_id); @@ -424,7 +536,7 @@ pub mod tests { } #[test] - fn test_get_account_by_account_id_default_account() { + fn get_account_by_account_id_default_account() { let addr2 = AccountId::new([0; 32]); let state = V02State::new_with_genesis_accounts(&[], &[]); let expected_account = Account::default(); @@ -435,7 +547,7 @@ pub mod tests { } #[test] - fn test_builtin_programs_getter() { + fn builtin_programs_getter() { let state = V02State::new_with_genesis_accounts(&[], &[]); let builtin_programs = state.programs(); @@ -454,13 +566,13 @@ pub mod tests { assert_eq!(state.get_account_by_id(to), Account::default()); let balance_to_move = 5; - let tx = transfer_transaction(from, key, 0, to, balance_to_move); + let tx = transfer_transaction(from, &key, 0, to, balance_to_move); state.transition_from_public_transaction(&tx).unwrap(); assert_eq!(state.get_account_by_id(from).balance, 95); assert_eq!(state.get_account_by_id(to).balance, 5); - assert_eq!(state.get_account_by_id(from).nonce, 1); - assert_eq!(state.get_account_by_id(to).nonce, 0); + assert_eq!(state.get_account_by_id(from).nonce, Nonce(1)); + assert_eq!(state.get_account_by_id(to).nonce, Nonce(0)); } #[test] @@ -475,14 +587,14 @@ pub mod tests { let balance_to_move = 101; assert!(state.get_account_by_id(from).balance < balance_to_move); - let tx = transfer_transaction(from, from_key, 0, to, balance_to_move); + let tx = transfer_transaction(from, &from_key, 0, to, balance_to_move); let result = state.transition_from_public_transaction(&tx); assert!(matches!(result, Err(NssaError::ProgramExecutionFailed(_)))); assert_eq!(state.get_account_by_id(from).balance, 100); assert_eq!(state.get_account_by_id(to).balance, 0); - assert_eq!(state.get_account_by_id(from).nonce, 0); - assert_eq!(state.get_account_by_id(to).nonce, 0); + assert_eq!(state.get_account_by_id(from).nonce, Nonce(0)); + assert_eq!(state.get_account_by_id(to).nonce, Nonce(0)); } #[test] @@ -499,13 +611,13 @@ pub mod tests { assert_ne!(state.get_account_by_id(to), Account::default()); let balance_to_move = 8; - let tx = transfer_transaction(from, from_key, 0, to, balance_to_move); + let tx = transfer_transaction(from, &from_key, 0, to, balance_to_move); state.transition_from_public_transaction(&tx).unwrap(); assert_eq!(state.get_account_by_id(from).balance, 192); assert_eq!(state.get_account_by_id(to).balance, 108); - assert_eq!(state.get_account_by_id(from).nonce, 1); - assert_eq!(state.get_account_by_id(to).nonce, 0); + assert_eq!(state.get_account_by_id(from).nonce, Nonce(1)); + assert_eq!(state.get_account_by_id(to).nonce, Nonce(0)); } #[test] @@ -519,89 +631,22 @@ pub mod tests { let account_id3 = AccountId::new([3; 32]); let balance_to_move = 5; - let tx = transfer_transaction(account_id1, key1, 0, account_id2, balance_to_move); + let tx = transfer_transaction(account_id1, &key1, 0, account_id2, balance_to_move); state.transition_from_public_transaction(&tx).unwrap(); let balance_to_move = 3; - let tx = transfer_transaction(account_id2, key2, 0, account_id3, balance_to_move); + let tx = transfer_transaction(account_id2, &key2, 0, account_id3, balance_to_move); state.transition_from_public_transaction(&tx).unwrap(); assert_eq!(state.get_account_by_id(account_id1).balance, 95); assert_eq!(state.get_account_by_id(account_id2).balance, 2); assert_eq!(state.get_account_by_id(account_id3).balance, 3); - assert_eq!(state.get_account_by_id(account_id1).nonce, 1); - assert_eq!(state.get_account_by_id(account_id2).nonce, 1); - assert_eq!(state.get_account_by_id(account_id3).nonce, 0); - } - - impl V02State { - pub fn force_insert_account(&mut self, account_id: AccountId, account: Account) { - self.public_state.insert(account_id, account); - } - - /// Include test programs in the builtin programs map - pub fn with_test_programs(mut self) -> Self { - self.insert_program(Program::nonce_changer_program()); - self.insert_program(Program::extra_output_program()); - self.insert_program(Program::missing_output_program()); - self.insert_program(Program::program_owner_changer()); - self.insert_program(Program::simple_balance_transfer()); - self.insert_program(Program::data_changer()); - self.insert_program(Program::minter()); - self.insert_program(Program::burner()); - self.insert_program(Program::chain_caller()); - self.insert_program(Program::amm()); - self.insert_program(Program::claimer()); - self.insert_program(Program::changer_claimer()); - self - } - - pub fn with_non_default_accounts_but_default_program_owners(mut self) -> Self { - let account_with_default_values_except_balance = Account { - balance: 100, - ..Account::default() - }; - let account_with_default_values_except_nonce = Account { - nonce: 37, - ..Account::default() - }; - let account_with_default_values_except_data = Account { - data: vec![0xca, 0xfe].try_into().unwrap(), - ..Account::default() - }; - self.force_insert_account( - AccountId::new([255; 32]), - account_with_default_values_except_balance, - ); - self.force_insert_account( - AccountId::new([254; 32]), - account_with_default_values_except_nonce, - ); - self.force_insert_account( - AccountId::new([253; 32]), - account_with_default_values_except_data, - ); - self - } - - pub fn with_account_owned_by_burner_program(mut self) -> Self { - let account = Account { - program_owner: Program::burner().id(), - balance: 100, - ..Default::default() - }; - self.force_insert_account(AccountId::new([252; 32]), account); - self - } - - pub fn with_private_account(mut self, keys: &TestPrivateKeys, account: &Account) -> Self { - let commitment = Commitment::new(&keys.npk(), account); - self.private_state.0.extend(&[commitment]); - self - } + assert_eq!(state.get_account_by_id(account_id1).nonce, Nonce(1)); + assert_eq!(state.get_account_by_id(account_id2).nonce, Nonce(1)); + assert_eq!(state.get_account_by_id(account_id3).nonce, Nonce(0)); } #[test] - fn test_program_should_fail_if_modifies_nonces() { + fn program_should_fail_if_modifies_nonces() { let initial_data = [(AccountId::new([1; 32]), 100)]; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -618,7 +663,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_output_accounts_exceed_inputs() { + fn program_should_fail_if_output_accounts_exceed_inputs() { let initial_data = [(AccountId::new([1; 32]), 100)]; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -635,7 +680,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_with_missing_output_accounts() { + fn program_should_fail_with_missing_output_accounts() { let initial_data = [(AccountId::new([1; 32]), 100)]; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -652,7 +697,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_modifies_program_owner_with_only_non_default_program_owner() { + fn program_should_fail_if_modifies_program_owner_with_only_non_default_program_owner() { let initial_data = [(AccountId::new([1; 32]), 0)]; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -676,7 +721,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_modifies_program_owner_with_only_non_default_balance() { + fn program_should_fail_if_modifies_program_owner_with_only_non_default_balance() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]) .with_test_programs() @@ -700,7 +745,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_modifies_program_owner_with_only_non_default_nonce() { + fn program_should_fail_if_modifies_program_owner_with_only_non_default_nonce() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]) .with_test_programs() @@ -724,7 +769,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_modifies_program_owner_with_only_non_default_data() { + fn program_should_fail_if_modifies_program_owner_with_only_non_default_data() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]) .with_test_programs() @@ -748,7 +793,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_transfers_balance_from_non_owned_account() { + fn program_should_fail_if_transfers_balance_from_non_owned_account() { let initial_data = [(AccountId::new([1; 32]), 100)]; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -776,7 +821,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_modifies_data_of_non_owned_account() { + fn program_should_fail_if_modifies_data_of_non_owned_account() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]) .with_test_programs() @@ -801,7 +846,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_does_not_preserve_total_balance_by_minting() { + fn program_should_fail_if_does_not_preserve_total_balance_by_minting() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -819,7 +864,7 @@ pub mod tests { } #[test] - fn test_program_should_fail_if_does_not_preserve_total_balance_by_burning() { + fn program_should_fail_if_does_not_preserve_total_balance_by_burning() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]) .with_test_programs() @@ -847,37 +892,12 @@ pub mod tests { assert!(matches!(result, Err(NssaError::InvalidProgramBehavior))); } - pub struct TestPublicKeys { - pub signing_key: PrivateKey, - } - - impl TestPublicKeys { - pub fn account_id(&self) -> AccountId { - AccountId::from(&PublicKey::new_from_private_key(&self.signing_key)) - } - } - fn test_public_account_keys_1() -> TestPublicKeys { TestPublicKeys { signing_key: PrivateKey::try_new([37; 32]).unwrap(), } } - pub struct TestPrivateKeys { - pub nsk: NullifierSecretKey, - pub vsk: Scalar, - } - - impl TestPrivateKeys { - pub fn npk(&self) -> NullifierPublicKey { - NullifierPublicKey::from(&self.nsk) - } - - pub fn vpk(&self) -> ViewingPublicKey { - ViewingPublicKey::from_scalar(self.vsk) - } - } - pub fn test_private_account_keys_1() -> TestPrivateKeys { TestPrivateKeys { nsk: [13; 32], @@ -916,7 +936,6 @@ pub mod tests { vec![sender, recipient], Program::serialize_instruction(balance_to_move).unwrap(), vec![0, 2], - vec![0xdeadbeef], vec![(recipient_keys.npk(), shared_secret)], vec![], vec![None], @@ -941,7 +960,6 @@ pub mod tests { sender_private_account: &Account, recipient_keys: &TestPrivateKeys, balance_to_move: u128, - new_nonces: [Nonce; 2], state: &V02State, ) -> PrivacyPreservingTransaction { let program = Program::authenticated_transfer_program(); @@ -963,7 +981,6 @@ pub mod tests { vec![sender_pre, recipient_pre], Program::serialize_instruction(balance_to_move).unwrap(), vec![1, 2], - new_nonces.to_vec(), vec![ (sender_keys.npk(), shared_secret_1), (recipient_keys.npk(), shared_secret_2), @@ -995,7 +1012,6 @@ pub mod tests { sender_private_account: &Account, recipient_account_id: &AccountId, balance_to_move: u128, - new_nonce: Nonce, state: &V02State, ) -> PrivacyPreservingTransaction { let program = Program::authenticated_transfer_program(); @@ -1016,7 +1032,6 @@ pub mod tests { vec![sender_pre, recipient_pre], Program::serialize_instruction(balance_to_move).unwrap(), vec![1, 0], - vec![new_nonce], vec![(sender_keys.npk(), shared_secret)], vec![sender_keys.nsk], vec![state.get_proof_for_commitment(&sender_commitment)], @@ -1038,7 +1053,7 @@ pub mod tests { } #[test] - fn test_transition_from_privacy_preserving_transaction_shielded() { + fn transition_from_privacy_preserving_transaction_shielded() { let sender_keys = test_public_account_keys_1(); let recipient_keys = test_private_account_keys_1(); @@ -1057,7 +1072,7 @@ pub mod tests { let expected_sender_post = { let mut this = state.get_account_by_id(sender_keys.account_id()); this.balance -= balance_to_move; - this.nonce += 1; + this.nonce.public_account_nonce_increment(); this }; @@ -1079,12 +1094,14 @@ pub mod tests { } #[test] - fn test_transition_from_privacy_preserving_transaction_private() { + fn transition_from_privacy_preserving_transaction_private() { let sender_keys = test_private_account_keys_1(); + let sender_nonce = Nonce(0xdead_beef); + let sender_private_account = Account { program_owner: Program::authenticated_transfer_program().id(), balance: 100, - nonce: 0xdeadbeef, + nonce: sender_nonce, data: Data::default(), }; let recipient_keys = test_private_account_keys_2(); @@ -1099,7 +1116,6 @@ pub mod tests { &sender_private_account, &recipient_keys, balance_to_move, - [0xcafecafe, 0xfecafeca], &state, ); @@ -1107,7 +1123,7 @@ pub mod tests { &sender_keys.npk(), &Account { program_owner: Program::authenticated_transfer_program().id(), - nonce: 0xcafecafe, + nonce: sender_nonce.private_account_nonce_increment(&sender_keys.nsk), balance: sender_private_account.balance - balance_to_move, data: Data::default(), }, @@ -1121,7 +1137,7 @@ pub mod tests { &recipient_keys.npk(), &Account { program_owner: Program::authenticated_transfer_program().id(), - nonce: 0xfecafeca, + nonce: Nonce::private_account_nonce_init(&recipient_keys.npk()), balance: balance_to_move, ..Account::default() }, @@ -1145,12 +1161,14 @@ pub mod tests { } #[test] - fn test_transition_from_privacy_preserving_transaction_deshielded() { + fn transition_from_privacy_preserving_transaction_deshielded() { let sender_keys = test_private_account_keys_1(); + let sender_nonce = Nonce(0xdead_beef); + let sender_private_account = Account { program_owner: Program::authenticated_transfer_program().id(), balance: 100, - nonce: 0xdeadbeef, + nonce: sender_nonce, data: Data::default(), }; let recipient_keys = test_public_account_keys_1(); @@ -1174,7 +1192,6 @@ pub mod tests { &sender_private_account, &recipient_keys.account_id(), balance_to_move, - 0xcafecafe, &state, ); @@ -1182,7 +1199,7 @@ pub mod tests { &sender_keys.npk(), &Account { program_owner: Program::authenticated_transfer_program().id(), - nonce: 0xcafecafe, + nonce: sender_nonce.private_account_nonce_increment(&sender_keys.nsk), balance: sender_private_account.balance - balance_to_move, data: Data::default(), }, @@ -1212,7 +1229,7 @@ pub mod tests { } #[test] - fn test_burner_program_should_fail_in_privacy_preserving_circuit() { + fn burner_program_should_fail_in_privacy_preserving_circuit() { let program = Program::burner(); let public_account = AccountWithMetadata::new( Account { @@ -1226,12 +1243,11 @@ pub mod tests { let result = execute_and_prove( vec![public_account], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![0], vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1239,7 +1255,7 @@ pub mod tests { } #[test] - fn test_minter_program_should_fail_in_privacy_preserving_circuit() { + fn minter_program_should_fail_in_privacy_preserving_circuit() { let program = Program::minter(); let public_account = AccountWithMetadata::new( Account { @@ -1253,12 +1269,11 @@ pub mod tests { let result = execute_and_prove( vec![public_account], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![0], vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1266,7 +1281,7 @@ pub mod tests { } #[test] - fn test_nonce_changer_program_should_fail_in_privacy_preserving_circuit() { + fn nonce_changer_program_should_fail_in_privacy_preserving_circuit() { let program = Program::nonce_changer_program(); let public_account = AccountWithMetadata::new( Account { @@ -1285,7 +1300,6 @@ pub mod tests { vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1293,7 +1307,7 @@ pub mod tests { } #[test] - fn test_data_changer_program_should_fail_for_non_owned_account_in_privacy_preserving_circuit() { + fn data_changer_program_should_fail_for_non_owned_account_in_privacy_preserving_circuit() { let program = Program::data_changer(); let public_account = AccountWithMetadata::new( Account { @@ -1312,7 +1326,6 @@ pub mod tests { vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1320,7 +1333,7 @@ pub mod tests { } #[test] - fn test_data_changer_program_should_fail_for_too_large_data_in_privacy_preserving_circuit() { + fn data_changer_program_should_fail_for_too_large_data_in_privacy_preserving_circuit() { let program = Program::data_changer(); let public_account = AccountWithMetadata::new( Account { @@ -1333,7 +1346,12 @@ pub mod tests { ); let large_data: Vec = - vec![0; nssa_core::account::data::DATA_MAX_LENGTH.as_u64() as usize + 1]; + vec![ + 0; + usize::try_from(nssa_core::account::data::DATA_MAX_LENGTH.as_u64()) + .expect("DATA_MAX_LENGTH fits in usize") + + 1 + ]; let result = execute_and_prove( vec![public_account], @@ -1342,15 +1360,14 @@ pub mod tests { vec![], vec![], vec![], - vec![], - &program.to_owned().into(), + &program.into(), ); assert!(matches!(result, Err(NssaError::ProgramProveFailed(_)))); } #[test] - fn test_extra_output_program_should_fail_in_privacy_preserving_circuit() { + fn extra_output_program_should_fail_in_privacy_preserving_circuit() { let program = Program::extra_output_program(); let public_account = AccountWithMetadata::new( Account { @@ -1369,7 +1386,6 @@ pub mod tests { vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1377,7 +1393,7 @@ pub mod tests { } #[test] - fn test_missing_output_program_should_fail_in_privacy_preserving_circuit() { + fn missing_output_program_should_fail_in_privacy_preserving_circuit() { let program = Program::missing_output_program(); let public_account_1 = AccountWithMetadata::new( Account { @@ -1405,7 +1421,6 @@ pub mod tests { vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1413,7 +1428,7 @@ pub mod tests { } #[test] - fn test_program_owner_changer_should_fail_in_privacy_preserving_circuit() { + fn program_owner_changer_should_fail_in_privacy_preserving_circuit() { let program = Program::program_owner_changer(); let public_account = AccountWithMetadata::new( Account { @@ -1432,7 +1447,6 @@ pub mod tests { vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1440,7 +1454,7 @@ pub mod tests { } #[test] - fn test_transfer_from_non_owned_account_should_fail_in_privacy_preserving_circuit() { + fn transfer_from_non_owned_account_should_fail_in_privacy_preserving_circuit() { let program = Program::simple_balance_transfer(); let public_account_1 = AccountWithMetadata::new( Account { @@ -1463,12 +1477,11 @@ pub mod tests { let result = execute_and_prove( vec![public_account_1, public_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![0, 0], vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1476,7 +1489,7 @@ pub mod tests { } #[test] - fn test_circuit_fails_if_visibility_masks_have_incorrect_lenght() { + fn circuit_fails_if_visibility_masks_have_incorrect_lenght() { let program = Program::simple_balance_transfer(); let public_account_1 = AccountWithMetadata::new( Account { @@ -1501,12 +1514,11 @@ pub mod tests { let visibility_mask = [0]; let result = execute_and_prove( vec![public_account_1, public_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), visibility_mask.to_vec(), vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1514,7 +1526,7 @@ pub mod tests { } #[test] - fn test_circuit_fails_if_insufficient_nonces_are_provided() { + fn circuit_fails_if_insufficient_nonces_are_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1530,13 +1542,10 @@ pub mod tests { let private_account_2 = AccountWithMetadata::new(Account::default(), false, &recipient_keys.npk()); - // Setting only one nonce for an execution with two private accounts. - let private_account_nonces = [0xdeadbeef1]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - private_account_nonces.to_vec(), vec![ ( sender_keys.npk(), @@ -1556,7 +1565,7 @@ pub mod tests { } #[test] - fn test_circuit_fails_if_insufficient_keys_are_provided() { + fn circuit_fails_if_insufficient_keys_are_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let private_account_1 = AccountWithMetadata::new( @@ -1578,9 +1587,8 @@ pub mod tests { )]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], private_account_keys.to_vec(), vec![sender_keys.nsk], vec![Some((0, vec![]))], @@ -1591,7 +1599,7 @@ pub mod tests { } #[test] - fn test_circuit_fails_if_insufficient_commitment_proofs_are_provided() { + fn circuit_fails_if_insufficient_commitment_proofs_are_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1611,9 +1619,8 @@ pub mod tests { let private_account_membership_proofs = [Some((0, vec![]))]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1633,7 +1640,7 @@ pub mod tests { } #[test] - fn test_circuit_fails_if_insufficient_auth_keys_are_provided() { + fn circuit_fails_if_insufficient_auth_keys_are_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1653,9 +1660,8 @@ pub mod tests { let private_account_nsks = []; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1675,7 +1681,7 @@ pub mod tests { } #[test] - fn test_circuit_fails_if_invalid_auth_keys_are_provided() { + fn circuit_fails_if_invalid_auth_keys_are_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1711,9 +1717,8 @@ pub mod tests { let private_account_membership_proofs = [Some((0, vec![]))]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], private_account_keys.to_vec(), private_account_nsks.to_vec(), private_account_membership_proofs.to_vec(), @@ -1724,7 +1729,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_if_new_private_account_with_non_default_balance_is_provided() { + fn circuit_should_fail_if_new_private_account_with_non_default_balance_is_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1749,9 +1754,8 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1771,8 +1775,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_if_new_private_account_with_non_default_program_owner_is_provided() - { + fn circuit_should_fail_if_new_private_account_with_non_default_program_owner_is_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1797,9 +1800,8 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1819,7 +1821,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_if_new_private_account_with_non_default_data_is_provided() { + fn circuit_should_fail_if_new_private_account_with_non_default_data_is_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1844,9 +1846,8 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1866,7 +1867,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_if_new_private_account_with_non_default_nonce_is_provided() { + fn circuit_should_fail_if_new_private_account_with_non_default_nonce_is_provided() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -1882,7 +1883,7 @@ pub mod tests { let private_account_2 = AccountWithMetadata::new( Account { // Non default nonce - nonce: 0xdeadbeef, + nonce: Nonce(0xdead_beef), ..Account::default() }, false, @@ -1891,9 +1892,8 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1913,7 +1913,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_if_new_private_account_is_provided_with_default_values_but_marked_as_authorized() + fn circuit_should_fail_if_new_private_account_is_provided_with_default_values_but_marked_as_authorized() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); @@ -1936,9 +1936,8 @@ pub mod tests { let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -1958,7 +1957,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_with_invalid_visibility_mask_value() { + fn circuit_should_fail_with_invalid_visibility_mask_value() { let program = Program::simple_balance_transfer(); let public_account_1 = AccountWithMetadata::new( Account { @@ -1975,12 +1974,11 @@ pub mod tests { let visibility_mask = [0, 3]; let result = execute_and_prove( vec![public_account_1, public_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), visibility_mask.to_vec(), vec![], vec![], vec![], - vec![], &program.into(), ); @@ -1988,7 +1986,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_with_too_many_nonces() { + fn circuit_should_fail_with_too_many_nonces() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -2004,14 +2002,10 @@ pub mod tests { let private_account_2 = AccountWithMetadata::new(Account::default(), false, &recipient_keys.npk()); - // Setting three new private account nonces for a circuit execution with only two private - // accounts. - let private_account_nonces = [0xdeadbeef1, 0xdeadbeef2, 0xdeadbeef3]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - private_account_nonces.to_vec(), vec![ ( sender_keys.npk(), @@ -2031,7 +2025,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_with_too_many_private_account_keys() { + fn circuit_should_fail_with_too_many_private_account_keys() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -2065,9 +2059,8 @@ pub mod tests { ]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), vec![1, 2], - vec![0xdeadbeef1, 0xdeadbeef2], private_account_keys.to_vec(), vec![sender_keys.nsk], vec![Some((0, vec![]))], @@ -2078,7 +2071,7 @@ pub mod tests { } #[test] - fn test_circuit_should_fail_with_too_many_private_account_auth_keys() { + fn circuit_should_fail_with_too_many_private_account_auth_keys() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let recipient_keys = test_private_account_keys_2(); @@ -2101,9 +2094,8 @@ pub mod tests { let private_account_membership_proofs = [Some((0, vec![])), Some((1, vec![]))]; let result = execute_and_prove( vec![private_account_1, private_account_2], - Program::serialize_instruction(10u128).unwrap(), + Program::serialize_instruction(10_u128).unwrap(), visibility_mask.to_vec(), - vec![0xdeadbeef1, 0xdeadbeef2], vec![ ( sender_keys.npk(), @@ -2123,12 +2115,14 @@ pub mod tests { } #[test] - fn test_private_accounts_can_only_be_initialized_once() { + fn private_accounts_can_only_be_initialized_once() { let sender_keys = test_private_account_keys_1(); + let sender_nonce = Nonce(0xdead_beef); + let sender_private_account = Account { program_owner: Program::authenticated_transfer_program().id(), balance: 100, - nonce: 0xdeadbeef, + nonce: sender_nonce, data: Data::default(), }; let recipient_keys = test_private_account_keys_2(); @@ -2137,13 +2131,13 @@ pub mod tests { .with_private_account(&sender_keys, &sender_private_account); let balance_to_move = 37; + let balance_to_move_2 = 30; let tx = private_balance_transfer_for_tests( &sender_keys, &sender_private_account, &recipient_keys, balance_to_move, - [0xcafecafe, 0xfecafeca], &state, ); @@ -2153,8 +2147,8 @@ pub mod tests { let sender_private_account = Account { program_owner: Program::authenticated_transfer_program().id(), - balance: 100 - balance_to_move, - nonce: 0xcafecafe, + balance: 100, + nonce: sender_nonce, data: Data::default(), }; @@ -2162,8 +2156,7 @@ pub mod tests { &sender_keys, &sender_private_account, &recipient_keys, - balance_to_move, - [0x1234, 0x5678], + balance_to_move_2, &state, ); @@ -2173,12 +2166,12 @@ pub mod tests { let NssaError::InvalidInput(error_message) = result.err().unwrap() else { panic!("Incorrect message error"); }; - let expected_error_message = "Nullifier already seen".to_string(); + let expected_error_message = "Nullifier already seen".to_owned(); assert_eq!(error_message, expected_error_message); } #[test] - fn test_circuit_should_fail_if_there_are_repeated_ids() { + fn circuit_should_fail_if_there_are_repeated_ids() { let program = Program::simple_balance_transfer(); let sender_keys = test_private_account_keys_1(); let private_account_1 = AccountWithMetadata::new( @@ -2197,9 +2190,8 @@ pub mod tests { let shared_secret = SharedSecretKey::new(&[55; 32], &sender_keys.vpk()); let result = execute_and_prove( vec![private_account_1.clone(), private_account_1], - Program::serialize_instruction(100u128).unwrap(), + Program::serialize_instruction(100_u128).unwrap(), visibility_mask.to_vec(), - vec![0xdeadbeef1, 0xdeadbeef2], vec![ (sender_keys.npk(), shared_secret), (sender_keys.npk(), shared_secret), @@ -2213,7 +2205,7 @@ pub mod tests { } #[test] - fn test_claiming_mechanism() { + fn claiming_mechanism() { let program = Program::authenticated_transfer_program(); let key = PrivateKey::try_new([1; 32]).unwrap(); let account_id = AccountId::from(&PublicKey::new_from_private_key(&key)); @@ -2235,9 +2227,13 @@ pub mod tests { ..Account::default() }; - let message = - public_transaction::Message::try_new(program.id(), vec![from, to], vec![0], amount) - .unwrap(); + let message = public_transaction::Message::try_new( + program.id(), + vec![from, to], + vec![Nonce(0)], + amount, + ) + .unwrap(); let witness_set = public_transaction::WitnessSet::for_message(&message, &[&from_key]); let tx = PublicTransaction::new(message, witness_set); @@ -2249,7 +2245,7 @@ pub mod tests { } #[test] - fn test_public_chained_call() { + fn public_chained_call() { let program = Program::chain_caller(); let key = PrivateKey::try_new([1; 32]).unwrap(); let from = AccountId::from(&PublicKey::new_from_private_key(&key)); @@ -2277,7 +2273,7 @@ pub mod tests { program.id(), vec![to, from], // The chain_caller program permutes the account order in the chain // call - vec![0], + vec![Nonce(0)], instruction, ) .unwrap(); @@ -2294,7 +2290,7 @@ pub mod tests { } #[test] - fn test_execution_fails_if_chained_calls_exceeds_depth() { + fn execution_fails_if_chained_calls_exceeds_depth() { let program = Program::chain_caller(); let key = PrivateKey::try_new([1; 32]).unwrap(); let from = AccountId::from(&PublicKey::new_from_private_key(&key)); @@ -2308,7 +2304,8 @@ pub mod tests { let instruction: (u128, ProgramId, u32, Option) = ( amount, Program::authenticated_transfer_program().id(), - MAX_NUMBER_CHAINED_CALLS as u32 + 1, + u32::try_from(MAX_NUMBER_CHAINED_CALLS).expect("MAX_NUMBER_CHAINED_CALLS fits in u32") + + 1, None, ); @@ -2316,7 +2313,7 @@ pub mod tests { program.id(), vec![to, from], // The chain_caller program permutes the account order in the chain // call - vec![0], + vec![Nonce(0)], instruction, ) .unwrap(); @@ -2330,1354 +2327,8 @@ pub mod tests { )); } - struct PrivateKeysForTests; - - impl PrivateKeysForTests { - fn user_token_a_key() -> PrivateKey { - PrivateKey::try_new([31; 32]).expect("Keys constructor expects valid private key") - } - - fn user_token_b_key() -> PrivateKey { - PrivateKey::try_new([32; 32]).expect("Keys constructor expects valid private key") - } - - fn user_token_lp_key() -> PrivateKey { - PrivateKey::try_new([33; 32]).expect("Keys constructor expects valid private key") - } - } - - struct BalanceForTests; - - impl BalanceForTests { - fn user_token_a_holding_init() -> u128 { - 10_000 - } - - fn user_token_b_holding_init() -> u128 { - 10_000 - } - - fn user_token_lp_holding_init() -> u128 { - 2_000 - } - - fn vault_a_balance_init() -> u128 { - 5_000 - } - - fn vault_b_balance_init() -> u128 { - 2_500 - } - - fn pool_lp_supply_init() -> u128 { - 5_000 - } - - fn token_a_supply() -> u128 { - 100_000 - } - - fn token_b_supply() -> u128 { - 100_000 - } - - fn token_lp_supply() -> u128 { - 5_000 - } - - fn remove_lp() -> u128 { - 1_000 - } - - fn remove_min_amount_a() -> u128 { - 500 - } - - fn remove_min_amount_b() -> u128 { - 500 - } - - fn add_min_amount_lp() -> u128 { - 1_000 - } - - fn add_max_amount_a() -> u128 { - 2_000 - } - - fn add_max_amount_b() -> u128 { - 1_000 - } - - fn swap_amount_in() -> u128 { - 1_000 - } - - fn swap_min_amount_out() -> u128 { - 200 - } - - fn vault_a_balance_swap_1() -> u128 { - 3_572 - } - - fn vault_b_balance_swap_1() -> u128 { - 3_500 - } - - fn user_token_a_holding_swap_1() -> u128 { - 11_428 - } - - fn user_token_b_holding_swap_1() -> u128 { - 9_000 - } - - fn vault_a_balance_swap_2() -> u128 { - 6_000 - } - - fn vault_b_balance_swap_2() -> u128 { - 2_084 - } - - fn user_token_a_holding_swap_2() -> u128 { - 9_000 - } - - fn user_token_b_holding_swap_2() -> u128 { - 10_416 - } - - fn vault_a_balance_add() -> u128 { - 7_000 - } - - fn vault_b_balance_add() -> u128 { - 3_500 - } - - fn user_token_a_holding_add() -> u128 { - 8_000 - } - - fn user_token_b_holding_add() -> u128 { - 9_000 - } - - fn user_token_lp_holding_add() -> u128 { - 4_000 - } - - fn token_lp_supply_add() -> u128 { - 7_000 - } - - fn vault_a_balance_remove() -> u128 { - 4_000 - } - - fn vault_b_balance_remove() -> u128 { - 2_000 - } - - fn user_token_a_holding_remove() -> u128 { - 11_000 - } - - fn user_token_b_holding_remove() -> u128 { - 10_500 - } - - fn user_token_lp_holding_remove() -> u128 { - 1_000 - } - - fn token_lp_supply_remove() -> u128 { - 4_000 - } - - fn user_token_a_holding_new_definition() -> u128 { - 5_000 - } - - fn user_token_b_holding_new_definition() -> u128 { - 7_500 - } - - fn lp_supply_init() -> u128 { - // isqrt(vault_a_balance_init * vault_b_balance_init) = isqrt(5_000 * 2_500) = 3535 - (BalanceForTests::vault_a_balance_init() * BalanceForTests::vault_b_balance_init()) - .isqrt() - } - } - - struct IdForTests; - - impl IdForTests { - fn pool_definition_id() -> AccountId { - amm_core::compute_pool_pda( - Program::amm().id(), - IdForTests::token_a_definition_id(), - IdForTests::token_b_definition_id(), - ) - } - - fn token_lp_definition_id() -> AccountId { - amm_core::compute_liquidity_token_pda( - Program::amm().id(), - IdForTests::pool_definition_id(), - ) - } - - fn token_a_definition_id() -> AccountId { - AccountId::new([3; 32]) - } - - fn token_b_definition_id() -> AccountId { - AccountId::new([4; 32]) - } - - fn user_token_a_id() -> AccountId { - AccountId::from(&PublicKey::new_from_private_key( - &PrivateKeysForTests::user_token_a_key(), - )) - } - - fn user_token_b_id() -> AccountId { - AccountId::from(&PublicKey::new_from_private_key( - &PrivateKeysForTests::user_token_b_key(), - )) - } - - fn user_token_lp_id() -> AccountId { - AccountId::from(&PublicKey::new_from_private_key( - &PrivateKeysForTests::user_token_lp_key(), - )) - } - - fn vault_a_id() -> AccountId { - amm_core::compute_vault_pda( - Program::amm().id(), - IdForTests::pool_definition_id(), - IdForTests::token_a_definition_id(), - ) - } - - fn vault_b_id() -> AccountId { - amm_core::compute_vault_pda( - Program::amm().id(), - IdForTests::pool_definition_id(), - IdForTests::token_b_definition_id(), - ) - } - } - - struct AccountForTests; - - impl AccountForTests { - fn user_token_a_holding() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::user_token_a_holding_init(), - }), - nonce: 0, - } - } - - fn user_token_b_holding() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::user_token_b_holding_init(), - }), - nonce: 0, - } - } - - fn pool_definition_init() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: BalanceForTests::pool_lp_supply_init(), - reserve_a: BalanceForTests::vault_a_balance_init(), - reserve_b: BalanceForTests::vault_b_balance_init(), - fees: 0u128, - active: true, - }), - nonce: 0, - } - } - - fn token_a_definition_account() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("test"), - total_supply: BalanceForTests::token_a_supply(), - metadata_id: None, - }), - nonce: 0, - } - } - - fn token_b_definition_acc() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("test"), - total_supply: BalanceForTests::token_b_supply(), - metadata_id: None, - }), - nonce: 0, - } - } - - fn token_lp_definition_acc() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("LP Token"), - total_supply: BalanceForTests::token_lp_supply(), - metadata_id: None, - }), - nonce: 0, - } - } - - fn vault_a_init() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::vault_a_balance_init(), - }), - nonce: 0, - } - } - - fn vault_b_init() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::vault_b_balance_init(), - }), - nonce: 0, - } - } - - fn user_token_lp_holding() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_lp_definition_id(), - balance: BalanceForTests::user_token_lp_holding_init(), - }), - nonce: 0, - } - } - - fn vault_a_swap_1() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::vault_a_balance_swap_1(), - }), - nonce: 0, - } - } - - fn vault_b_swap_1() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::vault_b_balance_swap_1(), - }), - nonce: 0, - } - } - - fn pool_definition_swap_1() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: BalanceForTests::pool_lp_supply_init(), - reserve_a: BalanceForTests::vault_a_balance_swap_1(), - reserve_b: BalanceForTests::vault_b_balance_swap_1(), - fees: 0u128, - active: true, - }), - nonce: 0, - } - } - - fn user_token_a_holding_swap_1() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::user_token_a_holding_swap_1(), - }), - nonce: 0, - } - } - - fn user_token_b_holding_swap_1() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::user_token_b_holding_swap_1(), - }), - nonce: 1, - } - } - - fn vault_a_swap_2() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::vault_a_balance_swap_2(), - }), - nonce: 0, - } - } - - fn vault_b_swap_2() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::vault_b_balance_swap_2(), - }), - nonce: 0, - } - } - - fn pool_definition_swap_2() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: BalanceForTests::pool_lp_supply_init(), - reserve_a: BalanceForTests::vault_a_balance_swap_2(), - reserve_b: BalanceForTests::vault_b_balance_swap_2(), - fees: 0u128, - active: true, - }), - nonce: 0, - } - } - - fn user_token_a_holding_swap_2() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::user_token_a_holding_swap_2(), - }), - nonce: 1, - } - } - - fn user_token_b_holding_swap_2() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::user_token_b_holding_swap_2(), - }), - nonce: 0, - } - } - - fn vault_a_add() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::vault_a_balance_add(), - }), - nonce: 0, - } - } - - fn vault_b_add() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::vault_b_balance_add(), - }), - nonce: 0, - } - } - - fn pool_definition_add() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: BalanceForTests::token_lp_supply_add(), - reserve_a: BalanceForTests::vault_a_balance_add(), - reserve_b: BalanceForTests::vault_b_balance_add(), - fees: 0u128, - active: true, - }), - nonce: 0, - } - } - - fn user_token_a_holding_add() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::user_token_a_holding_add(), - }), - nonce: 1, - } - } - - fn user_token_b_holding_add() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::user_token_b_holding_add(), - }), - nonce: 1, - } - } - - fn user_token_lp_holding_add() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_lp_definition_id(), - balance: BalanceForTests::user_token_lp_holding_add(), - }), - nonce: 0, - } - } - - fn token_lp_definition_add() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("LP Token"), - total_supply: BalanceForTests::token_lp_supply_add(), - metadata_id: None, - }), - nonce: 0, - } - } - - fn vault_a_remove() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::vault_a_balance_remove(), - }), - nonce: 0, - } - } - - fn vault_b_remove() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::vault_b_balance_remove(), - }), - nonce: 0, - } - } - - fn pool_definition_remove() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: BalanceForTests::token_lp_supply_remove(), - reserve_a: BalanceForTests::vault_a_balance_remove(), - reserve_b: BalanceForTests::vault_b_balance_remove(), - fees: 0u128, - active: true, - }), - nonce: 0, - } - } - - fn user_token_a_holding_remove() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::user_token_a_holding_remove(), - }), - nonce: 0, - } - } - - fn user_token_b_holding_remove() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::user_token_b_holding_remove(), - }), - nonce: 0, - } - } - - fn user_token_lp_holding_remove() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_lp_definition_id(), - balance: BalanceForTests::user_token_lp_holding_remove(), - }), - nonce: 1, - } - } - - fn token_lp_definition_remove() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("LP Token"), - total_supply: BalanceForTests::token_lp_supply_remove(), - metadata_id: None, - }), - nonce: 0, - } - } - - fn token_lp_definition_init_inactive() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("LP Token"), - total_supply: 0, - metadata_id: None, - }), - nonce: 0, - } - } - - fn vault_a_init_inactive() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: 0, - }), - nonce: 0, - } - } - - fn vault_b_init_inactive() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: 0, - }), - nonce: 0, - } - } - - fn pool_definition_inactive() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: 0, - reserve_a: 0, - reserve_b: 0, - fees: 0u128, - active: false, - }), - nonce: 0, - } - } - - fn user_token_a_holding_new_init() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_a_definition_id(), - balance: BalanceForTests::user_token_a_holding_new_definition(), - }), - nonce: 1, - } - } - - fn user_token_b_holding_new_init() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_b_definition_id(), - balance: BalanceForTests::user_token_b_holding_new_definition(), - }), - nonce: 1, - } - } - - fn user_token_lp_holding_new_init() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_lp_definition_id(), - balance: BalanceForTests::lp_supply_init(), - }), - nonce: 0, - } - } - - fn token_lp_definition_new_init() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenDefinition::Fungible { - name: String::from("LP Token"), - total_supply: BalanceForTests::lp_supply_init(), - metadata_id: None, - }), - nonce: 0, - } - } - - fn pool_definition_new_init() -> Account { - Account { - program_owner: Program::amm().id(), - balance: 0u128, - data: Data::from(&PoolDefinition { - definition_token_a_id: IdForTests::token_a_definition_id(), - definition_token_b_id: IdForTests::token_b_definition_id(), - vault_a_id: IdForTests::vault_a_id(), - vault_b_id: IdForTests::vault_b_id(), - liquidity_pool_id: IdForTests::token_lp_definition_id(), - liquidity_pool_supply: BalanceForTests::lp_supply_init(), - reserve_a: BalanceForTests::vault_a_balance_init(), - reserve_b: BalanceForTests::vault_b_balance_init(), - fees: 0u128, - active: true, - }), - nonce: 0, - } - } - - fn user_token_lp_holding_init_zero() -> Account { - Account { - program_owner: Program::token().id(), - balance: 0u128, - data: Data::from(&TokenHolding::Fungible { - definition_id: IdForTests::token_lp_definition_id(), - balance: 0, - }), - nonce: 0, - } - } - } - - fn state_for_amm_tests() -> V02State { - let initial_data = []; - let mut state = - V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); - state.force_insert_account( - IdForTests::pool_definition_id(), - AccountForTests::pool_definition_init(), - ); - state.force_insert_account( - IdForTests::token_a_definition_id(), - AccountForTests::token_a_definition_account(), - ); - state.force_insert_account( - IdForTests::token_b_definition_id(), - AccountForTests::token_b_definition_acc(), - ); - state.force_insert_account( - IdForTests::token_lp_definition_id(), - AccountForTests::token_lp_definition_acc(), - ); - state.force_insert_account( - IdForTests::user_token_a_id(), - AccountForTests::user_token_a_holding(), - ); - state.force_insert_account( - IdForTests::user_token_b_id(), - AccountForTests::user_token_b_holding(), - ); - state.force_insert_account( - IdForTests::user_token_lp_id(), - AccountForTests::user_token_lp_holding(), - ); - state.force_insert_account(IdForTests::vault_a_id(), AccountForTests::vault_a_init()); - state.force_insert_account(IdForTests::vault_b_id(), AccountForTests::vault_b_init()); - - state - } - - fn state_for_amm_tests_with_new_def() -> V02State { - let initial_data = []; - let mut state = - V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); - state.force_insert_account( - IdForTests::token_a_definition_id(), - AccountForTests::token_a_definition_account(), - ); - state.force_insert_account( - IdForTests::token_b_definition_id(), - AccountForTests::token_b_definition_acc(), - ); - state.force_insert_account( - IdForTests::user_token_a_id(), - AccountForTests::user_token_a_holding(), - ); - state.force_insert_account( - IdForTests::user_token_b_id(), - AccountForTests::user_token_b_holding(), - ); - state - } - #[test] - fn test_simple_amm_remove() { - let mut state = state_for_amm_tests(); - - let instruction = amm_core::Instruction::RemoveLiquidity { - remove_liquidity_amount: BalanceForTests::remove_lp(), - min_amount_to_remove_token_a: BalanceForTests::remove_min_amount_a(), - min_amount_to_remove_token_b: BalanceForTests::remove_min_amount_b(), - }; - - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::token_lp_definition_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - IdForTests::user_token_lp_id(), - ], - vec![0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[&PrivateKeysForTests::user_token_lp_key()], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id()); - - let expected_pool = AccountForTests::pool_definition_remove(); - let expected_vault_a = AccountForTests::vault_a_remove(); - let expected_vault_b = AccountForTests::vault_b_remove(); - let expected_token_lp = AccountForTests::token_lp_definition_remove(); - let expected_user_token_a = AccountForTests::user_token_a_holding_remove(); - let expected_user_token_b = AccountForTests::user_token_b_holding_remove(); - let expected_user_token_lp = AccountForTests::user_token_lp_holding_remove(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(token_lp_post, expected_token_lp); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - assert_eq!(user_token_lp_post, expected_user_token_lp); - } - - #[test] - fn test_simple_amm_new_definition_inactive_initialized_pool_and_uninit_user_lp() { - let mut state = state_for_amm_tests_with_new_def(); - - // Uninitialized in constructor - state.force_insert_account( - IdForTests::vault_a_id(), - AccountForTests::vault_a_init_inactive(), - ); - state.force_insert_account( - IdForTests::vault_b_id(), - AccountForTests::vault_b_init_inactive(), - ); - state.force_insert_account( - IdForTests::pool_definition_id(), - AccountForTests::pool_definition_inactive(), - ); - state.force_insert_account( - IdForTests::token_lp_definition_id(), - AccountForTests::token_lp_definition_init_inactive(), - ); - - let instruction = amm_core::Instruction::NewDefinition { - token_a_amount: BalanceForTests::vault_a_balance_init(), - token_b_amount: BalanceForTests::vault_b_balance_init(), - amm_program_id: Program::amm().id(), - }; - - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::token_lp_definition_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - IdForTests::user_token_lp_id(), - ], - vec![0, 0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[ - &PrivateKeysForTests::user_token_a_key(), - &PrivateKeysForTests::user_token_b_key(), - ], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id()); - - let expected_pool = AccountForTests::pool_definition_new_init(); - let expected_vault_a = AccountForTests::vault_a_init(); - let expected_vault_b = AccountForTests::vault_b_init(); - let expected_token_lp = AccountForTests::token_lp_definition_new_init(); - let expected_user_token_a = AccountForTests::user_token_a_holding_new_init(); - let expected_user_token_b = AccountForTests::user_token_b_holding_new_init(); - let expected_user_token_lp = AccountForTests::user_token_lp_holding_new_init(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(token_lp_post, expected_token_lp); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - assert_eq!(user_token_lp_post, expected_user_token_lp); - } - - #[test] - fn test_simple_amm_new_definition_inactive_initialized_pool_init_user_lp() { - let mut state = state_for_amm_tests_with_new_def(); - - // Uninitialized in constructor - state.force_insert_account( - IdForTests::vault_a_id(), - AccountForTests::vault_a_init_inactive(), - ); - state.force_insert_account( - IdForTests::vault_b_id(), - AccountForTests::vault_b_init_inactive(), - ); - state.force_insert_account( - IdForTests::pool_definition_id(), - AccountForTests::pool_definition_inactive(), - ); - state.force_insert_account( - IdForTests::token_lp_definition_id(), - AccountForTests::token_lp_definition_init_inactive(), - ); - state.force_insert_account( - IdForTests::user_token_lp_id(), - AccountForTests::user_token_lp_holding_init_zero(), - ); - - let instruction = amm_core::Instruction::NewDefinition { - token_a_amount: BalanceForTests::vault_a_balance_init(), - token_b_amount: BalanceForTests::vault_b_balance_init(), - amm_program_id: Program::amm().id(), - }; - - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::token_lp_definition_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - IdForTests::user_token_lp_id(), - ], - vec![0, 0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[ - &PrivateKeysForTests::user_token_a_key(), - &PrivateKeysForTests::user_token_b_key(), - ], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id()); - - let expected_pool = AccountForTests::pool_definition_new_init(); - let expected_vault_a = AccountForTests::vault_a_init(); - let expected_vault_b = AccountForTests::vault_b_init(); - let expected_token_lp = AccountForTests::token_lp_definition_new_init(); - let expected_user_token_a = AccountForTests::user_token_a_holding_new_init(); - let expected_user_token_b = AccountForTests::user_token_b_holding_new_init(); - let expected_user_token_lp = AccountForTests::user_token_lp_holding_new_init(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(token_lp_post, expected_token_lp); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - assert_eq!(user_token_lp_post, expected_user_token_lp); - } - - #[test] - fn test_simple_amm_new_definition_uninitialized_pool() { - let mut state = state_for_amm_tests_with_new_def(); - - // Uninitialized in constructor - state.force_insert_account( - IdForTests::vault_a_id(), - AccountForTests::vault_a_init_inactive(), - ); - state.force_insert_account( - IdForTests::vault_b_id(), - AccountForTests::vault_b_init_inactive(), - ); - - let instruction = amm_core::Instruction::NewDefinition { - token_a_amount: BalanceForTests::vault_a_balance_init(), - token_b_amount: BalanceForTests::vault_b_balance_init(), - amm_program_id: Program::amm().id(), - }; - - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::token_lp_definition_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - IdForTests::user_token_lp_id(), - ], - vec![0, 0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[ - &PrivateKeysForTests::user_token_a_key(), - &PrivateKeysForTests::user_token_b_key(), - ], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id()); - - let expected_pool = AccountForTests::pool_definition_new_init(); - let expected_vault_a = AccountForTests::vault_a_init(); - let expected_vault_b = AccountForTests::vault_b_init(); - let expected_token_lp = AccountForTests::token_lp_definition_new_init(); - let expected_user_token_a = AccountForTests::user_token_a_holding_new_init(); - let expected_user_token_b = AccountForTests::user_token_b_holding_new_init(); - let expected_user_token_lp = AccountForTests::user_token_lp_holding_new_init(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(token_lp_post, expected_token_lp); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - assert_eq!(user_token_lp_post, expected_user_token_lp); - } - - #[test] - fn test_simple_amm_add() { - env_logger::init(); - let mut state = state_for_amm_tests(); - - let instruction = amm_core::Instruction::AddLiquidity { - min_amount_liquidity: BalanceForTests::add_min_amount_lp(), - max_amount_to_add_token_a: BalanceForTests::add_max_amount_a(), - max_amount_to_add_token_b: BalanceForTests::add_max_amount_b(), - }; - - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::token_lp_definition_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - IdForTests::user_token_lp_id(), - ], - vec![0, 0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[ - &PrivateKeysForTests::user_token_a_key(), - &PrivateKeysForTests::user_token_b_key(), - ], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id()); - - let expected_pool = AccountForTests::pool_definition_add(); - let expected_vault_a = AccountForTests::vault_a_add(); - let expected_vault_b = AccountForTests::vault_b_add(); - let expected_token_lp = AccountForTests::token_lp_definition_add(); - let expected_user_token_a = AccountForTests::user_token_a_holding_add(); - let expected_user_token_b = AccountForTests::user_token_b_holding_add(); - let expected_user_token_lp = AccountForTests::user_token_lp_holding_add(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(token_lp_post, expected_token_lp); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - assert_eq!(user_token_lp_post, expected_user_token_lp); - } - - #[test] - fn test_simple_amm_swap_1() { - let mut state = state_for_amm_tests(); - - let instruction = amm_core::Instruction::Swap { - swap_amount_in: BalanceForTests::swap_amount_in(), - min_amount_out: BalanceForTests::swap_min_amount_out(), - token_definition_id_in: IdForTests::token_b_definition_id(), - }; - - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - ], - vec![0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[&PrivateKeysForTests::user_token_b_key()], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - - let expected_pool = AccountForTests::pool_definition_swap_1(); - let expected_vault_a = AccountForTests::vault_a_swap_1(); - let expected_vault_b = AccountForTests::vault_b_swap_1(); - let expected_user_token_a = AccountForTests::user_token_a_holding_swap_1(); - let expected_user_token_b = AccountForTests::user_token_b_holding_swap_1(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - } - - #[test] - fn test_simple_amm_swap_2() { - let mut state = state_for_amm_tests(); - - let instruction = amm_core::Instruction::Swap { - swap_amount_in: BalanceForTests::swap_amount_in(), - min_amount_out: BalanceForTests::swap_min_amount_out(), - token_definition_id_in: IdForTests::token_a_definition_id(), - }; - let message = public_transaction::Message::try_new( - Program::amm().id(), - vec![ - IdForTests::pool_definition_id(), - IdForTests::vault_a_id(), - IdForTests::vault_b_id(), - IdForTests::user_token_a_id(), - IdForTests::user_token_b_id(), - ], - vec![0], - instruction, - ) - .unwrap(); - - let witness_set = public_transaction::WitnessSet::for_message( - &message, - &[&PrivateKeysForTests::user_token_a_key()], - ); - - let tx = PublicTransaction::new(message, witness_set); - state.transition_from_public_transaction(&tx).unwrap(); - - let pool_post = state.get_account_by_id(IdForTests::pool_definition_id()); - let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id()); - let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id()); - let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id()); - let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id()); - - let expected_pool = AccountForTests::pool_definition_swap_2(); - let expected_vault_a = AccountForTests::vault_a_swap_2(); - let expected_vault_b = AccountForTests::vault_b_swap_2(); - let expected_user_token_a = AccountForTests::user_token_a_holding_swap_2(); - let expected_user_token_b = AccountForTests::user_token_b_holding_swap_2(); - - assert_eq!(pool_post, expected_pool); - assert_eq!(vault_a_post, expected_vault_a); - assert_eq!(vault_b_post, expected_vault_b); - assert_eq!(user_token_a_post, expected_user_token_a); - assert_eq!(user_token_b_post, expected_user_token_b); - } - - #[test] - fn test_execution_that_requires_authentication_of_a_program_derived_account_id_succeeds() { + fn execution_that_requires_authentication_of_a_program_derived_account_id_succeeds() { let chain_caller = Program::chain_caller(); let pda_seed = PdaSeed::new([37; 32]); let from = AccountId::from((&chain_caller.id(), &pda_seed)); @@ -3719,7 +2370,7 @@ pub mod tests { } #[test] - fn test_claiming_mechanism_within_chain_call() { + fn claiming_mechanism_within_chain_call() { // This test calls the authenticated transfer program through the chain_caller program. // The transfer is made from an initialized sender to an uninitialized recipient. And // it is expected that the recipient account is claimed by the authenticated transfer @@ -3759,7 +2410,7 @@ pub mod tests { chain_caller.id(), vec![to, from], // The chain_caller program permutes the account order in the chain // call - vec![0], + vec![Nonce(0)], instruction, ) .unwrap(); @@ -3776,7 +2427,7 @@ pub mod tests { #[test_case::test_case(1; "single call")] #[test_case::test_case(2; "two calls")] - fn test_private_chained_call(number_of_calls: u32) { + fn private_chained_call(number_of_calls: u32) { // Arrange let chain_caller = Program::chain_caller(); let auth_transfers = Program::authenticated_transfer_program(); @@ -3829,18 +2480,18 @@ pub mod tests { dependencies.insert(auth_transfers.id(), auth_transfers); let program_with_deps = ProgramWithDependencies::new(chain_caller, dependencies); - let from_new_nonce = 0xdeadbeef1; - let to_new_nonce = 0xdeadbeef2; + let from_new_nonce = Nonce::default().private_account_nonce_increment(&from_keys.nsk); + let to_new_nonce = Nonce::default().private_account_nonce_increment(&to_keys.nsk); let from_expected_post = Account { - balance: initial_balance - number_of_calls as u128 * amount, + balance: initial_balance - u128::from(number_of_calls) * amount, nonce: from_new_nonce, ..from_account.account.clone() }; let from_expected_commitment = Commitment::new(&from_keys.npk(), &from_expected_post); let to_expected_post = Account { - balance: number_of_calls as u128 * amount, + balance: u128::from(number_of_calls) * amount, nonce: to_new_nonce, ..to_account.account.clone() }; @@ -3851,7 +2502,6 @@ pub mod tests { vec![to_account, from_account], Program::serialize_instruction(instruction).unwrap(), vec![1, 1], - vec![from_new_nonce, to_new_nonce], vec![(from_keys.npk(), to_ss), (to_keys.npk(), from_ss)], vec![from_keys.nsk, to_keys.nsk], vec![ @@ -3893,7 +2543,7 @@ pub mod tests { } #[test] - fn test_pda_mechanism_with_pinata_token_program() { + fn pda_mechanism_with_pinata_token_program() { let pinata_token = Program::pinata_token(); let token = Program::token(); @@ -3948,7 +2598,7 @@ pub mod tests { state.transition_from_public_transaction(&tx).unwrap(); // Submit a solution to the pinata program to claim the prize - let solution: u128 = 989106; + let solution: u128 = 989_106; let message = public_transaction::Message::try_new( pinata_token.id(), vec![ @@ -3972,7 +2622,7 @@ pub mod tests { } #[test] - fn test_claiming_mechanism_cannot_claim_initialied_accounts() { + fn claiming_mechanism_cannot_claim_initialied_accounts() { let claimer = Program::claimer(); let mut state = V02State::new_with_genesis_accounts(&[], &[]).with_test_programs(); let account_id = AccountId::new([2; 32]); @@ -3994,13 +2644,13 @@ pub mod tests { let result = state.transition_from_public_transaction(&tx); - assert!(matches!(result, Err(NssaError::InvalidProgramBehavior))) + assert!(matches!(result, Err(NssaError::InvalidProgramBehavior))); } /// This test ensures that even if a malicious program tries to perform overflow of balances /// it will not be able to break the balance validation. #[test] - fn test_malicious_program_cannot_break_balance_validation() { + fn malicious_program_cannot_break_balance_validation() { let sender_key = PrivateKey::try_new([37; 32]).unwrap(); let sender_id = AccountId::from(&PublicKey::new_from_private_key(&sender_key)); let sender_init_balance: u128 = 10; @@ -4047,14 +2697,14 @@ pub mod tests { let expected_sender_post = { let mut this = state.get_account_by_id(sender_id); this.balance = sender_init_balance; - this.nonce = 0; + this.nonce = Nonce(0); this }; let expected_recipient_post = { let mut this = state.get_account_by_id(sender_id); this.balance = recipient_init_balance; - this.nonce = 0; + this.nonce = Nonce(0); this }; @@ -4063,7 +2713,7 @@ pub mod tests { } #[test] - fn test_private_authorized_uninitialized_account() { + fn private_authorized_uninitialized_account() { let mut state = V02State::new_with_genesis_accounts(&[], &[]); // Set up keys for the authorized private account @@ -4083,14 +2733,11 @@ pub mod tests { // Balance to initialize the account with (0 for a new account) let balance: u128 = 0; - let nonce = 0xdeadbeef1; - // Execute and prove the circuit with the authorized account but no commitment proof let (output, proof) = execute_and_prove( vec![authorized_account], Program::serialize_instruction(balance).unwrap(), vec![1], - vec![nonce], vec![(private_keys.npk(), shared_secret)], vec![private_keys.nsk], vec![None], @@ -4118,7 +2765,7 @@ pub mod tests { } #[test] - fn test_private_account_claimed_then_used_without_init_flag_should_fail() { + fn private_account_claimed_then_used_without_init_flag_should_fail() { let mut state = V02State::new_with_genesis_accounts(&[], &[]).with_test_programs(); // Set up keys for the private account @@ -4136,14 +2783,12 @@ pub mod tests { let epk = EphemeralPublicKey::from_scalar(esk); let balance: u128 = 0; - let nonce = 0xdeadbeef1; // Step 2: Execute claimer program to claim the account with authentication let (output, proof) = execute_and_prove( vec![authorized_account.clone()], Program::serialize_instruction(balance).unwrap(), vec![1], - vec![nonce], vec![(private_keys.npk(), shared_secret)], vec![private_keys.nsk], vec![None], @@ -4175,7 +2820,7 @@ pub mod tests { // Prepare new state of account let account_metadata = { - let mut acc = authorized_account.clone(); + let mut acc = authorized_account; acc.account.program_owner = Program::claimer().id(); acc }; @@ -4184,14 +2829,11 @@ pub mod tests { let esk2 = [4; 32]; let shared_secret2 = SharedSecretKey::new(&esk2, &private_keys.vpk()); - let nonce2 = 0xdeadbeef2; - // Step 3: Try to execute noop program with authentication but without initialization let res = execute_and_prove( vec![account_metadata], Program::serialize_instruction(()).unwrap(), vec![1], - vec![nonce2], vec![(private_keys.npk(), shared_secret2)], vec![private_keys.nsk], vec![None], @@ -4202,7 +2844,7 @@ pub mod tests { } #[test] - fn test_public_changer_claimer_no_data_change_no_claim_succeeds() { + fn public_changer_claimer_no_data_change_no_claim_succeeds() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -4226,7 +2868,7 @@ pub mod tests { } #[test] - fn test_public_changer_claimer_data_change_no_claim_fails() { + fn public_changer_claimer_data_change_no_claim_fails() { let initial_data = []; let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); @@ -4249,7 +2891,7 @@ pub mod tests { } #[test] - fn test_private_changer_claimer_no_data_change_no_claim_succeeds() { + fn private_changer_claimer_no_data_change_no_claim_succeeds() { let program = Program::changer_claimer(); let sender_keys = test_private_account_keys_1(); let private_account = @@ -4261,7 +2903,6 @@ pub mod tests { vec![private_account], Program::serialize_instruction(instruction).unwrap(), vec![1], - vec![2], vec![( sender_keys.npk(), SharedSecretKey::new(&[3; 32], &sender_keys.vpk()), @@ -4276,7 +2917,7 @@ pub mod tests { } #[test] - fn test_private_changer_claimer_data_change_no_claim_fails() { + fn private_changer_claimer_data_change_no_claim_fails() { let program = Program::changer_claimer(); let sender_keys = test_private_account_keys_1(); let private_account = @@ -4289,7 +2930,6 @@ pub mod tests { vec![private_account], Program::serialize_instruction(instruction).unwrap(), vec![1], - vec![2], vec![( sender_keys.npk(), SharedSecretKey::new(&[3; 32], &sender_keys.vpk()), @@ -4304,7 +2944,7 @@ pub mod tests { } #[test] - fn test_malicious_authorization_changer_should_fail_in_privacy_preserving_circuit() { + fn malicious_authorization_changer_should_fail_in_privacy_preserving_circuit() { // Arrange let malicious_program = Program::malicious_authorization_changer(); let auth_transfers = Program::authenticated_transfer_program(); @@ -4331,7 +2971,7 @@ pub mod tests { ) .with_test_programs(); - let balance_to_transfer = 10u128; + let balance_to_transfer = 10_u128; let instruction = (balance_to_transfer, auth_transfers.id()); let recipient_esk = [3; 32]; @@ -4341,14 +2981,11 @@ pub mod tests { dependencies.insert(auth_transfers.id(), auth_transfers); let program_with_deps = ProgramWithDependencies::new(malicious_program, dependencies); - let recipient_new_nonce = 0xdeadbeef1; - // Act - execute the malicious program - this should fail during proving let result = execute_and_prove( vec![sender_account, recipient_account], Program::serialize_instruction(instruction).unwrap(), vec![0, 1], - vec![recipient_new_nonce], vec![(recipient_keys.npk(), recipient)], vec![recipient_keys.nsk], vec![state.get_proof_for_commitment(&recipient_commitment)], @@ -4360,10 +2997,10 @@ pub mod tests { } #[test] - fn test_state_serialization_roundtrip() { + fn state_serialization_roundtrip() { let account_id_1 = AccountId::new([1; 32]); let account_id_2 = AccountId::new([2; 32]); - let initial_data = [(account_id_1, 100u128), (account_id_2, 151u128)]; + let initial_data = [(account_id_1, 100_u128), (account_id_2, 151_u128)]; let state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); let bytes = borsh::to_vec(&state).unwrap(); let state_from_bytes: V02State = borsh::from_slice(&bytes).unwrap(); diff --git a/program_methods/Cargo.toml b/program_methods/Cargo.toml index 999c1522..573fd4e6 100644 --- a/program_methods/Cargo.toml +++ b/program_methods/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [build-dependencies] risc0-build.workspace = true diff --git a/program_methods/guest/Cargo.toml b/program_methods/guest/Cargo.toml index a4627b86..eabcffc8 100644 --- a/program_methods/guest/Cargo.toml +++ b/program_methods/guest/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true token_core.workspace = true diff --git a/program_methods/guest/src/bin/authenticated_transfer.rs b/program_methods/guest/src/bin/authenticated_transfer.rs index 8a13173a..7835f733 100644 --- a/program_methods/guest/src/bin/authenticated_transfer.rs +++ b/program_methods/guest/src/bin/authenticated_transfer.rs @@ -7,18 +7,17 @@ use nssa_core::{ /// Initializes a default account under the ownership of this program. fn initialize_account(pre_state: AccountWithMetadata) -> AccountPostState { - let account_to_claim = AccountPostState::new_claimed(pre_state.account.clone()); + let account_to_claim = AccountPostState::new_claimed(pre_state.account); let is_authorized = pre_state.is_authorized; // Continue only if the account to claim has default values - if account_to_claim.account() != &Account::default() { - panic!("Account must be uninitialized"); - } + assert!( + account_to_claim.account() == &Account::default(), + "Account must be uninitialized" + ); // Continue only if the owner authorized this operation - if !is_authorized { - panic!("Account must be authorized"); - } + assert!(is_authorized, "Account must be authorized"); account_to_claim } @@ -30,27 +29,26 @@ fn transfer( balance_to_move: u128, ) -> Vec { // Continue only if the sender has authorized this operation - if !sender.is_authorized { - panic!("Sender must be authorized"); - } - - // Continue only if the sender has enough balance - if sender.account.balance < balance_to_move { - panic!("Sender has insufficient balance"); - } + assert!(sender.is_authorized, "Sender must be authorized"); // Create accounts post states, with updated balances let sender_post = { // Modify sender's balance - let mut sender_post_account = sender.account.clone(); - sender_post_account.balance -= balance_to_move; + let mut sender_post_account = sender.account; + sender_post_account.balance = sender_post_account + .balance + .checked_sub(balance_to_move) + .expect("Sender has insufficient balance"); AccountPostState::new(sender_post_account) }; let recipient_post = { // Modify recipient's balance - let mut recipient_post_account = recipient.account.clone(); - recipient_post_account.balance += balance_to_move; + let mut recipient_post_account = recipient.account; + recipient_post_account.balance = recipient_post_account + .balance + .checked_add(balance_to_move) + .expect("Recipient balance overflow"); // Claim recipient account if it has default program owner if recipient_post_account.program_owner == DEFAULT_PROGRAM_ID { diff --git a/program_methods/guest/src/bin/pinata.rs b/program_methods/guest/src/bin/pinata.rs index 0dc3c108..c9fc0735 100644 --- a/program_methods/guest/src/bin/pinata.rs +++ b/program_methods/guest/src/bin/pinata.rs @@ -1,5 +1,5 @@ use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs}; -use risc0_zkvm::sha::{Impl, Sha256}; +use risc0_zkvm::sha::{Impl, Sha256 as _}; const PRIZE: u128 = 150; @@ -28,7 +28,7 @@ impl Challenge { bytes[..32].copy_from_slice(&self.seed); bytes[32..].copy_from_slice(&solution.to_le_bytes()); let digest: [u8; 32] = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); - let difficulty = self.difficulty as usize; + let difficulty = usize::from(self.difficulty); digest[..difficulty].iter().all(|&b| b == 0) } @@ -40,7 +40,7 @@ impl Challenge { } } -/// A pinata program +/// A pinata program. fn main() { // Read input accounts. // It is expected to receive only two accounts: [pinata_account, winner_account] @@ -52,9 +52,8 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [pinata, winner] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pinata, winner]) = <[_; 2]>::try_from(pre_states) else { + return; }; let data = Challenge::new(&pinata.account.data); @@ -65,13 +64,19 @@ fn main() { let mut pinata_post = pinata.account.clone(); let mut winner_post = winner.account.clone(); - pinata_post.balance -= PRIZE; + pinata_post.balance = pinata_post + .balance + .checked_sub(PRIZE) + .expect("Not enough balance in the pinata"); pinata_post.data = data .next_data() .to_vec() .try_into() .expect("33 bytes should fit into Data"); - winner_post.balance += PRIZE; + winner_post.balance = winner_post + .balance + .checked_add(PRIZE) + .expect("Overflow when adding prize to winner"); write_nssa_outputs( instruction_words, diff --git a/program_methods/guest/src/bin/pinata_token.rs b/program_methods/guest/src/bin/pinata_token.rs index 188597cb..f1bbdc87 100644 --- a/program_methods/guest/src/bin/pinata_token.rs +++ b/program_methods/guest/src/bin/pinata_token.rs @@ -5,7 +5,7 @@ use nssa_core::{ write_nssa_outputs_with_chained_call, }, }; -use risc0_zkvm::sha::{Impl, Sha256}; +use risc0_zkvm::sha::{Impl, Sha256 as _}; const PRIZE: u128 = 150; @@ -34,7 +34,7 @@ impl Challenge { bytes[..32].copy_from_slice(&self.seed); bytes[32..].copy_from_slice(&solution.to_le_bytes()); let digest: [u8; 32] = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap(); - let difficulty = self.difficulty as usize; + let difficulty = usize::from(self.difficulty); digest[..difficulty].iter().all(|&b| b == 0) } @@ -46,7 +46,7 @@ impl Challenge { } } -/// A pinata program +/// A pinata program. fn main() { // Read input accounts. // It is expected to receive three accounts: [pinata_definition, pinata_token_holding, @@ -59,13 +59,15 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [ - pinata_definition, - pinata_token_holding, - winner_token_holding, - ] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok( + [ + pinata_definition, + pinata_token_holding, + winner_token_holding, + ], + ) = <[_; 3]>::try_from(pre_states) + else { + return; }; let data = Challenge::new(&pinata_definition.account.data); diff --git a/program_methods/guest/src/bin/privacy_preserving_circuit.rs b/program_methods/guest/src/bin/privacy_preserving_circuit.rs index 4bbd895f..99782d7f 100644 --- a/program_methods/guest/src/bin/privacy_preserving_circuit.rs +++ b/program_methods/guest/src/bin/privacy_preserving_circuit.rs @@ -16,31 +16,6 @@ use nssa_core::{ }; use risc0_zkvm::{guest::env, serde::to_vec}; -fn main() { - let PrivacyPreservingCircuitInput { - program_outputs, - visibility_mask, - private_account_nonces, - private_account_keys, - private_account_nsks, - private_account_membership_proofs, - program_id, - } = env::read(); - - let execution_state = ExecutionState::derive_from_outputs(program_id, program_outputs); - - let output = compute_circuit_output( - execution_state, - &visibility_mask, - &private_account_nonces, - &private_account_keys, - &private_account_nsks, - &private_account_membership_proofs, - ); - - env::commit(&output); -} - /// State of the involved accounts before and after program execution. struct ExecutionState { pre_states: Vec, @@ -62,7 +37,7 @@ impl ExecutionState { }; let mut chained_calls = VecDeque::from_iter([(initial_call, None)]); - let mut execution_state = ExecutionState { + let mut execution_state = Self { pre_states: Vec::new(), post_states: HashMap::new(), }; @@ -113,11 +88,13 @@ impl ExecutionState { ); execution_state.validate_and_sync_states( chained_call.program_id, - authorized_pdas, + &authorized_pdas, program_output.pre_states, program_output.post_states, ); - chain_calls_counter += 1; + chain_calls_counter = chain_calls_counter.checked_add(1).expect( + "Chain calls counter should not overflow as it checked before incrementing", + ); } assert!( @@ -153,7 +130,7 @@ impl ExecutionState { fn validate_and_sync_states( &mut self, program_id: ProgramId, - authorized_pdas: HashSet, + authorized_pdas: &HashSet, pre_states: Vec, post_states: Vec, ) { @@ -173,12 +150,12 @@ impl ExecutionState { .pre_states .iter() .find(|acc| acc.account_id == pre_account_id) - .map(|acc| acc.is_authorized) - .unwrap_or_else(|| { - panic!( + .map_or_else( + || panic!( "Pre state must exist in execution state for account {pre_account_id:?}", - ) - }); + ), + |acc| acc.is_authorized + ); let is_authorized = previous_is_authorized || authorized_pdas.contains(&pre_account_id); @@ -223,7 +200,6 @@ impl ExecutionState { fn compute_circuit_output( execution_state: ExecutionState, visibility_mask: &[u8], - private_account_nonces: &[Nonce], private_account_keys: &[(NullifierPublicKey, SharedSecretKey)], private_account_nsks: &[NullifierSecretKey], private_account_membership_proofs: &[Option], @@ -243,16 +219,15 @@ fn compute_circuit_output( "Invalid visibility mask length" ); - let mut private_nonces_iter = private_account_nonces.iter(); let mut private_keys_iter = private_account_keys.iter(); let mut private_nsks_iter = private_account_nsks.iter(); let mut private_membership_proofs_iter = private_account_membership_proofs.iter(); let mut output_index = 0; - for (visibility_mask, (pre_state, post_state)) in + for (account_visibility_mask, (pre_state, post_state)) in visibility_mask.iter().copied().zip(states_iter) { - match visibility_mask { + match account_visibility_mask { 0 => { // Public account output.public_pre_states.push(pre_state); @@ -269,7 +244,7 @@ fn compute_circuit_output( "AccountId mismatch" ); - let new_nullifier = if visibility_mask == 1 { + let (new_nullifier, new_nonce) = if account_visibility_mask == 1 { // Private account with authentication let Some(nsk) = private_nsks_iter.next() else { @@ -293,12 +268,16 @@ fn compute_circuit_output( panic!("Missing membership proof"); }; - compute_nullifier_and_set_digest( + let new_nullifier = compute_nullifier_and_set_digest( membership_proof_opt.as_ref(), &pre_state.account, npk, nsk, - ) + ); + + let new_nonce = pre_state.account.nonce.private_account_nonce_increment(nsk); + + (new_nullifier, new_nonce) } else { // Private account without authentication @@ -323,16 +302,16 @@ fn compute_circuit_output( ); let nullifier = Nullifier::for_account_initialization(npk); - (nullifier, DUMMY_COMMITMENT_HASH) + + let new_nonce = Nonce::private_account_nonce_init(npk); + + ((nullifier, DUMMY_COMMITMENT_HASH), new_nonce) }; output.new_nullifiers.push(new_nullifier); // Update post-state with new nonce let mut post_with_updated_nonce = post_state; - let Some(new_nonce) = private_nonces_iter.next() else { - panic!("Missing private account nonce"); - }; - post_with_updated_nonce.nonce = *new_nonce; + post_with_updated_nonce.nonce = new_nonce; // Compute commitment let commitment_post = Commitment::new(npk, &post_with_updated_nonce); @@ -347,14 +326,14 @@ fn compute_circuit_output( output.new_commitments.push(commitment_post); output.ciphertexts.push(encrypted_account); - output_index += 1; + output_index = output_index + .checked_add(1) + .unwrap_or_else(|| panic!("Too many private accounts, output index overflow")); } _ => panic!("Invalid visibility mask value"), } } - assert!(private_nonces_iter.next().is_none(), "Too many nonces"); - assert!( private_keys_iter.next().is_none(), "Too many private account keys" @@ -379,18 +358,8 @@ fn compute_nullifier_and_set_digest( npk: &NullifierPublicKey, nsk: &NullifierSecretKey, ) -> (Nullifier, CommitmentSetDigest) { - membership_proof_opt - .as_ref() - .map(|membership_proof| { - // Compute commitment set digest associated with provided auth path - let commitment_pre = Commitment::new(npk, pre_account); - let set_digest = compute_digest_for_path(&commitment_pre, membership_proof); - - // Compute update nullifier - let nullifier = Nullifier::for_account_update(&commitment_pre, nsk); - (nullifier, set_digest) - }) - .unwrap_or_else(|| { + membership_proof_opt.as_ref().map_or_else( + || { assert_eq!( *pre_account, Account::default(), @@ -400,5 +369,38 @@ fn compute_nullifier_and_set_digest( // Compute initialization nullifier let nullifier = Nullifier::for_account_initialization(npk); (nullifier, DUMMY_COMMITMENT_HASH) - }) + }, + |membership_proof| { + // Compute commitment set digest associated with provided auth path + let commitment_pre = Commitment::new(npk, pre_account); + let set_digest = compute_digest_for_path(&commitment_pre, membership_proof); + + // Compute update nullifier + let nullifier = Nullifier::for_account_update(&commitment_pre, nsk); + (nullifier, set_digest) + }, + ) +} + +fn main() { + let PrivacyPreservingCircuitInput { + program_outputs, + visibility_mask, + private_account_keys, + private_account_nsks, + private_account_membership_proofs, + program_id, + } = env::read(); + + let execution_state = ExecutionState::derive_from_outputs(program_id, program_outputs); + + let output = compute_circuit_output( + execution_state, + &visibility_mask, + &private_account_keys, + &private_account_nsks, + &private_account_membership_proofs, + ); + + env::commit(&output); } diff --git a/programs/amm/Cargo.toml b/programs/amm/Cargo.toml index 54df6763..449d5dcc 100644 --- a/programs/amm/Cargo.toml +++ b/programs/amm/Cargo.toml @@ -4,7 +4,14 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] +nssa = { workspace = true, optional = true, features = ["test-utils"], default-features = true } nssa_core.workspace = true token_core.workspace = true amm_core.workspace = true + +[features] +nssa = ["dep:nssa"] \ No newline at end of file diff --git a/programs/amm/core/Cargo.toml b/programs/amm/core/Cargo.toml index 935b60c7..b9c59dc4 100644 --- a/programs/amm/core/Cargo.toml +++ b/programs/amm/core/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true serde.workspace = true diff --git a/programs/amm/core/src/lib.rs b/programs/amm/core/src/lib.rs index f9d20dd3..85efd00d 100644 --- a/programs/amm/core/src/lib.rs +++ b/programs/amm/core/src/lib.rs @@ -26,7 +26,7 @@ pub enum Instruction { amm_program_id: ProgramId, }, - /// Adds liquidity to the Pool + /// Adds liquidity to the Pool. /// /// Required accounts: /// - AMM Pool (initialized) @@ -42,7 +42,7 @@ pub enum Instruction { max_amount_to_add_token_b: u128, }, - /// Removes liquidity from the Pool + /// Removes liquidity from the Pool. /// /// Required accounts: /// - AMM Pool (initialized) @@ -85,11 +85,11 @@ pub struct PoolDefinition { pub liquidity_pool_supply: u128, pub reserve_a: u128, pub reserve_b: u128, - /// Fees are currently not used + /// Fees are currently not used. pub fees: u128, /// A pool becomes inactive (active = false) /// once all of its liquidity has been removed (e.g., reserves are emptied and - /// liquidity_pool_supply = 0) + /// `liquidity_pool_supply` = 0). pub active: bool, } @@ -97,7 +97,7 @@ impl TryFrom<&Data> for PoolDefinition { type Error = std::io::Error; fn try_from(data: &Data) -> Result { - PoolDefinition::try_from_slice(data.as_ref()) + Self::try_from_slice(data.as_ref()) } } @@ -109,10 +109,11 @@ impl From<&PoolDefinition> for Data { BorshSerialize::serialize(definition, &mut data) .expect("Serialization to Vec should not fail"); - Data::try_from(data).expect("Token definition encoded data should fit into Data") + Self::try_from(data).expect("Token definition encoded data should fit into Data") } } +#[must_use] pub fn compute_pool_pda( amm_program_id: ProgramId, definition_token_a_id: AccountId, @@ -124,11 +125,12 @@ pub fn compute_pool_pda( )) } +#[must_use] pub fn compute_pool_pda_seed( definition_token_a_id: AccountId, definition_token_b_id: AccountId, ) -> PdaSeed { - use risc0_zkvm::sha::{Impl, Sha256}; + use risc0_zkvm::sha::{Impl, Sha256 as _}; let (token_1, token_2) = match definition_token_a_id .value() @@ -151,6 +153,7 @@ pub fn compute_pool_pda_seed( ) } +#[must_use] pub fn compute_vault_pda( amm_program_id: ProgramId, pool_id: AccountId, @@ -162,8 +165,9 @@ pub fn compute_vault_pda( )) } +#[must_use] pub fn compute_vault_pda_seed(pool_id: AccountId, definition_token_id: AccountId) -> PdaSeed { - use risc0_zkvm::sha::{Impl, Sha256}; + use risc0_zkvm::sha::{Impl, Sha256 as _}; let mut bytes = [0; 64]; bytes[0..32].copy_from_slice(&pool_id.to_bytes()); @@ -177,12 +181,14 @@ pub fn compute_vault_pda_seed(pool_id: AccountId, definition_token_id: AccountId ) } +#[must_use] pub fn compute_liquidity_token_pda(amm_program_id: ProgramId, pool_id: AccountId) -> AccountId { AccountId::from((&amm_program_id, &compute_liquidity_token_pda_seed(pool_id))) } +#[must_use] pub fn compute_liquidity_token_pda_seed(pool_id: AccountId) -> PdaSeed { - use risc0_zkvm::sha::{Impl, Sha256}; + use risc0_zkvm::sha::{Impl, Sha256 as _}; let mut bytes = [0; 64]; bytes[0..32].copy_from_slice(&pool_id.to_bytes()); diff --git a/programs/amm/src/add.rs b/programs/amm/src/add.rs index 28b4661c..d057507b 100644 --- a/programs/amm/src/add.rs +++ b/programs/amm/src/add.rs @@ -7,6 +7,7 @@ use nssa_core::{ }; #[expect(clippy::too_many_arguments, reason = "TODO: Fix later")] +#[must_use] pub fn add_liquidity( pool: AccountWithMetadata, vault_a: AccountWithMetadata, @@ -123,7 +124,7 @@ pub fn add_liquidity( ); // 5. Update pool account - let mut pool_post = pool.account.clone(); + let mut pool_post = pool.account; let pool_post_definition = PoolDefinition { liquidity_pool_supply: pool_def_data.liquidity_pool_supply + delta_lp, reserve_a: pool_def_data.reserve_a + actual_amount_a, @@ -155,7 +156,7 @@ pub fn add_liquidity( pool_definition_lp_auth.is_authorized = true; let call_token_lp = ChainedCall::new( token_program_id, - vec![pool_definition_lp_auth.clone(), user_holding_lp.clone()], + vec![pool_definition_lp_auth, user_holding_lp.clone()], &token_core::Instruction::Mint { amount_to_mint: delta_lp, }, @@ -166,12 +167,12 @@ pub fn add_liquidity( let post_states = vec![ AccountPostState::new(pool_post), - AccountPostState::new(vault_a.account.clone()), - AccountPostState::new(vault_b.account.clone()), - AccountPostState::new(pool_definition_lp.account.clone()), - AccountPostState::new(user_holding_a.account.clone()), - AccountPostState::new(user_holding_b.account.clone()), - AccountPostState::new(user_holding_lp.account.clone()), + AccountPostState::new(vault_a.account), + AccountPostState::new(vault_b.account), + AccountPostState::new(pool_definition_lp.account), + AccountPostState::new(user_holding_a.account), + AccountPostState::new(user_holding_b.account), + AccountPostState::new(user_holding_lp.account), ]; (post_states, chained_calls) diff --git a/programs/amm/src/lib.rs b/programs/amm/src/lib.rs index e50c738e..744f947c 100644 --- a/programs/amm/src/lib.rs +++ b/programs/amm/src/lib.rs @@ -1,5 +1,12 @@ //! The AMM Program implementation. +#![expect( + clippy::arithmetic_side_effects, + clippy::integer_division, + clippy::integer_division_remainder_used, + reason = "TODO: Fix later" +)] + pub use amm_core as core; pub mod add; @@ -7,4 +14,5 @@ pub mod new_definition; pub mod remove; pub mod swap; +#[cfg(test)] mod tests; diff --git a/programs/amm/src/new_definition.rs b/programs/amm/src/new_definition.rs index af54adce..366eb747 100644 --- a/programs/amm/src/new_definition.rs +++ b/programs/amm/src/new_definition.rs @@ -10,6 +10,7 @@ use nssa_core::{ }; #[expect(clippy::too_many_arguments, reason = "TODO: Fix later")] +#[must_use] pub fn new_definition( pool: AccountWithMetadata, vault_a: AccountWithMetadata, @@ -79,8 +80,20 @@ pub fn new_definition( // LP Token minting calculation let initial_lp = (token_a_amount.get() * token_b_amount.get()).isqrt(); + // Chain call for liquidity token (TokenLP definition -> User LP Holding) + let instruction = if pool.account == Account::default() { + token_core::Instruction::NewFungibleDefinition { + name: String::from("LP Token"), + total_supply: initial_lp, + } + } else { + token_core::Instruction::Mint { + amount_to_mint: initial_lp, + } + }; + // Update pool account - let mut pool_post = pool.account.clone(); + let mut pool_post = pool.account; let pool_post_definition = PoolDefinition { definition_token_a_id, definition_token_b_id, @@ -90,16 +103,12 @@ pub fn new_definition( liquidity_pool_supply: initial_lp, reserve_a: token_a_amount.into(), reserve_b: token_b_amount.into(), - fees: 0u128, // TODO: we assume all fees are 0 for now. + fees: 0_u128, // TODO: we assume all fees are 0 for now. active: true, }; pool_post.data = Data::from(&pool_post_definition); - let pool_post: AccountPostState = if pool.account == Account::default() { - AccountPostState::new_claimed(pool_post.clone()) - } else { - AccountPostState::new(pool_post.clone()) - }; + let pool_post = AccountPostState::new_claimed_if_default(pool_post); let token_program_id = user_holding_a.account.program_owner; @@ -120,24 +129,12 @@ pub fn new_definition( }, ); - // Chain call for liquidity token (TokenLP definition -> User LP Holding) - let instruction = if pool.account == Account::default() { - token_core::Instruction::NewFungibleDefinition { - name: String::from("LP Token"), - total_supply: initial_lp, - } - } else { - token_core::Instruction::Mint { - amount_to_mint: initial_lp, - } - }; - let mut pool_lp_auth = pool_definition_lp.clone(); pool_lp_auth.is_authorized = true; let call_token_lp = ChainedCall::new( token_program_id, - vec![pool_lp_auth.clone(), user_holding_lp.clone()], + vec![pool_lp_auth, user_holding_lp.clone()], &instruction, ) .with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]); @@ -145,14 +142,14 @@ pub fn new_definition( let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ - pool_post.clone(), - AccountPostState::new(vault_a.account.clone()), - AccountPostState::new(vault_b.account.clone()), - AccountPostState::new(pool_definition_lp.account.clone()), - AccountPostState::new(user_holding_a.account.clone()), - AccountPostState::new(user_holding_b.account.clone()), - AccountPostState::new(user_holding_lp.account.clone()), + pool_post, + AccountPostState::new(vault_a.account), + AccountPostState::new(vault_b.account), + AccountPostState::new(pool_definition_lp.account), + AccountPostState::new(user_holding_a.account), + AccountPostState::new(user_holding_b.account), + AccountPostState::new(user_holding_lp.account), ]; - (post_states.clone(), chained_calls) + (post_states, chained_calls) } diff --git a/programs/amm/src/remove.rs b/programs/amm/src/remove.rs index 370b3609..7844f4bd 100644 --- a/programs/amm/src/remove.rs +++ b/programs/amm/src/remove.rs @@ -7,6 +7,7 @@ use nssa_core::{ }; #[expect(clippy::too_many_arguments, reason = "TODO: Fix later")] +#[must_use] pub fn remove_liquidity( pool: AccountWithMetadata, vault_a: AccountWithMetadata, @@ -101,13 +102,13 @@ pub fn remove_liquidity( let active: bool = pool_def_data.liquidity_pool_supply - delta_lp != 0; // 5. Update pool account - let mut pool_post = pool.account.clone(); + let mut pool_post = pool.account; let pool_post_definition = PoolDefinition { liquidity_pool_supply: pool_def_data.liquidity_pool_supply - delta_lp, reserve_a: pool_def_data.reserve_a - withdraw_amount_a, reserve_b: pool_def_data.reserve_b - withdraw_amount_b, active, - ..pool_def_data.clone() + ..pool_def_data }; pool_post.data = Data::from(&pool_post_definition); @@ -153,13 +154,13 @@ pub fn remove_liquidity( let chained_calls = vec![call_token_lp, call_token_b, call_token_a]; let post_states = vec![ - AccountPostState::new(pool_post.clone()), - AccountPostState::new(vault_a.account.clone()), - AccountPostState::new(vault_b.account.clone()), - AccountPostState::new(pool_definition_lp.account.clone()), - AccountPostState::new(user_holding_a.account.clone()), - AccountPostState::new(user_holding_b.account.clone()), - AccountPostState::new(user_holding_lp.account.clone()), + AccountPostState::new(pool_post), + AccountPostState::new(vault_a.account), + AccountPostState::new(vault_b.account), + AccountPostState::new(pool_definition_lp.account), + AccountPostState::new(user_holding_a.account), + AccountPostState::new(user_holding_b.account), + AccountPostState::new(user_holding_lp.account), ]; (post_states, chained_calls) diff --git a/programs/amm/src/swap.rs b/programs/amm/src/swap.rs index aa02ac24..cb64f5eb 100644 --- a/programs/amm/src/swap.rs +++ b/programs/amm/src/swap.rs @@ -5,6 +5,7 @@ use nssa_core::{ }; #[expect(clippy::too_many_arguments, reason = "TODO: Fix later")] +#[must_use] pub fn swap( pool: AccountWithMetadata, vault_a: AccountWithMetadata, @@ -95,7 +96,7 @@ pub fn swap( }; // Update pool account - let mut pool_post = pool.account.clone(); + let mut pool_post = pool.account; let pool_post_definition = PoolDefinition { reserve_a: pool_def_data.reserve_a + deposit_a - withdraw_a, reserve_b: pool_def_data.reserve_b + deposit_b - withdraw_b, @@ -105,11 +106,11 @@ pub fn swap( pool_post.data = Data::from(&pool_post_definition); let post_states = vec![ - AccountPostState::new(pool_post.clone()), - AccountPostState::new(vault_a.account.clone()), - AccountPostState::new(vault_b.account.clone()), - AccountPostState::new(user_holding_a.account.clone()), - AccountPostState::new(user_holding_b.account.clone()), + AccountPostState::new(pool_post), + AccountPostState::new(vault_a.account), + AccountPostState::new(vault_b.account), + AccountPostState::new(user_holding_a.account), + AccountPostState::new(user_holding_b.account), ]; (post_states, chained_calls) @@ -151,7 +152,7 @@ fn swap_logic( }, )); - let mut vault_withdraw = vault_withdraw.clone(); + let mut vault_withdraw = vault_withdraw; vault_withdraw.is_authorized = true; let pda_seed = compute_vault_pda_seed( diff --git a/programs/amm/src/tests.rs b/programs/amm/src/tests.rs index 203e3284..e1e8698d 100644 --- a/programs/amm/src/tests.rs +++ b/programs/amm/src/tests.rs @@ -1,11 +1,13 @@ -#![cfg(test)] - use std::num::NonZero; use amm_core::{ PoolDefinition, compute_liquidity_token_pda, compute_liquidity_token_pda_seed, compute_pool_pda, compute_vault_pda, compute_vault_pda_seed, }; +#[cfg(feature = "nssa")] +use nssa::{ + PrivateKey, PublicKey, PublicTransaction, V02State, program::Program, public_transaction, +}; use nssa_core::{ account::{Account, AccountId, AccountWithMetadata, Data}, program::{ChainedCall, ProgramId}, @@ -22,7 +24,30 @@ const AMM_PROGRAM_ID: ProgramId = [42; 8]; struct BalanceForTests; struct ChainedCallForTests; struct IdForTests; -struct AccountForTests; +struct AccountWithMetadataForTests; +#[cfg(feature = "nssa")] +struct PrivateKeysForTests; +#[cfg(feature = "nssa")] +struct IdForExeTests; +#[cfg(feature = "nssa")] +struct BalanceForExeTests; +#[cfg(feature = "nssa")] +struct AccountsForExeTests; + +#[cfg(feature = "nssa")] +impl PrivateKeysForTests { + fn user_token_a_key() -> PrivateKey { + PrivateKey::try_new([31; 32]).expect("Keys constructor expects valid private key") + } + + fn user_token_b_key() -> PrivateKey { + PrivateKey::try_new([32; 32]).expect("Keys constructor expects valid private key") + } + + fn user_token_lp_key() -> PrivateKey { + PrivateKey::try_new([33; 32]).expect("Keys constructor expects valid private key") + } +} impl BalanceForTests { fn vault_a_reserve_init() -> u128 { @@ -107,7 +132,7 @@ impl BalanceForTests { fn lp_supply_init() -> u128 { // sqrt(vault_a_reserve_init * vault_b_reserve_init) = sqrt(1000 * 500) = 707 - (BalanceForTests::vault_a_reserve_init() * BalanceForTests::vault_b_reserve_init()).isqrt() + (Self::vault_a_reserve_init() * Self::vault_b_reserve_init()).isqrt() } fn vault_a_swap_test_1() -> u128 { @@ -160,8 +185,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::user_holding_a(), - AccountForTests::vault_a_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::vault_a_init(), ], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::add_max_amount_a(), @@ -172,12 +197,12 @@ impl ChainedCallForTests { fn cc_swap_token_b_test_1() -> ChainedCall { let swap_amount: u128 = 166; - let mut vault_b_auth = AccountForTests::vault_b_init(); + let mut vault_b_auth = AccountWithMetadataForTests::vault_b_init(); vault_b_auth.is_authorized = true; ChainedCall::new( TOKEN_PROGRAM_ID, - vec![vault_b_auth, AccountForTests::user_holding_b()], + vec![vault_b_auth, AccountWithMetadataForTests::user_holding_b()], &token_core::Instruction::Transfer { amount_to_transfer: swap_amount, }, @@ -191,12 +216,12 @@ impl ChainedCallForTests { fn cc_swap_token_a_test_2() -> ChainedCall { let swap_amount: u128 = 285; - let mut vault_a_auth = AccountForTests::vault_a_init(); + let mut vault_a_auth = AccountWithMetadataForTests::vault_a_init(); vault_a_auth.is_authorized = true; ChainedCall::new( TOKEN_PROGRAM_ID, - vec![vault_a_auth, AccountForTests::user_holding_a()], + vec![vault_a_auth, AccountWithMetadataForTests::user_holding_a()], &token_core::Instruction::Transfer { amount_to_transfer: swap_amount, }, @@ -211,8 +236,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::user_holding_b(), - AccountForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::vault_b_init(), ], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::add_max_amount_b(), @@ -224,8 +249,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::user_holding_a(), - AccountForTests::vault_a_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::vault_a_init(), ], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::add_successful_amount_a(), @@ -237,8 +262,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::user_holding_b(), - AccountForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::vault_b_init(), ], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::add_successful_amount_b(), @@ -247,12 +272,15 @@ impl ChainedCallForTests { } fn cc_add_pool_lp() -> ChainedCall { - let mut pool_lp_auth = AccountForTests::pool_lp_init(); + let mut pool_lp_auth = AccountWithMetadataForTests::pool_lp_init(); pool_lp_auth.is_authorized = true; ChainedCall::new( TOKEN_PROGRAM_ID, - vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], + vec![ + pool_lp_auth, + AccountWithMetadataForTests::user_holding_lp_init(), + ], &token_core::Instruction::Mint { amount_to_mint: 282, }, @@ -263,12 +291,12 @@ impl ChainedCallForTests { } fn cc_remove_token_a() -> ChainedCall { - let mut vault_a_auth = AccountForTests::vault_a_init(); + let mut vault_a_auth = AccountWithMetadataForTests::vault_a_init(); vault_a_auth.is_authorized = true; ChainedCall::new( TOKEN_PROGRAM_ID, - vec![vault_a_auth, AccountForTests::user_holding_a()], + vec![vault_a_auth, AccountWithMetadataForTests::user_holding_a()], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::remove_actual_a_successful(), }, @@ -280,12 +308,12 @@ impl ChainedCallForTests { } fn cc_remove_token_b() -> ChainedCall { - let mut vault_b_auth = AccountForTests::vault_b_init(); + let mut vault_b_auth = AccountWithMetadataForTests::vault_b_init(); vault_b_auth.is_authorized = true; ChainedCall::new( TOKEN_PROGRAM_ID, - vec![vault_b_auth, AccountForTests::user_holding_b()], + vec![vault_b_auth, AccountWithMetadataForTests::user_holding_b()], &token_core::Instruction::Transfer { amount_to_transfer: 70, }, @@ -297,12 +325,15 @@ impl ChainedCallForTests { } fn cc_remove_pool_lp() -> ChainedCall { - let mut pool_lp_auth = AccountForTests::pool_lp_init(); + let mut pool_lp_auth = AccountWithMetadataForTests::pool_lp_init(); pool_lp_auth.is_authorized = true; ChainedCall::new( TOKEN_PROGRAM_ID, - vec![pool_lp_auth, AccountForTests::user_holding_lp_init()], + vec![ + pool_lp_auth, + AccountWithMetadataForTests::user_holding_lp_init(), + ], &token_core::Instruction::Burn { amount_to_burn: BalanceForTests::remove_amount_lp(), }, @@ -316,8 +347,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::user_holding_a(), - AccountForTests::vault_a_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::vault_a_init(), ], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::add_successful_amount_a(), @@ -329,8 +360,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::user_holding_b(), - AccountForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::vault_b_init(), ], &token_core::Instruction::Transfer { amount_to_transfer: BalanceForTests::add_successful_amount_b(), @@ -342,8 +373,8 @@ impl ChainedCallForTests { ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_lp_uninit(), ], &token_core::Instruction::Mint { amount_to_mint: BalanceForTests::lp_supply_init(), @@ -365,7 +396,7 @@ impl IdForTests { } fn token_lp_definition_id() -> AccountId { - compute_liquidity_token_pda(AMM_PROGRAM_ID, IdForTests::pool_definition_id()) + compute_liquidity_token_pda(AMM_PROGRAM_ID, Self::pool_definition_id()) } fn user_token_a_id() -> AccountId { @@ -383,39 +414,39 @@ impl IdForTests { fn pool_definition_id() -> AccountId { compute_pool_pda( AMM_PROGRAM_ID, - IdForTests::token_a_definition_id(), - IdForTests::token_b_definition_id(), + Self::token_a_definition_id(), + Self::token_b_definition_id(), ) } fn vault_a_id() -> AccountId { compute_vault_pda( AMM_PROGRAM_ID, - IdForTests::pool_definition_id(), - IdForTests::token_a_definition_id(), + Self::pool_definition_id(), + Self::token_a_definition_id(), ) } fn vault_b_id() -> AccountId { compute_vault_pda( AMM_PROGRAM_ID, - IdForTests::pool_definition_id(), - IdForTests::token_b_definition_id(), + Self::pool_definition_id(), + Self::token_b_definition_id(), ) } } -impl AccountForTests { +impl AccountWithMetadataForTests { fn user_holding_a() -> AccountWithMetadata { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::user_token_a_balance(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::user_token_a_id(), @@ -426,12 +457,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::user_token_b_balance(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::user_token_b_id(), @@ -442,12 +473,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_init(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_a_id(), @@ -458,12 +489,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_init(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_b_id(), @@ -474,12 +505,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_high(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_a_id(), @@ -490,12 +521,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_high(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_b_id(), @@ -506,12 +537,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_low(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_a_id(), @@ -522,12 +553,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_low(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_b_id(), @@ -538,12 +569,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: 0, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_a_id(), @@ -554,12 +585,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: 0, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_b_id(), @@ -570,13 +601,13 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::lp_supply_init(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::token_lp_definition_id(), @@ -587,13 +618,13 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::lp_supply_init(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::vault_a_id(), @@ -604,12 +635,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: 0, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::user_token_lp_id(), @@ -620,12 +651,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_lp_definition_id(), balance: BalanceForTests::user_token_lp_balance(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::user_token_lp_id(), @@ -636,7 +667,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -646,10 +677,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_reserve_init(), reserve_b: BalanceForTests::vault_b_reserve_init(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -660,7 +691,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -670,10 +701,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: 0, reserve_b: BalanceForTests::vault_b_reserve_init(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -684,7 +715,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -694,10 +725,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_reserve_init(), reserve_b: 0, - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -708,7 +739,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -718,10 +749,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::vault_a_reserve_low(), reserve_a: BalanceForTests::vault_a_reserve_low(), reserve_b: BalanceForTests::vault_b_reserve_high(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -732,7 +763,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -742,10 +773,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::vault_a_reserve_high(), reserve_a: BalanceForTests::vault_a_reserve_high(), reserve_b: BalanceForTests::vault_b_reserve_low(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -756,7 +787,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -766,10 +797,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_swap_test_1(), reserve_b: BalanceForTests::vault_b_swap_test_1(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -780,7 +811,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -790,10 +821,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_swap_test_2(), reserve_b: BalanceForTests::vault_b_swap_test_2(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -804,7 +835,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -814,10 +845,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::vault_a_reserve_low(), reserve_a: BalanceForTests::vault_a_reserve_init(), reserve_b: BalanceForTests::vault_b_reserve_init(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -828,7 +859,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -838,10 +869,10 @@ impl AccountForTests { liquidity_pool_supply: 989, reserve_a: BalanceForTests::vault_a_add_successful(), reserve_b: BalanceForTests::vault_b_add_successful(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -852,7 +883,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -862,10 +893,10 @@ impl AccountForTests { liquidity_pool_supply: 607, reserve_a: BalanceForTests::vault_a_remove_successful(), reserve_b: BalanceForTests::vault_b_remove_successful(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -876,7 +907,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -886,10 +917,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_reserve_init(), reserve_b: BalanceForTests::vault_b_reserve_init(), - fees: 0u128, + fees: 0_u128, active: false, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -900,7 +931,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -910,10 +941,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_reserve_init(), reserve_b: BalanceForTests::vault_b_reserve_init(), - fees: 0u128, + fees: 0_u128, active: false, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: AccountId::new([4; 32]), @@ -924,12 +955,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_a_definition_id(), balance: BalanceForTests::vault_a_reserve_init(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: AccountId::new([4; 32]), @@ -940,12 +971,12 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: TOKEN_PROGRAM_ID, - balance: 0u128, + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::token_b_definition_id(), balance: BalanceForTests::vault_b_reserve_init(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: AccountId::new([4; 32]), @@ -956,7 +987,7 @@ impl AccountForTests { AccountWithMetadata { account: Account { program_owner: ProgramId::default(), - balance: 0u128, + balance: 0_u128, data: Data::from(&PoolDefinition { definition_token_a_id: IdForTests::token_a_definition_id(), definition_token_b_id: IdForTests::token_b_definition_id(), @@ -966,10 +997,10 @@ impl AccountForTests { liquidity_pool_supply: BalanceForTests::lp_supply_init(), reserve_a: BalanceForTests::vault_a_reserve_init(), reserve_b: BalanceForTests::vault_b_reserve_init(), - fees: 0u128, + fees: 0_u128, active: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -977,8 +1008,811 @@ impl AccountForTests { } } +#[cfg(feature = "nssa")] +impl BalanceForExeTests { + fn user_token_a_holding_init() -> u128 { + 10_000 + } + + fn user_token_b_holding_init() -> u128 { + 10_000 + } + + fn user_token_lp_holding_init() -> u128 { + 2_000 + } + + fn vault_a_balance_init() -> u128 { + 5_000 + } + + fn vault_b_balance_init() -> u128 { + 2_500 + } + + fn pool_lp_supply_init() -> u128 { + 5_000 + } + + fn token_a_supply() -> u128 { + 100_000 + } + + fn token_b_supply() -> u128 { + 100_000 + } + + fn token_lp_supply() -> u128 { + 5_000 + } + + fn remove_lp() -> u128 { + 1_000 + } + + fn remove_min_amount_a() -> u128 { + 500 + } + + fn remove_min_amount_b() -> u128 { + 500 + } + + fn add_min_amount_lp() -> u128 { + 1_000 + } + + fn add_max_amount_a() -> u128 { + 2_000 + } + + fn add_max_amount_b() -> u128 { + 1_000 + } + + fn swap_amount_in() -> u128 { + 1_000 + } + + fn swap_min_amount_out() -> u128 { + 200 + } + + fn vault_a_balance_swap_1() -> u128 { + 3_572 + } + + fn vault_b_balance_swap_1() -> u128 { + 3_500 + } + + fn user_token_a_holding_swap_1() -> u128 { + 11_428 + } + + fn user_token_b_holding_swap_1() -> u128 { + 9_000 + } + + fn vault_a_balance_swap_2() -> u128 { + 6_000 + } + + fn vault_b_balance_swap_2() -> u128 { + 2_084 + } + + fn user_token_a_holding_swap_2() -> u128 { + 9_000 + } + + fn user_token_b_holding_swap_2() -> u128 { + 10_416 + } + + fn vault_a_balance_add() -> u128 { + 7_000 + } + + fn vault_b_balance_add() -> u128 { + 3_500 + } + + fn user_token_a_holding_add() -> u128 { + 8_000 + } + + fn user_token_b_holding_add() -> u128 { + 9_000 + } + + fn user_token_lp_holding_add() -> u128 { + 4_000 + } + + fn token_lp_supply_add() -> u128 { + 7_000 + } + + fn vault_a_balance_remove() -> u128 { + 4_000 + } + + fn vault_b_balance_remove() -> u128 { + 2_000 + } + + fn user_token_a_holding_remove() -> u128 { + 11_000 + } + + fn user_token_b_holding_remove() -> u128 { + 10_500 + } + + fn user_token_lp_holding_remove() -> u128 { + 1_000 + } + + fn token_lp_supply_remove() -> u128 { + 4_000 + } + + fn user_token_a_holding_new_definition() -> u128 { + 5_000 + } + + fn user_token_b_holding_new_definition() -> u128 { + 7_500 + } + + fn lp_supply_init() -> u128 { + // isqrt(vault_a_balance_init * vault_b_balance_init) = isqrt(5_000 * 2_500) = 3535 + (Self::vault_a_balance_init() * Self::vault_b_balance_init()).isqrt() + } +} + +#[cfg(feature = "nssa")] +impl IdForExeTests { + fn pool_definition_id() -> AccountId { + amm_core::compute_pool_pda( + Program::amm().id(), + Self::token_a_definition_id(), + Self::token_b_definition_id(), + ) + } + + fn token_lp_definition_id() -> AccountId { + amm_core::compute_liquidity_token_pda(Program::amm().id(), Self::pool_definition_id()) + } + + fn token_a_definition_id() -> AccountId { + AccountId::new([3; 32]) + } + + fn token_b_definition_id() -> AccountId { + AccountId::new([4; 32]) + } + + fn user_token_a_id() -> AccountId { + AccountId::from(&PublicKey::new_from_private_key( + &PrivateKeysForTests::user_token_a_key(), + )) + } + + fn user_token_b_id() -> AccountId { + AccountId::from(&PublicKey::new_from_private_key( + &PrivateKeysForTests::user_token_b_key(), + )) + } + + fn user_token_lp_id() -> AccountId { + AccountId::from(&PublicKey::new_from_private_key( + &PrivateKeysForTests::user_token_lp_key(), + )) + } + + fn vault_a_id() -> AccountId { + amm_core::compute_vault_pda( + Program::amm().id(), + Self::pool_definition_id(), + Self::token_a_definition_id(), + ) + } + + fn vault_b_id() -> AccountId { + amm_core::compute_vault_pda( + Program::amm().id(), + Self::pool_definition_id(), + Self::token_b_definition_id(), + ) + } +} + +#[cfg(feature = "nssa")] +impl AccountsForExeTests { + fn user_token_a_holding() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::user_token_a_holding_init(), + }), + nonce: 0_u128.into(), + } + } + + fn user_token_b_holding() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::user_token_b_holding_init(), + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_init() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: BalanceForExeTests::pool_lp_supply_init(), + reserve_a: BalanceForExeTests::vault_a_balance_init(), + reserve_b: BalanceForExeTests::vault_b_balance_init(), + fees: 0_u128, + active: true, + }), + nonce: 0_u128.into(), + } + } + + fn token_a_definition_account() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForExeTests::token_a_supply(), + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn token_b_definition_acc() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("test"), + total_supply: BalanceForExeTests::token_b_supply(), + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn token_lp_definition_acc() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), + total_supply: BalanceForExeTests::token_lp_supply(), + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn vault_a_init() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::vault_a_balance_init(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_b_init() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::vault_b_balance_init(), + }), + nonce: 0_u128.into(), + } + } + + fn user_token_lp_holding() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_lp_definition_id(), + balance: BalanceForExeTests::user_token_lp_holding_init(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_a_swap_1() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::vault_a_balance_swap_1(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_b_swap_1() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::vault_b_balance_swap_1(), + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_swap_1() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: BalanceForExeTests::pool_lp_supply_init(), + reserve_a: BalanceForExeTests::vault_a_balance_swap_1(), + reserve_b: BalanceForExeTests::vault_b_balance_swap_1(), + fees: 0_u128, + active: true, + }), + nonce: 0_u128.into(), + } + } + + fn user_token_a_holding_swap_1() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::user_token_a_holding_swap_1(), + }), + nonce: 0_u128.into(), + } + } + + fn user_token_b_holding_swap_1() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::user_token_b_holding_swap_1(), + }), + nonce: 1_u128.into(), + } + } + + fn vault_a_swap_2() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::vault_a_balance_swap_2(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_b_swap_2() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::vault_b_balance_swap_2(), + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_swap_2() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: BalanceForExeTests::pool_lp_supply_init(), + reserve_a: BalanceForExeTests::vault_a_balance_swap_2(), + reserve_b: BalanceForExeTests::vault_b_balance_swap_2(), + fees: 0_u128, + active: true, + }), + nonce: 0_u128.into(), + } + } + + fn user_token_a_holding_swap_2() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::user_token_a_holding_swap_2(), + }), + nonce: 1_u128.into(), + } + } + + fn user_token_b_holding_swap_2() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::user_token_b_holding_swap_2(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_a_add() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::vault_a_balance_add(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_b_add() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::vault_b_balance_add(), + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_add() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: BalanceForExeTests::token_lp_supply_add(), + reserve_a: BalanceForExeTests::vault_a_balance_add(), + reserve_b: BalanceForExeTests::vault_b_balance_add(), + fees: 0_u128, + active: true, + }), + nonce: 0_u128.into(), + } + } + + fn user_token_a_holding_add() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::user_token_a_holding_add(), + }), + nonce: 1_u128.into(), + } + } + + fn user_token_b_holding_add() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::user_token_b_holding_add(), + }), + nonce: 1_u128.into(), + } + } + + fn user_token_lp_holding_add() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_lp_definition_id(), + balance: BalanceForExeTests::user_token_lp_holding_add(), + }), + nonce: 0_u128.into(), + } + } + + fn token_lp_definition_add() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), + total_supply: BalanceForExeTests::token_lp_supply_add(), + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn vault_a_remove() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::vault_a_balance_remove(), + }), + nonce: 0_u128.into(), + } + } + + fn vault_b_remove() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::vault_b_balance_remove(), + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_remove() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: BalanceForExeTests::token_lp_supply_remove(), + reserve_a: BalanceForExeTests::vault_a_balance_remove(), + reserve_b: BalanceForExeTests::vault_b_balance_remove(), + fees: 0_u128, + active: true, + }), + nonce: 0_u128.into(), + } + } + + fn user_token_a_holding_remove() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::user_token_a_holding_remove(), + }), + nonce: 0_u128.into(), + } + } + + fn user_token_b_holding_remove() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::user_token_b_holding_remove(), + }), + nonce: 0_u128.into(), + } + } + + fn user_token_lp_holding_remove() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_lp_definition_id(), + balance: BalanceForExeTests::user_token_lp_holding_remove(), + }), + nonce: 1_u128.into(), + } + } + + fn token_lp_definition_remove() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), + total_supply: BalanceForExeTests::token_lp_supply_remove(), + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn token_lp_definition_init_inactive() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), + total_supply: 0, + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn vault_a_init_inactive() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: 0, + }), + nonce: 0_u128.into(), + } + } + + fn vault_b_init_inactive() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: 0, + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_inactive() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: 0, + reserve_a: 0, + reserve_b: 0, + fees: 0_u128, + active: false, + }), + nonce: 0_u128.into(), + } + } + + fn user_token_a_holding_new_init() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_a_definition_id(), + balance: BalanceForExeTests::user_token_a_holding_new_definition(), + }), + nonce: 1_u128.into(), + } + } + + fn user_token_b_holding_new_init() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_b_definition_id(), + balance: BalanceForExeTests::user_token_b_holding_new_definition(), + }), + nonce: 1_u128.into(), + } + } + + fn user_token_lp_holding_new_init() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_lp_definition_id(), + balance: BalanceForExeTests::lp_supply_init(), + }), + nonce: 0_u128.into(), + } + } + + fn token_lp_definition_new_init() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenDefinition::Fungible { + name: String::from("LP Token"), + total_supply: BalanceForExeTests::lp_supply_init(), + metadata_id: None, + }), + nonce: 0_u128.into(), + } + } + + fn pool_definition_new_init() -> Account { + Account { + program_owner: Program::amm().id(), + balance: 0_u128, + data: Data::from(&PoolDefinition { + definition_token_a_id: IdForExeTests::token_a_definition_id(), + definition_token_b_id: IdForExeTests::token_b_definition_id(), + vault_a_id: IdForExeTests::vault_a_id(), + vault_b_id: IdForExeTests::vault_b_id(), + liquidity_pool_id: IdForExeTests::token_lp_definition_id(), + liquidity_pool_supply: BalanceForExeTests::lp_supply_init(), + reserve_a: BalanceForExeTests::vault_a_balance_init(), + reserve_b: BalanceForExeTests::vault_b_balance_init(), + fees: 0_u128, + active: true, + }), + nonce: 0_u128.into(), + } + } + + fn user_token_lp_holding_init_zero() -> Account { + Account { + program_owner: Program::token().id(), + balance: 0_u128, + data: Data::from(&TokenHolding::Fungible { + definition_id: IdForExeTests::token_lp_definition_id(), + balance: 0, + }), + nonce: 0_u128.into(), + } + } +} + #[test] -fn test_pool_pda_produces_unique_id_for_token_pair() { +fn pool_pda_produces_unique_id_for_token_pair() { assert!( amm_core::compute_pool_pda( AMM_PROGRAM_ID, @@ -994,15 +1828,15 @@ fn test_pool_pda_produces_unique_id_for_token_pair() { #[should_panic(expected = "Vault A was not provided")] #[test] -fn test_call_add_liquidity_vault_a_omitted() { +fn call_add_liquidity_vault_a_omitted() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_with_wrong_id(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_with_wrong_id(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1011,15 +1845,15 @@ fn test_call_add_liquidity_vault_a_omitted() { #[should_panic(expected = "Vault B was not provided")] #[test] -fn test_call_add_liquidity_vault_b_omitted() { +fn call_add_liquidity_vault_b_omitted() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_with_wrong_id(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_with_wrong_id(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1028,15 +1862,15 @@ fn test_call_add_liquidity_vault_b_omitted() { #[should_panic(expected = "LP definition mismatch")] #[test] -fn test_call_add_liquidity_lp_definition_mismatch() { +fn call_add_liquidity_lp_definition_mismatch() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_with_wrong_id(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_with_wrong_id(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1045,15 +1879,15 @@ fn test_call_add_liquidity_lp_definition_mismatch() { #[should_panic(expected = "Both max-balances must be nonzero")] #[test] -fn test_call_add_liquidity_zero_balance_1() { +fn call_add_liquidity_zero_balance_1() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), 0, BalanceForTests::add_max_amount_b(), @@ -1062,15 +1896,15 @@ fn test_call_add_liquidity_zero_balance_1() { #[should_panic(expected = "Both max-balances must be nonzero")] #[test] -fn test_call_add_liquidity_zero_balance_2() { +fn call_add_liquidity_zero_balance_2() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), 0, BalanceForTests::add_max_amount_a(), @@ -1079,15 +1913,15 @@ fn test_call_add_liquidity_zero_balance_2() { #[should_panic(expected = "Vaults' balances must be at least the reserve amounts")] #[test] -fn test_call_add_liquidity_vault_insufficient_balance_1() { +fn call_add_liquidity_vault_insufficient_balance_1() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init_zero(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init_zero(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_max_amount_a()).unwrap(), BalanceForTests::add_max_amount_b(), BalanceForTests::add_min_amount_lp(), @@ -1096,15 +1930,15 @@ fn test_call_add_liquidity_vault_insufficient_balance_1() { #[should_panic(expected = "Vaults' balances must be at least the reserve amounts")] #[test] -fn test_call_add_liquidity_vault_insufficient_balance_2() { +fn call_add_liquidity_vault_insufficient_balance_2() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init_zero(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init_zero(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_max_amount_a()).unwrap(), BalanceForTests::add_max_amount_b(), BalanceForTests::add_min_amount_lp(), @@ -1113,15 +1947,15 @@ fn test_call_add_liquidity_vault_insufficient_balance_2() { #[should_panic(expected = "A trade amount is 0")] #[test] -fn test_call_add_liquidity_actual_amount_zero_1() { +fn call_add_liquidity_actual_amount_zero_1() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init_reserve_a_low(), - AccountForTests::vault_a_init_low(), - AccountForTests::vault_b_init_high(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init_reserve_a_low(), + AccountWithMetadataForTests::vault_a_init_low(), + AccountWithMetadataForTests::vault_b_init_high(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1130,15 +1964,15 @@ fn test_call_add_liquidity_actual_amount_zero_1() { #[should_panic(expected = "A trade amount is 0")] #[test] -fn test_call_add_liquidity_actual_amount_zero_2() { +fn call_add_liquidity_actual_amount_zero_2() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init_reserve_b_low(), - AccountForTests::vault_a_init_high(), - AccountForTests::vault_b_init_low(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init_reserve_b_low(), + AccountWithMetadataForTests::vault_a_init_high(), + AccountWithMetadataForTests::vault_b_init_low(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a_low(), BalanceForTests::add_max_amount_b_low(), @@ -1147,15 +1981,15 @@ fn test_call_add_liquidity_actual_amount_zero_2() { #[should_panic(expected = "Reserves must be nonzero")] #[test] -fn test_call_add_liquidity_reserves_zero_1() { +fn call_add_liquidity_reserves_zero_1() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init_reserve_a_zero(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init_reserve_a_zero(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1164,15 +1998,15 @@ fn test_call_add_liquidity_reserves_zero_1() { #[should_panic(expected = "Reserves must be nonzero")] #[test] -fn test_call_add_liquidity_reserves_zero_2() { +fn call_add_liquidity_reserves_zero_2() { let _post_states = add_liquidity( - AccountForTests::pool_definition_init_reserve_b_zero(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init_reserve_b_zero(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1181,15 +2015,15 @@ fn test_call_add_liquidity_reserves_zero_2() { #[should_panic(expected = "Payable LP must be nonzero")] #[test] -fn test_call_add_liquidity_payable_lp_zero() { +fn call_add_liquidity_payable_lp_zero() { let _post_states = add_liquidity( - AccountForTests::pool_definition_add_zero_lp(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_add_zero_lp(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a_low(), BalanceForTests::add_max_amount_b_low(), @@ -1197,15 +2031,15 @@ fn test_call_add_liquidity_payable_lp_zero() { } #[test] -fn test_call_add_liquidity_chained_call_successsful() { +fn call_add_liquidity_chained_call_successsful() { let (post_states, chained_calls) = add_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::add_min_amount_lp()).unwrap(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_b(), @@ -1213,7 +2047,10 @@ fn test_call_add_liquidity_chained_call_successsful() { let pool_post = post_states[0].clone(); - assert!(AccountForTests::pool_definition_add_successful().account == *pool_post.account()); + assert!( + AccountWithMetadataForTests::pool_definition_add_successful().account + == *pool_post.account() + ); let chained_call_lp = chained_calls[0].clone(); let chained_call_b = chained_calls[1].clone(); @@ -1226,15 +2063,15 @@ fn test_call_add_liquidity_chained_call_successsful() { #[should_panic(expected = "Vault A was not provided")] #[test] -fn test_call_remove_liquidity_vault_a_omitted() { +fn call_remove_liquidity_vault_a_omitted() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_with_wrong_id(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_with_wrong_id(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b(), @@ -1243,15 +2080,15 @@ fn test_call_remove_liquidity_vault_a_omitted() { #[should_panic(expected = "Vault B was not provided")] #[test] -fn test_call_remove_liquidity_vault_b_omitted() { +fn call_remove_liquidity_vault_b_omitted() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_with_wrong_id(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_with_wrong_id(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b(), @@ -1260,15 +2097,15 @@ fn test_call_remove_liquidity_vault_b_omitted() { #[should_panic(expected = "LP definition mismatch")] #[test] -fn test_call_remove_liquidity_lp_def_mismatch() { +fn call_remove_liquidity_lp_def_mismatch() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_with_wrong_id(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_with_wrong_id(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b(), @@ -1277,16 +2114,17 @@ fn test_call_remove_liquidity_lp_def_mismatch() { #[should_panic(expected = "Invalid liquidity account provided")] #[test] -fn test_call_remove_liquidity_insufficient_liquidity_amount() { +fn call_remove_liquidity_insufficient_liquidity_amount() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_a(), /* different token account than lp to create desired - * error */ + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_a(), /* different token account than lp to + * create desired + * error */ NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b(), @@ -1297,15 +2135,15 @@ fn test_call_remove_liquidity_insufficient_liquidity_amount() { expected = "Insufficient minimal withdraw amount (Token A) provided for liquidity amount" )] #[test] -fn test_call_remove_liquidity_insufficient_balance_1() { +fn call_remove_liquidity_insufficient_balance_1() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp_1()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b(), @@ -1316,15 +2154,15 @@ fn test_call_remove_liquidity_insufficient_balance_1() { expected = "Insufficient minimal withdraw amount (Token B) provided for liquidity amount" )] #[test] -fn test_call_remove_liquidity_insufficient_balance_2() { +fn call_remove_liquidity_insufficient_balance_2() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b(), @@ -1333,15 +2171,15 @@ fn test_call_remove_liquidity_insufficient_balance_2() { #[should_panic(expected = "Minimum withdraw amount must be nonzero")] #[test] -fn test_call_remove_liquidity_min_bal_zero_1() { +fn call_remove_liquidity_min_bal_zero_1() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), 0, BalanceForTests::remove_min_amount_b(), @@ -1350,15 +2188,15 @@ fn test_call_remove_liquidity_min_bal_zero_1() { #[should_panic(expected = "Minimum withdraw amount must be nonzero")] #[test] -fn test_call_remove_liquidity_min_bal_zero_2() { +fn call_remove_liquidity_min_bal_zero_2() { let _post_states = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), 0, @@ -1366,15 +2204,15 @@ fn test_call_remove_liquidity_min_bal_zero_2() { } #[test] -fn test_call_remove_liquidity_chained_call_successful() { +fn call_remove_liquidity_chained_call_successful() { let (post_states, chained_calls) = remove_liquidity( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_init(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_init(), NonZero::new(BalanceForTests::remove_amount_lp()).unwrap(), BalanceForTests::remove_min_amount_a(), BalanceForTests::remove_min_amount_b_low(), @@ -1382,7 +2220,10 @@ fn test_call_remove_liquidity_chained_call_successful() { let pool_post = post_states[0].clone(); - assert!(AccountForTests::pool_definition_remove_successful().account == *pool_post.account()); + assert!( + AccountWithMetadataForTests::pool_definition_remove_successful().account + == *pool_post.account() + ); let chained_call_lp = chained_calls[0].clone(); let chained_call_b = chained_calls[1].clone(); @@ -1395,15 +2236,15 @@ fn test_call_remove_liquidity_chained_call_successful() { #[should_panic(expected = "Balances must be nonzero")] #[test] -fn test_call_new_definition_with_zero_balance_1() { +fn call_new_definition_with_zero_balance_1() { let _post_states = new_definition( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(0).expect("Balances must be nonzero"), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1412,15 +2253,15 @@ fn test_call_new_definition_with_zero_balance_1() { #[should_panic(expected = "Balances must be nonzero")] #[test] -fn test_call_new_definition_with_zero_balance_2() { +fn call_new_definition_with_zero_balance_2() { let _post_states = new_definition( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(0).expect("Balances must be nonzero"), AMM_PROGRAM_ID, @@ -1429,15 +2270,15 @@ fn test_call_new_definition_with_zero_balance_2() { #[should_panic(expected = "Cannot set up a swap for a token with itself")] #[test] -fn test_call_new_definition_same_token_definition() { +fn call_new_definition_same_token_definition() { let _post_states = new_definition( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1446,15 +2287,15 @@ fn test_call_new_definition_same_token_definition() { #[should_panic(expected = "Liquidity pool Token Definition Account ID does not match PDA")] #[test] -fn test_call_new_definition_wrong_liquidity_id() { +fn call_new_definition_wrong_liquidity_id() { let _post_states = new_definition( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_with_wrong_id(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_with_wrong_id(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1463,15 +2304,15 @@ fn test_call_new_definition_wrong_liquidity_id() { #[should_panic(expected = "Pool Definition Account ID does not match PDA")] #[test] -fn test_call_new_definition_wrong_pool_id() { +fn call_new_definition_wrong_pool_id() { let _post_states = new_definition( - AccountForTests::pool_definition_with_wrong_id(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_with_wrong_id(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1480,15 +2321,15 @@ fn test_call_new_definition_wrong_pool_id() { #[should_panic(expected = "Vault ID does not match PDA")] #[test] -fn test_call_new_definition_wrong_vault_id_1() { +fn call_new_definition_wrong_vault_id_1() { let _post_states = new_definition( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_with_wrong_id(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_with_wrong_id(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1497,15 +2338,15 @@ fn test_call_new_definition_wrong_vault_id_1() { #[should_panic(expected = "Vault ID does not match PDA")] #[test] -fn test_call_new_definition_wrong_vault_id_2() { +fn call_new_definition_wrong_vault_id_2() { let _post_states = new_definition( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_with_wrong_id(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_with_wrong_id(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1514,15 +2355,15 @@ fn test_call_new_definition_wrong_vault_id_2() { #[should_panic(expected = "Cannot initialize an active Pool Definition")] #[test] -fn test_call_new_definition_cannot_initialize_active_pool() { +fn call_new_definition_cannot_initialize_active_pool() { let _post_states = new_definition( - AccountForTests::pool_definition_active(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_active(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1531,15 +2372,15 @@ fn test_call_new_definition_cannot_initialize_active_pool() { #[should_panic(expected = "Cannot initialize an active Pool Definition")] #[test] -fn test_call_new_definition_chained_call_successful() { +fn call_new_definition_chained_call_successful() { let (post_states, chained_calls) = new_definition( - AccountForTests::pool_definition_active(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_active(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1547,7 +2388,10 @@ fn test_call_new_definition_chained_call_successful() { let pool_post = post_states[0].clone(); - assert!(AccountForTests::pool_definition_add_successful().account == *pool_post.account()); + assert!( + AccountWithMetadataForTests::pool_definition_add_successful().account + == *pool_post.account() + ); let chained_call_lp = chained_calls[0].clone(); let chained_call_b = chained_calls[1].clone(); @@ -1560,13 +2404,13 @@ fn test_call_new_definition_chained_call_successful() { #[should_panic(expected = "AccountId is not a token type for the pool")] #[test] -fn test_call_swap_incorrect_token_type() { +fn call_swap_incorrect_token_type() { let _post_states = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_lp_definition_id(), @@ -1575,13 +2419,13 @@ fn test_call_swap_incorrect_token_type() { #[should_panic(expected = "Vault A was not provided")] #[test] -fn test_call_swap_vault_a_omitted() { +fn call_swap_vault_a_omitted() { let _post_states = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_with_wrong_id(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_with_wrong_id(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_a_definition_id(), @@ -1590,13 +2434,13 @@ fn test_call_swap_vault_a_omitted() { #[should_panic(expected = "Vault B was not provided")] #[test] -fn test_call_swap_vault_b_omitted() { +fn call_swap_vault_b_omitted() { let _post_states = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_with_wrong_id(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_with_wrong_id(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_a_definition_id(), @@ -1605,13 +2449,13 @@ fn test_call_swap_vault_b_omitted() { #[should_panic(expected = "Reserve for Token A exceeds vault balance")] #[test] -fn test_call_swap_reserves_vault_mismatch_1() { +fn call_swap_reserves_vault_mismatch_1() { let _post_states = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init_low(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init_low(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_a_definition_id(), @@ -1620,13 +2464,13 @@ fn test_call_swap_reserves_vault_mismatch_1() { #[should_panic(expected = "Reserve for Token B exceeds vault balance")] #[test] -fn test_call_swap_reserves_vault_mismatch_2() { +fn call_swap_reserves_vault_mismatch_2() { let _post_states = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init_low(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init_low(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_a_definition_id(), @@ -1635,13 +2479,13 @@ fn test_call_swap_reserves_vault_mismatch_2() { #[should_panic(expected = "Pool is inactive")] #[test] -fn test_call_swap_ianctive() { +fn call_swap_ianctive() { let _post_states = swap( - AccountForTests::pool_definition_inactive(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_inactive(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_a_definition_id(), @@ -1650,13 +2494,13 @@ fn test_call_swap_ianctive() { #[should_panic(expected = "Withdraw amount is less than minimal amount out")] #[test] -fn test_call_swap_below_min_out() { +fn call_swap_below_min_out() { let _post_states = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::min_amount_out(), IdForTests::token_a_definition_id(), @@ -1664,13 +2508,13 @@ fn test_call_swap_below_min_out() { } #[test] -fn test_call_swap_chained_call_successful_1() { +fn call_swap_chained_call_successful_1() { let (post_states, chained_calls) = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_a(), BalanceForTests::add_max_amount_a_low(), IdForTests::token_a_definition_id(), @@ -1678,7 +2522,9 @@ fn test_call_swap_chained_call_successful_1() { let pool_post = post_states[0].clone(); - assert!(AccountForTests::pool_definition_swap_test_1().account == *pool_post.account()); + assert!( + AccountWithMetadataForTests::pool_definition_swap_test_1().account == *pool_post.account() + ); let chained_call_a = chained_calls[0].clone(); let chained_call_b = chained_calls[1].clone(); @@ -1694,13 +2540,13 @@ fn test_call_swap_chained_call_successful_1() { } #[test] -fn test_call_swap_chained_call_successful_2() { +fn call_swap_chained_call_successful_2() { let (post_states, chained_calls) = swap( - AccountForTests::pool_definition_init(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), + AccountWithMetadataForTests::pool_definition_init(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), BalanceForTests::add_max_amount_b(), BalanceForTests::min_amount_out(), IdForTests::token_b_definition_id(), @@ -1708,7 +2554,9 @@ fn test_call_swap_chained_call_successful_2() { let pool_post = post_states[0].clone(); - assert!(AccountForTests::pool_definition_swap_test_2().account == *pool_post.account()); + assert!( + AccountWithMetadataForTests::pool_definition_swap_test_2().account == *pool_post.account() + ); let chained_call_a = chained_calls[1].clone(); let chained_call_b = chained_calls[0].clone(); @@ -1724,15 +2572,15 @@ fn test_call_swap_chained_call_successful_2() { } #[test] -fn test_new_definition_lp_asymmetric_amounts() { +fn new_definition_lp_asymmetric_amounts() { let (post_states, chained_calls) = new_definition( - AccountForTests::pool_definition_inactive(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_inactive(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(BalanceForTests::vault_a_reserve_init()).unwrap(), NonZero::new(BalanceForTests::vault_b_reserve_init()).unwrap(), AMM_PROGRAM_ID, @@ -1751,21 +2599,21 @@ fn test_new_definition_lp_asymmetric_amounts() { } #[test] -fn test_new_definition_lp_symmetric_amounts() { +fn new_definition_lp_symmetric_amounts() { // token_a=100, token_b=100 → LP=sqrt(10_000)=100 - let token_a_amount = 100u128; - let token_b_amount = 100u128; + let token_a_amount = 100_u128; + let token_b_amount = 100_u128; let expected_lp = (token_a_amount * token_b_amount).isqrt(); assert_eq!(expected_lp, 100); let (post_states, chained_calls) = new_definition( - AccountForTests::pool_definition_inactive(), - AccountForTests::vault_a_init(), - AccountForTests::vault_b_init(), - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_a(), - AccountForTests::user_holding_b(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_definition_inactive(), + AccountWithMetadataForTests::vault_a_init(), + AccountWithMetadataForTests::vault_b_init(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_a(), + AccountWithMetadataForTests::user_holding_b(), + AccountWithMetadataForTests::user_holding_lp_uninit(), NonZero::new(token_a_amount).unwrap(), NonZero::new(token_b_amount).unwrap(), AMM_PROGRAM_ID, @@ -1779,8 +2627,8 @@ fn test_new_definition_lp_symmetric_amounts() { let expected_lp_call = ChainedCall::new( TOKEN_PROGRAM_ID, vec![ - AccountForTests::pool_lp_init(), - AccountForTests::user_holding_lp_uninit(), + AccountWithMetadataForTests::pool_lp_init(), + AccountWithMetadataForTests::user_holding_lp_uninit(), ], &token_core::Instruction::Mint { amount_to_mint: expected_lp, @@ -1792,3 +2640,535 @@ fn test_new_definition_lp_symmetric_amounts() { assert_eq!(chained_call_lp, expected_lp_call); } + +#[cfg(feature = "nssa")] +fn state_for_amm_tests() -> V02State { + let initial_data = []; + let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]); + state.force_insert_account( + IdForExeTests::pool_definition_id(), + AccountsForExeTests::pool_definition_init(), + ); + state.force_insert_account( + IdForExeTests::token_a_definition_id(), + AccountsForExeTests::token_a_definition_account(), + ); + state.force_insert_account( + IdForExeTests::token_b_definition_id(), + AccountsForExeTests::token_b_definition_acc(), + ); + state.force_insert_account( + IdForExeTests::token_lp_definition_id(), + AccountsForExeTests::token_lp_definition_acc(), + ); + state.force_insert_account( + IdForExeTests::user_token_a_id(), + AccountsForExeTests::user_token_a_holding(), + ); + state.force_insert_account( + IdForExeTests::user_token_b_id(), + AccountsForExeTests::user_token_b_holding(), + ); + state.force_insert_account( + IdForExeTests::user_token_lp_id(), + AccountsForExeTests::user_token_lp_holding(), + ); + state.force_insert_account( + IdForExeTests::vault_a_id(), + AccountsForExeTests::vault_a_init(), + ); + state.force_insert_account( + IdForExeTests::vault_b_id(), + AccountsForExeTests::vault_b_init(), + ); + + state +} + +#[cfg(feature = "nssa")] +fn state_for_amm_tests_with_new_def() -> V02State { + let initial_data = []; + let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]); + state.force_insert_account( + IdForExeTests::token_a_definition_id(), + AccountsForExeTests::token_a_definition_account(), + ); + state.force_insert_account( + IdForExeTests::token_b_definition_id(), + AccountsForExeTests::token_b_definition_acc(), + ); + state.force_insert_account( + IdForExeTests::user_token_a_id(), + AccountsForExeTests::user_token_a_holding(), + ); + state.force_insert_account( + IdForExeTests::user_token_b_id(), + AccountsForExeTests::user_token_b_holding(), + ); + state +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_remove() { + let mut state = state_for_amm_tests(); + + let instruction = amm_core::Instruction::RemoveLiquidity { + remove_liquidity_amount: BalanceForExeTests::remove_lp(), + min_amount_to_remove_token_a: BalanceForExeTests::remove_min_amount_a(), + min_amount_to_remove_token_b: BalanceForExeTests::remove_min_amount_b(), + }; + + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::token_lp_definition_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + IdForExeTests::user_token_lp_id(), + ], + vec![0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[&PrivateKeysForTests::user_token_lp_key()], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let token_lp_post = state.get_account_by_id(IdForExeTests::token_lp_definition_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + let user_token_lp_post = state.get_account_by_id(IdForExeTests::user_token_lp_id()); + + let expected_pool = AccountsForExeTests::pool_definition_remove(); + let expected_vault_a = AccountsForExeTests::vault_a_remove(); + let expected_vault_b = AccountsForExeTests::vault_b_remove(); + let expected_token_lp = AccountsForExeTests::token_lp_definition_remove(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_remove(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_remove(); + let expected_user_token_lp = AccountsForExeTests::user_token_lp_holding_remove(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(token_lp_post, expected_token_lp); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); + assert_eq!(user_token_lp_post, expected_user_token_lp); +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_new_definition_inactive_initialized_pool_and_uninit_user_lp() { + let mut state = state_for_amm_tests_with_new_def(); + + // Uninitialized in constructor + state.force_insert_account( + IdForExeTests::vault_a_id(), + AccountsForExeTests::vault_a_init_inactive(), + ); + state.force_insert_account( + IdForExeTests::vault_b_id(), + AccountsForExeTests::vault_b_init_inactive(), + ); + state.force_insert_account( + IdForExeTests::pool_definition_id(), + AccountsForExeTests::pool_definition_inactive(), + ); + state.force_insert_account( + IdForExeTests::token_lp_definition_id(), + AccountsForExeTests::token_lp_definition_init_inactive(), + ); + + let instruction = amm_core::Instruction::NewDefinition { + token_a_amount: BalanceForExeTests::vault_a_balance_init(), + token_b_amount: BalanceForExeTests::vault_b_balance_init(), + amm_program_id: Program::amm().id(), + }; + + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::token_lp_definition_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + IdForExeTests::user_token_lp_id(), + ], + vec![0_u128.into(), 0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[ + &PrivateKeysForTests::user_token_a_key(), + &PrivateKeysForTests::user_token_b_key(), + ], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let token_lp_post = state.get_account_by_id(IdForExeTests::token_lp_definition_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + let user_token_lp_post = state.get_account_by_id(IdForExeTests::user_token_lp_id()); + + let expected_pool = AccountsForExeTests::pool_definition_new_init(); + let expected_vault_a = AccountsForExeTests::vault_a_init(); + let expected_vault_b = AccountsForExeTests::vault_b_init(); + let expected_token_lp = AccountsForExeTests::token_lp_definition_new_init(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_new_init(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_new_init(); + let expected_user_token_lp = AccountsForExeTests::user_token_lp_holding_new_init(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(token_lp_post, expected_token_lp); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); + assert_eq!(user_token_lp_post, expected_user_token_lp); +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_new_definition_inactive_initialized_pool_init_user_lp() { + let mut state = state_for_amm_tests_with_new_def(); + + // Uninitialized in constructor + state.force_insert_account( + IdForExeTests::vault_a_id(), + AccountsForExeTests::vault_a_init_inactive(), + ); + state.force_insert_account( + IdForExeTests::vault_b_id(), + AccountsForExeTests::vault_b_init_inactive(), + ); + state.force_insert_account( + IdForExeTests::pool_definition_id(), + AccountsForExeTests::pool_definition_inactive(), + ); + state.force_insert_account( + IdForExeTests::token_lp_definition_id(), + AccountsForExeTests::token_lp_definition_init_inactive(), + ); + state.force_insert_account( + IdForExeTests::user_token_lp_id(), + AccountsForExeTests::user_token_lp_holding_init_zero(), + ); + + let instruction = amm_core::Instruction::NewDefinition { + token_a_amount: BalanceForExeTests::vault_a_balance_init(), + token_b_amount: BalanceForExeTests::vault_b_balance_init(), + amm_program_id: Program::amm().id(), + }; + + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::token_lp_definition_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + IdForExeTests::user_token_lp_id(), + ], + vec![0_u128.into(), 0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[ + &PrivateKeysForTests::user_token_a_key(), + &PrivateKeysForTests::user_token_b_key(), + ], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let token_lp_post = state.get_account_by_id(IdForExeTests::token_lp_definition_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + let user_token_lp_post = state.get_account_by_id(IdForExeTests::user_token_lp_id()); + + let expected_pool = AccountsForExeTests::pool_definition_new_init(); + let expected_vault_a = AccountsForExeTests::vault_a_init(); + let expected_vault_b = AccountsForExeTests::vault_b_init(); + let expected_token_lp = AccountsForExeTests::token_lp_definition_new_init(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_new_init(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_new_init(); + let expected_user_token_lp = AccountsForExeTests::user_token_lp_holding_new_init(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(token_lp_post, expected_token_lp); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); + assert_eq!(user_token_lp_post, expected_user_token_lp); +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_new_definition_uninitialized_pool() { + let mut state = state_for_amm_tests_with_new_def(); + + // Uninitialized in constructor + state.force_insert_account( + IdForExeTests::vault_a_id(), + AccountsForExeTests::vault_a_init_inactive(), + ); + state.force_insert_account( + IdForExeTests::vault_b_id(), + AccountsForExeTests::vault_b_init_inactive(), + ); + + let instruction = amm_core::Instruction::NewDefinition { + token_a_amount: BalanceForExeTests::vault_a_balance_init(), + token_b_amount: BalanceForExeTests::vault_b_balance_init(), + amm_program_id: Program::amm().id(), + }; + + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::token_lp_definition_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + IdForExeTests::user_token_lp_id(), + ], + vec![0_u128.into(), 0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[ + &PrivateKeysForTests::user_token_a_key(), + &PrivateKeysForTests::user_token_b_key(), + ], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let token_lp_post = state.get_account_by_id(IdForExeTests::token_lp_definition_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + let user_token_lp_post = state.get_account_by_id(IdForExeTests::user_token_lp_id()); + + let expected_pool = AccountsForExeTests::pool_definition_new_init(); + let expected_vault_a = AccountsForExeTests::vault_a_init(); + let expected_vault_b = AccountsForExeTests::vault_b_init(); + let expected_token_lp = AccountsForExeTests::token_lp_definition_new_init(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_new_init(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_new_init(); + let expected_user_token_lp = AccountsForExeTests::user_token_lp_holding_new_init(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(token_lp_post, expected_token_lp); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); + assert_eq!(user_token_lp_post, expected_user_token_lp); +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_add() { + let mut state = state_for_amm_tests(); + + let instruction = amm_core::Instruction::AddLiquidity { + min_amount_liquidity: BalanceForExeTests::add_min_amount_lp(), + max_amount_to_add_token_a: BalanceForExeTests::add_max_amount_a(), + max_amount_to_add_token_b: BalanceForExeTests::add_max_amount_b(), + }; + + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::token_lp_definition_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + IdForExeTests::user_token_lp_id(), + ], + vec![0_u128.into(), 0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[ + &PrivateKeysForTests::user_token_a_key(), + &PrivateKeysForTests::user_token_b_key(), + ], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let token_lp_post = state.get_account_by_id(IdForExeTests::token_lp_definition_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + let user_token_lp_post = state.get_account_by_id(IdForExeTests::user_token_lp_id()); + + let expected_pool = AccountsForExeTests::pool_definition_add(); + let expected_vault_a = AccountsForExeTests::vault_a_add(); + let expected_vault_b = AccountsForExeTests::vault_b_add(); + let expected_token_lp = AccountsForExeTests::token_lp_definition_add(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_add(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_add(); + let expected_user_token_lp = AccountsForExeTests::user_token_lp_holding_add(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(token_lp_post, expected_token_lp); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); + assert_eq!(user_token_lp_post, expected_user_token_lp); +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_swap_1() { + let mut state = state_for_amm_tests(); + + let instruction = amm_core::Instruction::Swap { + swap_amount_in: BalanceForExeTests::swap_amount_in(), + min_amount_out: BalanceForExeTests::swap_min_amount_out(), + token_definition_id_in: IdForExeTests::token_b_definition_id(), + }; + + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + ], + vec![0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[&PrivateKeysForTests::user_token_b_key()], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + + let expected_pool = AccountsForExeTests::pool_definition_swap_1(); + let expected_vault_a = AccountsForExeTests::vault_a_swap_1(); + let expected_vault_b = AccountsForExeTests::vault_b_swap_1(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_swap_1(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_swap_1(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); +} + +#[cfg(feature = "nssa")] +#[test] +fn simple_amm_swap_2() { + let mut state = state_for_amm_tests(); + + let instruction = amm_core::Instruction::Swap { + swap_amount_in: BalanceForExeTests::swap_amount_in(), + min_amount_out: BalanceForExeTests::swap_min_amount_out(), + token_definition_id_in: IdForExeTests::token_a_definition_id(), + }; + let message = public_transaction::Message::try_new( + Program::amm().id(), + vec![ + IdForExeTests::pool_definition_id(), + IdForExeTests::vault_a_id(), + IdForExeTests::vault_b_id(), + IdForExeTests::user_token_a_id(), + IdForExeTests::user_token_b_id(), + ], + vec![0_u128.into()], + instruction, + ) + .unwrap(); + + let witness_set = public_transaction::WitnessSet::for_message( + &message, + &[&PrivateKeysForTests::user_token_a_key()], + ); + + let tx = PublicTransaction::new(message, witness_set); + state.transition_from_public_transaction(&tx).unwrap(); + + let pool_post = state.get_account_by_id(IdForExeTests::pool_definition_id()); + let vault_a_post = state.get_account_by_id(IdForExeTests::vault_a_id()); + let vault_b_post = state.get_account_by_id(IdForExeTests::vault_b_id()); + let user_token_a_post = state.get_account_by_id(IdForExeTests::user_token_a_id()); + let user_token_b_post = state.get_account_by_id(IdForExeTests::user_token_b_id()); + + let expected_pool = AccountsForExeTests::pool_definition_swap_2(); + let expected_vault_a = AccountsForExeTests::vault_a_swap_2(); + let expected_vault_b = AccountsForExeTests::vault_b_swap_2(); + let expected_user_token_a = AccountsForExeTests::user_token_a_holding_swap_2(); + let expected_user_token_b = AccountsForExeTests::user_token_b_holding_swap_2(); + + assert_eq!(pool_post, expected_pool); + assert_eq!(vault_a_post, expected_vault_a); + assert_eq!(vault_b_post, expected_vault_b); + assert_eq!(user_token_a_post, expected_user_token_a); + assert_eq!(user_token_b_post, expected_user_token_b); +} diff --git a/programs/token/Cargo.toml b/programs/token/Cargo.toml index 39beb96a..57c88b7b 100644 --- a/programs/token/Cargo.toml +++ b/programs/token/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true token_core.workspace = true diff --git a/programs/token/core/Cargo.toml b/programs/token/core/Cargo.toml index cf61a35f..90df286f 100644 --- a/programs/token/core/Cargo.toml +++ b/programs/token/core/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true serde.workspace = true diff --git a/programs/token/core/src/lib.rs b/programs/token/core/src/lib.rs index 140ae38f..1edbc895 100644 --- a/programs/token/core/src/lib.rs +++ b/programs/token/core/src/lib.rs @@ -29,7 +29,7 @@ pub enum Instruction { /// - Token Metadata account (uninitialized). NewDefinitionWithMetadata { new_definition: NewTokenDefinition, - /// Boxed to avoid large enum variant size + /// Boxed to avoid large enum variant size. metadata: Box, }, @@ -92,7 +92,7 @@ impl TryFrom<&Data> for TokenDefinition { type Error = std::io::Error; fn try_from(data: &Data) -> Result { - TokenDefinition::try_from_slice(data.as_ref()) + Self::try_from_slice(data.as_ref()) } } @@ -104,7 +104,7 @@ impl From<&TokenDefinition> for Data { BorshSerialize::serialize(definition, &mut data) .expect("Serialization to Vec should not fail"); - Data::try_from(data).expect("Token definition encoded data should fit into Data") + Self::try_from(data).expect("Token definition encoded data should fit into Data") } } @@ -127,44 +127,47 @@ pub enum TokenHolding { } impl TokenHolding { - pub fn zeroized_clone_from(other: &Self) -> Self { + #[must_use] + pub const fn zeroized_clone_from(other: &Self) -> Self { match other { - TokenHolding::Fungible { definition_id, .. } => TokenHolding::Fungible { + Self::Fungible { definition_id, .. } => Self::Fungible { definition_id: *definition_id, balance: 0, }, - TokenHolding::NftMaster { definition_id, .. } => TokenHolding::NftMaster { + Self::NftMaster { definition_id, .. } => Self::NftMaster { definition_id: *definition_id, print_balance: 0, }, - TokenHolding::NftPrintedCopy { definition_id, .. } => TokenHolding::NftPrintedCopy { + Self::NftPrintedCopy { definition_id, .. } => Self::NftPrintedCopy { definition_id: *definition_id, owned: false, }, } } - pub fn zeroized_from_definition( + #[must_use] + pub const fn zeroized_from_definition( definition_id: AccountId, definition: &TokenDefinition, ) -> Self { match definition { - TokenDefinition::Fungible { .. } => TokenHolding::Fungible { + TokenDefinition::Fungible { .. } => Self::Fungible { definition_id, balance: 0, }, - TokenDefinition::NonFungible { .. } => TokenHolding::NftPrintedCopy { + TokenDefinition::NonFungible { .. } => Self::NftPrintedCopy { definition_id, owned: false, }, } } - pub fn definition_id(&self) -> AccountId { + #[must_use] + pub const fn definition_id(&self) -> AccountId { match self { - TokenHolding::Fungible { definition_id, .. } => *definition_id, - TokenHolding::NftMaster { definition_id, .. } => *definition_id, - TokenHolding::NftPrintedCopy { definition_id, .. } => *definition_id, + Self::Fungible { definition_id, .. } + | Self::NftMaster { definition_id, .. } + | Self::NftPrintedCopy { definition_id, .. } => *definition_id, } } } @@ -173,7 +176,7 @@ impl TryFrom<&Data> for TokenHolding { type Error = std::io::Error; fn try_from(data: &Data) -> Result { - TokenHolding::try_from_slice(data.as_ref()) + Self::try_from_slice(data.as_ref()) } } @@ -185,7 +188,7 @@ impl From<&TokenHolding> for Data { BorshSerialize::serialize(holding, &mut data) .expect("Serialization to Vec should not fail"); - Data::try_from(data).expect("Token holding encoded data should fit into Data") + Self::try_from(data).expect("Token holding encoded data should fit into Data") } } @@ -193,7 +196,7 @@ impl From<&TokenHolding> for Data { pub struct NewTokenMetadata { /// Metadata standard. pub standard: MetadataStandard, - /// Pointer to off-chain metadata + /// Pointer to off-chain metadata. pub uri: String, /// Creators of the token. pub creators: String, @@ -224,7 +227,7 @@ impl TryFrom<&Data> for TokenMetadata { type Error = std::io::Error; fn try_from(data: &Data) -> Result { - TokenMetadata::try_from_slice(data.as_ref()) + Self::try_from_slice(data.as_ref()) } } @@ -236,6 +239,6 @@ impl From<&TokenMetadata> for Data { BorshSerialize::serialize(metadata, &mut data) .expect("Serialization to Vec should not fail"); - Data::try_from(data).expect("Token metadata encoded data should fit into Data") + Self::try_from(data).expect("Token metadata encoded data should fit into Data") } } diff --git a/programs/token/src/burn.rs b/programs/token/src/burn.rs index 94637d92..a2468055 100644 --- a/programs/token/src/burn.rs +++ b/programs/token/src/burn.rs @@ -4,6 +4,7 @@ use nssa_core::{ }; use token_core::{TokenDefinition, TokenHolding}; +#[must_use] pub fn burn( definition_account: AccountWithMetadata, user_holding_account: AccountWithMetadata, diff --git a/programs/token/src/initialize.rs b/programs/token/src/initialize.rs index 744fdb64..dc0b612a 100644 --- a/programs/token/src/initialize.rs +++ b/programs/token/src/initialize.rs @@ -4,6 +4,7 @@ use nssa_core::{ }; use token_core::{TokenDefinition, TokenHolding}; +#[must_use] pub fn initialize_account( definition_account: AccountWithMetadata, account_to_initialize: AccountWithMetadata, diff --git a/programs/token/src/mint.rs b/programs/token/src/mint.rs index 2f17cc62..8b157340 100644 --- a/programs/token/src/mint.rs +++ b/programs/token/src/mint.rs @@ -4,6 +4,7 @@ use nssa_core::{ }; use token_core::{TokenDefinition, TokenHolding}; +#[must_use] pub fn mint( definition_account: AccountWithMetadata, user_holding_account: AccountWithMetadata, diff --git a/programs/token/src/new_definition.rs b/programs/token/src/new_definition.rs index b2a9ae9f..8da55dc1 100644 --- a/programs/token/src/new_definition.rs +++ b/programs/token/src/new_definition.rs @@ -6,6 +6,7 @@ use token_core::{ NewTokenDefinition, NewTokenMetadata, TokenDefinition, TokenHolding, TokenMetadata, }; +#[must_use] pub fn new_fungible_definition( definition_target_account: AccountWithMetadata, holding_target_account: AccountWithMetadata, @@ -46,6 +47,7 @@ pub fn new_fungible_definition( ] } +#[must_use] pub fn new_definition_with_metadata( definition_target_account: AccountWithMetadata, holding_target_account: AccountWithMetadata, @@ -104,16 +106,16 @@ pub fn new_definition_with_metadata( standard: metadata.standard, uri: metadata.uri, creators: metadata.creators, - primary_sale_date: 0u64, // TODO #261: future works to implement this + primary_sale_date: 0_u64, // TODO #261: future works to implement this }; - let mut definition_target_account_post = definition_target_account.account.clone(); + let mut definition_target_account_post = definition_target_account.account; definition_target_account_post.data = Data::from(&token_definition); - let mut holding_target_account_post = holding_target_account.account.clone(); + let mut holding_target_account_post = holding_target_account.account; holding_target_account_post.data = Data::from(&token_holding); - let mut metadata_target_account_post = metadata_target_account.account.clone(); + let mut metadata_target_account_post = metadata_target_account.account; metadata_target_account_post.data = Data::from(&token_metadata); vec![ diff --git a/programs/token/src/print_nft.rs b/programs/token/src/print_nft.rs index d10533c1..c7177a43 100644 --- a/programs/token/src/print_nft.rs +++ b/programs/token/src/print_nft.rs @@ -4,6 +4,7 @@ use nssa_core::{ }; use token_core::TokenHolding; +#[must_use] pub fn print_nft( master_account: AccountWithMetadata, printed_account: AccountWithMetadata, @@ -36,7 +37,7 @@ pub fn print_nft( *print_balance > 1, "Insufficient balance to print another NFT copy" ); - *print_balance -= 1; + *print_balance = print_balance.checked_sub(1).expect("Checked above"); let mut master_account_post = master_account.account; master_account_post.data = Data::from(&master_account_data); diff --git a/programs/token/src/tests.rs b/programs/token/src/tests.rs index cf95c4d4..640d6d76 100644 --- a/programs/token/src/tests.rs +++ b/programs/token/src/tests.rs @@ -1,4 +1,9 @@ #![cfg(test)] +#![expect( + clippy::shadow_unrelated, + clippy::arithmetic_side_effects, + reason = "We don't care about it in tests" +)] use nssa_core::account::{Account, AccountId, AccountWithMetadata, Data}; use token_core::{ @@ -25,14 +30,14 @@ impl AccountForTests { fn definition_account_auth() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::init_supply(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -42,14 +47,14 @@ impl AccountForTests { fn definition_account_without_auth() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::init_supply(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: false, account_id: IdForTests::pool_definition_id(), @@ -59,13 +64,13 @@ impl AccountForTests { fn holding_different_definition() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id_diff(), balance: BalanceForTests::holding_balance(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -75,13 +80,13 @@ impl AccountForTests { fn holding_same_definition_with_authorization() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::holding_balance(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -91,13 +96,13 @@ impl AccountForTests { fn holding_same_definition_without_authorization() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::holding_balance(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: false, account_id: IdForTests::holding_id(), @@ -107,13 +112,13 @@ impl AccountForTests { fn holding_same_definition_without_authorization_overflow() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::init_supply(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: false, account_id: IdForTests::holding_id(), @@ -123,14 +128,14 @@ impl AccountForTests { fn definition_account_post_burn() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::init_supply_burned(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -140,13 +145,13 @@ impl AccountForTests { fn holding_account_post_burn() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::holding_balance_burned(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: false, account_id: IdForTests::holding_id(), @@ -164,13 +169,13 @@ impl AccountForTests { fn init_mint() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [0u32; 8], - balance: 0u128, + program_owner: [0_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::mint_success(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: false, account_id: IdForTests::holding_id(), @@ -180,13 +185,13 @@ impl AccountForTests { fn holding_account_same_definition_mint() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::holding_balance_mint(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -196,14 +201,14 @@ impl AccountForTests { fn definition_account_mint() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::init_supply_mint(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -213,13 +218,13 @@ impl AccountForTests { fn holding_same_definition_with_authorization_and_large_balance() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::mint_overflow(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -229,14 +234,14 @@ impl AccountForTests { fn definition_account_with_authorization_nonfungible() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenDefinition::NonFungible { name: String::from("test"), printable_supply: BalanceForTests::printable_copies(), metadata_id: AccountId::new([0; 32]), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -254,13 +259,13 @@ impl AccountForTests { fn holding_account_init() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::init_supply(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -270,14 +275,14 @@ impl AccountForTests { fn definition_account_unclaimed() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [0u32; 8], - balance: 0u128, + program_owner: [0_u32; 8], + balance: 0_u128, data: Data::from(&TokenDefinition::Fungible { name: String::from("test"), total_supply: BalanceForTests::init_supply(), metadata_id: None, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::pool_definition_id(), @@ -287,13 +292,13 @@ impl AccountForTests { fn holding_account_unclaimed() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [0u32; 8], - balance: 0u128, + program_owner: [0_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::init_supply(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -303,13 +308,13 @@ impl AccountForTests { fn holding_account2_init() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::init_supply(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id_2(), @@ -319,13 +324,13 @@ impl AccountForTests { fn holding_account2_init_post_transfer() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::recipient_post_transfer(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id_2(), @@ -335,13 +340,13 @@ impl AccountForTests { fn holding_account_init_post_transfer() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::Fungible { definition_id: IdForTests::pool_definition_id(), balance: BalanceForTests::sender_post_transfer(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -351,13 +356,13 @@ impl AccountForTests { fn holding_account_master_nft() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::NftMaster { definition_id: IdForTests::pool_definition_id(), print_balance: BalanceForTests::printable_copies(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -367,13 +372,13 @@ impl AccountForTests { fn holding_account_master_nft_insufficient_balance() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::NftMaster { definition_id: IdForTests::pool_definition_id(), print_balance: 1, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -383,13 +388,13 @@ impl AccountForTests { fn holding_account_master_nft_after_print() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::NftMaster { definition_id: IdForTests::pool_definition_id(), print_balance: BalanceForTests::printable_copies() - 1, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -399,13 +404,13 @@ impl AccountForTests { fn holding_account_printed_nft() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [0u32; 8], - balance: 0u128, + program_owner: [0_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::NftPrintedCopy { definition_id: IdForTests::pool_definition_id(), owned: true, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: false, account_id: IdForTests::holding_id(), @@ -415,13 +420,13 @@ impl AccountForTests { fn holding_account_with_master_nft_transferred_to() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [0u32; 8], - balance: 0u128, + program_owner: [0_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::NftMaster { definition_id: IdForTests::pool_definition_id(), print_balance: BalanceForTests::printable_copies(), }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id_2(), @@ -431,13 +436,13 @@ impl AccountForTests { fn holding_account_master_nft_post_transfer() -> AccountWithMetadata { AccountWithMetadata { account: Account { - program_owner: [5u32; 8], - balance: 0u128, + program_owner: [5_u32; 8], + balance: 0_u128, data: Data::from(&TokenHolding::NftMaster { definition_id: IdForTests::pool_definition_id(), print_balance: 0, }), - nonce: 0, + nonce: 0_u128.into(), }, is_authorized: true, account_id: IdForTests::holding_id(), @@ -523,7 +528,7 @@ impl IdForTests { #[should_panic(expected = "Definition target account must have default values")] #[test] -fn test_new_definition_non_default_first_account_should_fail() { +fn new_definition_non_default_first_account_should_fail() { let definition_account = AccountWithMetadata { account: Account { program_owner: [1, 2, 3, 4, 5, 6, 7, 8], @@ -547,7 +552,7 @@ fn test_new_definition_non_default_first_account_should_fail() { #[should_panic(expected = "Holding target account must have default values")] #[test] -fn test_new_definition_non_default_second_account_should_fail() { +fn new_definition_non_default_second_account_should_fail() { let definition_account = AccountWithMetadata { account: Account::default(), is_authorized: true, @@ -570,7 +575,7 @@ fn test_new_definition_non_default_second_account_should_fail() { } #[test] -fn test_new_definition_with_valid_inputs_succeeds() { +fn new_definition_with_valid_inputs_succeeds() { let definition_account = AccountForTests::definition_account_uninit(); let holding_account = AccountForTests::holding_account_uninit(); @@ -595,7 +600,7 @@ fn test_new_definition_with_valid_inputs_succeeds() { #[should_panic(expected = "Sender and recipient definition id mismatch")] #[test] -fn test_transfer_with_different_definition_ids_should_fail() { +fn transfer_with_different_definition_ids_should_fail() { let sender = AccountForTests::holding_same_definition_with_authorization(); let recipient = AccountForTests::holding_different_definition(); let _post_states = transfer(sender, recipient, 10); @@ -603,7 +608,7 @@ fn test_transfer_with_different_definition_ids_should_fail() { #[should_panic(expected = "Insufficient balance")] #[test] -fn test_transfer_with_insufficient_balance_should_fail() { +fn transfer_with_insufficient_balance_should_fail() { let sender = AccountForTests::holding_same_definition_with_authorization(); let recipient = AccountForTests::holding_account_same_definition_mint(); // Attempt to transfer more than balance @@ -612,14 +617,14 @@ fn test_transfer_with_insufficient_balance_should_fail() { #[should_panic(expected = "Sender authorization is missing")] #[test] -fn test_transfer_without_sender_authorization_should_fail() { +fn transfer_without_sender_authorization_should_fail() { let sender = AccountForTests::holding_same_definition_without_authorization(); let recipient = AccountForTests::holding_account_uninit(); let _post_states = transfer(sender, recipient, 37); } #[test] -fn test_transfer_with_valid_inputs_succeeds() { +fn transfer_with_valid_inputs_succeeds() { let sender = AccountForTests::holding_account_init(); let recipient = AccountForTests::holding_account2_init(); let post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); @@ -637,7 +642,7 @@ fn test_transfer_with_valid_inputs_succeeds() { #[should_panic(expected = "Invalid balance for NFT Master transfer")] #[test] -fn test_transfer_with_master_nft_invalid_balance() { +fn transfer_with_master_nft_invalid_balance() { let sender = AccountForTests::holding_account_master_nft(); let recipient = AccountForTests::holding_account_uninit(); let _post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); @@ -645,14 +650,14 @@ fn test_transfer_with_master_nft_invalid_balance() { #[should_panic(expected = "Invalid balance in recipient account for NFT transfer")] #[test] -fn test_transfer_with_master_nft_invalid_recipient_balance() { +fn transfer_with_master_nft_invalid_recipient_balance() { let sender = AccountForTests::holding_account_master_nft(); let recipient = AccountForTests::holding_account_with_master_nft_transferred_to(); let _post_states = transfer(sender, recipient, BalanceForTests::printable_copies()); } #[test] -fn test_transfer_with_master_nft_success() { +fn transfer_with_master_nft_success() { let sender = AccountForTests::holding_account_master_nft(); let recipient = AccountForTests::holding_account_uninit(); let post_states = transfer(sender, recipient, BalanceForTests::printable_copies()); @@ -669,7 +674,7 @@ fn test_transfer_with_master_nft_success() { } #[test] -fn test_token_initialize_account_succeeds() { +fn token_initialize_account_succeeds() { let sender = AccountForTests::holding_account_init(); let recipient = AccountForTests::holding_account2_init(); let post_states = transfer(sender, recipient, BalanceForTests::transfer_amount()); @@ -687,7 +692,7 @@ fn test_token_initialize_account_succeeds() { #[test] #[should_panic(expected = "Mismatch Token Definition and Token Holding")] -fn test_burn_mismatch_def() { +fn burn_mismatch_def() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_different_definition(); let _post_states = burn( @@ -699,7 +704,7 @@ fn test_burn_mismatch_def() { #[test] #[should_panic(expected = "Authorization is missing")] -fn test_burn_missing_authorization() { +fn burn_missing_authorization() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_without_authorization(); let _post_states = burn( @@ -711,7 +716,7 @@ fn test_burn_missing_authorization() { #[test] #[should_panic(expected = "Insufficient balance to burn")] -fn test_burn_insufficient_balance() { +fn burn_insufficient_balance() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_with_authorization(); let _post_states = burn( @@ -723,7 +728,7 @@ fn test_burn_insufficient_balance() { #[test] #[should_panic(expected = "Total supply underflow")] -fn test_burn_total_supply_underflow() { +fn burn_total_supply_underflow() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_with_authorization_and_large_balance(); @@ -735,7 +740,7 @@ fn test_burn_total_supply_underflow() { } #[test] -fn test_burn_success() { +fn burn_success() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_with_authorization(); let post_states = burn( @@ -758,7 +763,7 @@ fn test_burn_success() { #[test] #[should_panic(expected = "Holding account must be valid")] -fn test_mint_not_valid_holding_account() { +fn mint_not_valid_holding_account() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::definition_account_without_auth(); let _post_states = mint( @@ -770,7 +775,7 @@ fn test_mint_not_valid_holding_account() { #[test] #[should_panic(expected = "Definition account must be valid")] -fn test_mint_not_valid_definition_account() { +fn mint_not_valid_definition_account() { let definition_account = AccountForTests::holding_same_definition_with_authorization(); let holding_account = AccountForTests::holding_same_definition_without_authorization(); let _post_states = mint( @@ -782,7 +787,7 @@ fn test_mint_not_valid_definition_account() { #[test] #[should_panic(expected = "Definition authorization is missing")] -fn test_mint_missing_authorization() { +fn mint_missing_authorization() { let definition_account = AccountForTests::definition_account_without_auth(); let holding_account = AccountForTests::holding_same_definition_without_authorization(); let _post_states = mint( @@ -794,7 +799,7 @@ fn test_mint_missing_authorization() { #[test] #[should_panic(expected = "Mismatch Token Definition and Token Holding")] -fn test_mint_mismatched_token_definition() { +fn mint_mismatched_token_definition() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_different_definition(); let _post_states = mint( @@ -805,7 +810,7 @@ fn test_mint_mismatched_token_definition() { } #[test] -fn test_mint_success() { +fn mint_success() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_without_authorization(); let post_states = mint( @@ -827,7 +832,7 @@ fn test_mint_success() { } #[test] -fn test_mint_uninit_holding_success() { +fn mint_uninit_holding_success() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_account_uninit(); let post_states = mint( @@ -851,7 +856,7 @@ fn test_mint_uninit_holding_success() { #[test] #[should_panic(expected = "Total supply overflow")] -fn test_mint_total_supply_overflow() { +fn mint_total_supply_overflow() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_without_authorization(); let _post_states = mint( @@ -863,7 +868,7 @@ fn test_mint_total_supply_overflow() { #[test] #[should_panic(expected = "Balance overflow on minting")] -fn test_mint_holding_account_overflow() { +fn mint_holding_account_overflow() { let definition_account = AccountForTests::definition_account_auth(); let holding_account = AccountForTests::holding_same_definition_without_authorization_overflow(); let _post_states = mint( @@ -875,7 +880,7 @@ fn test_mint_holding_account_overflow() { #[test] #[should_panic(expected = "Cannot mint additional supply for Non-Fungible Tokens")] -fn test_mint_cannot_mint_unmintable_tokens() { +fn mint_cannot_mint_unmintable_tokens() { let definition_account = AccountForTests::definition_account_with_authorization_nonfungible(); let holding_account = AccountForTests::holding_account_master_nft(); let _post_states = mint( @@ -887,7 +892,7 @@ fn test_mint_cannot_mint_unmintable_tokens() { #[should_panic(expected = "Definition target account must have default values")] #[test] -fn test_call_new_definition_metadata_with_init_definition() { +fn call_new_definition_metadata_with_init_definition() { let definition_account = AccountForTests::definition_account_auth(); let metadata_account = AccountWithMetadata { account: Account::default(), @@ -901,12 +906,12 @@ fn test_call_new_definition_metadata_with_init_definition() { }; let new_definition = NewTokenDefinition::Fungible { name: String::from("test"), - total_supply: 15u128, + total_supply: 15_u128, }; let metadata = NewTokenMetadata { standard: MetadataStandard::Simple, - uri: "test_uri".to_string(), - creators: "test_creators".to_string(), + uri: "test_uri".to_owned(), + creators: "test_creators".to_owned(), }; let _post_states = new_definition_with_metadata( definition_account, @@ -919,7 +924,7 @@ fn test_call_new_definition_metadata_with_init_definition() { #[should_panic(expected = "Metadata target account must have default values")] #[test] -fn test_call_new_definition_metadata_with_init_metadata() { +fn call_new_definition_metadata_with_init_metadata() { let definition_account = AccountWithMetadata { account: Account::default(), is_authorized: true, @@ -933,12 +938,12 @@ fn test_call_new_definition_metadata_with_init_metadata() { let metadata_account = AccountForTests::holding_account_same_definition_mint(); let new_definition = NewTokenDefinition::Fungible { name: String::from("test"), - total_supply: 15u128, + total_supply: 15_u128, }; let metadata = NewTokenMetadata { standard: MetadataStandard::Simple, - uri: "test_uri".to_string(), - creators: "test_creators".to_string(), + uri: "test_uri".to_owned(), + creators: "test_creators".to_owned(), }; let _post_states = new_definition_with_metadata( definition_account, @@ -951,7 +956,7 @@ fn test_call_new_definition_metadata_with_init_metadata() { #[should_panic(expected = "Holding target account must have default values")] #[test] -fn test_call_new_definition_metadata_with_init_holding() { +fn call_new_definition_metadata_with_init_holding() { let definition_account = AccountWithMetadata { account: Account::default(), is_authorized: true, @@ -965,12 +970,12 @@ fn test_call_new_definition_metadata_with_init_holding() { let holding_account = AccountForTests::holding_account_same_definition_mint(); let new_definition = NewTokenDefinition::Fungible { name: String::from("test"), - total_supply: 15u128, + total_supply: 15_u128, }; let metadata = NewTokenMetadata { standard: MetadataStandard::Simple, - uri: "test_uri".to_string(), - creators: "test_creators".to_string(), + uri: "test_uri".to_owned(), + creators: "test_creators".to_owned(), }; let _post_states = new_definition_with_metadata( definition_account, @@ -983,7 +988,7 @@ fn test_call_new_definition_metadata_with_init_holding() { #[should_panic(expected = "Master NFT Account must be authorized")] #[test] -fn test_print_nft_master_account_must_be_authorized() { +fn print_nft_master_account_must_be_authorized() { let master_account = AccountForTests::holding_account_uninit(); let printed_account = AccountForTests::holding_account_uninit(); let _post_states = print_nft(master_account, printed_account); @@ -991,7 +996,7 @@ fn test_print_nft_master_account_must_be_authorized() { #[should_panic(expected = "Printed Account must be uninitialized")] #[test] -fn test_print_nft_print_account_initialized() { +fn print_nft_print_account_initialized() { let master_account = AccountForTests::holding_account_master_nft(); let printed_account = AccountForTests::holding_account_init(); let _post_states = print_nft(master_account, printed_account); @@ -999,7 +1004,7 @@ fn test_print_nft_print_account_initialized() { #[should_panic(expected = "Invalid Token Holding data")] #[test] -fn test_print_nft_master_nft_invalid_token_holding() { +fn print_nft_master_nft_invalid_token_holding() { let master_account = AccountForTests::definition_account_auth(); let printed_account = AccountForTests::holding_account_uninit(); let _post_states = print_nft(master_account, printed_account); @@ -1007,7 +1012,7 @@ fn test_print_nft_master_nft_invalid_token_holding() { #[should_panic(expected = "Invalid Token Holding provided as NFT Master Account")] #[test] -fn test_print_nft_master_nft_not_nft_master_account() { +fn print_nft_master_nft_not_nft_master_account() { let master_account = AccountForTests::holding_account_init(); let printed_account = AccountForTests::holding_account_uninit(); let _post_states = print_nft(master_account, printed_account); @@ -1015,14 +1020,14 @@ fn test_print_nft_master_nft_not_nft_master_account() { #[should_panic(expected = "Insufficient balance to print another NFT copy")] #[test] -fn test_print_nft_master_nft_insufficient_balance() { +fn print_nft_master_nft_insufficient_balance() { let master_account = AccountForTests::holding_account_master_nft_insufficient_balance(); let printed_account = AccountForTests::holding_account_uninit(); let _post_states = print_nft(master_account, printed_account); } #[test] -fn test_print_nft_success() { +fn print_nft_success() { let master_account = AccountForTests::holding_account_master_nft(); let printed_account = AccountForTests::holding_account_uninit(); let post_states = print_nft(master_account, printed_account); diff --git a/programs/token/src/transfer.rs b/programs/token/src/transfer.rs index a1087bb1..392f630e 100644 --- a/programs/token/src/transfer.rs +++ b/programs/token/src/transfer.rs @@ -4,6 +4,7 @@ use nssa_core::{ }; use token_core::TokenHolding; +#[must_use] pub fn transfer( sender: AccountWithMetadata, recipient: AccountWithMetadata, @@ -95,7 +96,7 @@ pub fn transfer( _ => { panic!("Mismatched token holding types for transfer"); } - }; + } let mut sender_post = sender.account; sender_post.data = Data::from(&sender_holding); diff --git a/rust-toolchain.toml b/rust-toolchain.toml index bf951337..8a0f7d2f 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,5 +1,5 @@ # Should be kept in sync with Dockerfiles [toolchain] -channel = "1.91.1" +channel = "1.94.0" profile = "default" diff --git a/sequencer_core/Cargo.toml b/sequencer_core/Cargo.toml index e939c7ae..334f093c 100644 --- a/sequencer_core/Cargo.toml +++ b/sequencer_core/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa.workspace = true nssa_core.workspace = true diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 15612835..2f036b98 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -1,9 +1,9 @@ -use anyhow::{Context, Result}; +use anyhow::{Context as _, Result}; use bedrock_client::BedrockClient; pub use common::block::Block; pub use logos_blockchain_core::mantle::{MantleTx, SignedMantleTx, ops::channel::MsgId}; use logos_blockchain_core::mantle::{ - Op, OpProof, Transaction, TxHash, ledger, + Op, OpProof, Transaction as _, TxHash, ledger, ops::channel::{ChannelId, inscribe::InscriptionOp}, }; pub use logos_blockchain_key_management_system_service::keys::Ed25519Key; @@ -14,7 +14,7 @@ use crate::config::BedrockConfig; #[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")] pub trait BlockSettlementClientTrait: Clone { //// Create a new client. - fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result; + fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result; /// Get the bedrock channel ID used by this client. fn bedrock_channel_id(&self) -> ChannelId; @@ -74,23 +74,23 @@ pub trait BlockSettlementClientTrait: Clone { } } -/// A component that posts block data to logos blockchain +/// A component that posts block data to logos blockchain. #[derive(Clone)] pub struct BlockSettlementClient { - bedrock_client: BedrockClient, - bedrock_signing_key: Ed25519Key, - bedrock_channel_id: ChannelId, + client: BedrockClient, + signing_key: Ed25519Key, + channel_id: ChannelId, } impl BlockSettlementClientTrait for BlockSettlementClient { - fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result { - let bedrock_client = + fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result { + let client = BedrockClient::new(config.backoff, config.node_url.clone(), config.auth.clone()) .context("Failed to initialize bedrock client")?; Ok(Self { - bedrock_client, - bedrock_signing_key, - bedrock_channel_id: config.channel_id, + client, + signing_key, + channel_id: config.channel_id, }) } @@ -99,10 +99,11 @@ impl BlockSettlementClientTrait for BlockSettlementClient { Some(Op::ChannelInscribe(inscribe)) => (inscribe.parent, inscribe.id()), _ => panic!("Expected ChannelInscribe op"), }; - self.bedrock_client + self.client .post_transaction(tx) .await - .context("Failed to post transaction to Bedrock")?; + .context("Failed to post transaction to Bedrock after retries")? + .context("Failed to post transaction to Bedrock with non-retryable error")?; log::debug!("Posted block to Bedrock with parent id {parent_id:?} and msg id: {msg_id:?}"); @@ -110,11 +111,11 @@ impl BlockSettlementClientTrait for BlockSettlementClient { } fn bedrock_channel_id(&self) -> ChannelId { - self.bedrock_channel_id + self.channel_id } fn bedrock_signing_key(&self) -> &Ed25519Key { - &self.bedrock_signing_key + &self.signing_key } } diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index bfa3be99..eb541188 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -24,32 +24,24 @@ impl SequencerStore { /// ATTENTION: Will overwrite genesis block. pub fn open_db_with_genesis( location: &Path, - genesis_block: Option<(&Block, MantleMsgId)>, + genesis_block: &Block, + genesis_msg_id: MantleMsgId, signing_key: nssa::PrivateKey, ) -> Result { - let tx_hash_to_block_map = if let Some((block, _msg_id)) = &genesis_block { - block_to_transactions_map(block) - } else { - HashMap::new() - }; + let tx_hash_to_block_map = block_to_transactions_map(genesis_block); - let dbio = RocksDBIO::open_or_create(location, genesis_block)?; + let dbio = RocksDBIO::open_or_create(location, genesis_block, genesis_msg_id)?; let genesis_id = dbio.get_meta_first_block_in_db()?; Ok(Self { dbio, - genesis_id, tx_hash_to_block_map, + genesis_id, signing_key, }) } - /// Reopening existing database - pub fn open_db_restart(location: &Path, signing_key: nssa::PrivateKey) -> Result { - SequencerStore::open_db_with_genesis(location, None, signing_key) - } - pub fn get_block_at_id(&self, id: u64) -> Result { Ok(self.dbio.get_block(id)?) } @@ -67,7 +59,7 @@ impl SequencerStore { let block_id = self.tx_hash_to_block_map.get(&hash); let block = block_id.map(|&id| self.get_block_at_id(id)); if let Some(Ok(block)) = block { - for transaction in block.body.transactions.into_iter() { + for transaction in block.body.transactions { if transaction.hash() == hash { return Some(transaction); } @@ -80,11 +72,11 @@ impl SequencerStore { Ok(self.dbio.latest_block_meta()?) } - pub fn genesis_id(&self) -> u64 { + pub const fn genesis_id(&self) -> u64 { self.genesis_id } - pub fn signing_key(&self) -> &nssa::PrivateKey { + pub const fn signing_key(&self) -> &nssa::PrivateKey { &self.signing_key } @@ -120,13 +112,15 @@ pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap #[cfg(test)] mod tests { + #![expect(clippy::shadow_unrelated, reason = "We don't care about it in tests")] + use common::{block::HashableBlockData, test_utils::sequencer_sign_key_for_testing}; use tempfile::tempdir; use super::*; #[test] - fn test_get_transaction_by_hash() { + fn get_transaction_by_hash() { let temp_dir = tempdir().unwrap(); let path = temp_dir.path(); @@ -141,12 +135,9 @@ mod tests { let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); // Start an empty node store - let mut node_store = SequencerStore::open_db_with_genesis( - path, - Some((&genesis_block, [0; 32])), - signing_key, - ) - .unwrap(); + let mut node_store = + SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) + .unwrap(); let tx = common::test_utils::produce_dummy_empty_transaction(); let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); @@ -163,7 +154,7 @@ mod tests { } #[test] - fn test_latest_block_meta_returns_genesis_meta_initially() { + fn latest_block_meta_returns_genesis_meta_initially() { let temp_dir = tempdir().unwrap(); let path = temp_dir.path(); @@ -179,12 +170,9 @@ mod tests { let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); let genesis_hash = genesis_block.header.hash; - let node_store = SequencerStore::open_db_with_genesis( - path, - Some((&genesis_block, [0; 32])), - signing_key, - ) - .unwrap(); + let node_store = + SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) + .unwrap(); // Verify that initially the latest block hash equals genesis hash let latest_meta = node_store.latest_block_meta().unwrap(); @@ -193,7 +181,7 @@ mod tests { } #[test] - fn test_latest_block_meta_updates_after_new_block() { + fn latest_block_meta_updates_after_new_block() { let temp_dir = tempdir().unwrap(); let path = temp_dir.path(); @@ -207,16 +195,13 @@ mod tests { }; let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); - let mut node_store = SequencerStore::open_db_with_genesis( - path, - Some((&genesis_block, [0; 32])), - signing_key, - ) - .unwrap(); + let mut node_store = + SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) + .unwrap(); // Add a new block let tx = common::test_utils::produce_dummy_empty_transaction(); - let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); + let block = common::test_utils::produce_dummy_block(1, None, vec![tx]); let block_hash = block.header.hash; let block_msg_id = [1; 32]; @@ -232,7 +217,7 @@ mod tests { } #[test] - fn test_mark_block_finalized() { + fn mark_block_finalized() { let temp_dir = tempdir().unwrap(); let path = temp_dir.path(); @@ -246,16 +231,13 @@ mod tests { }; let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); - let mut node_store = SequencerStore::open_db_with_genesis( - path, - Some((&genesis_block, [0; 32])), - signing_key, - ) - .unwrap(); + let mut node_store = + SequencerStore::open_db_with_genesis(path, &genesis_block, [0; 32], signing_key) + .unwrap(); // Add a new block with Pending status let tx = common::test_utils::produce_dummy_empty_transaction(); - let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); + let block = common::test_utils::produce_dummy_block(1, None, vec![tx]); let block_id = block.header.block_id; let dummy_state = V02State::new_with_genesis_accounts(&[], &[]); diff --git a/sequencer_core/src/config.rs b/sequencer_core/src/config.rs index 003b82e8..097d1391 100644 --- a/sequencer_core/src/config.rs +++ b/sequencer_core/src/config.rs @@ -20,56 +20,56 @@ use url::Url; // TODO: Provide default values #[derive(Clone, Serialize, Deserialize)] pub struct SequencerConfig { - /// Home dir of sequencer storage + /// Home dir of sequencer storage. pub home: PathBuf, - /// Override rust log (env var logging level) + /// Override rust log (env var logging level). pub override_rust_log: Option, - /// Genesis id + /// Genesis id. pub genesis_id: u64, - /// If `True`, then adds random sequence of bytes to genesis block + /// If `True`, then adds random sequence of bytes to genesis block. pub is_genesis_random: bool, - /// Maximum number of transactions in block + /// Maximum number of transactions in block. pub max_num_tx_in_block: usize, - /// Maximum block size (includes header and transactions) + /// Maximum block size (includes header and transactions). #[serde(default = "default_max_block_size")] pub max_block_size: ByteSize, - /// Mempool maximum size + /// Mempool maximum size. pub mempool_max_size: usize, - /// Interval in which blocks produced + /// Interval in which blocks produced. #[serde(with = "humantime_serde")] pub block_create_timeout: Duration, - /// Interval in which pending blocks are retried + /// Interval in which pending blocks are retried. #[serde(with = "humantime_serde")] pub retry_pending_blocks_timeout: Duration, - /// Port to listen + /// Port to listen. pub port: u16, - /// List of initial accounts data + /// List of initial accounts data. pub initial_accounts: Vec, - /// List of initial commitments + /// List of initial commitments. pub initial_commitments: Vec, - /// Sequencer own signing key + /// Sequencer own signing key. pub signing_key: [u8; 32], - /// Bedrock configuration options + /// Bedrock configuration options. pub bedrock_config: BedrockConfig, - /// Indexer RPC URL + /// Indexer RPC URL. pub indexer_rpc_url: Url, } #[derive(Clone, Serialize, Deserialize)] pub struct BedrockConfig { - /// Fibonacci backoff retry strategy configuration + /// Fibonacci backoff retry strategy configuration. #[serde(default)] pub backoff: BackoffConfig, - /// Bedrock channel ID + /// Bedrock channel ID. pub channel_id: ChannelId, - /// Bedrock Url + /// Bedrock Url. pub node_url: Url, - /// Bedrock auth + /// Bedrock auth. pub auth: Option, } impl SequencerConfig { - pub fn from_path(config_home: &Path) -> Result { + pub fn from_path(config_home: &Path) -> Result { let file = File::open(config_home)?; let reader = BufReader::new(file); @@ -77,6 +77,6 @@ impl SequencerConfig { } } -fn default_max_block_size() -> ByteSize { +const fn default_max_block_size() -> ByteSize { ByteSize::mib(1) } diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 083728bf..c844c193 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -13,6 +13,8 @@ use config::SequencerConfig; use log::{error, info, warn}; use logos_blockchain_key_management_system_service::keys::{ED25519_SECRET_KEY_SIZE, Ed25519Key}; use mempool::{MemPool, MemPoolHandle}; +#[cfg(feature = "mock")] +pub use mock::SequencerCoreWithMockClients; use crate::{ block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait, MsgId}, @@ -24,11 +26,9 @@ pub mod block_settlement_client; pub mod block_store; pub mod config; pub mod indexer_client; -#[cfg(feature = "mock")] -pub mod mock; #[cfg(feature = "mock")] -pub use mock::SequencerCoreWithMockClients; +pub mod mock; pub struct SequencerCore< BC: BlockSettlementClientTrait = BlockSettlementClient, @@ -82,7 +82,8 @@ impl SequencerCore SequencerCore { - info!("Found local database. Loading state and pending blocks from it."); - state - } - None => { - info!( - "No database found when starting the sequencer. Creating a fresh new with the initial data in config" - ); - let initial_commitments: Vec = config - .initial_commitments - .iter() - .map(|init_comm_data| { - let npk = &init_comm_data.npk; + let mut state = if let Some(state) = store.get_nssa_state() { + info!("Found local database. Loading state and pending blocks from it."); + state + } else { + info!( + "No database found when starting the sequencer. Creating a fresh new with the initial data in config" + ); + let initial_commitments: Vec = config + .initial_commitments + .iter() + .map(|init_comm_data| { + let npk = &init_comm_data.npk; - let mut acc = init_comm_data.account.clone(); + let mut acc = init_comm_data.account.clone(); - acc.program_owner = - nssa::program::Program::authenticated_transfer_program().id(); + acc.program_owner = + nssa::program::Program::authenticated_transfer_program().id(); - nssa_core::Commitment::new(npk, &acc) - }) - .collect(); + nssa_core::Commitment::new(npk, &acc) + }) + .collect(); - let init_accs: Vec<(nssa::AccountId, u128)> = config - .initial_accounts - .iter() - .map(|acc_data| (acc_data.account_id, acc_data.balance)) - .collect(); + let init_accs: Vec<(nssa::AccountId, u128)> = config + .initial_accounts + .iter() + .map(|acc_data| (acc_data.account_id, acc_data.balance)) + .collect(); - nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) - } + nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) }; #[cfg(feature = "testnet")] @@ -179,24 +177,29 @@ impl SequencerCore Result<(SignedMantleTx, MsgId)> { let now = Instant::now(); - let new_block_height = self.chain_height + 1; + let new_block_height = self + .chain_height + .checked_add(1) + .with_context(|| format!("Max block height reached: {}", self.chain_height))?; let mut valid_transactions = vec![]; - let max_block_size = self.sequencer_config.max_block_size.as_u64() as usize; + let max_block_size = usize::try_from(self.sequencer_config.max_block_size.as_u64()) + .expect("`max_block_size` should fit into usize"); let latest_block_meta = self .store .latest_block_meta() .context("Failed to get latest block meta from store")?; - let curr_time = chrono::Utc::now().timestamp_millis() as u64; + let curr_time = u64::try_from(chrono::Utc::now().timestamp_millis()) + .expect("Timestamp must be positive"); while let Some(tx) = self.mempool.pop() { let tx_hash = tx.hash(); @@ -278,19 +281,19 @@ impl SequencerCore &nssa::V02State { + pub const fn state(&self) -> &nssa::V02State { &self.state } - pub fn block_store(&self) -> &SequencerStore { + pub const fn block_store(&self) -> &SequencerStore { &self.store } - pub fn chain_height(&self) -> u64 { + pub const fn chain_height(&self) -> u64 { self.chain_height } - pub fn sequencer_config(&self) -> &SequencerConfig { + pub const fn sequencer_config(&self) -> &SequencerConfig { &self.sequencer_config } @@ -299,23 +302,17 @@ impl SequencerCore Result<()> { - if let Some(first_pending_block_id) = self - .get_pending_blocks()? + self.get_pending_blocks()? .iter() .map(|block| block.header.block_id) .min() - { - info!( - "Clearing pending blocks up to id: {}", - last_finalized_block_id - ); - // TODO: Delete blocks instead of marking them as finalized. - // Current approach is used because we still have `GetBlockDataRequest`. - (first_pending_block_id..=last_finalized_block_id) - .try_for_each(|id| self.store.mark_block_as_finalized(id)) - } else { - Ok(()) - } + .map_or(Ok(()), |first_pending_block_id| { + info!("Clearing pending blocks up to id: {last_finalized_block_id}"); + // TODO: Delete blocks instead of marking them as finalized. + // Current approach is used because we still have `GetBlockDataRequest`. + (first_pending_block_id..=last_finalized_block_id) + .try_for_each(|id| self.store.mark_block_as_finalized(id)) + }) } /// Returns the list of stored pending blocks. @@ -338,16 +335,18 @@ impl SequencerCore Result { if path.exists() { let key_bytes = std::fs::read(path)?; + let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes .try_into() - .map_err(|_| anyhow!("Found key with incorrect length"))?; + .map_err(|_bytes| anyhow!("Found key with incorrect length"))?; + Ok(Ed25519Key::from_bytes(&key_array)) } else { - let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE]; + let mut key_bytes = [0_u8; ED25519_SECRET_KEY_SIZE]; rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes); // Create parent directory if it doesn't exist if let Some(parent) = path.parent() { @@ -358,11 +357,14 @@ fn load_or_create_signing_key(path: &Path) -> Result { } } -#[cfg(all(test, feature = "mock"))] +#[cfg(test)] +#[cfg(feature = "mock")] mod tests { + #![expect(clippy::shadow_unrelated, reason = "We don't care about it in tests")] + use std::{pin::pin, str::FromStr as _, time::Duration}; - use base58::ToBase58; + use base58::ToBase58 as _; use bedrock_client::BackoffConfig; use common::{ block::AccountInitialData, test_utils::sequencer_sign_key_for_testing, @@ -385,7 +387,7 @@ mod tests { SequencerConfig { home, - override_rust_log: Some("info".to_string()), + override_rust_log: Some("info".to_owned()), genesis_id: 1, is_genesis_random: false, max_num_tx_in_block: 10, @@ -466,7 +468,7 @@ mod tests { } #[tokio::test] - async fn test_start_from_config() { + async fn start_from_config() { let config = setup_sequencer_config(); let (sequencer, _mempool_handle) = SequencerCoreWithMockClients::start_from_config(config.clone()).await; @@ -486,7 +488,7 @@ mod tests { } #[tokio::test] - async fn test_start_different_intial_accounts_balances() { + async fn start_different_intial_accounts_balances() { let acc1_account_id: Vec = vec![ 27, 132, 197, 86, 123, 18, 100, 64, 153, 93, 62, 213, 170, 186, 5, 101, 215, 30, 24, 52, 96, 72, 25, 255, 156, 23, 245, 233, 213, 221, 7, 143, @@ -527,7 +529,7 @@ mod tests { } #[test] - fn test_transaction_pre_check_pass() { + fn transaction_pre_check_pass() { let tx = common::test_utils::produce_dummy_empty_transaction(); let result = tx.transaction_stateless_check(); @@ -535,7 +537,7 @@ mod tests { } #[tokio::test] - async fn test_transaction_pre_check_native_transfer_valid() { + async fn transaction_pre_check_native_transfer_valid() { let (sequencer, _mempool_handle) = common_setup().await; let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id; @@ -544,7 +546,7 @@ mod tests { let sign_key1 = create_signing_key_for_account1(); let tx = common::test_utils::create_transaction_native_token_transfer( - acc1, 0, acc2, 10, sign_key1, + acc1, 0, acc2, 10, &sign_key1, ); let result = tx.transaction_stateless_check(); @@ -552,7 +554,7 @@ mod tests { } #[tokio::test] - async fn test_transaction_pre_check_native_transfer_other_signature() { + async fn transaction_pre_check_native_transfer_other_signature() { let (mut sequencer, _mempool_handle) = common_setup().await; let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id; @@ -561,7 +563,7 @@ mod tests { let sign_key2 = create_signing_key_for_account2(); let tx = common::test_utils::create_transaction_native_token_transfer( - acc1, 0, acc2, 10, sign_key2, + acc1, 0, acc2, 10, &sign_key2, ); // Signature is valid, stateless check pass @@ -577,7 +579,7 @@ mod tests { } #[tokio::test] - async fn test_transaction_pre_check_native_transfer_sent_too_much() { + async fn transaction_pre_check_native_transfer_sent_too_much() { let (mut sequencer, _mempool_handle) = common_setup().await; let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id; @@ -586,7 +588,7 @@ mod tests { let sign_key1 = create_signing_key_for_account1(); let tx = common::test_utils::create_transaction_native_token_transfer( - acc1, 0, acc2, 10000000, sign_key1, + acc1, 0, acc2, 10_000_000, &sign_key1, ); let result = tx.transaction_stateless_check(); @@ -604,7 +606,7 @@ mod tests { } #[tokio::test] - async fn test_transaction_execute_native_transfer() { + async fn transaction_execute_native_transfer() { let (mut sequencer, _mempool_handle) = common_setup().await; let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id; @@ -613,7 +615,7 @@ mod tests { let sign_key1 = create_signing_key_for_account1(); let tx = common::test_utils::create_transaction_native_token_transfer( - acc1, 0, acc2, 100, sign_key1, + acc1, 0, acc2, 100, &sign_key1, ); sequencer.execute_check_transaction_on_state(tx).unwrap(); @@ -626,7 +628,7 @@ mod tests { } #[tokio::test] - async fn test_push_tx_into_mempool_blocks_until_mempool_is_full() { + async fn push_tx_into_mempool_blocks_until_mempool_is_full() { let config = SequencerConfig { mempool_max_size: 1, ..setup_sequencer_config() @@ -653,7 +655,7 @@ mod tests { } #[tokio::test] - async fn test_produce_new_block_with_mempool_transactions() { + async fn produce_new_block_with_mempool_transactions() { let (mut sequencer, mempool_handle) = common_setup().await; let genesis_height = sequencer.chain_height; @@ -666,7 +668,7 @@ mod tests { } #[tokio::test] - async fn test_replay_transactions_are_rejected_in_the_same_block() { + async fn replay_transactions_are_rejected_in_the_same_block() { let (mut sequencer, mempool_handle) = common_setup().await; let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id; @@ -675,7 +677,7 @@ mod tests { let sign_key1 = create_signing_key_for_account1(); let tx = common::test_utils::create_transaction_native_token_transfer( - acc1, 0, acc2, 100, sign_key1, + acc1, 0, acc2, 100, &sign_key1, ); let tx_original = tx.clone(); @@ -698,7 +700,7 @@ mod tests { } #[tokio::test] - async fn test_replay_transactions_are_rejected_in_different_blocks() { + async fn replay_transactions_are_rejected_in_different_blocks() { let (mut sequencer, mempool_handle) = common_setup().await; let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id; @@ -707,7 +709,7 @@ mod tests { let sign_key1 = create_signing_key_for_account1(); let tx = common::test_utils::create_transaction_native_token_transfer( - acc1, 0, acc2, 100, sign_key1, + acc1, 0, acc2, 100, &sign_key1, ); // The transaction should be included the first time @@ -734,7 +736,7 @@ mod tests { } #[tokio::test] - async fn test_restart_from_storage() { + async fn restart_from_storage() { let config = setup_sequencer_config(); let acc1_account_id = config.initial_accounts[0].account_id; let acc2_account_id = config.initial_accounts[1].account_id; @@ -753,7 +755,7 @@ mod tests { 0, acc2_account_id, balance_to_move, - signing_key, + &signing_key, ); mempool_handle.push(tx.clone()).await.unwrap(); @@ -786,7 +788,7 @@ mod tests { } #[tokio::test] - async fn test_get_pending_blocks() { + async fn get_pending_blocks() { let config = setup_sequencer_config(); let (mut sequencer, _mempool_handle) = SequencerCoreWithMockClients::start_from_config(config).await; @@ -803,7 +805,7 @@ mod tests { } #[tokio::test] - async fn test_delete_blocks() { + async fn delete_blocks() { let config = setup_sequencer_config(); let (mut sequencer, _mempool_handle) = SequencerCoreWithMockClients::start_from_config(config).await; @@ -826,7 +828,7 @@ mod tests { } #[tokio::test] - async fn test_produce_block_with_correct_prev_meta_after_restart() { + async fn produce_block_with_correct_prev_meta_after_restart() { let config = setup_sequencer_config(); let acc1_account_id = config.initial_accounts[0].account_id; let acc2_account_id = config.initial_accounts[1].account_id; @@ -844,7 +846,7 @@ mod tests { 0, acc2_account_id, 100, - signing_key, + &signing_key, ); mempool_handle.push(tx).await.unwrap(); @@ -867,7 +869,7 @@ mod tests { 1, // Next nonce acc2_account_id, 50, - signing_key, + &signing_key, ); mempool_handle.push(tx.clone()).await.unwrap(); @@ -899,7 +901,7 @@ mod tests { } #[tokio::test] - async fn test_start_from_config_uses_db_height_not_config_genesis() { + async fn start_from_config_uses_db_height_not_config_genesis() { let mut config = setup_sequencer_config(); let original_genesis_id = config.genesis_id; diff --git a/sequencer_core/src/mock.rs b/sequencer_core/src/mock.rs index 930ff946..45a682e2 100644 --- a/sequencer_core/src/mock.rs +++ b/sequencer_core/src/mock.rs @@ -19,10 +19,10 @@ pub struct MockBlockSettlementClient { } impl BlockSettlementClientTrait for MockBlockSettlementClient { - fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result { + fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result { Ok(Self { bedrock_channel_id: config.channel_id, - bedrock_signing_key, + bedrock_signing_key: signing_key, }) } @@ -46,10 +46,10 @@ pub struct MockBlockSettlementClientWithError { } impl BlockSettlementClientTrait for MockBlockSettlementClientWithError { - fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result { + fn new(config: &BedrockConfig, signing_key: Ed25519Key) -> Result { Ok(Self { bedrock_channel_id: config.channel_id, - bedrock_signing_key, + bedrock_signing_key: signing_key, }) } diff --git a/sequencer_rpc/Cargo.toml b/sequencer_rpc/Cargo.toml index 42aa978f..5c76ba34 100644 --- a/sequencer_rpc/Cargo.toml +++ b/sequencer_rpc/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa.workspace = true common.workspace = true diff --git a/sequencer_rpc/src/lib.rs b/sequencer_rpc/src/lib.rs index 074ea284..47e4fa75 100644 --- a/sequencer_rpc/src/lib.rs +++ b/sequencer_rpc/src/lib.rs @@ -1,7 +1,3 @@ -pub mod net_utils; -pub mod process; -pub mod types; - use std::sync::Arc; use common::{ @@ -10,6 +6,8 @@ use common::{ }; use mempool::MemPoolHandle; pub use net_utils::*; +#[cfg(feature = "standalone")] +use sequencer_core::mock::{MockBlockSettlementClient, MockIndexerClient}; use sequencer_core::{ SequencerCore, block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait}, @@ -21,6 +19,13 @@ use tokio::sync::Mutex; use self::types::err_rpc::RpcErr; +pub mod net_utils; +pub mod process; +pub mod types; + +#[cfg(feature = "standalone")] +pub type JsonHandlerWithMockClients = JsonHandler; + // ToDo: Add necessary fields pub struct JsonHandler< BC: BlockSettlementClientTrait = BlockSettlementClient, @@ -35,14 +40,12 @@ fn respond(val: T) -> Result { Ok(serde_json::to_value(val)?) } +#[must_use] pub fn rpc_error_responce_inverter(err: RpcError) -> RpcError { - let mut content: Option = None; - if err.error_struct.is_some() { - content = match err.error_struct.clone().unwrap() { - RpcErrorKind::HandlerError(val) | RpcErrorKind::InternalError(val) => Some(val), - RpcErrorKind::RequestValidationError(vall) => Some(serde_json::to_value(vall).unwrap()), - }; - } + let content = err.error_struct.map(|error| match error { + RpcErrorKind::HandlerError(val) | RpcErrorKind::InternalError(val) => val, + RpcErrorKind::RequestValidationError(vall) => serde_json::to_value(vall).unwrap(), + }); RpcError { error_struct: None, code: err.code, @@ -50,9 +53,3 @@ pub fn rpc_error_responce_inverter(err: RpcError) -> RpcError { data: content, } } - -#[cfg(feature = "standalone")] -use sequencer_core::mock::{MockBlockSettlementClient, MockIndexerClient}; - -#[cfg(feature = "standalone")] -pub type JsonHandlerWithMockClients = JsonHandler; diff --git a/sequencer_rpc/src/net_utils.rs b/sequencer_rpc/src/net_utils.rs index a15cabec..e306ec0e 100644 --- a/sequencer_rpc/src/net_utils.rs +++ b/sequencer_rpc/src/net_utils.rs @@ -6,28 +6,26 @@ use common::{ rpc_primitives::{RpcConfig, message::Message}, transaction::NSSATransaction, }; -use futures::{Future, FutureExt}; +use futures::{Future, FutureExt as _}; use log::info; use mempool::MemPoolHandle; #[cfg(not(feature = "standalone"))] use sequencer_core::SequencerCore; #[cfg(feature = "standalone")] use sequencer_core::SequencerCoreWithMockClients as SequencerCore; +use tokio::sync::Mutex; #[cfg(not(feature = "standalone"))] use super::JsonHandler; - -#[cfg(feature = "standalone")] -type JsonHandler = super::JsonHandlerWithMockClients; - -use tokio::sync::Mutex; - use crate::process::Process; pub const SHUTDOWN_TIMEOUT_SECS: u64 = 10; pub const NETWORK: &str = "network"; +#[cfg(feature = "standalone")] +type JsonHandler = super::JsonHandlerWithMockClients; + pub(crate) fn rpc_handler( message: web::Json, handler: web::Data

, @@ -41,7 +39,7 @@ pub(crate) fn rpc_handler( fn get_cors(cors_allowed_origins: &[String]) -> Cors { let mut cors = Cors::permissive(); - if cors_allowed_origins != ["*".to_string()] { + if cors_allowed_origins != ["*".to_owned()] { for origin in cors_allowed_origins { cors = cors.allowed_origin(origin); } @@ -68,22 +66,26 @@ pub async fn new_http_server( .await .sequencer_config() .max_block_size - .as_u64() as usize; + .as_u64() + .try_into() + .expect("`max_block_size` is expected to fit into usize"); let handler = web::Data::new(JsonHandler { - sequencer_state: seuquencer_core.clone(), + sequencer_state: Arc::clone(&seuquencer_core), mempool_handle, max_block_size, }); // HTTP server let http_server = HttpServer::new(move || { + let json_limit = limits_config + .json_payload_max_size + .as_u64() + .try_into() + .expect("`json_payload_max_size` is expected to fit into usize"); App::new() .wrap(get_cors(&cors_allowed_origins)) .app_data(handler.clone()) - .app_data( - web::JsonConfig::default() - .limit(limits_config.json_payload_max_size.as_u64() as usize), - ) + .app_data(web::JsonConfig::default().limit(json_limit)) .wrap(middleware::Logger::default()) .service(web::resource("/").route(web::post().to(rpc_handler::))) }) @@ -91,12 +93,12 @@ pub async fn new_http_server( .shutdown_timeout(SHUTDOWN_TIMEOUT_SECS) .disable_signals(); - let [addr] = http_server + let [final_addr] = http_server .addrs() .try_into() .expect("Exactly one address bound is expected for sequencer HTTP server"); - info!(target:NETWORK, "HTTP server started at {addr}"); + info!(target:NETWORK, "HTTP server started at {final_addr}"); - Ok((http_server.run(), addr)) + Ok((http_server.run(), final_addr)) } diff --git a/sequencer_rpc/src/process.rs b/sequencer_rpc/src/process.rs index b3dca691..17c46f03 100644 --- a/sequencer_rpc/src/process.rs +++ b/sequencer_rpc/src/process.rs @@ -1,13 +1,13 @@ use std::collections::HashMap; use actix_web::Error as HttpError; -use base64::{Engine, engine::general_purpose}; +use base64::{Engine as _, engine::general_purpose}; use common::{ block::{AccountInitialData, HashableBlockData}, rpc_primitives::{ errors::RpcError, message::{Message, Request}, - parser::RpcRequest, + parser::RpcRequest as _, requests::{ GetAccountBalanceRequest, GetAccountBalanceResponse, GetAccountRequest, GetAccountResponse, GetAccountsNoncesRequest, GetAccountsNoncesResponse, @@ -77,29 +77,28 @@ impl< } impl JsonHandler { - /// Example of request processing - #[allow(clippy::unused_async)] - async fn process_temp_hello(&self, request: Request) -> Result { + /// Example of request processing. + fn process_temp_hello(request: Request) -> Result { let _hello_request = HelloRequest::parse(Some(request.params))?; let response = HelloResponse { - greeting: HELLO_FROM_SEQUENCER.to_string(), + greeting: HELLO_FROM_SEQUENCER.to_owned(), }; respond(response) } async fn process_send_tx(&self, request: Request) -> Result { - let send_tx_req = SendTxRequest::parse(Some(request.params))?; - let tx = borsh::from_slice::(&send_tx_req.transaction).unwrap(); - let tx_hash = tx.hash(); - // Check transaction size against block size limit // Reserve ~200 bytes for block header overhead const BLOCK_HEADER_OVERHEAD: usize = 200; - let tx_size = borsh::to_vec(&tx) - .map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx_hash })? - .len(); + + let send_tx_req = SendTxRequest::parse(Some(request.params))?; + let tx = borsh::from_slice::(&send_tx_req.transaction).unwrap(); + + let tx_hash = tx.hash(); + + let tx_size = send_tx_req.transaction.len(); let max_tx_size = self.max_block_size.saturating_sub(BLOCK_HEADER_OVERHEAD); @@ -123,7 +122,7 @@ impl JsonHandler .expect("Mempool is closed, this is a bug"); let response = SendTxResponse { - status: TRANSACTION_SUBMITTED.to_string(), + status: TRANSACTION_SUBMITTED.to_owned(), tx_hash, }; @@ -195,8 +194,8 @@ impl JsonHandler respond(response) } - /// Returns the initial accounts for testnet - /// ToDo: Useful only for testnet and needs to be removed later + /// Returns the initial accounts for testnet. + /// `ToDo`: Useful only for testnet and needs to be removed later. async fn get_initial_testnet_accounts(&self, request: Request) -> Result { let _get_initial_testnet_accounts_request = GetInitialTestnetAccountsRequest::parse(Some(request.params))?; @@ -210,8 +209,8 @@ impl JsonHandler respond(initial_accounts) } - /// Returns the balance of the account at the given account_id. - /// The account_id must be a valid hex string of the correct length. + /// Returns the balance of the account at the given `account_id`. + /// The `account_id` must be a valid hex string of the correct length. async fn process_get_account_balance(&self, request: Request) -> Result { let get_account_req = GetAccountBalanceRequest::parse(Some(request.params))?; let account_id = get_account_req.account_id; @@ -227,8 +226,8 @@ impl JsonHandler respond(response) } - /// Returns the nonces of the accounts at the given account_ids. - /// Each account_id must be a valid hex string of the correct length. + /// Returns the nonces of the accounts at the given `account_ids`. + /// Each `account_id` must be a valid hex string of the correct length. async fn process_get_accounts_nonces(&self, request: Request) -> Result { let get_account_nonces_req = GetAccountsNoncesRequest::parse(Some(request.params))?; let account_ids = get_account_nonces_req.account_ids; @@ -238,7 +237,7 @@ impl JsonHandler account_ids .into_iter() - .map(|account_id| state.state().get_account_by_id(account_id).nonce) + .map(|account_id| state.state().get_account_by_id(account_id).nonce.0) .collect() }; @@ -247,8 +246,8 @@ impl JsonHandler respond(response) } - /// Returns account struct for given account_id. - /// AccountId must be a valid hex string of the correct length. + /// Returns account struct for given `account_id`. + /// `AccountId` must be a valid hex string of the correct length. async fn process_get_account(&self, request: Request) -> Result { let get_account_nonces_req = GetAccountRequest::parse(Some(request.params))?; @@ -285,7 +284,7 @@ impl JsonHandler respond(response) } - /// Returns the commitment proof, corresponding to commitment + /// Returns the commitment proof, corresponding to commitment. async fn process_get_proof_by_commitment(&self, request: Request) -> Result { let get_proof_req = GetProofForCommitmentRequest::parse(Some(request.params))?; @@ -299,19 +298,19 @@ impl JsonHandler respond(response) } - async fn process_get_program_ids(&self, request: Request) -> Result { + fn process_get_program_ids(request: Request) -> Result { let _get_proof_req = GetProgramIdsRequest::parse(Some(request.params))?; let mut program_ids = HashMap::new(); program_ids.insert( - "authenticated_transfer".to_string(), + "authenticated_transfer".to_owned(), Program::authenticated_transfer_program().id(), ); - program_ids.insert("token".to_string(), Program::token().id()); - program_ids.insert("pinata".to_string(), Program::pinata().id()); - program_ids.insert("amm".to_string(), Program::amm().id()); + program_ids.insert("token".to_owned(), Program::token().id()); + program_ids.insert("pinata".to_owned(), Program::pinata().id()); + program_ids.insert("amm".to_owned(), Program::amm().id()); program_ids.insert( - "privacy_preserving_circuit".to_string(), + "privacy_preserving_circuit".to_owned(), nssa::PRIVACY_PRESERVING_CIRCUIT_ID, ); let response = GetProgramIdsResponse { program_ids }; @@ -320,7 +319,7 @@ impl JsonHandler pub async fn process_request_internal(&self, request: Request) -> Result { match request.method.as_ref() { - HELLO => self.process_temp_hello(request).await, + HELLO => Self::process_temp_hello(request), SEND_TX => self.process_send_tx(request).await, GET_BLOCK => self.process_get_block_data(request).await, GET_BLOCK_RANGE => self.process_get_block_range_data(request).await, @@ -332,7 +331,7 @@ impl JsonHandler GET_ACCOUNT => self.process_get_account(request).await, GET_TRANSACTION_BY_HASH => self.process_get_transaction_by_hash(request).await, GET_PROOF_FOR_COMMITMENT => self.process_get_proof_by_commitment(request).await, - GET_PROGRAM_IDS => self.process_get_program_ids(request).await, + GET_PROGRAM_IDS => Self::process_get_program_ids(request), _ => Err(RpcErr(RpcError::method_not_found(request.method))), } } @@ -342,8 +341,8 @@ impl JsonHandler mod tests { use std::{str::FromStr as _, sync::Arc, time::Duration}; - use base58::ToBase58; - use base64::{Engine, engine::general_purpose}; + use base58::ToBase58 as _; + use base64::{Engine as _, engine::general_purpose}; use bedrock_client::BackoffConfig; use common::{ block::AccountInitialData, config::BasicAuth, test_utils::sequencer_sign_key_for_testing, @@ -390,7 +389,7 @@ mod tests { SequencerConfig { home, - override_rust_log: Some("info".to_string()), + override_rust_log: Some("info".to_owned()), genesis_id: 1, is_genesis_random: false, max_num_tx_in_block: 10, @@ -410,7 +409,7 @@ mod tests { channel_id: [42; 32].into(), node_url: "http://localhost:8080".parse().unwrap(), auth: Some(BasicAuth { - username: "user".to_string(), + username: "user".to_owned(), password: None, }), }, @@ -443,7 +442,7 @@ mod tests { 0, AccountId::from_str(&[2; 32].to_base58()).unwrap(), balance_to_move, - signing_key, + &signing_key, ); mempool_handle @@ -455,7 +454,9 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap(); - let max_block_size = sequencer_core.sequencer_config().max_block_size.as_u64() as usize; + let max_block_size = + usize::try_from(sequencer_core.sequencer_config().max_block_size.as_u64()) + .expect("`max_block_size` is expected to fit in usize"); let sequencer_core = Arc::new(Mutex::new(sequencer_core)); ( @@ -493,7 +494,7 @@ mod tests { } #[actix_web::test] - async fn test_get_account_balance_for_non_existent_account() { + async fn get_account_balance_for_non_existent_account() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -515,7 +516,7 @@ mod tests { } #[actix_web::test] - async fn test_get_account_balance_for_invalid_base58() { + async fn get_account_balance_for_invalid_base58() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -545,7 +546,7 @@ mod tests { } #[actix_web::test] - async fn test_get_account_balance_for_invalid_length() { + async fn get_account_balance_for_invalid_length() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -575,7 +576,7 @@ mod tests { } #[actix_web::test] - async fn test_get_account_balance_for_existing_account() { + async fn get_account_balance_for_existing_account() { let (json_handler, initial_accounts, _) = components_for_tests().await; let acc1_id = initial_accounts[0].account_id; @@ -600,7 +601,7 @@ mod tests { } #[actix_web::test] - async fn test_get_accounts_nonces_for_non_existent_account() { + async fn get_accounts_nonces_for_non_existent_account() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -622,7 +623,7 @@ mod tests { } #[actix_web::test] - async fn test_get_accounts_nonces_for_existent_account() { + async fn get_accounts_nonces_for_existent_account() { let (json_handler, initial_accounts, _) = components_for_tests().await; let acc1_id = initial_accounts[0].account_id; @@ -648,7 +649,7 @@ mod tests { } #[actix_web::test] - async fn test_get_account_data_for_non_existent_account() { + async fn get_account_data_for_non_existent_account() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -675,7 +676,7 @@ mod tests { } #[actix_web::test] - async fn test_get_transaction_by_hash_for_non_existent_hash() { + async fn get_transaction_by_hash_for_non_existent_hash() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -697,7 +698,7 @@ mod tests { } #[actix_web::test] - async fn test_get_transaction_by_hash_for_invalid_hex() { + async fn get_transaction_by_hash_for_invalid_hex() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -728,7 +729,7 @@ mod tests { } #[actix_web::test] - async fn test_get_transaction_by_hash_for_invalid_length() { + async fn get_transaction_by_hash_for_invalid_length() { let (json_handler, _, _) = components_for_tests().await; let request = serde_json::json!({ "jsonrpc": "2.0", @@ -759,7 +760,7 @@ mod tests { } #[actix_web::test] - async fn test_get_transaction_by_hash_for_existing_transaction() { + async fn get_transaction_by_hash_for_existing_transaction() { let (json_handler, _, tx) = components_for_tests().await; let tx_hash_hex = hex::encode(tx.hash()); let expected_base64_encoded = general_purpose::STANDARD.encode(borsh::to_vec(&tx).unwrap()); diff --git a/sequencer_rpc/src/types/err_rpc.rs b/sequencer_rpc/src/types/err_rpc.rs index 92214c54..4cb75606 100644 --- a/sequencer_rpc/src/types/err_rpc.rs +++ b/sequencer_rpc/src/types/err_rpc.rs @@ -2,7 +2,16 @@ use common::{ rpc_primitives::errors::{RpcError, RpcParseError}, transaction::TransactionMalformationError, }; -use log::debug; + +macro_rules! standard_rpc_err_kind { + ($type_name:path) => { + impl RpcErrKind for $type_name { + fn into_rpc_err(self) -> RpcError { + self.into() + } + } + }; +} pub struct RpcErr(pub RpcError); @@ -18,15 +27,6 @@ impl From for RpcErr { } } -macro_rules! standard_rpc_err_kind { - ($type_name:path) => { - impl RpcErrKind for $type_name { - fn into_rpc_err(self) -> RpcError { - self.into() - } - } - }; -} standard_rpc_err_kind!(RpcError); standard_rpc_err_kind!(RpcParseError); @@ -47,9 +47,3 @@ impl RpcErrKind for TransactionMalformationError { RpcError::invalid_params(Some(serde_json::to_value(self).unwrap())) } } - -#[allow(clippy::needless_pass_by_value)] -pub fn from_rpc_err_into_anyhow_err(rpc_err: RpcError) -> anyhow::Error { - debug!("Rpc error cast to anyhow error : err {rpc_err:?}"); - anyhow::anyhow!(format!("{rpc_err:#?}")) -} diff --git a/sequencer_runner/Cargo.toml b/sequencer_runner/Cargo.toml index 5e627ed2..71404d13 100644 --- a/sequencer_runner/Cargo.toml +++ b/sequencer_runner/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] common.workspace = true sequencer_core = { workspace = true, features = ["testnet"] } diff --git a/sequencer_runner/Dockerfile b/sequencer_runner/Dockerfile index acdea432..0efdf561 100644 --- a/sequencer_runner/Dockerfile +++ b/sequencer_runner/Dockerfile @@ -1,5 +1,5 @@ # Chef stage - uses pre-built cargo-chef image -FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef +FROM lukemathwalker/cargo-chef:latest-rust-1.94.0-slim-trixie AS chef # Install dependencies RUN apt-get update && apt-get install -y \ @@ -26,7 +26,7 @@ RUN ARCH=$(uname -m); \ else \ echo "Using manual build for $ARCH"; \ git clone --depth 1 --branch release-3.0 https://github.com/risc0/risc0.git; \ - git clone --depth 1 --branch r0.1.91.1 https://github.com/risc0/rust.git; \ + git clone --depth 1 --branch r0.1.94.0 https://github.com/risc0/rust.git; \ cd /risc0; \ cargo install --path rzup; \ rzup build --path /rust rust --verbose; \ diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index 944a6402..a17ecbf9 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -21,7 +21,7 @@ pub const RUST_LOG: &str = "RUST_LOG"; #[derive(Parser, Debug)] #[clap(version)] struct Args { - /// Path to configs + /// Path to configs. home_dir: PathBuf, } @@ -40,6 +40,10 @@ impl SequencerHandle { /// Runs the sequencer indefinitely, monitoring its tasks. /// /// If no error occurs, this function will never return. + #[expect( + clippy::integer_division_remainder_used, + reason = "Generated by select! macro, can't be easily rewritten to avoid this lint" + )] pub async fn run_forever(&mut self) -> Result { let Self { addr: _, @@ -68,13 +72,15 @@ impl SequencerHandle { } } + #[must_use] pub fn is_finished(&self) -> bool { self.main_loop_handle.is_finished() || self.retry_pending_blocks_loop_handle.is_finished() || self.listen_for_bedrock_blocks_loop_handle.is_finished() } - pub fn addr(&self) -> SocketAddr { + #[must_use] + pub const fn addr(&self) -> SocketAddr { self.addr } } @@ -192,7 +198,7 @@ async fn retry_pending_blocks(seq_core: &Arc>) -> Result<() ); } - for block in pending_blocks.iter() { + for block in &pending_blocks { debug!( "Resubmitting pending block with id {}", block.header.block_id @@ -290,9 +296,10 @@ pub async fn main_runner() -> Result<()> { let app_config = SequencerConfig::from_path(&home_dir.join("sequencer_config.json"))?; - if let Some(ref rust_log) = app_config.override_rust_log { + if let Some(rust_log) = &app_config.override_rust_log { info!("RUST_LOG env var set to {rust_log:?}"); + // SAFETY: there is no other threads running at this point unsafe { std::env::set_var(RUST_LOG, rust_log); } diff --git a/storage/Cargo.toml b/storage/Cargo.toml index b6374249..f18625cb 100644 --- a/storage/Cargo.toml +++ b/storage/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] common.workspace = true nssa.workspace = true diff --git a/storage/src/error.rs b/storage/src/error.rs index fa8a3265..3056e09b 100644 --- a/storage/src/error.rs +++ b/storage/src/error.rs @@ -17,21 +17,24 @@ pub enum DbError { } impl DbError { - pub fn rocksdb_cast_message(rerr: rocksdb::Error, message: Option) -> Self { + #[must_use] + pub const fn rocksdb_cast_message(rerr: rocksdb::Error, message: Option) -> Self { Self::RocksDbError { error: rerr, additional_info: message, } } - pub fn borsh_cast_message(berr: borsh::io::Error, message: Option) -> Self { + #[must_use] + pub const fn borsh_cast_message(berr: borsh::io::Error, message: Option) -> Self { Self::SerializationError { error: berr, additional_info: message, } } - pub fn db_interaction_error(message: String) -> Self { + #[must_use] + pub const fn db_interaction_error(message: String) -> Self { Self::DbInteractionError { additional_info: message, } diff --git a/storage/src/indexer.rs b/storage/src/indexer.rs index 2c37ab0f..534a1c0b 100644 --- a/storage/src/indexer.rs +++ b/storage/src/indexer.rs @@ -1,6 +1,9 @@ -use std::{collections::HashMap, ops::Div, path::Path, sync::Arc}; +use std::{collections::HashMap, path::Path, sync::Arc}; -use common::{block::Block, transaction::NSSATransaction}; +use common::{ + block::{Block, BlockId}, + transaction::NSSATransaction, +}; use nssa::V02State; use rocksdb::{ BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, WriteBatch, @@ -8,60 +11,60 @@ use rocksdb::{ use crate::error::DbError; -/// Maximal size of stored blocks in base +/// Maximal size of stored blocks in base. /// -/// Used to control db size +/// Used to control db size. /// /// Currently effectively unbounded. pub const BUFF_SIZE_ROCKSDB: usize = usize::MAX; -/// Size of stored blocks cache in memory +/// Size of stored blocks cache in memory. /// -/// Keeping small to not run out of memory +/// Keeping small to not run out of memory. pub const CACHE_SIZE: usize = 1000; -/// Key base for storing metainformation about id of first block in db +/// Key base for storing metainformation about id of first block in db. pub const DB_META_FIRST_BLOCK_IN_DB_KEY: &str = "first_block_in_db"; -/// Key base for storing metainformation about id of last current block in db +/// Key base for storing metainformation about id of last current block in db. pub const DB_META_LAST_BLOCK_IN_DB_KEY: &str = "last_block_in_db"; -/// Key base for storing metainformation about id of last observed L1 lib header in db +/// Key base for storing metainformation about id of last observed L1 lib header in db. pub const DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY: &str = "last_observed_l1_lib_header_in_db"; -/// Key base for storing metainformation which describe if first block has been set +/// Key base for storing metainformation which describe if first block has been set. pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set"; -/// Key base for storing metainformation about the last breakpoint +/// Key base for storing metainformation about the last breakpoint. pub const DB_META_LAST_BREAKPOINT_ID: &str = "last_breakpoint_id"; -/// Interval between state breakpoints -pub const BREAKPOINT_INTERVAL: u64 = 100; +/// Interval between state breakpoints. +pub const BREAKPOINT_INTERVAL: u8 = 100; -/// Name of block column family +/// Name of block column family. pub const CF_BLOCK_NAME: &str = "cf_block"; -/// Name of meta column family +/// Name of meta column family. pub const CF_META_NAME: &str = "cf_meta"; -/// Name of breakpoint column family +/// Name of breakpoint column family. pub const CF_BREAKPOINT_NAME: &str = "cf_breakpoint"; -/// Name of hash to id map column family +/// Name of hash to id map column family. pub const CF_HASH_TO_ID: &str = "cf_hash_to_id"; -/// Name of tx hash to id map column family +/// Name of tx hash to id map column family. pub const CF_TX_TO_ID: &str = "cf_tx_to_id"; -/// Name of account meta column family +/// Name of account meta column family. pub const CF_ACC_META: &str = "cf_acc_meta"; -/// Name of account id to tx hash map column family +/// Name of account id to tx hash map column family. pub const CF_ACC_TO_TX: &str = "cf_acc_to_tx"; pub type DbResult = Result; -fn closest_breakpoint_id(block_id: u64) -> u64 { - block_id.saturating_sub(1).div(BREAKPOINT_INTERVAL) -} - pub struct RocksDBIO { pub db: DBWithThreadMode, } impl RocksDBIO { - pub fn open_or_create(path: &Path, start_data: Option<(Block, V02State)>) -> DbResult { + pub fn open_or_create( + path: &Path, + genesis_block: &Block, + initial_state: &V02State, + ) -> DbResult { let mut cf_opts = Options::default(); cf_opts.set_max_write_buffer_number(16); // ToDo: Add more column families for different data @@ -80,32 +83,27 @@ impl RocksDBIO { &db_opts, path, vec![cfb, cfmeta, cfbreakpoint, cfhti, cftti, cfameta, cfatt], - ); + ) + .map_err(|err| DbError::RocksDbError { + error: err, + additional_info: Some("Failed to open or create DB".to_owned()), + })?; - let dbio = Self { - // There is no point in handling this from runner code - db: db.unwrap(), - }; + let dbio = Self { db }; let is_start_set = dbio.get_meta_is_first_block_set()?; - - if is_start_set { - Ok(dbio) - } else if let Some((block, initial_state)) = start_data { - let block_id = block.header.block_id; + if !is_start_set { + let block_id = genesis_block.header.block_id; dbio.put_meta_last_block_in_db(block_id)?; - dbio.put_meta_first_block_in_db(block)?; + dbio.put_meta_first_block_in_db(genesis_block)?; dbio.put_meta_is_first_block_set()?; // First breakpoint setup dbio.put_breakpoint(0, initial_state)?; dbio.put_meta_last_breakpoint_id(0)?; - - Ok(dbio) - } else { - // Here we are trying to start a DB without a block, one should not do it. - unreachable!() } + + Ok(dbio) } pub fn destroy(path: &Path) -> DbResult<()> { @@ -168,7 +166,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_owned()), ) })?, ) @@ -178,12 +176,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize first block".to_string()), + Some("Failed to deserialize first block".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "First block not found".to_string(), + "First block not found".to_owned(), )) } } @@ -197,7 +195,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_owned()), ) })?, ) @@ -207,12 +205,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize last block".to_string()), + Some("Failed to deserialize last block".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Last block not found".to_string(), + "Last block not found".to_owned(), )) } } @@ -228,8 +226,7 @@ impl RocksDBIO { DbError::borsh_cast_message( err, Some( - "Failed to serialize DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY" - .to_string(), + "Failed to serialize DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY".to_owned(), ), ) }, @@ -241,7 +238,7 @@ impl RocksDBIO { borsh::from_slice::<[u8; 32]>(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize last l1 lib header".to_string()), + Some("Failed to deserialize last l1 lib header".to_owned()), ) }) }) @@ -257,7 +254,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_SET_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_owned()), ) })?, ) @@ -275,7 +272,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BREAKPOINT_ID).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BREAKPOINT_ID".to_string()), + Some("Failed to serialize DB_META_LAST_BREAKPOINT_ID".to_owned()), ) })?, ) @@ -285,17 +282,17 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize last breakpoint id".to_string()), + Some("Failed to deserialize last breakpoint id".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Last breakpoint id not found".to_string(), + "Last breakpoint id not found".to_owned(), )) } } - pub fn put_meta_first_block_in_db(&self, block: Block) -> DbResult<()> { + pub fn put_meta_first_block_in_db(&self, block: &Block) -> DbResult<()> { let cf_meta = self.meta_column(); self.db .put_cf( @@ -303,13 +300,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_owned()), ) })?, borsh::to_vec(&block.header.block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize first block id".to_string()), + Some("Failed to serialize first block id".to_owned()), ) })?, ) @@ -327,13 +324,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_owned()), ) })?, borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize last block id".to_string()), + Some("Failed to serialize last block id".to_owned()), ) })?, ) @@ -354,8 +351,7 @@ impl RocksDBIO { DbError::borsh_cast_message( err, Some( - "Failed to serialize DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY" - .to_string(), + "Failed to serialize DB_META_LAST_OBSERVED_L1_LIB_HEADER_ID_IN_DB_KEY".to_owned(), ), ) }, @@ -363,7 +359,7 @@ impl RocksDBIO { borsh::to_vec(&l1_lib_header).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize last l1 block header".to_string()), + Some("Failed to serialize last l1 block header".to_owned()), ) })?, ) @@ -379,13 +375,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BREAKPOINT_ID).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BREAKPOINT_ID".to_string()), + Some("Failed to serialize DB_META_LAST_BREAKPOINT_ID".to_owned()), ) })?, borsh::to_vec(&br_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize last block id".to_string()), + Some("Failed to serialize last block id".to_owned()), ) })?, ) @@ -401,10 +397,10 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_SET_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_owned()), ) })?, - [1u8; 1], + [1_u8; 1], ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; Ok(()) @@ -412,7 +408,7 @@ impl RocksDBIO { // Block - pub fn put_block(&self, block: Block, l1_lib_header: [u8; 32]) -> DbResult<()> { + pub fn put_block(&self, block: &Block, l1_lib_header: [u8; 32]) -> DbResult<()> { let cf_block = self.block_column(); let cf_hti = self.hash_to_id_column(); let cf_tti: Arc> = self.tx_hash_to_id_column(); @@ -425,13 +421,13 @@ impl RocksDBIO { borsh::to_vec(&block.header.block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, borsh::to_vec(&block).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block data".to_string()), + Some("Failed to serialize block data".to_owned()), ) })?, ) @@ -450,13 +446,13 @@ impl RocksDBIO { borsh::to_vec(&block.header.hash).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block hash".to_string()), + Some("Failed to serialize block hash".to_owned()), ) })?, borsh::to_vec(&block.header.block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, ) @@ -464,7 +460,7 @@ impl RocksDBIO { let mut acc_to_tx_map: HashMap<[u8; 32], Vec<[u8; 32]>> = HashMap::new(); - for tx in block.body.transactions { + for tx in &block.body.transactions { let tx_hash = tx.hash(); self.db @@ -473,13 +469,13 @@ impl RocksDBIO { borsh::to_vec(&tx_hash).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize tx hash".to_string()), + Some("Failed to serialize tx hash".to_owned()), ) })?, borsh::to_vec(&block.header.block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, ) @@ -488,22 +484,30 @@ impl RocksDBIO { let acc_ids = tx .affected_public_account_ids() .into_iter() - .map(|account_id| account_id.into_value()) + .map(nssa::AccountId::into_value) .collect::>(); for acc_id in acc_ids { acc_to_tx_map .entry(acc_id) .and_modify(|tx_hashes| tx_hashes.push(tx_hash.into())) - .or_insert(vec![tx_hash.into()]); + .or_insert_with(|| vec![tx_hash.into()]); } } + #[expect( + clippy::iter_over_hash_type, + reason = "RocksDB will keep ordering persistent" + )] for (acc_id, tx_hashes) in acc_to_tx_map { - self.put_account_transactions(acc_id, tx_hashes)?; + self.put_account_transactions(acc_id, &tx_hashes)?; } - if block.header.block_id.is_multiple_of(BREAKPOINT_INTERVAL) { + if block + .header + .block_id + .is_multiple_of(u64::from(BREAKPOINT_INTERVAL)) + { self.put_next_breakpoint()?; } @@ -519,7 +523,7 @@ impl RocksDBIO { borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, ) @@ -529,17 +533,17 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|serr| { DbError::borsh_cast_message( serr, - Some("Failed to deserialize block data".to_string()), + Some("Failed to deserialize block data".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Block on this id not found".to_string(), + "Block on this id not found".to_owned(), )) } } - pub fn get_block_batch(&self, before: Option, limit: u64) -> DbResult> { + pub fn get_block_batch(&self, before: Option, limit: u64) -> DbResult> { let cf_block = self.block_column(); let mut block_batch = vec![]; @@ -566,7 +570,7 @@ impl RocksDBIO { borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, ) @@ -576,7 +580,7 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|serr| { DbError::borsh_cast_message( serr, - Some("Failed to deserialize block data".to_string()), + Some("Failed to deserialize block data".to_owned()), ) })?) } else { @@ -592,7 +596,7 @@ impl RocksDBIO { // State - pub fn put_breakpoint(&self, br_id: u64, breakpoint: V02State) -> DbResult<()> { + pub fn put_breakpoint(&self, br_id: u64, breakpoint: &V02State) -> DbResult<()> { let cf_br = self.breakpoint_column(); self.db @@ -601,13 +605,13 @@ impl RocksDBIO { borsh::to_vec(&br_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize breakpoint id".to_string()), + Some("Failed to serialize breakpoint id".to_owned()), ) })?, borsh::to_vec(&breakpoint).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize breakpoint data".to_string()), + Some("Failed to serialize breakpoint data".to_owned()), ) })?, ) @@ -623,7 +627,7 @@ impl RocksDBIO { borsh::to_vec(&br_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize breakpoint id".to_string()), + Some("Failed to serialize breakpoint id".to_owned()), ) })?, ) @@ -633,12 +637,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|serr| { DbError::borsh_cast_message( serr, - Some("Failed to deserialize breakpoint data".to_string()), + Some("Failed to deserialize breakpoint data".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Breakpoint on this id not found".to_string(), + "Breakpoint on this id not found".to_owned(), )) } } @@ -653,7 +657,9 @@ impl RocksDBIO { // ToDo: update it to handle any genesis id // right now works correctly only if genesis_id < BREAKPOINT_INTERVAL let start = if br_id != 0 { - BREAKPOINT_INTERVAL * br_id + u64::from(BREAKPOINT_INTERVAL) + .checked_mul(br_id) + .expect("Reached maximum breakpoint id") } else { self.get_meta_first_block_in_db()? }; @@ -681,7 +687,7 @@ impl RocksDBIO { Ok(breakpoint) } else { Err(DbError::db_interaction_error( - "Block on this id not found".to_string(), + "Block on this id not found".to_owned(), )) } } @@ -692,17 +698,22 @@ impl RocksDBIO { pub fn put_next_breakpoint(&self) -> DbResult<()> { let last_block = self.get_meta_last_block_in_db()?; - let next_breakpoint_id = self.get_meta_last_breakpoint_id()? + 1; - let block_to_break_id = next_breakpoint_id * BREAKPOINT_INTERVAL; + let next_breakpoint_id = self + .get_meta_last_breakpoint_id()? + .checked_add(1) + .expect("Reached maximum breakpoint id"); + let block_to_break_id = next_breakpoint_id + .checked_mul(u64::from(BREAKPOINT_INTERVAL)) + .expect("Reached maximum breakpoint id"); if block_to_break_id <= last_block { let next_breakpoint = self.calculate_state_for_id(block_to_break_id)?; - self.put_breakpoint(next_breakpoint_id, next_breakpoint)?; + self.put_breakpoint(next_breakpoint_id, &next_breakpoint)?; self.put_meta_last_breakpoint_id(next_breakpoint_id) } else { Err(DbError::db_interaction_error( - "Breakpoint not yet achieved".to_string(), + "Breakpoint not yet achieved".to_owned(), )) } } @@ -718,7 +729,7 @@ impl RocksDBIO { borsh::to_vec(&hash).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block hash".to_string()), + Some("Failed to serialize block hash".to_owned()), ) })?, ) @@ -726,14 +737,11 @@ impl RocksDBIO { if let Some(data) = res { Ok(borsh::from_slice::(&data).map_err(|serr| { - DbError::borsh_cast_message( - serr, - Some("Failed to deserialize block id".to_string()), - ) + DbError::borsh_cast_message(serr, Some("Failed to deserialize block id".to_owned())) })?) } else { Err(DbError::db_interaction_error( - "Block on this hash not found".to_string(), + "Block on this hash not found".to_owned(), )) } } @@ -747,7 +755,7 @@ impl RocksDBIO { borsh::to_vec(&tx_hash).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize transaction hash".to_string()), + Some("Failed to serialize transaction hash".to_owned()), ) })?, ) @@ -755,14 +763,11 @@ impl RocksDBIO { if let Some(data) = res { Ok(borsh::from_slice::(&data).map_err(|serr| { - DbError::borsh_cast_message( - serr, - Some("Failed to deserialize block id".to_string()), - ) + DbError::borsh_cast_message(serr, Some("Failed to deserialize block id".to_owned())) })?) } else { Err(DbError::db_interaction_error( - "Block for this tx hash not found".to_string(), + "Block for this tx hash not found".to_owned(), )) } } @@ -780,12 +785,12 @@ impl RocksDBIO { write_batch.put_cf( &cf_ameta, borsh::to_vec(&acc_id).map_err(|err| { - DbError::borsh_cast_message(err, Some("Failed to serialize account id".to_string())) + DbError::borsh_cast_message(err, Some("Failed to serialize account id".to_owned())) })?, borsh::to_vec(&num_tx).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize acc metadata".to_string()), + Some("Failed to serialize acc metadata".to_owned()), ) })?, ); @@ -796,12 +801,12 @@ impl RocksDBIO { fn get_acc_meta_num_tx(&self, acc_id: [u8; 32]) -> DbResult> { let cf_ameta = self.account_meta_column(); let res = self.db.get_cf(&cf_ameta, acc_id).map_err(|rerr| { - DbError::rocksdb_cast_message(rerr, Some("Failed to read from acc meta cf".to_string())) + DbError::rocksdb_cast_message(rerr, Some("Failed to read from acc meta cf".to_owned())) })?; res.map(|data| { borsh::from_slice::(&data).map_err(|serr| { - DbError::borsh_cast_message(serr, Some("Failed to deserialize num tx".to_string())) + DbError::borsh_cast_message(serr, Some("Failed to deserialize num tx".to_owned())) }) }) .transpose() @@ -812,23 +817,25 @@ impl RocksDBIO { pub fn put_account_transactions( &self, acc_id: [u8; 32], - tx_hashes: Vec<[u8; 32]>, + tx_hashes: &[[u8; 32]], ) -> DbResult<()> { let acc_num_tx = self.get_acc_meta_num_tx(acc_id)?.unwrap_or(0); let cf_att = self.account_id_to_tx_hash_column(); let mut write_batch = WriteBatch::new(); for (tx_id, tx_hash) in tx_hashes.iter().enumerate() { - let put_id = acc_num_tx + tx_id as u64; + let put_id = acc_num_tx + .checked_add( + u64::try_from(tx_id) + .expect("Transaction number for account expected to fit in u64"), + ) + .expect("Reached maximum number of transactions for account"); let mut prefix = borsh::to_vec(&acc_id).map_err(|berr| { - DbError::borsh_cast_message( - berr, - Some("Failed to serialize account id".to_string()), - ) + DbError::borsh_cast_message(berr, Some("Failed to serialize account id".to_owned())) })?; let suffix = borsh::to_vec(&put_id).map_err(|berr| { - DbError::borsh_cast_message(berr, Some("Failed to serialize tx id".to_string())) + DbError::borsh_cast_message(berr, Some("Failed to serialize tx id".to_owned())) })?; prefix.extend_from_slice(&suffix); @@ -839,7 +846,7 @@ impl RocksDBIO { borsh::to_vec(tx_hash).map_err(|berr| { DbError::borsh_cast_message( berr, - Some("Failed to serialize tx hash".to_string()), + Some("Failed to serialize tx hash".to_owned()), ) })?, ); @@ -847,12 +854,17 @@ impl RocksDBIO { self.update_acc_meta_batch( acc_id, - acc_num_tx + (tx_hashes.len() as u64), + acc_num_tx + .checked_add( + u64::try_from(tx_hashes.len()) + .expect("Number of transactions expected to fit in u64"), + ) + .expect("Reached maximum number of transactions for account"), &mut write_batch, )?; self.db.write(write_batch).map_err(|rerr| { - DbError::rocksdb_cast_message(rerr, Some("Failed to write batch".to_string())) + DbError::rocksdb_cast_message(rerr, Some("Failed to write batch".to_owned())) }) } @@ -867,15 +879,12 @@ impl RocksDBIO { // ToDo: Multi get this - for tx_id in offset..(offset + limit) { + for tx_id in offset..(offset.saturating_add(limit)) { let mut prefix = borsh::to_vec(&acc_id).map_err(|berr| { - DbError::borsh_cast_message( - berr, - Some("Failed to serialize account id".to_string()), - ) + DbError::borsh_cast_message(berr, Some("Failed to serialize account id".to_owned())) })?; let suffix = borsh::to_vec(&tx_id).map_err(|berr| { - DbError::borsh_cast_message(berr, Some("Failed to serialize tx id".to_string())) + DbError::borsh_cast_message(berr, Some("Failed to serialize tx id".to_owned())) })?; prefix.extend_from_slice(&suffix); @@ -889,7 +898,7 @@ impl RocksDBIO { Ok(borsh::from_slice::<[u8; 32]>(&data).map_err(|serr| { DbError::borsh_cast_message( serr, - Some("Failed to deserialize tx_hash".to_string()), + Some("Failed to deserialize tx_hash".to_owned()), ) })?) } else { @@ -920,10 +929,12 @@ impl RocksDBIO { .transactions .iter() .find(|tx| tx.hash().0 == tx_hash) - .ok_or(DbError::db_interaction_error(format!( - "Missing transaction in block {} with hash {:#?}", - block.header.block_id, tx_hash - )))?; + .ok_or_else(|| { + DbError::db_interaction_error(format!( + "Missing transaction in block {} with hash {:#?}", + block.header.block_id, tx_hash + )) + })?; tx_batch.push(transaction.clone()); } @@ -932,8 +943,17 @@ impl RocksDBIO { } } +fn closest_breakpoint_id(block_id: u64) -> u64 { + block_id + .saturating_sub(1) + .checked_div(u64::from(BREAKPOINT_INTERVAL)) + .expect("Breakpoint interval is not zero") +} + #[cfg(test)] mod tests { + #![expect(clippy::shadow_unrelated, reason = "We don't care about it in tests")] + use nssa::AccountId; use tempfile::tempdir; @@ -972,30 +992,28 @@ mod tests { fn transfer(amount: u128, nonce: u128, direction: bool) -> NSSATransaction { let from; let to; - let sign_key; - - if direction { + let sign_key = if direction { from = acc1(); to = acc2(); - sign_key = acc1_sign_key(); + acc1_sign_key() } else { from = acc2(); to = acc1(); - sign_key = acc2_sign_key(); - } + acc2_sign_key() + }; common::test_utils::create_transaction_native_token_transfer( - from, nonce, to, amount, sign_key, + from, nonce, to, amount, &sign_key, ) } #[test] - fn test_start_db() { + fn start_db() { let temp_dir = tempdir().unwrap(); let temdir_path = temp_dir.path(); - let dbio = RocksDBIO::open_or_create(temdir_path, Some((genesis_block(), initial_state()))) - .unwrap(); + let dbio = + RocksDBIO::open_or_create(temdir_path, &genesis_block(), &initial_state()).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let first_id = dbio.get_meta_first_block_in_db().unwrap(); @@ -1021,18 +1039,18 @@ mod tests { } #[test] - fn test_one_block_insertion() { + fn one_block_insertion() { let temp_dir = tempdir().unwrap(); let temdir_path = temp_dir.path(); - let dbio = RocksDBIO::open_or_create(temdir_path, Some((genesis_block(), initial_state()))) - .unwrap(); + let dbio = + RocksDBIO::open_or_create(temdir_path, &genesis_block(), &initial_state()).unwrap(); let prev_hash = genesis_block().header.hash; let transfer_tx = transfer(1, 0, true); let block = common::test_utils::produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [1; 32]).unwrap(); + dbio.put_block(&block, [1; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let first_id = dbio.get_meta_first_block_in_db().unwrap(); @@ -1060,22 +1078,25 @@ mod tests { } #[test] - fn test_new_breakpoint() { + fn new_breakpoint() { let temp_dir = tempdir().unwrap(); let temdir_path = temp_dir.path(); - let dbio = RocksDBIO::open_or_create(temdir_path, Some((genesis_block(), initial_state()))) - .unwrap(); + let dbio = + RocksDBIO::open_or_create(temdir_path, &genesis_block(), &initial_state()).unwrap(); for i in 1..BREAKPOINT_INTERVAL { let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); let prev_hash = last_block.header.hash; - let transfer_tx = transfer(1, (i - 1) as u128, true); - let block = - common::test_utils::produce_dummy_block(i + 1, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [i as u8; 32]).unwrap(); + let transfer_tx = transfer(1, u128::from(i - 1), true); + let block = common::test_utils::produce_dummy_block( + u64::from(i + 1), + Some(prev_hash), + vec![transfer_tx], + ); + dbio.put_block(&block, [i; 32]).unwrap(); } let last_id = dbio.get_meta_last_block_in_db().unwrap(); @@ -1113,12 +1134,12 @@ mod tests { } #[test] - fn test_simple_maps() { + fn simple_maps() { let temp_dir = tempdir().unwrap(); let temdir_path = temp_dir.path(); - let dbio = RocksDBIO::open_or_create(temdir_path, Some((genesis_block(), initial_state()))) - .unwrap(); + let dbio = + RocksDBIO::open_or_create(temdir_path, &genesis_block(), &initial_state()).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1129,7 +1150,7 @@ mod tests { let control_hash1 = block.header.hash; - dbio.put_block(block, [1; 32]).unwrap(); + dbio.put_block(&block, [1; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1140,7 +1161,7 @@ mod tests { let control_hash2 = block.header.hash; - dbio.put_block(block, [2; 32]).unwrap(); + dbio.put_block(&block, [2; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1151,7 +1172,7 @@ mod tests { let control_tx_hash1 = transfer_tx.hash(); let block = common::test_utils::produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [3; 32]).unwrap(); + dbio.put_block(&block, [3; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1162,7 +1183,7 @@ mod tests { let control_tx_hash2 = transfer_tx.hash(); let block = common::test_utils::produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [4; 32]).unwrap(); + dbio.put_block(&block, [4; 32]).unwrap(); let control_block_id1 = dbio.get_block_id_by_hash(control_hash1.0).unwrap(); let control_block_id2 = dbio.get_block_id_by_hash(control_hash2.0).unwrap(); @@ -1176,14 +1197,14 @@ mod tests { } #[test] - fn test_block_batch() { + fn block_batch() { let temp_dir = tempdir().unwrap(); let temdir_path = temp_dir.path(); let mut block_res = vec![]; - let dbio = RocksDBIO::open_or_create(temdir_path, Some((genesis_block(), initial_state()))) - .unwrap(); + let dbio = + RocksDBIO::open_or_create(temdir_path, &genesis_block(), &initial_state()).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1193,7 +1214,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); - dbio.put_block(block, [1; 32]).unwrap(); + dbio.put_block(&block, [1; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1203,7 +1224,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); - dbio.put_block(block, [2; 32]).unwrap(); + dbio.put_block(&block, [2; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1213,7 +1234,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); - dbio.put_block(block, [3; 32]).unwrap(); + dbio.put_block(&block, [3; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1223,7 +1244,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]); block_res.push(block.clone()); - dbio.put_block(block, [4; 32]).unwrap(); + dbio.put_block(&block, [4; 32]).unwrap(); let block_hashes_mem: Vec<[u8; 32]> = block_res.into_iter().map(|bl| bl.header.hash.0).collect(); @@ -1254,14 +1275,14 @@ mod tests { } #[test] - fn test_account_map() { + fn account_map() { let temp_dir = tempdir().unwrap(); let temdir_path = temp_dir.path(); let mut tx_hash_res = vec![]; - let dbio = RocksDBIO::open_or_create(temdir_path, Some((genesis_block(), initial_state()))) - .unwrap(); + let dbio = + RocksDBIO::open_or_create(temdir_path, &genesis_block(), &initial_state()).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1273,7 +1294,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [1; 32]).unwrap(); + dbio.put_block(&block, [1; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1285,7 +1306,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [2; 32]).unwrap(); + dbio.put_block(&block, [2; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1297,7 +1318,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [3; 32]).unwrap(); + dbio.put_block(&block, [3; 32]).unwrap(); let last_id = dbio.get_meta_last_block_in_db().unwrap(); let last_block = dbio.get_block(last_id).unwrap(); @@ -1309,7 +1330,7 @@ mod tests { let block = common::test_utils::produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]); - dbio.put_block(block, [4; 32]).unwrap(); + dbio.put_block(&block, [4; 32]).unwrap(); let acc1_tx = dbio.get_acc_transactions(*acc1().value(), 0, 4).unwrap(); let acc1_tx_hashes: Vec<[u8; 32]> = acc1_tx.into_iter().map(|tx| tx.hash().0).collect(); @@ -1320,6 +1341,6 @@ mod tests { let acc1_tx_limited_hashes: Vec<[u8; 32]> = acc1_tx_limited.into_iter().map(|tx| tx.hash().0).collect(); - assert_eq!(acc1_tx_limited_hashes.as_slice(), &tx_hash_res[1..]) + assert_eq!(acc1_tx_limited_hashes.as_slice(), &tx_hash_res[1..]); } } diff --git a/storage/src/sequencer.rs b/storage/src/sequencer.rs index 2de123dc..8d072a52 100644 --- a/storage/src/sequencer.rs +++ b/storage/src/sequencer.rs @@ -8,37 +8,37 @@ use rocksdb::{ use crate::error::DbError; -/// Maximal size of stored blocks in base +/// Maximal size of stored blocks in base. /// -/// Used to control db size +/// Used to control db size. /// /// Currently effectively unbounded. pub const BUFF_SIZE_ROCKSDB: usize = usize::MAX; -/// Size of stored blocks cache in memory +/// Size of stored blocks cache in memory. /// -/// Keeping small to not run out of memory +/// Keeping small to not run out of memory. pub const CACHE_SIZE: usize = 1000; -/// Key base for storing metainformation about id of first block in db +/// Key base for storing metainformation about id of first block in db. pub const DB_META_FIRST_BLOCK_IN_DB_KEY: &str = "first_block_in_db"; -/// Key base for storing metainformation about id of last current block in db +/// Key base for storing metainformation about id of last current block in db. pub const DB_META_LAST_BLOCK_IN_DB_KEY: &str = "last_block_in_db"; -/// Key base for storing metainformation which describe if first block has been set +/// Key base for storing metainformation which describe if first block has been set. pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set"; -/// Key base for storing metainformation about the last finalized block on Bedrock +/// Key base for storing metainformation about the last finalized block on Bedrock. pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id"; -/// Key base for storing metainformation about the latest block meta +/// Key base for storing metainformation about the latest block meta. pub const DB_META_LATEST_BLOCK_META_KEY: &str = "latest_block_meta"; -/// Key base for storing the NSSA state +/// Key base for storing the NSSA state. pub const DB_NSSA_STATE_KEY: &str = "nssa_state"; -/// Name of block column family +/// Name of block column family. pub const CF_BLOCK_NAME: &str = "cf_block"; -/// Name of meta column family +/// Name of meta column family. pub const CF_META_NAME: &str = "cf_meta"; -/// Name of state column family +/// Name of state column family. pub const CF_NSSA_STATE_NAME: &str = "cf_nssa_state"; pub type DbResult = Result; @@ -50,7 +50,8 @@ pub struct RocksDBIO { impl RocksDBIO { pub fn open_or_create( path: &Path, - start_block: Option<(&Block, MantleMsgId)>, + genesis_block: &Block, + genesis_msg_id: MantleMsgId, ) -> DbResult { let mut cf_opts = Options::default(); cf_opts.set_max_write_buffer_number(16); @@ -66,34 +67,29 @@ impl RocksDBIO { &db_opts, path, vec![cfb, cfmeta, cfstate], - ); + ) + .map_err(|err| DbError::RocksDbError { + error: err, + additional_info: Some("Failed to open or create DB".to_owned()), + })?; - let dbio = Self { - // There is no point in handling this from runner code - db: db.unwrap(), - }; + let dbio = Self { db }; let is_start_set = dbio.get_meta_is_first_block_set()?; - - if is_start_set { - Ok(dbio) - } else if let Some((block, msg_id)) = start_block { - let block_id = block.header.block_id; - dbio.put_meta_first_block_in_db(block, msg_id)?; + if !is_start_set { + let block_id = genesis_block.header.block_id; + dbio.put_meta_first_block_in_db(genesis_block, genesis_msg_id)?; dbio.put_meta_is_first_block_set()?; dbio.put_meta_last_block_in_db(block_id)?; dbio.put_meta_last_finalized_block_id(None)?; dbio.put_meta_latest_block_meta(&BlockMeta { - id: block.header.block_id, - hash: block.header.hash, - msg_id, + id: genesis_block.header.block_id, + hash: genesis_block.header.hash, + msg_id: genesis_msg_id, })?; - - Ok(dbio) - } else { - // Here we are trying to start a DB without a block, one should not do it. - unreachable!() } + + Ok(dbio) } pub fn destroy(path: &Path) -> DbResult<()> { @@ -132,7 +128,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_owned()), ) })?, ) @@ -142,12 +138,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize first block".to_string()), + Some("Failed to deserialize first block".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "First block not found".to_string(), + "First block not found".to_owned(), )) } } @@ -161,7 +157,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_owned()), ) })?, ) @@ -171,12 +167,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize last block".to_string()), + Some("Failed to deserialize last block".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Last block not found".to_string(), + "Last block not found".to_owned(), )) } } @@ -190,7 +186,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_SET_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_owned()), ) })?, ) @@ -206,11 +202,11 @@ impl RocksDBIO { borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_NSSA_STATE_KEY".to_string()), + Some("Failed to serialize DB_NSSA_STATE_KEY".to_owned()), ) })?, borsh::to_vec(state).map_err(|err| { - DbError::borsh_cast_message(err, Some("Failed to serialize NSSA state".to_string())) + DbError::borsh_cast_message(err, Some("Failed to serialize NSSA state".to_owned())) })?, ); @@ -225,13 +221,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_IN_DB_KEY".to_owned()), ) })?, borsh::to_vec(&block.header.block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize first block id".to_string()), + Some("Failed to serialize first block id".to_owned()), ) })?, ) @@ -242,7 +238,7 @@ impl RocksDBIO { self.db.write(batch).map_err(|rerr| { DbError::rocksdb_cast_message( rerr, - Some("Failed to write first block in db".to_string()), + Some("Failed to write first block in db".to_owned()), ) })?; @@ -257,13 +253,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_owned()), ) })?, borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize last block id".to_string()), + Some("Failed to serialize last block id".to_owned()), ) })?, ) @@ -282,13 +278,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_BLOCK_IN_DB_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_string()), + Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_owned()), ) })?, borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize last block id".to_string()), + Some("Failed to serialize last block id".to_owned()), ) })?, ); @@ -303,13 +299,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LAST_FINALIZED_BLOCK_ID).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LAST_FINALIZED_BLOCK_ID".to_string()), + Some("Failed to serialize DB_META_LAST_FINALIZED_BLOCK_ID".to_owned()), ) })?, borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize last block id".to_string()), + Some("Failed to serialize last block id".to_owned()), ) })?, ) @@ -325,10 +321,10 @@ impl RocksDBIO { borsh::to_vec(&DB_META_FIRST_BLOCK_SET_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_string()), + Some("Failed to serialize DB_META_FIRST_BLOCK_SET_KEY".to_owned()), ) })?, - [1u8; 1], + [1_u8; 1], ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; Ok(()) @@ -342,13 +338,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LATEST_BLOCK_META_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LATEST_BLOCK_META_KEY".to_string()), + Some("Failed to serialize DB_META_LATEST_BLOCK_META_KEY".to_owned()), ) })?, borsh::to_vec(&block_meta).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize latest block meta".to_string()), + Some("Failed to serialize latest block meta".to_owned()), ) })?, ) @@ -367,13 +363,13 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LATEST_BLOCK_META_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LATEST_BLOCK_META_KEY".to_string()), + Some("Failed to serialize DB_META_LATEST_BLOCK_META_KEY".to_owned()), ) })?, borsh::to_vec(&block_meta).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize latest block meta".to_string()), + Some("Failed to serialize latest block meta".to_owned()), ) })?, ); @@ -389,7 +385,7 @@ impl RocksDBIO { borsh::to_vec(&DB_META_LATEST_BLOCK_META_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize DB_META_LATEST_BLOCK_META_KEY".to_string()), + Some("Failed to serialize DB_META_LATEST_BLOCK_META_KEY".to_owned()), ) })?, ) @@ -399,12 +395,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize latest block meta".to_string()), + Some("Failed to deserialize latest block meta".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Latest block meta not found".to_string(), + "Latest block meta not found".to_owned(), )) } } @@ -437,10 +433,10 @@ impl RocksDBIO { batch.put_cf( &cf_block, borsh::to_vec(&block.header.block_id).map_err(|err| { - DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_owned())) })?, borsh::to_vec(block).map_err(|err| { - DbError::borsh_cast_message(err, Some("Failed to serialize block data".to_string())) + DbError::borsh_cast_message(err, Some("Failed to serialize block data".to_owned())) })?, ); Ok(()) @@ -455,7 +451,7 @@ impl RocksDBIO { borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, ) @@ -465,12 +461,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|serr| { DbError::borsh_cast_message( serr, - Some("Failed to deserialize block data".to_string()), + Some("Failed to deserialize block data".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Block on this id not found".to_string(), + "Block on this id not found".to_owned(), )) } } @@ -484,7 +480,7 @@ impl RocksDBIO { borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, ) @@ -494,12 +490,12 @@ impl RocksDBIO { Ok(borsh::from_slice::(&data).map_err(|serr| { DbError::borsh_cast_message( serr, - Some("Failed to deserialize block data".to_string()), + Some("Failed to deserialize block data".to_owned()), ) })?) } else { Err(DbError::db_interaction_error( - "Block on this id not found".to_string(), + "Block on this id not found".to_owned(), )) } } @@ -507,7 +503,7 @@ impl RocksDBIO { pub fn delete_block(&self, block_id: u64) -> DbResult<()> { let cf_block = self.block_column(); let key = borsh::to_vec(&block_id).map_err(|err| { - DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_owned())) })?; if self @@ -517,7 +513,7 @@ impl RocksDBIO { .is_none() { return Err(DbError::db_interaction_error( - "Block on this id not found".to_string(), + "Block on this id not found".to_owned(), )); } @@ -539,13 +535,13 @@ impl RocksDBIO { borsh::to_vec(&block_id).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block id".to_string()), + Some("Failed to serialize block id".to_owned()), ) })?, borsh::to_vec(&block).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to serialize block data".to_string()), + Some("Failed to serialize block data".to_owned()), ) })?, ) @@ -567,14 +563,14 @@ impl RocksDBIO { let (_key, value) = res.map_err(|rerr| { DbError::rocksdb_cast_message( rerr, - Some("Failed to get key value pair".to_string()), + Some("Failed to get key value pair".to_owned()), ) })?; borsh::from_slice::(&value).map_err(|err| { DbError::borsh_cast_message( err, - Some("Failed to deserialize block data".to_string()), + Some("Failed to deserialize block data".to_owned()), ) }) }) diff --git a/test_program_methods/Cargo.toml b/test_program_methods/Cargo.toml index 1c3368c7..9b4934e2 100644 --- a/test_program_methods/Cargo.toml +++ b/test_program_methods/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [build-dependencies] risc0-build.workspace = true diff --git a/test_program_methods/guest/Cargo.toml b/test_program_methods/guest/Cargo.toml index 21c4fdc7..1ca958b3 100644 --- a/test_program_methods/guest/Cargo.toml +++ b/test_program_methods/guest/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true diff --git a/test_program_methods/guest/src/bin/burner.rs b/test_program_methods/guest/src/bin/burner.rs index 3002e39c..a2256aa3 100644 --- a/test_program_methods/guest/src/bin/burner.rs +++ b/test_program_methods/guest/src/bin/burner.rs @@ -11,14 +11,13 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = &pre.account; let mut account_post = account_pre.clone(); - account_post.balance -= balance_to_burn; + account_post.balance = account_post.balance.saturating_sub(balance_to_burn); write_nssa_outputs( instruction_words, diff --git a/test_program_methods/guest/src/bin/chain_caller.rs b/test_program_methods/guest/src/bin/chain_caller.rs index 0cdac8d6..7e67fa9b 100644 --- a/test_program_methods/guest/src/bin/chain_caller.rs +++ b/test_program_methods/guest/src/bin/chain_caller.rs @@ -8,7 +8,8 @@ type Instruction = (u128, ProgramId, u32, Option); /// A program that calls another program `num_chain_calls` times. /// It permutes the order of the input accounts on the subsequent call -/// The `ProgramId` in the instruction must be the program_id of the authenticated transfers program +/// The `ProgramId` in the instruction must be the `program_id` of the authenticated transfers +/// program. fn main() { let ( ProgramInput { @@ -18,9 +19,8 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [recipient_pre, sender_pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([recipient_pre, sender_pre]) = <[_; 2]>::try_from(pre_states) else { + return; }; let instruction_data = to_vec(&balance).unwrap(); @@ -42,8 +42,16 @@ fn main() { }; chained_calls.push(new_chained_call); - running_sender_pre.account.balance -= balance; - running_recipient_pre.account.balance += balance; + running_sender_pre.account.balance = + match running_sender_pre.account.balance.checked_sub(balance) { + Some(new_balance) => new_balance, + None => return, + }; + running_recipient_pre.account.balance = + match running_recipient_pre.account.balance.checked_add(balance) { + Some(new_balance) => new_balance, + None => return, + }; } write_nssa_outputs_with_chained_call( diff --git a/test_program_methods/guest/src/bin/changer_claimer.rs b/test_program_methods/guest/src/bin/changer_claimer.rs index 8d28a490..37079737 100644 --- a/test_program_methods/guest/src/bin/changer_claimer.rs +++ b/test_program_methods/guest/src/bin/changer_claimer.rs @@ -12,9 +12,8 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = &pre.account; diff --git a/test_program_methods/guest/src/bin/claimer.rs b/test_program_methods/guest/src/bin/claimer.rs index 8687704b..897ca6a6 100644 --- a/test_program_methods/guest/src/bin/claimer.rs +++ b/test_program_methods/guest/src/bin/claimer.rs @@ -6,14 +6,13 @@ fn main() { let ( ProgramInput { pre_states, - instruction: _, + instruction: (), }, instruction_words, ) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_post = AccountPostState::new_claimed(pre.account.clone()); diff --git a/test_program_methods/guest/src/bin/data_changer.rs b/test_program_methods/guest/src/bin/data_changer.rs index cd1cc19d..c689dce5 100644 --- a/test_program_methods/guest/src/bin/data_changer.rs +++ b/test_program_methods/guest/src/bin/data_changer.rs @@ -12,9 +12,8 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = &pre.account; diff --git a/test_program_methods/guest/src/bin/extra_output.rs b/test_program_methods/guest/src/bin/extra_output.rs index 4950f14a..4d67df6e 100644 --- a/test_program_methods/guest/src/bin/extra_output.rs +++ b/test_program_methods/guest/src/bin/extra_output.rs @@ -8,9 +8,8 @@ type Instruction = (); fn main() { let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = pre.account.clone(); diff --git a/test_program_methods/guest/src/bin/malicious_authorization_changer.rs b/test_program_methods/guest/src/bin/malicious_authorization_changer.rs index 7dc0ac68..56ba7e72 100644 --- a/test_program_methods/guest/src/bin/malicious_authorization_changer.rs +++ b/test_program_methods/guest/src/bin/malicious_authorization_changer.rs @@ -21,9 +21,8 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [sender, receiver] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([sender, receiver]) = <[_; 2]>::try_from(pre_states) else { + return; }; // Maliciously set is_authorized to true for the first account @@ -37,7 +36,7 @@ fn main() { let chained_call = ChainedCall { program_id: transfer_program_id, instruction_data, - pre_states: vec![authorised_sender.clone(), receiver.clone()], + pre_states: vec![authorised_sender, receiver.clone()], pda_seeds: vec![], }; diff --git a/test_program_methods/guest/src/bin/minter.rs b/test_program_methods/guest/src/bin/minter.rs index 6bc6855b..a602df56 100644 --- a/test_program_methods/guest/src/bin/minter.rs +++ b/test_program_methods/guest/src/bin/minter.rs @@ -5,14 +5,16 @@ type Instruction = (); fn main() { let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = &pre.account; let mut account_post = account_pre.clone(); - account_post.balance += 1; + account_post.balance = account_post + .balance + .checked_add(1) + .expect("Balance overflow"); write_nssa_outputs( instruction_words, diff --git a/test_program_methods/guest/src/bin/missing_output.rs b/test_program_methods/guest/src/bin/missing_output.rs index 7b910c69..52ca6e2f 100644 --- a/test_program_methods/guest/src/bin/missing_output.rs +++ b/test_program_methods/guest/src/bin/missing_output.rs @@ -5,9 +5,8 @@ type Instruction = (); fn main() { let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::(); - let [pre1, pre2] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre1, pre2]) = <[_; 2]>::try_from(pre_states) else { + return; }; let account_pre1 = pre1.account.clone(); diff --git a/test_program_methods/guest/src/bin/modified_transfer.rs b/test_program_methods/guest/src/bin/modified_transfer.rs index dd93e836..3aee3816 100644 --- a/test_program_methods/guest/src/bin/modified_transfer.rs +++ b/test_program_methods/guest/src/bin/modified_transfer.rs @@ -1,3 +1,8 @@ +#![expect( + clippy::arithmetic_side_effects, + reason = "This program is intentionally malicious and is expected to have side effects." +)] + use nssa_core::{ account::{Account, AccountWithMetadata}, program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs}, @@ -6,18 +11,17 @@ use nssa_core::{ /// Initializes a default account under the ownership of this program. /// This is achieved by a noop. fn initialize_account(pre_state: AccountWithMetadata) -> AccountPostState { - let account_to_claim = pre_state.account.clone(); + let account_to_claim = pre_state.account; let is_authorized = pre_state.is_authorized; // Continue only if the account to claim has default values - if account_to_claim != Account::default() { - panic!("Account is already initialized"); - } + assert!( + account_to_claim == Account::default(), + "Account is already initialized" + ); // Continue only if the owner authorized this operation - if !is_authorized { - panic!("Missing required authorization"); - } + assert!(is_authorized, "Missing required authorization"); AccountPostState::new(account_to_claim) } @@ -29,9 +33,7 @@ fn transfer( balance_to_move: u128, ) -> Vec { // Continue only if the sender has authorized this operation - if !sender.is_authorized { - panic!("Missing required authorization"); - } + assert!(sender.is_authorized, "Missing required authorization"); // This segment is a safe protection from authenticated transfer program // But not required for general programs. @@ -44,8 +46,8 @@ fn transfer( let malicious_offset = base.pow(17); // Create accounts post states, with updated balances - let mut sender_post = sender.account.clone(); - let mut recipient_post = recipient.account.clone(); + let mut sender_post = sender.account; + let mut recipient_post = recipient.account; sender_post.balance -= balance_to_move + malicious_offset; recipient_post.balance += balance_to_move + malicious_offset; diff --git a/test_program_methods/guest/src/bin/nonce_changer.rs b/test_program_methods/guest/src/bin/nonce_changer.rs index 17aa966a..52d2e392 100644 --- a/test_program_methods/guest/src/bin/nonce_changer.rs +++ b/test_program_methods/guest/src/bin/nonce_changer.rs @@ -5,14 +5,13 @@ type Instruction = (); fn main() { let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = &pre.account; let mut account_post = account_pre.clone(); - account_post.nonce += 1; + account_post.nonce.public_account_nonce_increment(); write_nssa_outputs( instruction_words, diff --git a/test_program_methods/guest/src/bin/program_owner_changer.rs b/test_program_methods/guest/src/bin/program_owner_changer.rs index 232fa306..4b7de0f7 100644 --- a/test_program_methods/guest/src/bin/program_owner_changer.rs +++ b/test_program_methods/guest/src/bin/program_owner_changer.rs @@ -5,9 +5,8 @@ type Instruction = (); fn main() { let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::(); - let [pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([pre]) = <[_; 1]>::try_from(pre_states) else { + return; }; let account_pre = &pre.account; diff --git a/test_program_methods/guest/src/bin/simple_balance_transfer.rs b/test_program_methods/guest/src/bin/simple_balance_transfer.rs index e1dbc1b7..55bbfcef 100644 --- a/test_program_methods/guest/src/bin/simple_balance_transfer.rs +++ b/test_program_methods/guest/src/bin/simple_balance_transfer.rs @@ -11,15 +11,20 @@ fn main() { instruction_words, ) = read_nssa_inputs::(); - let [sender_pre, receiver_pre] = match pre_states.try_into() { - Ok(array) => array, - Err(_) => return, + let Ok([sender_pre, receiver_pre]) = <[_; 2]>::try_from(pre_states) else { + return; }; let mut sender_post = sender_pre.account.clone(); let mut receiver_post = receiver_pre.account.clone(); - sender_post.balance -= balance; - receiver_post.balance += balance; + sender_post.balance = sender_post + .balance + .checked_sub(balance) + .expect("Not enough balance to transfer"); + receiver_post.balance = receiver_post + .balance + .checked_add(balance) + .expect("Overflow when adding balance"); write_nssa_outputs( instruction_words, diff --git a/wallet-ffi/Cargo.toml b/wallet-ffi/Cargo.toml index 0da8a248..93096e12 100644 --- a/wallet-ffi/Cargo.toml +++ b/wallet-ffi/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2021" license = { workspace = true } +[lints] +workspace = true + [lib] crate-type = ["rlib", "cdylib", "staticlib"] diff --git a/wallet-ffi/src/account.rs b/wallet-ffi/src/account.rs index 08e0138a..49f6a8de 100644 --- a/wallet-ffi/src/account.rs +++ b/wallet-ffi/src/account.rs @@ -45,7 +45,7 @@ pub unsafe extern "C" fn wallet_ffi_create_account_public( let mut wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -93,7 +93,7 @@ pub unsafe extern "C" fn wallet_ffi_create_account_private( let mut wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -143,7 +143,7 @@ pub unsafe extern "C" fn wallet_ffi_list_accounts( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -192,7 +192,7 @@ pub unsafe extern "C" fn wallet_ffi_list_accounts( } } else { let entries_boxed = entries.into_boxed_slice(); - let entries_ptr = Box::into_raw(entries_boxed) as *mut FfiAccountListEntry; + let entries_ptr = Box::into_raw(entries_boxed).cast::(); unsafe { (*out_list).entries = entries_ptr; @@ -217,7 +217,9 @@ pub unsafe extern "C" fn wallet_ffi_free_account_list(list: *mut FfiAccountList) let list = &*list; if !list.entries.is_null() && list.count > 0 { let slice = std::slice::from_raw_parts_mut(list.entries, list.count); - drop(Box::from_raw(slice as *mut [FfiAccountListEntry])); + drop(Box::from_raw(std::ptr::from_mut::<[FfiAccountListEntry]>( + slice, + ))); } } } @@ -261,7 +263,7 @@ pub unsafe extern "C" fn wallet_ffi_get_balance( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -270,21 +272,17 @@ pub unsafe extern "C" fn wallet_ffi_get_balance( let balance = if is_public { match block_on(wallet.get_account_balance(account_id)) { - Ok(Ok(b)) => b, - Ok(Err(e)) => { - print_error(format!("Failed to get balance: {}", e)); + Ok(b) => b, + Err(e) => { + print_error(format!("Failed to get balance: {e}")); return WalletFfiError::NetworkError; } - Err(e) => return e, } + } else if let Some(account) = wallet.get_account_private(account_id) { + account.balance } else { - match wallet.get_account_private(account_id) { - Some(account) => account.balance, - None => { - print_error("Private account not found"); - return WalletFfiError::AccountNotFound; - } - } + print_error("Private account not found"); + return WalletFfiError::AccountNotFound; }; unsafe { @@ -331,7 +329,7 @@ pub unsafe extern "C" fn wallet_ffi_get_account_public( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -339,12 +337,11 @@ pub unsafe extern "C" fn wallet_ffi_get_account_public( let account_id = AccountId::new(unsafe { (*account_id).data }); let account = match block_on(wallet.get_account_public(account_id)) { - Ok(Ok(a)) => a, - Ok(Err(e)) => { - print_error(format!("Failed to get account: {}", e)); + Ok(a) => a, + Err(e) => { + print_error(format!("Failed to get account: {e}")); return WalletFfiError::NetworkError; } - Err(e) => return e, }; unsafe { @@ -391,7 +388,7 @@ pub unsafe extern "C" fn wallet_ffi_get_account_private( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -423,8 +420,8 @@ pub unsafe extern "C" fn wallet_ffi_free_account_data(account: *mut FfiAccount) unsafe { let account = &*account; if !account.data.is_null() && account.data_len > 0 { - let slice = std::slice::from_raw_parts_mut(account.data as *mut u8, account.data_len); - drop(Box::from_raw(slice as *mut [u8])); + let slice = std::slice::from_raw_parts_mut(account.data.cast_mut(), account.data_len); + drop(Box::from_raw(std::ptr::from_mut::<[u8]>(slice))); } } } diff --git a/wallet-ffi/src/error.rs b/wallet-ffi/src/error.rs index ab9ce6dd..a8c345b5 100644 --- a/wallet-ffi/src/error.rs +++ b/wallet-ffi/src/error.rs @@ -6,45 +6,58 @@ #[repr(C)] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum WalletFfiError { - /// Operation completed successfully + /// Operation completed successfully. Success = 0, - /// A null pointer was passed where a valid pointer was expected + /// A null pointer was passed where a valid pointer was expected. NullPointer = 1, - /// Invalid UTF-8 string + /// Invalid UTF-8 string. InvalidUtf8 = 2, - /// Wallet handle is not initialized + /// Wallet handle is not initialized. WalletNotInitialized = 3, - /// Configuration error + /// Configuration error. ConfigError = 4, - /// Storage/persistence error + /// Storage/persistence error. StorageError = 5, - /// Network/RPC error + /// Network/RPC error. NetworkError = 6, - /// Account not found + /// Account not found. AccountNotFound = 7, - /// Key not found for account + /// Key not found for account. KeyNotFound = 8, - /// Insufficient funds for operation + /// Insufficient funds for operation. InsufficientFunds = 9, - /// Invalid account ID format + /// Invalid account ID format. InvalidAccountId = 10, - /// Tokio runtime error + /// Tokio runtime error. RuntimeError = 11, - /// Password required but not provided + /// Password required but not provided. PasswordRequired = 12, - /// Block synchronization error + /// Block synchronization error. SyncError = 13, - /// Serialization/deserialization error + /// Serialization/deserialization error. SerializationError = 14, - /// Invalid conversion from FFI types to NSSA types + /// Invalid conversion from FFI types to NSSA types. InvalidTypeConversion = 15, - /// Invalid Key value + /// Invalid Key value. InvalidKeyValue = 16, - /// Internal error (catch-all) + /// Internal error (catch-all). InternalError = 99, } +impl WalletFfiError { + /// Check if it's [`WalletFfiError::Success`] or panic. + pub fn unwrap(self) { + let Self::Success = self else { + panic!("Called `unwrap()` on error value `{self:#?}`"); + }; + } +} + /// Log an error message to stderr. +#[expect( + clippy::print_stderr, + reason = "In FFI context it's better to print errors than to return strings" +)] pub fn print_error(msg: impl Into) { eprintln!("[wallet-ffi] {}", msg.into()); } diff --git a/wallet-ffi/src/keys.rs b/wallet-ffi/src/keys.rs index c54525d9..4eeadd8f 100644 --- a/wallet-ffi/src/keys.rs +++ b/wallet-ffi/src/keys.rs @@ -47,19 +47,16 @@ pub unsafe extern "C" fn wallet_ffi_get_public_account_key( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; let account_id = AccountId::new(unsafe { (*account_id).data }); - let private_key = match wallet.get_account_public_signing_key(account_id) { - Some(k) => k, - None => { - print_error("Public account key not found in wallet"); - return WalletFfiError::KeyNotFound; - } + let Some(private_key) = wallet.get_account_public_signing_key(account_id) else { + print_error("Public account key not found in wallet"); + return WalletFfiError::KeyNotFound; }; let public_key = PublicKey::new_from_private_key(private_key); @@ -112,19 +109,17 @@ pub unsafe extern "C" fn wallet_ffi_get_private_account_keys( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; let account_id = AccountId::new(unsafe { (*account_id).data }); - let (key_chain, _account) = match wallet.storage().user_data.get_private_account(account_id) { - Some(k) => k, - None => { - print_error("Private account not found in wallet"); - return WalletFfiError::AccountNotFound; - } + let Some((key_chain, _account)) = wallet.storage().user_data.get_private_account(account_id) + else { + print_error("Private account not found in wallet"); + return WalletFfiError::AccountNotFound; }; // NPK is a 32-byte array @@ -135,6 +130,10 @@ pub unsafe extern "C" fn wallet_ffi_get_private_account_keys( let vpk_len = vpk_bytes.len(); let vpk_vec = vpk_bytes.to_vec(); let vpk_boxed = vpk_vec.into_boxed_slice(); + #[expect( + clippy::as_conversions, + reason = "We need to convert the boxed slice into a raw pointer for FFI" + )] let vpk_ptr = Box::into_raw(vpk_boxed) as *const u8; unsafe { @@ -161,10 +160,10 @@ pub unsafe extern "C" fn wallet_ffi_free_private_account_keys(keys: *mut FfiPriv let keys = &*keys; if !keys.viewing_public_key.is_null() && keys.viewing_public_key_len > 0 { let slice = std::slice::from_raw_parts_mut( - keys.viewing_public_key as *mut u8, + keys.viewing_public_key.cast_mut(), keys.viewing_public_key_len, ); - drop(Box::from_raw(slice as *mut [u8])); + drop(Box::from_raw(std::ptr::from_mut::<[u8]>(slice))); } } } @@ -198,7 +197,7 @@ pub unsafe extern "C" fn wallet_ffi_account_id_to_base58( match std::ffi::CString::new(base58_str) { Ok(s) => s.into_raw(), Err(e) => { - print_error(format!("Failed to create C string: {}", e)); + print_error(format!("Failed to create C string: {e}")); ptr::null_mut() } } @@ -232,7 +231,7 @@ pub unsafe extern "C" fn wallet_ffi_account_id_from_base58( let str_slice = match c_str.to_str() { Ok(s) => s, Err(e) => { - print_error(format!("Invalid UTF-8: {}", e)); + print_error(format!("Invalid UTF-8: {e}")); return WalletFfiError::InvalidUtf8; } }; @@ -240,7 +239,7 @@ pub unsafe extern "C" fn wallet_ffi_account_id_from_base58( let account_id: AccountId = match str_slice.parse() { Ok(id) => id, Err(e) => { - print_error(format!("Invalid Base58 account ID: {}", e)); + print_error(format!("Invalid Base58 account ID: {e}")); return WalletFfiError::InvalidAccountId; } }; diff --git a/wallet-ffi/src/lib.rs b/wallet-ffi/src/lib.rs index 99a0ee98..c36b05e0 100644 --- a/wallet-ffi/src/lib.rs +++ b/wallet-ffi/src/lib.rs @@ -1,4 +1,4 @@ -//! NSSA Wallet FFI Library +//! NSSA Wallet FFI Library. //! //! This crate provides C-compatible bindings for the NSSA wallet functionality. //! @@ -20,6 +20,22 @@ //! - Use the corresponding `wallet_ffi_free_*` function to free memory //! - Never free memory returned by FFI using standard C `free()` +#![expect( + clippy::undocumented_unsafe_blocks, + clippy::multiple_unsafe_ops_per_block, + reason = "TODO: fix later" +)] + +use std::sync::OnceLock; + +use common::error::ExecutionFailureKind; +// Re-export public types for cbindgen +pub use error::WalletFfiError as FfiError; +use tokio::runtime::Handle; +pub use types::*; + +use crate::error::print_error; + pub mod account; pub mod error; pub mod keys; @@ -29,19 +45,10 @@ pub mod transfer; pub mod types; pub mod wallet; -use std::sync::OnceLock; - -// Re-export public types for cbindgen -pub use error::WalletFfiError as FfiError; -use tokio::runtime::Handle; -pub use types::*; - -use crate::error::{print_error, WalletFfiError}; - static TOKIO_RUNTIME: OnceLock = OnceLock::new(); /// Get a reference to the global runtime. -pub(crate) fn get_runtime() -> Result<&'static Handle, WalletFfiError> { +pub(crate) fn get_runtime() -> &'static Handle { let runtime = TOKIO_RUNTIME.get_or_init(|| { match tokio::runtime::Builder::new_multi_thread() .enable_all() @@ -54,11 +61,30 @@ pub(crate) fn get_runtime() -> Result<&'static Handle, WalletFfiError> { } } }); - Ok(runtime.handle()) + runtime.handle() } /// Run an async future on the global runtime, blocking until completion. -pub(crate) fn block_on(future: F) -> Result { - let runtime = get_runtime()?; - Ok(runtime.block_on(future)) +pub(crate) fn block_on(future: F) -> F::Output { + let runtime = get_runtime(); + runtime.block_on(future) +} + +#[expect( + clippy::needless_pass_by_value, + reason = "Error is consumed to create FFI error response" +)] +#[expect( + clippy::wildcard_enum_match_arm, + reason = "We want to catch all errors for future proofing" +)] +pub(crate) fn map_execution_error(e: ExecutionFailureKind) -> FfiError { + match e { + ExecutionFailureKind::InsufficientFundsError => FfiError::InsufficientFunds, + ExecutionFailureKind::KeyNotFoundError => FfiError::KeyNotFound, + ExecutionFailureKind::SequencerError(_) | ExecutionFailureKind::SequencerClientError(_) => { + FfiError::NetworkError + } + _ => FfiError::InternalError, + } } diff --git a/wallet-ffi/src/pinata.rs b/wallet-ffi/src/pinata.rs index 2cf2fc84..7c8e21d0 100644 --- a/wallet-ffi/src/pinata.rs +++ b/wallet-ffi/src/pinata.rs @@ -2,7 +2,6 @@ use std::{ffi::CString, ptr, slice}; -use common::error::ExecutionFailureKind; use nssa::AccountId; use nssa_core::MembershipProof; use wallet::program_facades::pinata::Pinata; @@ -10,6 +9,7 @@ use wallet::program_facades::pinata::Pinata; use crate::{ block_on, error::{print_error, WalletFfiError}, + map_execution_error, types::{FfiBytes32, FfiTransferResult, WalletHandle}, wallet::get_wallet, }; @@ -63,7 +63,7 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -75,9 +75,9 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata( let pinata = Pinata(&wallet); match block_on(pinata.claim(pinata_id, winner_id, solution)) { - Ok(Ok(response)) => { + Ok(response) => { let tx_hash = CString::new(response.tx_hash.to_string()) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -86,15 +86,14 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Pinata claim failed: {:?}", e)); + Err(e) => { + print_error(format!("Pinata claim failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -161,7 +160,7 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_already_initializ let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -183,9 +182,9 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_already_initializ pinata .claim_private_owned_account_already_initialized(pinata_id, winner_id, solution, proof), ) { - Ok(Ok((response, _shared_key))) => { + Ok((response, _shared_key)) => { let tx_hash = CString::new(response.tx_hash.to_string()) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -194,10 +193,9 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_already_initializ } WalletFfiError::Success } - Ok(Err(e)) => { + Err(e) => { print_error(format!( - "Pinata private claim (already initialized) failed: {:?}", - e + "Pinata private claim (already initialized) failed: {e:?}" )); unsafe { (*out_result).tx_hash = ptr::null_mut(); @@ -205,7 +203,6 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_already_initializ } map_execution_error(e) } - Err(e) => e, } } @@ -259,7 +256,7 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_not_initialized( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -271,9 +268,9 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_not_initialized( let pinata = Pinata(&wallet); match block_on(pinata.claim_private_owned_account(pinata_id, winner_id, solution)) { - Ok(Ok((response, _shared_key))) => { + Ok((response, _shared_key)) => { let tx_hash = CString::new(response.tx_hash.to_string()) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -282,10 +279,9 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_not_initialized( } WalletFfiError::Success } - Ok(Err(e)) => { + Err(e) => { print_error(format!( - "Pinata private claim (not initialized) failed: {:?}", - e + "Pinata private claim (not initialized) failed: {e:?}" )); unsafe { (*out_result).tx_hash = ptr::null_mut(); @@ -293,16 +289,5 @@ pub unsafe extern "C" fn wallet_ffi_claim_pinata_private_owned_not_initialized( } map_execution_error(e) } - Err(e) => e, - } -} - -fn map_execution_error(e: ExecutionFailureKind) -> WalletFfiError { - match e { - ExecutionFailureKind::InsufficientFundsError => WalletFfiError::InsufficientFunds, - ExecutionFailureKind::KeyNotFoundError => WalletFfiError::KeyNotFound, - ExecutionFailureKind::SequencerError => WalletFfiError::NetworkError, - ExecutionFailureKind::SequencerClientError(_) => WalletFfiError::NetworkError, - _ => WalletFfiError::InternalError, } } diff --git a/wallet-ffi/src/sync.rs b/wallet-ffi/src/sync.rs index 3979f935..c321feb0 100644 --- a/wallet-ffi/src/sync.rs +++ b/wallet-ffi/src/sync.rs @@ -40,18 +40,17 @@ pub unsafe extern "C" fn wallet_ffi_sync_to_block( let mut wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; match block_on(wallet.sync_to_block(block_id)) { - Ok(Ok(())) => WalletFfiError::Success, - Ok(Err(e)) => { - print_error(format!("Sync failed: {}", e)); + Ok(()) => WalletFfiError::Success, + Err(e) => { + print_error(format!("Sync failed: {e}")); WalletFfiError::SyncError } - Err(e) => e, } } @@ -86,7 +85,7 @@ pub unsafe extern "C" fn wallet_ffi_get_last_synced_block( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -130,22 +129,21 @@ pub unsafe extern "C" fn wallet_ffi_get_current_block_height( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; match block_on(wallet.sequencer_client.get_last_block()) { - Ok(Ok(response)) => { + Ok(response) => { unsafe { *out_block_height = response.last_block; } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Failed to get block height: {:?}", e)); + Err(e) => { + print_error(format!("Failed to get block height: {e:?}")); WalletFfiError::NetworkError } - Err(e) => e, } } diff --git a/wallet-ffi/src/transfer.rs b/wallet-ffi/src/transfer.rs index c609529d..da1892dd 100644 --- a/wallet-ffi/src/transfer.rs +++ b/wallet-ffi/src/transfer.rs @@ -2,13 +2,13 @@ use std::{ffi::CString, ptr}; -use common::error::ExecutionFailureKind; use nssa::AccountId; use wallet::program_facades::native_token_transfer::NativeTokenTransfer; use crate::{ block_on, error::{print_error, WalletFfiError}, + map_execution_error, types::{FfiBytes32, FfiTransferResult, WalletHandle}, wallet::get_wallet, FfiPrivateAccountKeys, @@ -61,7 +61,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_public( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -73,9 +73,9 @@ pub unsafe extern "C" fn wallet_ffi_transfer_public( let transfer = NativeTokenTransfer(&wallet); match block_on(transfer.send_public_transfer(from_id, to_id, amount)) { - Ok(Ok(response)) => { + Ok(response) => { let tx_hash = CString::new(response.tx_hash.to_string()) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -84,15 +84,14 @@ pub unsafe extern "C" fn wallet_ffi_transfer_public( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Transfer failed: {:?}", e)); + Err(e) => { + print_error(format!("Transfer failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -143,7 +142,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_shielded( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -164,9 +163,9 @@ pub unsafe extern "C" fn wallet_ffi_transfer_shielded( match block_on( transfer.send_shielded_transfer_to_outer_account(from_id, to_npk, to_vpk, amount), ) { - Ok(Ok((response, _shared_key))) => { + Ok((response, _shared_key)) => { let tx_hash = CString::new(response.tx_hash) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -175,15 +174,14 @@ pub unsafe extern "C" fn wallet_ffi_transfer_shielded( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Transfer failed: {:?}", e)); + Err(e) => { + print_error(format!("Transfer failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -234,7 +232,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_deshielded( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -246,9 +244,9 @@ pub unsafe extern "C" fn wallet_ffi_transfer_deshielded( let transfer = NativeTokenTransfer(&wallet); match block_on(transfer.send_deshielded_transfer(from_id, to_id, amount)) { - Ok(Ok((response, _shared_key))) => { + Ok((response, _shared_key)) => { let tx_hash = CString::new(response.tx_hash) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -257,15 +255,14 @@ pub unsafe extern "C" fn wallet_ffi_transfer_deshielded( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Transfer failed: {:?}", e)); + Err(e) => { + print_error(format!("Transfer failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -316,7 +313,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_private( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -336,9 +333,9 @@ pub unsafe extern "C" fn wallet_ffi_transfer_private( match block_on(transfer.send_private_transfer_to_outer_account(from_id, to_npk, to_vpk, amount)) { - Ok(Ok((response, _shared_key))) => { + Ok((response, _shared_key)) => { let tx_hash = CString::new(response.tx_hash) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -347,15 +344,14 @@ pub unsafe extern "C" fn wallet_ffi_transfer_private( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Transfer failed: {:?}", e)); + Err(e) => { + print_error(format!("Transfer failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -409,7 +405,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_shielded_owned( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -421,9 +417,9 @@ pub unsafe extern "C" fn wallet_ffi_transfer_shielded_owned( let transfer = NativeTokenTransfer(&wallet); match block_on(transfer.send_shielded_transfer(from_id, to_id, amount)) { - Ok(Ok((response, _shared_key))) => { + Ok((response, _shared_key)) => { let tx_hash = CString::new(response.tx_hash) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -432,15 +428,14 @@ pub unsafe extern "C" fn wallet_ffi_transfer_shielded_owned( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Transfer failed: {:?}", e)); + Err(e) => { + print_error(format!("Transfer failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -494,7 +489,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_private_owned( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -506,9 +501,9 @@ pub unsafe extern "C" fn wallet_ffi_transfer_private_owned( let transfer = NativeTokenTransfer(&wallet); match block_on(transfer.send_private_transfer_to_owned_account(from_id, to_id, amount)) { - Ok(Ok((response, _shared_keys))) => { + Ok((response, _shared_keys)) => { let tx_hash = CString::new(response.tx_hash) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -517,15 +512,14 @@ pub unsafe extern "C" fn wallet_ffi_transfer_private_owned( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Transfer failed: {:?}", e)); + Err(e) => { + print_error(format!("Transfer failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -569,7 +563,7 @@ pub unsafe extern "C" fn wallet_ffi_register_public_account( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -579,9 +573,9 @@ pub unsafe extern "C" fn wallet_ffi_register_public_account( let transfer = NativeTokenTransfer(&wallet); match block_on(transfer.register_account(account_id)) { - Ok(Ok(response)) => { + Ok(response) => { let tx_hash = CString::new(response.tx_hash.to_string()) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -590,15 +584,14 @@ pub unsafe extern "C" fn wallet_ffi_register_public_account( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Registration failed: {:?}", e)); + Err(e) => { + print_error(format!("Registration failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -642,7 +635,7 @@ pub unsafe extern "C" fn wallet_ffi_register_private_account( let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; @@ -652,9 +645,9 @@ pub unsafe extern "C" fn wallet_ffi_register_private_account( let transfer = NativeTokenTransfer(&wallet); match block_on(transfer.register_account_private(account_id)) { - Ok(Ok((res, _secret))) => { + Ok((res, _secret)) => { let tx_hash = CString::new(res.tx_hash) - .map(|s| s.into_raw()) + .map(std::ffi::CString::into_raw) .unwrap_or(ptr::null_mut()); unsafe { @@ -663,15 +656,14 @@ pub unsafe extern "C" fn wallet_ffi_register_private_account( } WalletFfiError::Success } - Ok(Err(e)) => { - print_error(format!("Registration failed: {:?}", e)); + Err(e) => { + print_error(format!("Registration failed: {e:?}")); unsafe { (*out_result).tx_hash = ptr::null_mut(); (*out_result).success = false; } map_execution_error(e) } - Err(e) => e, } } @@ -693,13 +685,3 @@ pub unsafe extern "C" fn wallet_ffi_free_transfer_result(result: *mut FfiTransfe } } } - -fn map_execution_error(e: ExecutionFailureKind) -> WalletFfiError { - match e { - ExecutionFailureKind::InsufficientFundsError => WalletFfiError::InsufficientFunds, - ExecutionFailureKind::KeyNotFoundError => WalletFfiError::KeyNotFound, - ExecutionFailureKind::SequencerError => WalletFfiError::NetworkError, - ExecutionFailureKind::SequencerClientError(_) => WalletFfiError::NetworkError, - _ => WalletFfiError::InternalError, - } -} diff --git a/wallet-ffi/src/types.rs b/wallet-ffi/src/types.rs index e99286c7..87c30315 100644 --- a/wallet-ffi/src/types.rs +++ b/wallet-ffi/src/types.rs @@ -3,7 +3,7 @@ use core::slice; use std::{ffi::c_char, ptr}; -use nssa::{Account, Data}; +use nssa::Data; use nssa_core::encryption::shared_key_derivation::Secp256k1Point; use crate::error::WalletFfiError; @@ -17,7 +17,7 @@ pub struct WalletHandle { _private: [u8; 0], } -/// 32-byte array type for AccountId, keys, hashes, etc. +/// 32-byte array type for `AccountId`, keys, hashes, etc. #[repr(C)] #[derive(Clone, Copy, Default)] pub struct FfiBytes32 { @@ -31,7 +31,7 @@ pub struct FfiProgramId { pub data: [u32; 8], } -/// U128 - 16 bytes little endian +/// U128 - 16 bytes little endian. #[repr(C)] #[derive(Clone, Copy, Default)] pub struct FfiU128 { @@ -45,13 +45,13 @@ pub struct FfiU128 { #[repr(C)] pub struct FfiAccount { pub program_owner: FfiProgramId, - /// Balance as little-endian [u8; 16] + /// Balance as little-endian [u8; 16]. pub balance: FfiU128, - /// Pointer to account data bytes + /// Pointer to account data bytes. pub data: *const u8, - /// Length of account data + /// Length of account data. pub data_len: usize, - /// Nonce as little-endian [u8; 16] + /// Nonce as little-endian [u8; 16]. pub nonce: FfiU128, } @@ -70,11 +70,11 @@ impl Default for FfiAccount { /// Public keys for a private account (safe to expose). #[repr(C)] pub struct FfiPrivateAccountKeys { - /// Nullifier public key (32 bytes) + /// Nullifier public key (32 bytes). pub nullifier_public_key: FfiBytes32, - /// viewing public key (compressed secp256k1 point) + /// viewing public key (compressed secp256k1 point). pub viewing_public_key: *const u8, - /// Length of viewing public key (typically 33 bytes) + /// Length of viewing public key (typically 33 bytes). pub viewing_public_key_len: usize, } @@ -103,7 +103,7 @@ pub struct FfiAccountListEntry { pub is_public: bool, } -/// List of accounts returned by wallet_ffi_list_accounts. +/// List of accounts returned by `wallet_ffi_list_accounts`. #[repr(C)] pub struct FfiAccountList { pub entries: *mut FfiAccountListEntry, @@ -123,9 +123,9 @@ impl Default for FfiAccountList { #[repr(C)] pub struct FfiTransferResult { // TODO: Replace with HashType FFI representation - /// Transaction hash (null-terminated string, or null on failure) + /// Transaction hash (null-terminated string, or null on failure). pub tx_hash: *mut c_char, - /// Whether the transfer succeeded + /// Whether the transfer succeeded. pub success: bool, } @@ -142,18 +142,21 @@ impl Default for FfiTransferResult { impl FfiBytes32 { /// Create from a 32-byte array. - pub fn from_bytes(bytes: [u8; 32]) -> Self { + #[must_use] + pub const fn from_bytes(bytes: [u8; 32]) -> Self { Self { data: bytes } } - /// Create from an AccountId. - pub fn from_account_id(id: &nssa::AccountId) -> Self { + /// Create from an `AccountId`. + #[must_use] + pub const fn from_account_id(id: &nssa::AccountId) -> Self { Self { data: *id.value() } } } impl FfiPrivateAccountKeys { - pub fn npk(&self) -> nssa_core::NullifierPublicKey { + #[must_use] + pub const fn npk(&self) -> nssa_core::NullifierPublicKey { nssa_core::NullifierPublicKey(self.nullifier_public_key.data) } @@ -179,7 +182,7 @@ impl From for FfiU128 { impl From for u128 { fn from(value: FfiU128) -> Self { - u128::from_le_bytes(value.data) + Self::from_le_bytes(value.data) } } @@ -191,11 +194,15 @@ impl From<&nssa::AccountId> for FfiBytes32 { impl From for nssa::AccountId { fn from(bytes: FfiBytes32) -> Self { - nssa::AccountId::new(bytes.data) + Self::new(bytes.data) } } impl From for FfiAccount { + #[expect( + clippy::as_conversions, + reason = "We need to convert to byte arrays for FFI" + )] fn from(value: nssa::Account) -> Self { // Convert account data to FFI type let data_vec: Vec = value.data.into(); @@ -210,12 +217,12 @@ impl From for FfiAccount { let program_owner = FfiProgramId { data: value.program_owner, }; - FfiAccount { + Self { program_owner, balance: value.balance.into(), data, data_len, - nonce: value.nonce.into(), + nonce: value.nonce.0.into(), } } } @@ -227,16 +234,17 @@ impl TryFrom<&FfiAccount> for nssa::Account { let data = if value.data_len > 0 { unsafe { let slice = slice::from_raw_parts(value.data, value.data_len); - Data::try_from(slice.to_vec()).map_err(|_| WalletFfiError::InvalidTypeConversion)? + Data::try_from(slice.to_vec()) + .map_err(|_err| WalletFfiError::InvalidTypeConversion)? } } else { Data::default() }; - Ok(Account { + Ok(Self { program_owner: value.program_owner.data, balance: value.balance.into(), data, - nonce: value.nonce.into(), + nonce: nssa_core::account::Nonce(value.nonce.into()), }) } } @@ -253,8 +261,8 @@ impl TryFrom<&FfiPublicAccountKey> for nssa::PublicKey { type Error = WalletFfiError; fn try_from(value: &FfiPublicAccountKey) -> Result { - let public_key = nssa::PublicKey::try_new(value.public_key.data) - .map_err(|_| WalletFfiError::InvalidTypeConversion)?; + let public_key = Self::try_new(value.public_key.data) + .map_err(|_err| WalletFfiError::InvalidTypeConversion)?; Ok(public_key) } } diff --git a/wallet-ffi/src/wallet.rs b/wallet-ffi/src/wallet.rs index 7dd76d4e..9117d0ee 100644 --- a/wallet-ffi/src/wallet.rs +++ b/wallet-ffi/src/wallet.rs @@ -15,7 +15,7 @@ use crate::{ types::WalletHandle, }; -/// Internal wrapper around WalletCore with mutex for thread safety. +/// Internal wrapper around `WalletCore` with mutex for thread safety. pub(crate) struct WalletWrapper { pub core: Mutex, } @@ -28,11 +28,11 @@ pub(crate) fn get_wallet( print_error("Null wallet handle"); return Err(WalletFfiError::NullPointer); } - Ok(unsafe { &*(handle as *mut WalletWrapper) }) + Ok(unsafe { &*handle.cast::() }) } /// Helper to get a mutable reference to the wallet wrapper. -#[allow(dead_code)] +#[expect(dead_code, reason = "Maybe used later")] pub(crate) fn get_wallet_mut( handle: *mut WalletHandle, ) -> Result<&'static mut WalletWrapper, WalletFfiError> { @@ -40,13 +40,13 @@ pub(crate) fn get_wallet_mut( print_error("Null wallet handle"); return Err(WalletFfiError::NullPointer); } - Ok(unsafe { &mut *(handle as *mut WalletWrapper) }) + Ok(unsafe { &mut *handle.cast::() }) } -/// Helper to convert a C string to a Rust PathBuf. +/// Helper to convert a C string to a Rust `PathBuf`. fn c_str_to_path(ptr: *const c_char, name: &str) -> Result { if ptr.is_null() { - print_error(format!("Null pointer for {}", name)); + print_error(format!("Null pointer for {name}")); return Err(WalletFfiError::NullPointer); } @@ -54,7 +54,7 @@ fn c_str_to_path(ptr: *const c_char, name: &str) -> Result Ok(PathBuf::from(s)), Err(e) => { - print_error(format!("Invalid UTF-8 in {}: {}", name, e)); + print_error(format!("Invalid UTF-8 in {name}: {e}")); Err(WalletFfiError::InvalidUtf8) } } @@ -63,15 +63,15 @@ fn c_str_to_path(ptr: *const c_char, name: &str) -> Result Result { if ptr.is_null() { - print_error(format!("Null pointer for {}", name)); + print_error(format!("Null pointer for {name}")); return Err(WalletFfiError::NullPointer); } let c_str = unsafe { CStr::from_ptr(ptr) }; match c_str.to_str() { - Ok(s) => Ok(s.to_string()), + Ok(s) => Ok(s.to_owned()), Err(e) => { - print_error(format!("Invalid UTF-8 in {}: {}", name, e)); + print_error(format!("Invalid UTF-8 in {name}: {e}")); Err(WalletFfiError::InvalidUtf8) } } @@ -99,19 +99,16 @@ pub unsafe extern "C" fn wallet_ffi_create_new( storage_path: *const c_char, password: *const c_char, ) -> *mut WalletHandle { - let config_path = match c_str_to_path(config_path, "config_path") { - Ok(p) => p, - Err(_) => return ptr::null_mut(), + let Ok(config_path) = c_str_to_path(config_path, "config_path") else { + return ptr::null_mut(); }; - let storage_path = match c_str_to_path(storage_path, "storage_path") { - Ok(p) => p, - Err(_) => return ptr::null_mut(), + let Ok(storage_path) = c_str_to_path(storage_path, "storage_path") else { + return ptr::null_mut(); }; - let password = match c_str_to_string(password, "password") { - Ok(s) => s, - Err(_) => return ptr::null_mut(), + let Ok(password) = c_str_to_string(password, "password") else { + return ptr::null_mut(); }; match WalletCore::new_init_storage(config_path, storage_path, None, password) { @@ -119,10 +116,10 @@ pub unsafe extern "C" fn wallet_ffi_create_new( let wrapper = Box::new(WalletWrapper { core: Mutex::new(core), }); - Box::into_raw(wrapper) as *mut WalletHandle + Box::into_raw(wrapper).cast::() } Err(e) => { - print_error(format!("Failed to create wallet: {}", e)); + print_error(format!("Failed to create wallet: {e}")); ptr::null_mut() } } @@ -147,14 +144,12 @@ pub unsafe extern "C" fn wallet_ffi_open( config_path: *const c_char, storage_path: *const c_char, ) -> *mut WalletHandle { - let config_path = match c_str_to_path(config_path, "config_path") { - Ok(p) => p, - Err(_) => return ptr::null_mut(), + let Ok(config_path) = c_str_to_path(config_path, "config_path") else { + return ptr::null_mut(); }; - let storage_path = match c_str_to_path(storage_path, "storage_path") { - Ok(p) => p, - Err(_) => return ptr::null_mut(), + let Ok(storage_path) = c_str_to_path(storage_path, "storage_path") else { + return ptr::null_mut(); }; match WalletCore::new_update_chain(config_path, storage_path, None) { @@ -162,10 +157,10 @@ pub unsafe extern "C" fn wallet_ffi_open( let wrapper = Box::new(WalletWrapper { core: Mutex::new(core), }); - Box::into_raw(wrapper) as *mut WalletHandle + Box::into_raw(wrapper).cast::() } Err(e) => { - print_error(format!("Failed to open wallet: {}", e)); + print_error(format!("Failed to open wallet: {e}")); ptr::null_mut() } } @@ -183,7 +178,7 @@ pub unsafe extern "C" fn wallet_ffi_open( pub unsafe extern "C" fn wallet_ffi_destroy(handle: *mut WalletHandle) { if !handle.is_null() { unsafe { - drop(Box::from_raw(handle as *mut WalletWrapper)); + drop(Box::from_raw(handle.cast::())); } } } @@ -212,18 +207,17 @@ pub unsafe extern "C" fn wallet_ffi_save(handle: *mut WalletHandle) -> WalletFfi let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return WalletFfiError::InternalError; } }; match block_on(wallet.store_persistent_data()) { - Ok(Ok(())) => WalletFfiError::Success, - Ok(Err(e)) => { - print_error(format!("Failed to save wallet: {}", e)); + Ok(()) => WalletFfiError::Success, + Err(e) => { + print_error(format!("Failed to save wallet: {e}")); WalletFfiError::StorageError } - Err(e) => e, } } @@ -241,15 +235,14 @@ pub unsafe extern "C" fn wallet_ffi_save(handle: *mut WalletHandle) -> WalletFfi /// - `handle` must be a valid wallet handle from `wallet_ffi_create_new` or `wallet_ffi_open` #[no_mangle] pub unsafe extern "C" fn wallet_ffi_get_sequencer_addr(handle: *mut WalletHandle) -> *mut c_char { - let wrapper = match get_wallet(handle) { - Ok(w) => w, - Err(_) => return ptr::null_mut(), + let Ok(wrapper) = get_wallet(handle) else { + return ptr::null_mut(); }; let wallet = match wrapper.core.lock() { Ok(w) => w, Err(e) => { - print_error(format!("Failed to lock wallet: {}", e)); + print_error(format!("Failed to lock wallet: {e}")); return ptr::null_mut(); } }; @@ -259,7 +252,7 @@ pub unsafe extern "C" fn wallet_ffi_get_sequencer_addr(handle: *mut WalletHandle match std::ffi::CString::new(addr) { Ok(s) => s.into_raw(), Err(e) => { - print_error(format!("Invalid sequencer address: {}", e)); + print_error(format!("Invalid sequencer address: {e}")); ptr::null_mut() } } diff --git a/wallet-ffi/wallet_ffi.h b/wallet-ffi/wallet_ffi.h index 6b191506..2665cd40 100644 --- a/wallet-ffi/wallet_ffi.h +++ b/wallet-ffi/wallet_ffi.h @@ -36,75 +36,75 @@ */ typedef enum WalletFfiError { /** - * Operation completed successfully + * Operation completed successfully. */ SUCCESS = 0, /** - * A null pointer was passed where a valid pointer was expected + * A null pointer was passed where a valid pointer was expected. */ NULL_POINTER = 1, /** - * Invalid UTF-8 string + * Invalid UTF-8 string. */ INVALID_UTF8 = 2, /** - * Wallet handle is not initialized + * Wallet handle is not initialized. */ WALLET_NOT_INITIALIZED = 3, /** - * Configuration error + * Configuration error. */ CONFIG_ERROR = 4, /** - * Storage/persistence error + * Storage/persistence error. */ STORAGE_ERROR = 5, /** - * Network/RPC error + * Network/RPC error. */ NETWORK_ERROR = 6, /** - * Account not found + * Account not found. */ ACCOUNT_NOT_FOUND = 7, /** - * Key not found for account + * Key not found for account. */ KEY_NOT_FOUND = 8, /** - * Insufficient funds for operation + * Insufficient funds for operation. */ INSUFFICIENT_FUNDS = 9, /** - * Invalid account ID format + * Invalid account ID format. */ INVALID_ACCOUNT_ID = 10, /** - * Tokio runtime error + * Tokio runtime error. */ RUNTIME_ERROR = 11, /** - * Password required but not provided + * Password required but not provided. */ PASSWORD_REQUIRED = 12, /** - * Block synchronization error + * Block synchronization error. */ SYNC_ERROR = 13, /** - * Serialization/deserialization error + * Serialization/deserialization error. */ SERIALIZATION_ERROR = 14, /** - * Invalid conversion from FFI types to NSSA types + * Invalid conversion from FFI types to NSSA types. */ INVALID_TYPE_CONVERSION = 15, /** - * Invalid Key value + * Invalid Key value. */ INVALID_KEY_VALUE = 16, /** - * Internal error (catch-all) + * Internal error (catch-all). */ INTERNAL_ERROR = 99, } WalletFfiError; @@ -120,7 +120,7 @@ typedef struct WalletHandle { } WalletHandle; /** - * 32-byte array type for AccountId, keys, hashes, etc. + * 32-byte array type for `AccountId`, keys, hashes, etc. */ typedef struct FfiBytes32 { uint8_t data[32]; @@ -135,7 +135,7 @@ typedef struct FfiAccountListEntry { } FfiAccountListEntry; /** - * List of accounts returned by wallet_ffi_list_accounts. + * List of accounts returned by `wallet_ffi_list_accounts`. */ typedef struct FfiAccountList { struct FfiAccountListEntry *entries; @@ -150,7 +150,7 @@ typedef struct FfiProgramId { } FfiProgramId; /** - * U128 - 16 bytes little endian + * U128 - 16 bytes little endian. */ typedef struct FfiU128 { uint8_t data[16]; @@ -165,19 +165,19 @@ typedef struct FfiU128 { typedef struct FfiAccount { struct FfiProgramId program_owner; /** - * Balance as little-endian [u8; 16] + * Balance as little-endian [u8; 16]. */ struct FfiU128 balance; /** - * Pointer to account data bytes + * Pointer to account data bytes. */ const uint8_t *data; /** - * Length of account data + * Length of account data. */ uintptr_t data_len; /** - * Nonce as little-endian [u8; 16] + * Nonce as little-endian [u8; 16]. */ struct FfiU128 nonce; } FfiAccount; @@ -194,15 +194,15 @@ typedef struct FfiPublicAccountKey { */ typedef struct FfiPrivateAccountKeys { /** - * Nullifier public key (32 bytes) + * Nullifier public key (32 bytes). */ struct FfiBytes32 nullifier_public_key; /** - * viewing public key (compressed secp256k1 point) + * viewing public key (compressed secp256k1 point). */ const uint8_t *viewing_public_key; /** - * Length of viewing public key (typically 33 bytes) + * Length of viewing public key (typically 33 bytes). */ uintptr_t viewing_public_key_len; } FfiPrivateAccountKeys; @@ -212,11 +212,11 @@ typedef struct FfiPrivateAccountKeys { */ typedef struct FfiTransferResult { /** - * Transaction hash (null-terminated string, or null on failure) + * Transaction hash (null-terminated string, or null on failure). */ char *tx_hash; /** - * Whether the transfer succeeded + * Whether the transfer succeeded. */ bool success; } FfiTransferResult; diff --git a/wallet/Cargo.toml b/wallet/Cargo.toml index 511c20e1..63e14bb6 100644 --- a/wallet/Cargo.toml +++ b/wallet/Cargo.toml @@ -4,6 +4,9 @@ version = "0.1.0" edition = "2024" license = { workspace = true } +[lints] +workspace = true + [dependencies] nssa_core.workspace = true nssa.workspace = true @@ -21,8 +24,8 @@ humantime-serde.workspace = true humantime.workspace = true tokio = { workspace = true, features = ["macros"] } clap.workspace = true +base58.workspace = true base64.workspace = true -bytemuck.workspace = true borsh.workspace = true hex.workspace = true rand.workspace = true diff --git a/wallet/src/chain_storage.rs b/wallet/src/chain_storage.rs index 0188568b..49d31371 100644 --- a/wallet/src/chain_storage.rs +++ b/wallet/src/chain_storage.rs @@ -20,6 +20,10 @@ pub struct WalletChainStore { } impl WalletChainStore { + #[expect( + clippy::wildcard_enum_match_arm, + reason = "We perform search for specific variants only" + )] pub fn new( config: WalletConfig, persistent_accounts: Vec, @@ -158,7 +162,7 @@ impl WalletChainStore { #[cfg(test)] mod tests { use key_protocol::key_management::key_tree::{ - keys_private::ChildKeysPrivate, keys_public::ChildKeysPublic, traits::KeyNode, + keys_private::ChildKeysPrivate, keys_public::ChildKeysPublic, traits::KeyNode as _, }; use super::*; @@ -281,19 +285,19 @@ mod tests { chain_index: ChainIndex::root(), data: public_data, }), - PersistentAccountData::Private(PersistentAccountDataPrivate { + PersistentAccountData::Private(Box::new(PersistentAccountDataPrivate { account_id: private_data.account_id(), chain_index: ChainIndex::root(), data: private_data, - }), + })), ] } #[test] - fn test_new_initializes_correctly() { + fn new_initializes_correctly() { let config = create_sample_wallet_config(); let accs = create_sample_persistent_accounts(); - let _ = WalletChainStore::new(config.clone(), accs, HashMap::new()).unwrap(); + let _ = WalletChainStore::new(config, accs, HashMap::new()).unwrap(); } } diff --git a/wallet/src/cli/account.rs b/wallet/src/cli/account.rs index 109e6a1d..c7d76f24 100644 --- a/wallet/src/cli/account.rs +++ b/wallet/src/cli/account.rs @@ -1,4 +1,4 @@ -use anyhow::Result; +use anyhow::{Context as _, Result}; use clap::Subcommand; use itertools::Itertools as _; use key_protocol::key_management::key_tree::chain_index::ChainIndex; @@ -12,63 +12,63 @@ use crate::{ helperfunctions::{AccountPrivacyKind, HumanReadableAccount, parse_addr_with_privacy_prefix}, }; -/// Represents generic chain CLI subcommand +/// Represents generic chain CLI subcommand. #[derive(Subcommand, Debug, Clone)] pub enum AccountSubcommand { - /// Get account data + /// Get account data. Get { - /// Flag to get raw account data + /// Flag to get raw account data. #[arg(short, long)] raw: bool, - /// Display keys (pk for public accounts, npk/vpk for private accounts) + /// Display keys (pk for public accounts, npk/vpk for private accounts). #[arg(short, long)] keys: bool, - /// Valid 32 byte base58 string with privacy prefix + /// Valid 32 byte base58 string with privacy prefix. #[arg(short, long)] account_id: String, }, - /// Produce new public or private account + /// Produce new public or private account. #[command(subcommand)] New(NewSubcommand), - /// Sync private accounts - SyncPrivate {}, - /// List all accounts owned by the wallet + /// Sync private accounts. + SyncPrivate, + /// List all accounts owned by the wallet. #[command(visible_alias = "ls")] List { - /// Show detailed account information (like `account get`) + /// Show detailed account information (like `account get`). #[arg(short, long)] long: bool, }, - /// Set a label for an account + /// Set a label for an account. Label { - /// Valid 32 byte base58 string with privacy prefix + /// Valid 32 byte base58 string with privacy prefix. #[arg(short, long)] account_id: String, - /// The label to assign to the account + /// The label to assign to the account. #[arg(short, long)] label: String, }, } -/// Represents generic register CLI subcommand +/// Represents generic register CLI subcommand. #[derive(Subcommand, Debug, Clone)] pub enum NewSubcommand { - /// Register new public account + /// Register new public account. Public { #[arg(long)] - /// Chain index of a parent node + /// Chain index of a parent node. cci: Option, #[arg(short, long)] - /// Label to assign to the new account + /// Label to assign to the new account. label: Option, }, - /// Register new private account + /// Register new private account. Private { #[arg(long)] - /// Chain index of a parent node + /// Chain index of a parent node. cci: Option, #[arg(short, long)] - /// Label to assign to the new account + /// Label to assign to the new account. label: Option, }, } @@ -79,8 +79,8 @@ impl WalletSubcommand for NewSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - NewSubcommand::Public { cci, label } => { - if let Some(ref label) = label + Self::Public { cci, label } => { + if let Some(label) = &label && wallet_core .storage .labels @@ -116,8 +116,8 @@ impl WalletSubcommand for NewSubcommand { Ok(SubcommandReturnValue::RegisterAccount { account_id }) } - NewSubcommand::Private { cci, label } => { - if let Some(ref label) = label + Self::Private { cci, label } => { + if let Some(label) = &label && wallet_core .storage .labels @@ -159,52 +159,14 @@ impl WalletSubcommand for NewSubcommand { } } -/// Formats account details for display, returning (description, json_view) -fn format_account_details(account: &Account) -> (String, String) { - let auth_tr_prog_id = Program::authenticated_transfer_program().id(); - let token_prog_id = Program::token().id(); - - match &account.program_owner { - o if *o == auth_tr_prog_id => ( - "Account owned by authenticated transfer program".to_string(), - serde_json::to_string(&account).unwrap(), - ), - o if *o == token_prog_id => { - if let Ok(token_def) = TokenDefinition::try_from(&account.data) { - ( - "Definition account owned by token program".to_string(), - serde_json::to_string(&token_def).unwrap(), - ) - } else if let Ok(token_hold) = TokenHolding::try_from(&account.data) { - ( - "Holding account owned by token program".to_string(), - serde_json::to_string(&token_hold).unwrap(), - ) - } else { - let account_hr: HumanReadableAccount = account.clone().into(); - ( - "Unknown token program account".to_string(), - serde_json::to_string(&account_hr).unwrap(), - ) - } - } - _ => { - let account_hr: HumanReadableAccount = account.clone().into(); - ( - "Account".to_string(), - serde_json::to_string(&account_hr).unwrap(), - ) - } - } -} - impl WalletSubcommand for AccountSubcommand { + #[expect(clippy::cognitive_complexity, reason = "TODO: fix later")] async fn handle_subcommand( self, wallet_core: &mut WalletCore, ) -> Result { match self { - AccountSubcommand::Get { + Self::Get { raw, keys, account_id, @@ -223,7 +185,7 @@ impl WalletSubcommand for AccountSubcommand { } AccountPrivacyKind::Private => wallet_core .get_account_private(account_id) - .ok_or(anyhow::anyhow!("Private account not found in storage"))?, + .context("Private account not found in storage")?, }; // Helper closure to display keys for the account @@ -234,7 +196,7 @@ impl WalletSubcommand for AccountSubcommand { .storage .user_data .get_pub_account_signing_key(account_id) - .ok_or(anyhow::anyhow!("Public account not found in storage"))?; + .context("Public account not found in storage")?; let public_key = PublicKey::new_from_private_key(private_key); println!("pk {}", hex::encode(public_key.value())); @@ -244,7 +206,7 @@ impl WalletSubcommand for AccountSubcommand { .storage .user_data .get_private_account(account_id) - .ok_or(anyhow::anyhow!("Private account not found in storage"))?; + .context("Private account not found in storage")?; println!("npk {}", hex::encode(key.nullifier_public_key.0)); println!("vpk {}", hex::encode(key.viewing_public_key.to_bytes())); @@ -264,7 +226,7 @@ impl WalletSubcommand for AccountSubcommand { } if raw { - let account_hr: HumanReadableAccount = account.clone().into(); + let account_hr: HumanReadableAccount = account.into(); println!("{}", serde_json::to_string(&account_hr).unwrap()); return Ok(SubcommandReturnValue::Empty); @@ -280,10 +242,8 @@ impl WalletSubcommand for AccountSubcommand { Ok(SubcommandReturnValue::Empty) } - AccountSubcommand::New(new_subcommand) => { - new_subcommand.handle_subcommand(wallet_core).await - } - AccountSubcommand::SyncPrivate {} => { + Self::New(new_subcommand) => new_subcommand.handle_subcommand(wallet_core).await, + Self::SyncPrivate => { let curr_last_block = wallet_core .sequencer_client .get_last_block() @@ -306,17 +266,15 @@ impl WalletSubcommand for AccountSubcommand { Ok(SubcommandReturnValue::SyncedToBlock(curr_last_block)) } - AccountSubcommand::List { long } => { + Self::List { long } => { let user_data = &wallet_core.storage.user_data; let labels = &wallet_core.storage.labels; let format_with_label = |prefix: &str, id: nssa::AccountId| { let id_str = id.to_string(); - if let Some(label) = labels.get(&id_str) { - format!("{prefix} [{label}]") - } else { - prefix.to_string() - } + labels + .get(&id_str) + .map_or_else(|| prefix.to_owned(), |label| format!("{prefix} [{label}]")) }; if !long { @@ -381,7 +339,7 @@ impl WalletSubcommand for AccountSubcommand { } // Public key tree accounts - for (id, chain_index) in user_data.public_key_tree.account_id_map.iter() { + for (id, chain_index) in &user_data.public_key_tree.account_id_map { println!( "{}", format_with_label(&format!("{chain_index} Public/{id}"), *id) @@ -398,7 +356,7 @@ impl WalletSubcommand for AccountSubcommand { } // Private key tree accounts - for (id, chain_index) in user_data.private_key_tree.account_id_map.iter() { + for (id, chain_index) in &user_data.private_key_tree.account_id_map { println!( "{}", format_with_label(&format!("{chain_index} Private/{id}"), *id) @@ -416,7 +374,7 @@ impl WalletSubcommand for AccountSubcommand { Ok(SubcommandReturnValue::Empty) } - AccountSubcommand::Label { account_id, label } => { + Self::Label { account_id, label } => { let (account_id_str, _) = parse_addr_with_privacy_prefix(&account_id)?; // Check if label is already used by a different account @@ -450,3 +408,48 @@ impl WalletSubcommand for AccountSubcommand { } } } + +/// Formats account details for display, returning (description, `json_view`). +fn format_account_details(account: &Account) -> (String, String) { + let auth_tr_prog_id = Program::authenticated_transfer_program().id(); + let token_prog_id = Program::token().id(); + + match &account.program_owner { + o if *o == auth_tr_prog_id => { + let account_hr: HumanReadableAccount = account.clone().into(); + ( + "Account owned by authenticated transfer program".to_owned(), + serde_json::to_string(&account_hr).unwrap(), + ) + } + o if *o == token_prog_id => TokenDefinition::try_from(&account.data) + .map(|token_def| { + ( + "Definition account owned by token program".to_owned(), + serde_json::to_string(&token_def).unwrap(), + ) + }) + .or_else(|_| { + TokenHolding::try_from(&account.data).map(|token_hold| { + ( + "Holding account owned by token program".to_owned(), + serde_json::to_string(&token_hold).unwrap(), + ) + }) + }) + .unwrap_or_else(|_| { + let account_hr: HumanReadableAccount = account.clone().into(); + ( + "Unknown token program account".to_owned(), + serde_json::to_string(&account_hr).unwrap(), + ) + }), + _ => { + let account_hr: HumanReadableAccount = account.clone().into(); + ( + "Account".to_owned(), + serde_json::to_string(&account_hr).unwrap(), + ) + } + } +} diff --git a/wallet/src/cli/chain.rs b/wallet/src/cli/chain.rs index 1e3ec029..4beadbbc 100644 --- a/wallet/src/cli/chain.rs +++ b/wallet/src/cli/chain.rs @@ -7,19 +7,19 @@ use crate::{ cli::{SubcommandReturnValue, WalletSubcommand}, }; -/// Represents generic chain CLI subcommand +/// Represents generic chain CLI subcommand. #[derive(Subcommand, Debug, Clone)] pub enum ChainSubcommand { - /// Get current block id from sequencer - CurrentBlockId {}, - /// Get block at id from sequencer + /// Get current block id from sequencer. + CurrentBlockId, + /// Get block at id from sequencer. Block { #[arg(short, long)] id: u64, }, - /// Get transaction at hash from sequencer + /// Get transaction at hash from sequencer. Transaction { - /// hash - valid 32 byte hex string + /// hash - valid 32 byte hex string. #[arg(short = 't', long)] hash: HashType, }, @@ -31,17 +31,17 @@ impl WalletSubcommand for ChainSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - ChainSubcommand::CurrentBlockId {} => { + Self::CurrentBlockId => { let latest_block_res = wallet_core.sequencer_client.get_last_block().await?; println!("Last block id is {}", latest_block_res.last_block); } - ChainSubcommand::Block { id } => { + Self::Block { id } => { let block_res = wallet_core.sequencer_client.get_block(id).await?; println!("Last block id is {:#?}", block_res.block); } - ChainSubcommand::Transaction { hash } => { + Self::Transaction { hash } => { let tx_res = wallet_core .sequencer_client .get_transaction_by_hash(hash) diff --git a/wallet/src/cli/config.rs b/wallet/src/cli/config.rs index bc0e3662..ac94a1b7 100644 --- a/wallet/src/cli/config.rs +++ b/wallet/src/cli/config.rs @@ -6,20 +6,20 @@ use crate::{ cli::{SubcommandReturnValue, WalletSubcommand}, }; -/// Represents generic config CLI subcommand +/// Represents generic config CLI subcommand. #[derive(Subcommand, Debug, Clone)] pub enum ConfigSubcommand { - /// Getter of config fields + /// Getter of config fields. Get { - /// Print all config fields + /// Print all config fields. #[arg(short, long)] all: bool, - /// Config field key to get + /// Config field key to get. key: Option, }, - /// Setter of config fields + /// Setter of config fields. Set { key: String, value: String }, - /// Prints description of corresponding field + /// Prints description of corresponding field. Description { key: String }, } @@ -29,7 +29,7 @@ impl WalletSubcommand for ConfigSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - ConfigSubcommand::Get { all, key } => { + Self::Get { all, key } => { if all { let config_str = serde_json::to_string_pretty(&wallet_core.storage.wallet_config)?; @@ -86,7 +86,7 @@ impl WalletSubcommand for ConfigSubcommand { println!("Please provide a key or use --all flag"); } } - ConfigSubcommand::Set { key, value } => { + Self::Set { key, value } => { match key.as_str() { "override_rust_log" => { wallet_core.storage.wallet_config.override_rust_log = Some(value); @@ -97,7 +97,7 @@ impl WalletSubcommand for ConfigSubcommand { "seq_poll_timeout" => { wallet_core.storage.wallet_config.seq_poll_timeout = humantime::parse_duration(&value) - .map_err(|e| anyhow::anyhow!("Invalid duration: {}", e))?; + .map_err(|e| anyhow::anyhow!("Invalid duration: {e}"))?; } "seq_tx_poll_max_blocks" => { wallet_core.storage.wallet_config.seq_tx_poll_max_blocks = value.parse()?; @@ -120,9 +120,9 @@ impl WalletSubcommand for ConfigSubcommand { } } - wallet_core.store_config_changes().await? + wallet_core.store_config_changes().await?; } - ConfigSubcommand::Description { key } => match key.as_str() { + Self::Description { key } => match key.as_str() { "override_rust_log" => { println!("Value of variable RUST_LOG to override, affects logging"); } diff --git a/wallet/src/cli/mod.rs b/wallet/src/cli/mod.rs index 87c2bb31..58d77d6a 100644 --- a/wallet/src/cli/mod.rs +++ b/wallet/src/cli/mod.rs @@ -1,6 +1,6 @@ -use std::{io::Write, path::PathBuf}; +use std::{io::Write as _, path::PathBuf, sync::Arc}; -use anyhow::{Context, Result}; +use anyhow::{Context as _, Result}; use clap::{Parser, Subcommand}; use common::HashType; use nssa::{ProgramDeploymentTransaction, program::Program}; @@ -28,62 +28,62 @@ pub(crate) trait WalletSubcommand { -> Result; } -/// Represents CLI command for a wallet +/// Represents CLI command for a wallet. #[derive(Subcommand, Debug, Clone)] #[clap(about)] pub enum Command { - /// Authenticated transfer subcommand + /// Authenticated transfer subcommand. #[command(subcommand)] AuthTransfer(AuthTransferSubcommand), - /// Generic chain info subcommand + /// Generic chain info subcommand. #[command(subcommand)] ChainInfo(ChainSubcommand), - /// Account view and sync subcommand + /// Account view and sync subcommand. #[command(subcommand)] Account(AccountSubcommand), - /// Pinata program interaction subcommand + /// Pinata program interaction subcommand. #[command(subcommand)] Pinata(PinataProgramAgnosticSubcommand), - /// Token program interaction subcommand + /// Token program interaction subcommand. #[command(subcommand)] Token(TokenProgramAgnosticSubcommand), - /// AMM program interaction subcommand + /// AMM program interaction subcommand. #[command(subcommand)] AMM(AmmProgramAgnosticSubcommand), /// Check the wallet can connect to the node and builtin local programs - /// match the remote versions - CheckHealth {}, - /// Command to setup config, get and set config fields + /// match the remote versions. + CheckHealth, + /// Command to setup config, get and set config fields. #[command(subcommand)] Config(ConfigSubcommand), - /// Restoring keys from given password at given `depth` + /// Restoring keys from given password at given `depth`. /// - /// !!!WARNING!!! will rewrite current storage + /// !!!WARNING!!! will rewrite current storage. RestoreKeys { #[arg(short, long)] /// Indicates, how deep in tree accounts may be. Affects command complexity. depth: u32, }, - /// Deploy a program + /// Deploy a program. DeployProgram { binary_filepath: PathBuf }, } -/// To execute commands, env var NSSA_WALLET_HOME_DIR must be set into directory with config +/// To execute commands, env var `NSSA_WALLET_HOME_DIR` must be set into directory with config. /// /// All account addresses must be valid 32 byte base58 strings. /// -/// All account account_ids must be provided as {privacy_prefix}/{account_id}, -/// where valid options for `privacy_prefix` is `Public` and `Private` +/// All account `account_ids` must be provided as {`privacy_prefix}/{account_id`}, +/// where valid options for `privacy_prefix` is `Public` and `Private`. #[derive(Parser, Debug)] #[clap(version, about)] pub struct Args { - /// Continious run flag + /// Continious run flag. #[arg(short, long)] pub continuous_run: bool, - /// Basic authentication in the format `user` or `user:password` + /// Basic authentication in the format `user` or `user:password`. #[arg(long)] pub auth: Option, - /// Wallet command + /// Wallet command. #[command(subcommand)] pub command: Option, } @@ -114,7 +114,7 @@ pub async fn execute_subcommand( Command::Pinata(pinata_subcommand) => { pinata_subcommand.handle_subcommand(wallet_core).await? } - Command::CheckHealth {} => { + Command::CheckHealth => { let remote_program_ids = wallet_core .sequencer_client .get_program_ids() @@ -124,29 +124,33 @@ pub async fn execute_subcommand( else { panic!("Missing authenticated transfer ID from remote"); }; - if authenticated_transfer_id != &Program::authenticated_transfer_program().id() { - panic!("Local ID for authenticated transfer program is different from remote"); - } + assert!( + authenticated_transfer_id == &Program::authenticated_transfer_program().id(), + "Local ID for authenticated transfer program is different from remote" + ); let Some(token_id) = remote_program_ids.get("token") else { panic!("Missing token program ID from remote"); }; - if token_id != &Program::token().id() { - panic!("Local ID for token program is different from remote"); - } + assert!( + token_id == &Program::token().id(), + "Local ID for token program is different from remote" + ); let Some(circuit_id) = remote_program_ids.get("privacy_preserving_circuit") else { panic!("Missing privacy preserving circuit ID from remote"); }; - if circuit_id != &nssa::PRIVACY_PRESERVING_CIRCUIT_ID { - panic!("Local ID for privacy preserving circuit is different from remote"); - } + assert!( + circuit_id == &nssa::PRIVACY_PRESERVING_CIRCUIT_ID, + "Local ID for privacy preserving circuit is different from remote" + ); let Some(amm_id) = remote_program_ids.get("amm") else { panic!("Missing AMM program ID from remote"); }; - if amm_id != &Program::amm().id() { - panic!("Local ID for AMM program is different from remote"); - } + assert!( + amm_id == &Program::amm().id(), + "Local ID for AMM program is different from remote" + ); - println!("✅All looks good!"); + println!("\u{2705}All looks good!"); SubcommandReturnValue::Empty } @@ -202,7 +206,7 @@ pub fn read_password_from_stdin() -> Result { std::io::stdout().flush()?; std::io::stdin().read_line(&mut password)?; - Ok(password.trim().to_string()) + Ok(password.trim().to_owned()) } pub async fn execute_keys_restoration(wallet_core: &mut WalletCore, depth: u32) -> Result<()> { @@ -226,7 +230,7 @@ pub async fn execute_keys_restoration(wallet_core: &mut WalletCore, depth: u32) .storage .user_data .public_key_tree - .cleanup_tree_remove_uninit_layered(depth, wallet_core.sequencer_client.clone()) + .cleanup_tree_remove_uninit_layered(depth, Arc::clone(&wallet_core.sequencer_client)) .await?; println!("Public tree cleaned up"); diff --git a/wallet/src/cli/programs/amm.rs b/wallet/src/cli/programs/amm.rs index ce919b7c..7307569d 100644 --- a/wallet/src/cli/programs/amm.rs +++ b/wallet/src/cli/programs/amm.rs @@ -9,22 +9,22 @@ use crate::{ program_facades::amm::Amm, }; -/// Represents generic CLI subcommand for a wallet working with amm program +/// Represents generic CLI subcommand for a wallet working with amm program. #[derive(Subcommand, Debug, Clone)] pub enum AmmProgramAgnosticSubcommand { - /// Produce a new pool + /// Produce a new pool. /// - /// user_holding_a and user_holding_b must be owned. + /// `user_holding_a` and `user_holding_b` must be owned. /// - /// Only public execution allowed + /// Only public execution allowed. New { - /// user_holding_a - valid 32 byte base58 string with privacy prefix + /// `user_holding_a` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_a: String, - /// user_holding_b - valid 32 byte base58 string with privacy prefix + /// `user_holding_b` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_b: String, - /// user_holding_lp - valid 32 byte base58 string with privacy prefix + /// `user_holding_lp` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_lp: String, #[arg(long)] @@ -32,39 +32,39 @@ pub enum AmmProgramAgnosticSubcommand { #[arg(long)] balance_b: u128, }, - /// Swap + /// Swap. /// - /// The account associated with swapping token must be owned + /// The account associated with swapping token must be owned. /// - /// Only public execution allowed + /// Only public execution allowed. Swap { - /// user_holding_a - valid 32 byte base58 string with privacy prefix + /// `user_holding_a` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_a: String, - /// user_holding_b - valid 32 byte base58 string with privacy prefix + /// `user_holding_b` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_b: String, #[arg(long)] amount_in: u128, #[arg(long)] min_amount_out: u128, - /// token_definition - valid 32 byte base58 string WITHOUT privacy prefix + /// `token_definition` - valid 32 byte base58 string WITHOUT privacy prefix. #[arg(long)] token_definition: String, }, - /// Add liquidity + /// Add liquidity. /// - /// user_holding_a and user_holding_b must be owned. + /// `user_holding_a` and `user_holding_b` must be owned. /// - /// Only public execution allowed + /// Only public execution allowed. AddLiquidity { - /// user_holding_a - valid 32 byte base58 string with privacy prefix + /// `user_holding_a` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_a: String, - /// user_holding_b - valid 32 byte base58 string with privacy prefix + /// `user_holding_b` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_b: String, - /// user_holding_lp - valid 32 byte base58 string with privacy prefix + /// `user_holding_lp` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_lp: String, #[arg(long)] @@ -74,19 +74,19 @@ pub enum AmmProgramAgnosticSubcommand { #[arg(long)] max_amount_b: u128, }, - /// Remove liquidity + /// Remove liquidity. /// - /// user_holding_lp must be owned. + /// `user_holding_lp` must be owned. /// - /// Only public execution allowed + /// Only public execution allowed. RemoveLiquidity { - /// user_holding_a - valid 32 byte base58 string with privacy prefix + /// `user_holding_a` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_a: String, - /// user_holding_b - valid 32 byte base58 string with privacy prefix + /// `user_holding_b` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_b: String, - /// user_holding_lp - valid 32 byte base58 string with privacy prefix + /// `user_holding_lp` - valid 32 byte base58 string with privacy prefix. #[arg(long)] user_holding_lp: String, #[arg(long)] @@ -104,7 +104,7 @@ impl WalletSubcommand for AmmProgramAgnosticSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - AmmProgramAgnosticSubcommand::New { + Self::New { user_holding_a, user_holding_b, user_holding_lp, @@ -150,7 +150,7 @@ impl WalletSubcommand for AmmProgramAgnosticSubcommand { } } } - AmmProgramAgnosticSubcommand::Swap { + Self::Swap { user_holding_a, user_holding_b, amount_in, @@ -185,7 +185,7 @@ impl WalletSubcommand for AmmProgramAgnosticSubcommand { } } } - AmmProgramAgnosticSubcommand::AddLiquidity { + Self::AddLiquidity { user_holding_a, user_holding_b, user_holding_lp, @@ -233,7 +233,7 @@ impl WalletSubcommand for AmmProgramAgnosticSubcommand { } } } - AmmProgramAgnosticSubcommand::RemoveLiquidity { + Self::RemoveLiquidity { user_holding_a, user_holding_b, user_holding_lp, diff --git a/wallet/src/cli/programs/native_token_transfer.rs b/wallet/src/cli/programs/native_token_transfer.rs index 9dd4fca6..314f78ba 100644 --- a/wallet/src/cli/programs/native_token_transfer.rs +++ b/wallet/src/cli/programs/native_token_transfer.rs @@ -11,35 +11,35 @@ use crate::{ program_facades::native_token_transfer::NativeTokenTransfer, }; -/// Represents generic CLI subcommand for a wallet working with native token transfer program +/// Represents generic CLI subcommand for a wallet working with native token transfer program. #[derive(Subcommand, Debug, Clone)] pub enum AuthTransferSubcommand { - /// Initialize account under authenticated transfer program + /// Initialize account under authenticated transfer program. Init { - /// account_id - valid 32 byte base58 string with privacy prefix + /// `account_id` - valid 32 byte base58 string with privacy prefix. #[arg(long)] account_id: String, }, - /// Send native tokens from one account to another with variable privacy + /// Send native tokens from one account to another with variable privacy. /// /// If receiver is private, then `to` and (`to_npk` , `to_vpk`) is a mutually exclusive /// patterns. /// /// First is used for owned accounts, second otherwise. Send { - /// from - valid 32 byte base58 string with privacy prefix + /// from - valid 32 byte base58 string with privacy prefix. #[arg(long)] from: String, - /// to - valid 32 byte base58 string with privacy prefix + /// to - valid 32 byte base58 string with privacy prefix. #[arg(long)] to: Option, - /// to_npk - valid 32 byte hex string + /// `to_npk` - valid 32 byte hex string. #[arg(long)] to_npk: Option, - /// to_vpk - valid 33 byte hex string + /// `to_vpk` - valid 33 byte hex string. #[arg(long)] to_vpk: Option, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, @@ -51,7 +51,7 @@ impl WalletSubcommand for AuthTransferSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - AuthTransferSubcommand::Init { account_id } => { + Self::Init { account_id } => { let (account_id, addr_privacy) = parse_addr_with_privacy_prefix(&account_id)?; match addr_privacy { @@ -87,7 +87,7 @@ impl WalletSubcommand for AuthTransferSubcommand { let acc_decode_data = vec![Decode(secret, account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -98,7 +98,7 @@ impl WalletSubcommand for AuthTransferSubcommand { Ok(SubcommandReturnValue::Empty) } - AuthTransferSubcommand::Send { + Self::Send { from, to, to_npk, @@ -188,114 +188,114 @@ impl WalletSubcommand for AuthTransferSubcommand { } } -/// Represents generic CLI subcommand for a wallet working with native token transfer program +/// Represents generic CLI subcommand for a wallet working with native token transfer program. #[derive(Subcommand, Debug, Clone)] pub enum NativeTokenTransferProgramSubcommand { - /// Send native token transfer from `from` to `to` for `amount` + /// Send native token transfer from `from` to `to` for `amount`. /// - /// Public operation + /// Public operation. Public { - /// from - valid 32 byte hex string + /// from - valid 32 byte hex string. #[arg(long)] from: String, - /// to - valid 32 byte hex string + /// to - valid 32 byte hex string. #[arg(long)] to: String, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, - /// Private execution + /// Private execution. #[command(subcommand)] Private(NativeTokenTransferProgramSubcommandPrivate), - /// Send native token transfer from `from` to `to` for `amount` + /// Send native token transfer from `from` to `to` for `amount`. /// - /// Deshielded operation + /// Deshielded operation. Deshielded { - /// from - valid 32 byte hex string + /// from - valid 32 byte hex string. #[arg(long)] from: String, - /// to - valid 32 byte hex string + /// to - valid 32 byte hex string. #[arg(long)] to: String, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, - /// Shielded execution + /// Shielded execution. #[command(subcommand)] Shielded(NativeTokenTransferProgramSubcommandShielded), } /// Represents generic shielded CLI subcommand for a wallet working with native token transfer -/// program +/// program. #[derive(Subcommand, Debug, Clone)] pub enum NativeTokenTransferProgramSubcommandShielded { - /// Send native token transfer from `from` to `to` for `amount` + /// Send native token transfer from `from` to `to` for `amount`. /// - /// Shielded operation + /// Shielded operation. ShieldedOwned { - /// from - valid 32 byte hex string + /// from - valid 32 byte hex string. #[arg(long)] from: String, - /// to - valid 32 byte hex string + /// to - valid 32 byte hex string. #[arg(long)] to: String, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, - /// Send native token transfer from `from` to `to` for `amount` + /// Send native token transfer from `from` to `to` for `amount`. /// - /// Shielded operation + /// Shielded operation. ShieldedForeign { - /// from - valid 32 byte hex string + /// from - valid 32 byte hex string. #[arg(long)] from: String, - /// to_npk - valid 32 byte hex string + /// `to_npk` - valid 32 byte hex string. #[arg(long)] to_npk: String, - /// to_vpk - valid 33 byte hex string + /// `to_vpk` - valid 33 byte hex string. #[arg(long)] to_vpk: String, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, } /// Represents generic private CLI subcommand for a wallet working with native token transfer -/// program +/// program. #[derive(Subcommand, Debug, Clone)] pub enum NativeTokenTransferProgramSubcommandPrivate { - /// Send native token transfer from `from` to `to` for `amount` + /// Send native token transfer from `from` to `to` for `amount`. /// - /// Private operation + /// Private operation. PrivateOwned { - /// from - valid 32 byte hex string + /// from - valid 32 byte hex string. #[arg(long)] from: String, - /// to - valid 32 byte hex string + /// to - valid 32 byte hex string. #[arg(long)] to: String, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, - /// Send native token transfer from `from` to `to` for `amount` + /// Send native token transfer from `from` to `to` for `amount`. /// - /// Private operation + /// Private operation. PrivateForeign { - /// from - valid 32 byte hex string + /// from - valid 32 byte hex string. #[arg(long)] from: String, - /// to_npk - valid 32 byte hex string + /// `to_npk` - valid 32 byte hex string. #[arg(long)] to_npk: String, - /// to_vpk - valid 33 byte hex string + /// `to_vpk` - valid 33 byte hex string. #[arg(long)] to_vpk: String, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, @@ -307,7 +307,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate { wallet_core: &mut WalletCore, ) -> Result { match self { - NativeTokenTransferProgramSubcommandPrivate::PrivateOwned { from, to, amount } => { + Self::PrivateOwned { from, to, amount } => { let from: AccountId = from.parse().unwrap(); let to: AccountId = to.parse().unwrap(); @@ -324,7 +324,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate { let acc_decode_data = vec![Decode(secret_from, from), Decode(secret_to, to)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -333,7 +333,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - NativeTokenTransferProgramSubcommandPrivate::PrivateForeign { + Self::PrivateForeign { from, to_npk, to_vpk, @@ -346,7 +346,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate { let to_npk = nssa_core::NullifierPublicKey(to_npk); let to_vpk_res = hex::decode(to_vpk)?; - let mut to_vpk = [0u8; 33]; + let mut to_vpk = [0_u8; 33]; to_vpk.copy_from_slice(&to_vpk_res); let to_vpk = nssa_core::encryption::shared_key_derivation::Secp256k1Point(to_vpk.to_vec()); @@ -364,7 +364,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate { let acc_decode_data = vec![Decode(secret_from, from)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -383,7 +383,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandShielded { wallet_core: &mut WalletCore, ) -> Result { match self { - NativeTokenTransferProgramSubcommandShielded::ShieldedOwned { from, to, amount } => { + Self::ShieldedOwned { from, to, amount } => { let from: AccountId = from.parse().unwrap(); let to: AccountId = to.parse().unwrap(); @@ -400,7 +400,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandShielded { let acc_decode_data = vec![Decode(secret, to)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -409,7 +409,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandShielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - NativeTokenTransferProgramSubcommandShielded::ShieldedForeign { + Self::ShieldedForeign { from, to_npk, to_vpk, @@ -423,7 +423,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandShielded { let to_npk = nssa_core::NullifierPublicKey(to_npk); let to_vpk_res = hex::decode(to_vpk)?; - let mut to_vpk = [0u8; 33]; + let mut to_vpk = [0_u8; 33]; to_vpk.copy_from_slice(&to_vpk_res); let to_vpk = nssa_core::encryption::shared_key_derivation::Secp256k1Point(to_vpk.to_vec()); @@ -450,13 +450,13 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - NativeTokenTransferProgramSubcommand::Private(private_subcommand) => { + Self::Private(private_subcommand) => { private_subcommand.handle_subcommand(wallet_core).await } - NativeTokenTransferProgramSubcommand::Shielded(shielded_subcommand) => { + Self::Shielded(shielded_subcommand) => { shielded_subcommand.handle_subcommand(wallet_core).await } - NativeTokenTransferProgramSubcommand::Deshielded { from, to, amount } => { + Self::Deshielded { from, to, amount } => { let from: AccountId = from.parse().unwrap(); let to: AccountId = to.parse().unwrap(); @@ -473,7 +473,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommand { let acc_decode_data = vec![Decode(secret, from)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -482,7 +482,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommand { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - NativeTokenTransferProgramSubcommand::Public { from, to, amount } => { + Self::Public { from, to, amount } => { let from: AccountId = from.parse().unwrap(); let to: AccountId = to.parse().unwrap(); diff --git a/wallet/src/cli/programs/pinata.rs b/wallet/src/cli/programs/pinata.rs index b117c4c1..948da9c2 100644 --- a/wallet/src/cli/programs/pinata.rs +++ b/wallet/src/cli/programs/pinata.rs @@ -1,6 +1,7 @@ -use anyhow::{Context, Result}; +use anyhow::{Context as _, Result}; use clap::Subcommand; use common::{PINATA_BASE58, transaction::NSSATransaction}; +use nssa::{Account, AccountId}; use crate::{ AccDecodeData::Decode, @@ -10,12 +11,12 @@ use crate::{ program_facades::pinata::Pinata, }; -/// Represents generic CLI subcommand for a wallet working with pinata program +/// Represents generic CLI subcommand for a wallet working with pinata program. #[derive(Subcommand, Debug, Clone)] pub enum PinataProgramAgnosticSubcommand { - /// Claim pinata + /// Claim pinata. Claim { - /// to - valid 32 byte base58 string with privacy prefix + /// to - valid 32 byte base58 string with privacy prefix. #[arg(long)] to: String, }, @@ -27,19 +28,19 @@ impl WalletSubcommand for PinataProgramAgnosticSubcommand { wallet_core: &mut WalletCore, ) -> Result { let underlying_subcommand = match self { - PinataProgramAgnosticSubcommand::Claim { to } => { + Self::Claim { to } => { let (to, to_addr_privacy) = parse_addr_with_privacy_prefix(&to)?; match to_addr_privacy { AccountPrivacyKind::Public => { PinataProgramSubcommand::Public(PinataProgramSubcommandPublic::Claim { - pinata_account_id: PINATA_BASE58.to_string(), + pinata_account_id: PINATA_BASE58.to_owned(), winner_account_id: to, }) } AccountPrivacyKind::Private => PinataProgramSubcommand::Private( PinataProgramSubcommandPrivate::ClaimPrivateOwned { - pinata_account_id: PINATA_BASE58.to_string(), + pinata_account_id: PINATA_BASE58.to_owned(), winner_account_id: to, }, ), @@ -51,42 +52,42 @@ impl WalletSubcommand for PinataProgramAgnosticSubcommand { } } -/// Represents generic CLI subcommand for a wallet working with pinata program +/// Represents generic CLI subcommand for a wallet working with pinata program. #[derive(Subcommand, Debug, Clone)] pub enum PinataProgramSubcommand { - /// Public execution + /// Public execution. #[command(subcommand)] Public(PinataProgramSubcommandPublic), - /// Private execution + /// Private execution. #[command(subcommand)] Private(PinataProgramSubcommandPrivate), } -/// Represents generic public CLI subcommand for a wallet working with pinata program +/// Represents generic public CLI subcommand for a wallet working with pinata program. #[derive(Subcommand, Debug, Clone)] pub enum PinataProgramSubcommandPublic { // TODO: Testnet only. Refactor to prevent compilation on mainnet. // Claim piñata prize Claim { - /// pinata_account_id - valid 32 byte hex string + /// `pinata_account_id` - valid 32 byte hex string. #[arg(long)] pinata_account_id: String, - /// winner_account_id - valid 32 byte hex string + /// `winner_account_id` - valid 32 byte hex string. #[arg(long)] winner_account_id: String, }, } -/// Represents generic private CLI subcommand for a wallet working with pinata program +/// Represents generic private CLI subcommand for a wallet working with pinata program. #[derive(Subcommand, Debug, Clone)] pub enum PinataProgramSubcommandPrivate { // TODO: Testnet only. Refactor to prevent compilation on mainnet. // Claim piñata prize ClaimPrivateOwned { - /// pinata_account_id - valid 32 byte hex string + /// `pinata_account_id` - valid 32 byte hex string. #[arg(long)] pinata_account_id: String, - /// winner_account_id - valid 32 byte hex string + /// `winner_account_id` - valid 32 byte hex string. #[arg(long)] winner_account_id: String, }, @@ -98,21 +99,21 @@ impl WalletSubcommand for PinataProgramSubcommandPublic { wallet_core: &mut WalletCore, ) -> Result { match self { - PinataProgramSubcommandPublic::Claim { + Self::Claim { pinata_account_id, winner_account_id, } => { - let pinata_account_id = pinata_account_id.parse().unwrap(); + let pinata_account_id = pinata_account_id.parse()?; + let winner_account_id: AccountId = winner_account_id.parse()?; + + ensure_public_recipient_initialized(wallet_core, winner_account_id).await?; + let solution = find_solution(wallet_core, pinata_account_id) .await .context("failed to compute solution")?; let res = Pinata(wallet_core) - .claim( - pinata_account_id, - winner_account_id.parse().unwrap(), - solution, - ) + .claim(pinata_account_id, winner_account_id, solution) .await?; println!("Results of tx send are {res:#?}"); @@ -134,12 +135,15 @@ impl WalletSubcommand for PinataProgramSubcommandPrivate { wallet_core: &mut WalletCore, ) -> Result { match self { - PinataProgramSubcommandPrivate::ClaimPrivateOwned { + Self::ClaimPrivateOwned { pinata_account_id, winner_account_id, } => { - let pinata_account_id = pinata_account_id.parse().unwrap(); - let winner_account_id = winner_account_id.parse().unwrap(); + let pinata_account_id = pinata_account_id.parse()?; + let winner_account_id: AccountId = winner_account_id.parse()?; + + ensure_private_owned_recipient_initialized(wallet_core, winner_account_id)?; + let solution = find_solution(wallet_core, pinata_account_id) .await .context("failed to compute solution")?; @@ -159,7 +163,7 @@ impl WalletSubcommand for PinataProgramSubcommandPrivate { let acc_decode_data = vec![Decode(secret_winner, winner_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -178,23 +182,67 @@ impl WalletSubcommand for PinataProgramSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - PinataProgramSubcommand::Private(private_subcommand) => { + Self::Private(private_subcommand) => { private_subcommand.handle_subcommand(wallet_core).await } - PinataProgramSubcommand::Public(public_subcommand) => { + Self::Public(public_subcommand) => { public_subcommand.handle_subcommand(wallet_core).await } } } } -async fn find_solution(wallet: &WalletCore, pinata_account_id: nssa::AccountId) -> Result { +async fn ensure_public_recipient_initialized( + wallet_core: &WalletCore, + winner_account_id: AccountId, +) -> Result<()> { + let account = wallet_core + .get_account_public(winner_account_id) + .await + .with_context(|| format!("failed to fetch recipient account Public/{winner_account_id}"))?; + + if account == Account::default() { + anyhow::bail!( + "Recipient account Public/{winner_account_id} is uninitialized.\n\ + Initialize it first:\n \ + wallet auth-transfer init --account-id Public/{winner_account_id}" + ); + } + + Ok(()) +} + +fn ensure_private_owned_recipient_initialized( + wallet_core: &WalletCore, + winner_account_id: AccountId, +) -> Result<()> { + let Some(account) = wallet_core.get_account_private(winner_account_id) else { + anyhow::bail!( + "Recipient account Private/{winner_account_id} is not found in this wallet.\n\ + `wallet pinata claim --to Private/...` supports owned private accounts only." + ); + }; + + if account == Account::default() { + anyhow::bail!( + "Recipient account Private/{winner_account_id} is uninitialized.\n\ + Initialize it first:\n \ + wallet auth-transfer init --account-id Private/{winner_account_id}\n\ + Then sync private state:\n \ + wallet account sync-private" + ); + } + + Ok(()) +} + +async fn find_solution(wallet: &WalletCore, pinata_account_id: AccountId) -> Result { let account = wallet.get_account_public(pinata_account_id).await?; let data: [u8; 33] = account .data .as_ref() .try_into() - .map_err(|_| anyhow::Error::msg("invalid pinata account data"))?; + .map_err(|_err| anyhow::Error::msg("invalid pinata account data"))?; println!("Computing solution for pinata..."); let now = std::time::Instant::now(); @@ -209,7 +257,7 @@ fn compute_solution(data: [u8; 33]) -> u128 { let difficulty = data[0]; let seed = &data[1..]; - let mut solution = 0u128; + let mut solution = 0_u128; while !validate_solution(difficulty, seed, solution) { solution = solution.checked_add(1).expect("solution overflowed u128"); } @@ -228,6 +276,6 @@ fn validate_solution(difficulty: u8, seed: &[u8], solution: u128) -> bool { hasher.update(bytes); let digest: [u8; 32] = hasher.finalize_fixed().into(); - let difficulty = difficulty as usize; + let difficulty = usize::from(difficulty); digest[..difficulty].iter().all(|&b| b == 0) } diff --git a/wallet/src/cli/programs/token.rs b/wallet/src/cli/programs/token.rs index 2f83d4ce..65a283dd 100644 --- a/wallet/src/cli/programs/token.rs +++ b/wallet/src/cli/programs/token.rs @@ -11,15 +11,15 @@ use crate::{ program_facades::token::Token, }; -/// Represents generic CLI subcommand for a wallet working with token program +/// Represents generic CLI subcommand for a wallet working with token program. #[derive(Subcommand, Debug, Clone)] pub enum TokenProgramAgnosticSubcommand { - /// Produce a new token + /// Produce a new token. New { - /// definition_account_id - valid 32 byte base58 string with privacy prefix + /// `definition_account_id` - valid 32 byte base58 string with privacy prefix. #[arg(long)] definition_account_id: String, - /// supply_account_id - valid 32 byte base58 string with privacy prefix + /// `supply_account_id` - valid 32 byte base58 string with privacy prefix. #[arg(long)] supply_account_id: String, #[arg(short, long)] @@ -27,68 +27,68 @@ pub enum TokenProgramAgnosticSubcommand { #[arg(short, long)] total_supply: u128, }, - /// Send tokens from one account to another with variable privacy + /// Send tokens from one account to another with variable privacy. /// /// If receiver is private, then `to` and (`to_npk` , `to_vpk`) is a mutually exclusive /// patterns. /// /// First is used for owned accounts, second otherwise. Send { - /// from - valid 32 byte base58 string with privacy prefix + /// from - valid 32 byte base58 string with privacy prefix. #[arg(long)] from: String, - /// to - valid 32 byte base58 string with privacy prefix + /// to - valid 32 byte base58 string with privacy prefix. #[arg(long)] to: Option, - /// to_npk - valid 32 byte hex string + /// `to_npk` - valid 32 byte hex string. #[arg(long)] to_npk: Option, - /// to_vpk - valid 33 byte hex string + /// `to_vpk` - valid 33 byte hex string. #[arg(long)] to_vpk: Option, - /// amount - amount of balance to move + /// amount - amount of balance to move. #[arg(long)] amount: u128, }, /// Burn tokens on `holder`, modify `definition`. /// - /// `holder` is owned + /// `holder` is owned. /// /// Also if `definition` is private then it is owned, because /// we can not modify foreign accounts. Burn { - /// definition - valid 32 byte base58 string with privacy prefix + /// definition - valid 32 byte base58 string with privacy prefix. #[arg(long)] definition: String, - /// holder - valid 32 byte base58 string with privacy prefix + /// holder - valid 32 byte base58 string with privacy prefix. #[arg(long)] holder: String, - /// amount - amount of balance to burn + /// amount - amount of balance to burn. #[arg(long)] amount: u128, }, /// Mint tokens on `holder`, modify `definition`. /// - /// `definition` is owned + /// `definition` is owned. /// /// If `holder` is private, then `holder` and (`holder_npk` , `holder_vpk`) is a mutually /// exclusive patterns. /// /// First is used for owned accounts, second otherwise. Mint { - /// definition - valid 32 byte base58 string with privacy prefix + /// definition - valid 32 byte base58 string with privacy prefix. #[arg(long)] definition: String, - /// holder - valid 32 byte base58 string with privacy prefix + /// holder - valid 32 byte base58 string with privacy prefix. #[arg(long)] holder: Option, - /// holder_npk - valid 32 byte hex string + /// `holder_npk` - valid 32 byte hex string. #[arg(long)] holder_npk: Option, - /// to_vpk - valid 33 byte hex string + /// `to_vpk` - valid 33 byte hex string. #[arg(long)] holder_vpk: Option, - /// amount - amount of balance to mint + /// amount - amount of balance to mint. #[arg(long)] amount: u128, }, @@ -100,7 +100,7 @@ impl WalletSubcommand for TokenProgramAgnosticSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - TokenProgramAgnosticSubcommand::New { + Self::New { definition_account_id, supply_account_id, name, @@ -156,7 +156,7 @@ impl WalletSubcommand for TokenProgramAgnosticSubcommand { underlying_subcommand.handle_subcommand(wallet_core).await } - TokenProgramAgnosticSubcommand::Send { + Self::Send { from, to, to_npk, @@ -246,7 +246,7 @@ impl WalletSubcommand for TokenProgramAgnosticSubcommand { underlying_subcommand.handle_subcommand(wallet_core).await } - TokenProgramAgnosticSubcommand::Burn { + Self::Burn { definition, holder, amount, @@ -298,7 +298,7 @@ impl WalletSubcommand for TokenProgramAgnosticSubcommand { underlying_subcommand.handle_subcommand(wallet_core).await } - TokenProgramAgnosticSubcommand::Mint { + Self::Mint { definition, holder, holder_npk, @@ -394,27 +394,27 @@ impl WalletSubcommand for TokenProgramAgnosticSubcommand { } } -/// Represents generic CLI subcommand for a wallet working with token_program +/// Represents generic CLI subcommand for a wallet working with `token_program`. #[derive(Subcommand, Debug, Clone)] pub enum TokenProgramSubcommand { - /// Creation of new token + /// Creation of new token. #[command(subcommand)] Create(CreateNewTokenProgramSubcommand), - /// Public execution + /// Public execution. #[command(subcommand)] Public(TokenProgramSubcommandPublic), - /// Private execution + /// Private execution. #[command(subcommand)] Private(TokenProgramSubcommandPrivate), - /// Deshielded execution + /// Deshielded execution. #[command(subcommand)] Deshielded(TokenProgramSubcommandDeshielded), - /// Shielded execution + /// Shielded execution. #[command(subcommand)] Shielded(TokenProgramSubcommandShielded), } -/// Represents generic public CLI subcommand for a wallet working with token_program +/// Represents generic public CLI subcommand for a wallet working with `token_program`. #[derive(Subcommand, Debug, Clone)] pub enum TokenProgramSubcommandPublic { // Transfer tokens using the token program @@ -446,7 +446,7 @@ pub enum TokenProgramSubcommandPublic { }, } -/// Represents generic private CLI subcommand for a wallet working with token_program +/// Represents generic private CLI subcommand for a wallet working with `token_program`. #[derive(Subcommand, Debug, Clone)] pub enum TokenProgramSubcommandPrivate { // Transfer tokens using the token program @@ -462,10 +462,10 @@ pub enum TokenProgramSubcommandPrivate { TransferTokenPrivateForeign { #[arg(short, long)] sender_account_id: String, - /// recipient_npk - valid 32 byte hex string + /// `recipient_npk` - valid 32 byte hex string. #[arg(long)] recipient_npk: String, - /// recipient_vpk - valid 33 byte hex string + /// `recipient_vpk` - valid 33 byte hex string. #[arg(long)] recipient_vpk: String, #[arg(short, long)] @@ -502,7 +502,7 @@ pub enum TokenProgramSubcommandPrivate { }, } -/// Represents deshielded public CLI subcommand for a wallet working with token_program +/// Represents deshielded public CLI subcommand for a wallet working with `token_program`. #[derive(Subcommand, Debug, Clone)] pub enum TokenProgramSubcommandDeshielded { // Transfer tokens using the token program @@ -534,7 +534,7 @@ pub enum TokenProgramSubcommandDeshielded { }, } -/// Represents generic shielded CLI subcommand for a wallet working with token_program +/// Represents generic shielded CLI subcommand for a wallet working with `token_program`. #[derive(Subcommand, Debug, Clone)] pub enum TokenProgramSubcommandShielded { // Transfer tokens using the token program @@ -550,10 +550,10 @@ pub enum TokenProgramSubcommandShielded { TransferTokenShieldedForeign { #[arg(short, long)] sender_account_id: String, - /// recipient_npk - valid 32 byte hex string + /// `recipient_npk` - valid 32 byte hex string. #[arg(long)] recipient_npk: String, - /// recipient_vpk - valid 33 byte hex string + /// `recipient_vpk` - valid 33 byte hex string. #[arg(long)] recipient_vpk: String, #[arg(short, long)] @@ -590,12 +590,12 @@ pub enum TokenProgramSubcommandShielded { }, } -/// Represents generic initialization subcommand for a wallet working with token_program +/// Represents generic initialization subcommand for a wallet working with `token_program`. #[derive(Subcommand, Debug, Clone)] pub enum CreateNewTokenProgramSubcommand { - /// Create a new token using the token program + /// Create a new token using the token program. /// - /// Definition - public, supply - public + /// Definition - public, supply - public. NewPublicDefPublicSupp { #[arg(short, long)] definition_account_id: String, @@ -606,9 +606,9 @@ pub enum CreateNewTokenProgramSubcommand { #[arg(short, long)] total_supply: u128, }, - /// Create a new token using the token program + /// Create a new token using the token program. /// - /// Definition - public, supply - private + /// Definition - public, supply - private. NewPublicDefPrivateSupp { #[arg(short, long)] definition_account_id: String, @@ -619,9 +619,9 @@ pub enum CreateNewTokenProgramSubcommand { #[arg(short, long)] total_supply: u128, }, - /// Create a new token using the token program + /// Create a new token using the token program. /// - /// Definition - private, supply - public + /// Definition - private, supply - public. NewPrivateDefPublicSupp { #[arg(short, long)] definition_account_id: String, @@ -632,9 +632,9 @@ pub enum CreateNewTokenProgramSubcommand { #[arg(short, long)] total_supply: u128, }, - /// Create a new token using the token program + /// Create a new token using the token program. /// - /// Definition - private, supply - private + /// Definition - private, supply - private. NewPrivateDefPrivateSupp { #[arg(short, long)] definition_account_id: String, @@ -653,7 +653,7 @@ impl WalletSubcommand for TokenProgramSubcommandPublic { wallet_core: &mut WalletCore, ) -> Result { match self { - TokenProgramSubcommandPublic::TransferToken { + Self::TransferToken { sender_account_id, recipient_account_id, balance_to_move, @@ -667,7 +667,7 @@ impl WalletSubcommand for TokenProgramSubcommandPublic { .await?; Ok(SubcommandReturnValue::Empty) } - TokenProgramSubcommandPublic::BurnToken { + Self::BurnToken { definition_account_id, holder_account_id, amount, @@ -681,7 +681,7 @@ impl WalletSubcommand for TokenProgramSubcommandPublic { .await?; Ok(SubcommandReturnValue::Empty) } - TokenProgramSubcommandPublic::MintToken { + Self::MintToken { definition_account_id, holder_account_id, amount, @@ -705,7 +705,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { wallet_core: &mut WalletCore, ) -> Result { match self { - TokenProgramSubcommandPrivate::TransferTokenPrivateOwned { + Self::TransferTokenPrivateOwned { sender_account_id, recipient_account_id, balance_to_move, @@ -733,7 +733,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { ]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -742,7 +742,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandPrivate::TransferTokenPrivateForeign { + Self::TransferTokenPrivateForeign { sender_account_id, recipient_npk, recipient_vpk, @@ -755,7 +755,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { let recipient_npk = nssa_core::NullifierPublicKey(recipient_npk); let recipient_vpk_res = hex::decode(recipient_vpk)?; - let mut recipient_vpk = [0u8; 33]; + let mut recipient_vpk = [0_u8; 33]; recipient_vpk.copy_from_slice(&recipient_vpk_res); let recipient_vpk = nssa_core::encryption::shared_key_derivation::Secp256k1Point( recipient_vpk.to_vec(), @@ -779,7 +779,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { let acc_decode_data = vec![Decode(secret_sender, sender_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -788,7 +788,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandPrivate::BurnTokenPrivateOwned { + Self::BurnTokenPrivateOwned { definition_account_id, holder_account_id, amount, @@ -816,7 +816,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { ]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -825,7 +825,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandPrivate::MintTokenPrivateOwned { + Self::MintTokenPrivateOwned { definition_account_id, holder_account_id, amount, @@ -853,7 +853,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { ]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -862,7 +862,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandPrivate::MintTokenPrivateForeign { + Self::MintTokenPrivateForeign { definition_account_id, holder_npk, holder_vpk, @@ -876,7 +876,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { let holder_npk = nssa_core::NullifierPublicKey(holder_npk); let holder_vpk_res = hex::decode(holder_vpk)?; - let mut holder_vpk = [0u8; 33]; + let mut holder_vpk = [0_u8; 33]; holder_vpk.copy_from_slice(&holder_vpk_res); let holder_vpk = nssa_core::encryption::shared_key_derivation::Secp256k1Point( holder_vpk.to_vec(), @@ -900,7 +900,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate { let acc_decode_data = vec![Decode(secret_definition, definition_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -919,7 +919,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded { wallet_core: &mut WalletCore, ) -> Result { match self { - TokenProgramSubcommandDeshielded::TransferTokenDeshielded { + Self::TransferTokenDeshielded { sender_account_id, recipient_account_id, balance_to_move, @@ -944,7 +944,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded { let acc_decode_data = vec![Decode(secret_sender, sender_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -953,7 +953,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandDeshielded::BurnTokenDeshieldedOwned { + Self::BurnTokenDeshieldedOwned { definition_account_id, holder_account_id, amount, @@ -978,7 +978,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded { let acc_decode_data = vec![Decode(secret_definition, definition_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -987,7 +987,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandDeshielded::MintTokenDeshielded { + Self::MintTokenDeshielded { definition_account_id, holder_account_id, amount, @@ -1012,7 +1012,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded { let acc_decode_data = vec![Decode(secret_definition, definition_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1031,7 +1031,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { wallet_core: &mut WalletCore, ) -> Result { match self { - TokenProgramSubcommandShielded::TransferTokenShieldedForeign { + Self::TransferTokenShieldedForeign { sender_account_id, recipient_npk, recipient_vpk, @@ -1044,7 +1044,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { let recipient_npk = nssa_core::NullifierPublicKey(recipient_npk); let recipient_vpk_res = hex::decode(recipient_vpk)?; - let mut recipient_vpk = [0u8; 33]; + let mut recipient_vpk = [0_u8; 33]; recipient_vpk.copy_from_slice(&recipient_vpk_res); let recipient_vpk = nssa_core::encryption::shared_key_derivation::Secp256k1Point( recipient_vpk.to_vec(), @@ -1072,7 +1072,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandShielded::TransferTokenShieldedOwned { + Self::TransferTokenShieldedOwned { sender_account_id, recipient_account_id, balance_to_move, @@ -1097,7 +1097,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { let acc_decode_data = vec![Decode(secret_recipient, recipient_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1106,7 +1106,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandShielded::BurnTokenShielded { + Self::BurnTokenShielded { definition_account_id, holder_account_id, amount, @@ -1131,7 +1131,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { let acc_decode_data = vec![Decode(secret_holder, holder_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1140,7 +1140,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandShielded::MintTokenShieldedOwned { + Self::MintTokenShieldedOwned { definition_account_id, holder_account_id, amount, @@ -1165,7 +1165,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { let acc_decode_data = vec![Decode(secret_holder, holder_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1174,7 +1174,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - TokenProgramSubcommandShielded::MintTokenShieldedForeign { + Self::MintTokenShieldedForeign { definition_account_id, holder_npk, holder_vpk, @@ -1188,7 +1188,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded { let holder_npk = nssa_core::NullifierPublicKey(holder_npk); let holder_vpk_res = hex::decode(holder_vpk)?; - let mut holder_vpk = [0u8; 33]; + let mut holder_vpk = [0_u8; 33]; holder_vpk.copy_from_slice(&holder_vpk_res); let holder_vpk = nssa_core::encryption::shared_key_derivation::Secp256k1Point( holder_vpk.to_vec(), @@ -1226,7 +1226,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - CreateNewTokenProgramSubcommand::NewPrivateDefPrivateSupp { + Self::NewPrivateDefPrivateSupp { definition_account_id, supply_account_id, name, @@ -1256,7 +1256,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { ]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1265,7 +1265,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - CreateNewTokenProgramSubcommand::NewPrivateDefPublicSupp { + Self::NewPrivateDefPublicSupp { definition_account_id, supply_account_id, name, @@ -1292,7 +1292,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { let acc_decode_data = vec![Decode(secret_definition, definition_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1301,7 +1301,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - CreateNewTokenProgramSubcommand::NewPublicDefPrivateSupp { + Self::NewPublicDefPrivateSupp { definition_account_id, supply_account_id, name, @@ -1328,7 +1328,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { let acc_decode_data = vec![Decode(secret_supply, supply_account_id)]; wallet_core.decode_insert_privacy_preserving_transaction_results( - tx, + &tx, &acc_decode_data, )?; } @@ -1337,7 +1337,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand { Ok(SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash }) } - CreateNewTokenProgramSubcommand::NewPublicDefPublicSupp { + Self::NewPublicDefPublicSupp { definition_account_id, supply_account_id, name, @@ -1363,19 +1363,19 @@ impl WalletSubcommand for TokenProgramSubcommand { wallet_core: &mut WalletCore, ) -> Result { match self { - TokenProgramSubcommand::Create(creation_subcommand) => { + Self::Create(creation_subcommand) => { creation_subcommand.handle_subcommand(wallet_core).await } - TokenProgramSubcommand::Private(private_subcommand) => { + Self::Private(private_subcommand) => { private_subcommand.handle_subcommand(wallet_core).await } - TokenProgramSubcommand::Public(public_subcommand) => { + Self::Public(public_subcommand) => { public_subcommand.handle_subcommand(wallet_core).await } - TokenProgramSubcommand::Deshielded(deshielded_subcommand) => { + Self::Deshielded(deshielded_subcommand) => { deshielded_subcommand.handle_subcommand(wallet_core).await } - TokenProgramSubcommand::Shielded(shielded_subcommand) => { + Self::Shielded(shielded_subcommand) => { shielded_subcommand.handle_subcommand(wallet_core).await } } diff --git a/wallet/src/config.rs b/wallet/src/config.rs index a1b7bfe2..7419762c 100644 --- a/wallet/src/config.rs +++ b/wallet/src/config.rs @@ -48,21 +48,19 @@ pub struct PersistentAccountDataPrivate { // Big difference in enum variants sizes // however it is improbable, that we will have that much accounts, that it will substantialy affect // memory -#[allow(clippy::large_enum_variant)] #[derive(Debug, Clone, Serialize, Deserialize)] pub enum InitialAccountData { Public(InitialAccountDataPublic), - Private(InitialAccountDataPrivate), + Private(Box), } // Big difference in enum variants sizes // however it is improbable, that we will have that much accounts, that it will substantialy affect // memory -#[allow(clippy::large_enum_variant)] #[derive(Debug, Clone, Serialize, Deserialize)] pub enum PersistentAccountData { Public(PersistentAccountDataPublic), - Private(PersistentAccountDataPrivate), + Private(Box), Preconfigured(InitialAccountData), } @@ -71,7 +69,8 @@ pub enum PersistentAccountData { pub struct Label(String); impl Label { - pub fn new(label: String) -> Self { + #[must_use] + pub const fn new(label: String) -> Self { Self(label) } } @@ -87,13 +86,17 @@ pub struct PersistentStorage { pub accounts: Vec, pub last_synced_block: u64, /// Account labels keyed by account ID string (e.g., - /// "2rnKprXqWGWJTkDZKsQbFXa4ctKRbapsdoTKQFnaVGG8") + /// "2rnKprXqWGWJTkDZKsQbFXa4ctKRbapsdoTKQFnaVGG8"). #[serde(default)] pub labels: HashMap, } impl PersistentStorage { pub fn from_path(path: &Path) -> Result { + #[expect( + clippy::wildcard_enum_match_arm, + reason = "We want to provide a specific error message for not found case" + )] match std::fs::File::open(path) { Ok(file) => { let storage_content = BufReader::new(file); @@ -112,6 +115,7 @@ impl PersistentStorage { } impl InitialAccountData { + #[must_use] pub fn account_id(&self) -> nssa::AccountId { match &self { Self::Public(acc) => acc.account_id, @@ -121,6 +125,7 @@ impl InitialAccountData { } impl PersistentAccountData { + #[must_use] pub fn account_id(&self) -> nssa::AccountId { match &self { Self::Public(acc) => acc.account_id, @@ -138,7 +143,7 @@ impl From for InitialAccountData { impl From for InitialAccountData { fn from(value: InitialAccountDataPrivate) -> Self { - Self::Private(value) + Self::Private(Box::new(value)) } } @@ -150,7 +155,7 @@ impl From for PersistentAccountData { impl From for PersistentAccountData { fn from(value: PersistentAccountDataPrivate) -> Self { - Self::Private(value) + Self::Private(Box::new(value)) } } @@ -162,42 +167,42 @@ impl From for PersistentAccountData { #[derive(Debug, Clone, Serialize, Deserialize)] pub struct GasConfig { - /// Gas spent per deploying one byte of data + /// Gas spent per deploying one byte of data. pub gas_fee_per_byte_deploy: u64, - /// Gas spent per reading one byte of data in VM + /// Gas spent per reading one byte of data in VM. pub gas_fee_per_input_buffer_runtime: u64, - /// Gas spent per one byte of contract data in runtime + /// Gas spent per one byte of contract data in runtime. pub gas_fee_per_byte_runtime: u64, - /// Cost of one gas of runtime in public balance + /// Cost of one gas of runtime in public balance. pub gas_cost_runtime: u64, - /// Cost of one gas of deployment in public balance + /// Cost of one gas of deployment in public balance. pub gas_cost_deploy: u64, - /// Gas limit for deployment + /// Gas limit for deployment. pub gas_limit_deploy: u64, - /// Gas limit for runtime + /// Gas limit for runtime. pub gas_limit_runtime: u64, } #[optfield::optfield(pub WalletConfigOverrides, rewrap, attrs = (derive(Debug, Default, Clone)))] #[derive(Debug, Clone, Serialize, Deserialize)] pub struct WalletConfig { - /// Override rust log (env var logging level) + /// Override rust log (env var logging level). #[serde(skip_serializing_if = "Option::is_none")] pub override_rust_log: Option, - /// Sequencer URL + /// Sequencer URL. pub sequencer_addr: Url, - /// Sequencer polling duration for new blocks + /// Sequencer polling duration for new blocks. #[serde(with = "humantime_serde")] pub seq_poll_timeout: Duration, - /// Sequencer polling max number of blocks to find transaction + /// Sequencer polling max number of blocks to find transaction. pub seq_tx_poll_max_blocks: usize, - /// Sequencer polling max number error retries + /// Sequencer polling max number error retries. pub seq_poll_max_retries: u64, - /// Max amount of blocks to poll in one request + /// Max amount of blocks to poll in one request. pub seq_block_poll_max_amount: u64, - /// Initial accounts for wallet + /// Initial accounts for wallet. pub initial_accounts: Vec, - /// Basic authentication credentials + /// Basic authentication credentials. #[serde(skip_serializing_if = "Option::is_none")] pub basic_auth: Option, } @@ -692,7 +697,7 @@ impl Default for WalletConfig { } impl WalletConfig { - pub fn from_path_or_initialize_default(config_path: &Path) -> Result { + pub fn from_path_or_initialize_default(config_path: &Path) -> Result { match std::fs::File::open(config_path) { Ok(file) => { let reader = std::io::BufReader::new(file); @@ -703,12 +708,13 @@ impl WalletConfig { let config_home = config_path.parent().ok_or_else(|| { anyhow::anyhow!( - "Could not get parent directory of config file at {config_path:#?}" + "Could not get parent directory of config file at {}", + config_path.display() ) })?; std::fs::create_dir_all(config_home)?; - println!("Created configs dir at path {config_home:#?}"); + println!("Created configs dir at path {}", config_home.display()); let mut file = std::fs::OpenOptions::new() .write(true) @@ -716,7 +722,7 @@ impl WalletConfig { .truncate(true) .open(config_path)?; - let config = WalletConfig::default(); + let config = Self::default(); let default_config_serialized = serde_json::to_vec_pretty(&config).unwrap(); file.write_all(&default_config_serialized)?; @@ -729,7 +735,7 @@ impl WalletConfig { } pub fn apply_overrides(&mut self, overrides: WalletConfigOverrides) { - let WalletConfig { + let Self { override_rust_log, sequencer_addr, seq_poll_timeout, diff --git a/wallet/src/helperfunctions.rs b/wallet/src/helperfunctions.rs index 20c04968..74f7bab3 100644 --- a/wallet/src/helperfunctions.rs +++ b/wallet/src/helperfunctions.rs @@ -1,11 +1,11 @@ -use std::{collections::HashMap, path::PathBuf, str::FromStr}; +use std::{collections::HashMap, path::PathBuf, str::FromStr as _}; -use anyhow::Result; -use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use anyhow::{Context as _, Result}; +use base58::ToBase58 as _; use key_protocol::key_protocol_core::NSSAUserData; use nssa::Account; use nssa_core::account::Nonce; -use rand::{RngCore, rngs::OsRng}; +use rand::{RngCore as _, rngs::OsRng}; use serde::Serialize; use crate::{ @@ -16,6 +16,39 @@ use crate::{ }, }; +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum AccountPrivacyKind { + Public, + Private, +} + +/// Human-readable representation of an account. +#[derive(Serialize)] +pub(crate) struct HumanReadableAccount { + balance: u128, + program_owner: String, + data: String, + nonce: u128, +} + +impl From for HumanReadableAccount { + fn from(account: Account) -> Self { + let program_owner = account + .program_owner + .iter() + .flat_map(|n| n.to_le_bytes()) + .collect::>() + .to_base58(); + let data = hex::encode(account.data); + Self { + balance: account.balance, + program_owner, + data, + nonce: account.nonce.0, + } + } +} + /// Get home dir for wallet. Env var `NSSA_WALLET_HOME_DIR` must be set before execution to succeed. fn get_home_nssa_var() -> Result { Ok(PathBuf::from_str(&std::env::var(HOME_DIR_ENV_VAR)?)?) @@ -25,26 +58,22 @@ fn get_home_nssa_var() -> Result { fn get_home_default_path() -> Result { std::env::home_dir() .map(|path| path.join(".nssa").join("wallet")) - .ok_or(anyhow::anyhow!("Failed to get HOME")) + .context("Failed to get HOME") } /// Get home dir for wallet. pub fn get_home() -> Result { - if let Ok(home) = get_home_nssa_var() { - Ok(home) - } else { - get_home_default_path() - } + get_home_nssa_var().or_else(|_| get_home_default_path()) } -/// Fetch config path from default home +/// Fetch config path from default home. pub fn fetch_config_path() -> Result { let home = get_home()?; let config_path = home.join("wallet_config.json"); Ok(config_path) } -/// Fetch path to data storage from default home +/// Fetch path to data storage from default home. /// /// File must be created through setup beforehand. pub fn fetch_persistent_storage_path() -> Result { @@ -53,7 +82,8 @@ pub fn fetch_persistent_storage_path() -> Result { Ok(accs_path) } -/// Produces data for storage +/// Produces data for storage. +#[must_use] pub fn produce_data_for_storage( user_data: &NSSAUserData, last_synced_block: u64, @@ -94,18 +124,18 @@ pub fn produce_data_for_storage( pub_sign_key: key.clone(), }) .into(), - ) + ); } for (account_id, (key_chain, account)) in &user_data.default_user_private_accounts { vec_for_storage.push( - InitialAccountData::Private(InitialAccountDataPrivate { + InitialAccountData::Private(Box::new(InitialAccountDataPrivate { account_id: *account_id, account: account.clone(), key_chain: key_chain.clone(), - }) + })) .into(), - ) + ); } PersistentStorage { @@ -115,16 +145,16 @@ pub fn produce_data_for_storage( } } +#[expect(dead_code, reason = "Maybe used later")] pub(crate) fn produce_random_nonces(size: usize) -> Vec { let mut result = vec![[0; 16]; size]; - result.iter_mut().for_each(|bytes| OsRng.fill_bytes(bytes)); - result.into_iter().map(Nonce::from_le_bytes).collect() -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum AccountPrivacyKind { - Public, - Private, + for bytes in &mut result { + OsRng.fill_bytes(bytes); + } + result + .into_iter() + .map(|x| Nonce(u128::from_le_bytes(x))) + .collect() } pub(crate) fn parse_addr_with_privacy_prefix( @@ -132,12 +162,12 @@ pub(crate) fn parse_addr_with_privacy_prefix( ) -> Result<(String, AccountPrivacyKind)> { if account_base58.starts_with("Public/") { Ok(( - account_base58.strip_prefix("Public/").unwrap().to_string(), + account_base58.strip_prefix("Public/").unwrap().to_owned(), AccountPrivacyKind::Public, )) } else if account_base58.starts_with("Private/") { Ok(( - account_base58.strip_prefix("Private/").unwrap().to_string(), + account_base58.strip_prefix("Private/").unwrap().to_owned(), AccountPrivacyKind::Private, )) } else { @@ -145,49 +175,12 @@ pub(crate) fn parse_addr_with_privacy_prefix( } } -/// Human-readable representation of an account. -#[derive(Serialize)] -pub(crate) struct HumanReadableAccount { - balance: u128, - program_owner_b64: String, - data_b64: String, - nonce: u128, -} - -impl From for HumanReadableAccount { - fn from(account: Account) -> Self { - let program_owner_b64 = BASE64.encode(bytemuck::cast_slice(&account.program_owner)); - let data_b64 = BASE64.encode(account.data); - Self { - balance: account.balance, - program_owner_b64, - data_b64, - nonce: account.nonce, - } - } -} - #[cfg(test)] mod tests { use super::*; #[test] - fn test_get_home_get_env_var() { - unsafe { - std::env::set_var(HOME_DIR_ENV_VAR, "/path/to/configs"); - } - - let home = get_home().unwrap(); - - assert_eq!(PathBuf::from_str("/path/to/configs").unwrap(), home); - - unsafe { - std::env::remove_var(HOME_DIR_ENV_VAR); - } - } - - #[test] - fn test_addr_parse_with_privacy() { + fn addr_parse_with_privacy() { let addr_base58 = "Public/BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy"; let (_, addr_kind) = parse_addr_with_privacy_prefix(addr_base58).unwrap(); diff --git a/wallet/src/lib.rs b/wallet/src/lib.rs index 8b78db57..a7a8daca 100644 --- a/wallet/src/lib.rs +++ b/wallet/src/lib.rs @@ -1,7 +1,17 @@ +#![expect( + clippy::print_stdout, + clippy::print_stderr, + reason = "This is a CLI application, printing to stdout and stderr is expected and convenient" +)] +#![expect( + clippy::shadow_unrelated, + reason = "Most of the shadows come from args parsing which is ok" +)] + use std::{path::PathBuf, sync::Arc}; -use anyhow::{Context, Result}; -use base64::{Engine, engine::general_purpose::STANDARD as BASE64}; +use anyhow::{Context as _, Result}; +use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64}; use chain_storage::WalletChainStore; use common::{ HashType, error::ExecutionFailureKind, rpc_primitives::requests::SendTxResponse, @@ -18,16 +28,14 @@ use nssa::{ }; use nssa_core::{Commitment, MembershipProof, SharedSecretKey, program::InstructionData}; pub use privacy_preserving_tx::PrivacyPreservingAccount; -use tokio::io::AsyncWriteExt; +use tokio::io::AsyncWriteExt as _; use crate::{ config::{PersistentStorage, WalletConfigOverrides}, - helperfunctions::{produce_data_for_storage, produce_random_nonces}, + helperfunctions::produce_data_for_storage, poller::TxPoller, }; -pub const HOME_DIR_ENV_VAR: &str = "NSSA_WALLET_HOME_DIR"; - pub mod chain_storage; pub mod cli; pub mod config; @@ -36,18 +44,20 @@ pub mod poller; mod privacy_preserving_tx; pub mod program_facades; +pub const HOME_DIR_ENV_VAR: &str = "NSSA_WALLET_HOME_DIR"; + pub enum AccDecodeData { Skip, Decode(nssa_core::SharedSecretKey, AccountId), } +#[expect(clippy::partial_pub_fields, reason = "TODO: make all fields private")] pub struct WalletCore { config_path: PathBuf, config_overrides: Option, storage: WalletChainStore, storage_path: PathBuf, poller: TxPoller, - // TODO: Make all fields private pub sequencer_client: Arc, pub last_synced_block: u64, } @@ -70,8 +80,12 @@ impl WalletCore { accounts: persistent_accounts, last_synced_block, labels, - } = PersistentStorage::from_path(&storage_path) - .with_context(|| format!("Failed to read persistent storage at {storage_path:#?}"))?; + } = PersistentStorage::from_path(&storage_path).with_context(|| { + format!( + "Failed to read persistent storage at {}", + storage_path.display() + ) + })?; Self::new( config_path, @@ -104,8 +118,13 @@ impl WalletCore { storage_ctor: impl FnOnce(WalletConfig) -> Result, last_synced_block: u64, ) -> Result { - let mut config = WalletConfig::from_path_or_initialize_default(&config_path) - .with_context(|| format!("Failed to deserialize wallet config at {config_path:#?}"))?; + let mut config = + WalletConfig::from_path_or_initialize_default(&config_path).with_context(|| { + format!( + "Failed to deserialize wallet config at {}", + config_path.display() + ) + })?; if let Some(config_overrides) = config_overrides.clone() { config.apply_overrides(config_overrides); } @@ -114,7 +133,7 @@ impl WalletCore { config.sequencer_addr.clone(), config.basic_auth.clone(), )?); - let tx_poller = TxPoller::new(config.clone(), Arc::clone(&sequencer_client)); + let tx_poller = TxPoller::new(&config, Arc::clone(&sequencer_client)); let storage = storage_ctor(config)?; @@ -129,23 +148,25 @@ impl WalletCore { }) } - /// Get configuration with applied overrides - pub fn config(&self) -> &WalletConfig { + /// Get configuration with applied overrides. + #[must_use] + pub const fn config(&self) -> &WalletConfig { &self.storage.wallet_config } - /// Get storage - pub fn storage(&self) -> &WalletChainStore { + /// Get storage. + #[must_use] + pub const fn storage(&self) -> &WalletChainStore { &self.storage } - /// Reset storage + /// Reset storage. pub fn reset_storage(&mut self, password: String) -> Result<()> { self.storage = WalletChainStore::new_storage(self.storage.wallet_config.clone(), password)?; Ok(()) } - /// Store persistent data at home + /// Store persistent data at home. pub async fn store_persistent_data(&self) -> Result<()> { let data = produce_data_for_storage( &self.storage.user_data, @@ -159,12 +180,15 @@ impl WalletCore { // Ensure data is flushed to disk before returning to prevent race conditions storage_file.sync_all().await?; - println!("Stored persistent accounts at {:#?}", self.storage_path); + println!( + "Stored persistent accounts at {}", + self.storage_path.display() + ); Ok(()) } - /// Store persistent data at home + /// Store persistent data at home. pub async fn store_config_changes(&self) -> Result<()> { let config = serde_json::to_vec_pretty(&self.storage.wallet_config)?; @@ -173,7 +197,7 @@ impl WalletCore { // Ensure data is flushed to disk before returning to prevent race conditions config_file.sync_all().await?; - info!("Stored data at {:#?}", self.config_path); + info!("Stored data at {}", self.config_path.display()); Ok(()) } @@ -196,7 +220,7 @@ impl WalletCore { .generate_new_privacy_preserving_transaction_key_chain(chain_index) } - /// Get account balance + /// Get account balance. pub async fn get_account_balance(&self, acc: AccountId) -> Result { Ok(self .sequencer_client @@ -205,7 +229,7 @@ impl WalletCore { .balance) } - /// Get accounts nonces + /// Get accounts nonces. pub async fn get_accounts_nonces(&self, accs: Vec) -> Result> { Ok(self .sequencer_client @@ -214,12 +238,13 @@ impl WalletCore { .nonces) } - /// Get account + /// Get account. pub async fn get_account_public(&self, account_id: AccountId) -> Result { let response = self.sequencer_client.get_account(account_id).await?; Ok(response.account) } + #[must_use] pub fn get_account_public_signing_key( &self, account_id: AccountId, @@ -229,6 +254,7 @@ impl WalletCore { .get_pub_account_signing_key(account_id) } + #[must_use] pub fn get_account_private(&self, account_id: AccountId) -> Option { self.storage .user_data @@ -236,12 +262,13 @@ impl WalletCore { .map(|value| value.1.clone()) } + #[must_use] pub fn get_private_account_commitment(&self, account_id: AccountId) -> Option { let (keys, account) = self.storage.user_data.get_private_account(account_id)?; Some(Commitment::new(&keys.nullifier_public_key, account)) } - /// Poll transactions + /// Poll transactions. pub async fn poll_native_token_transfer(&self, hash: HashType) -> Result { let transaction_encoded = self.poller.poll_tx(hash).await?; let tx_base64_decode = BASE64.decode(transaction_encoded)?; @@ -266,7 +293,7 @@ impl WalletCore { pub fn decode_insert_privacy_preserving_transaction_results( &mut self, - tx: nssa::privacy_preserving_transaction::PrivacyPreservingTransaction, + tx: &nssa::privacy_preserving_transaction::PrivacyPreservingTransaction, acc_decode_mask: &[AccDecodeData], ) -> Result<()> { for (output_index, acc_decode_data) in acc_decode_mask.iter().enumerate() { @@ -279,7 +306,9 @@ impl WalletCore { &acc_ead.ciphertext, secret, &acc_comm, - output_index as u32, + output_index + .try_into() + .expect("Output index is expected to fit in u32"), ) .unwrap(); @@ -296,14 +325,17 @@ impl WalletCore { Ok(()) } + // TODO: handle large Err-variant properly + #[expect( + clippy::result_large_err, + reason = "ExecutionFailureKind is large, tracked by TODO" + )] pub async fn send_privacy_preserving_tx( &self, accounts: Vec, instruction_data: InstructionData, program: &ProgramWithDependencies, ) -> Result<(SendTxResponse, Vec), ExecutionFailureKind> { - // TODO: handle large Err-variant properly - #[allow(clippy::result_large_err)] self.send_privacy_preserving_tx_with_pre_check(accounts, instruction_data, program, |_| { Ok(()) }) @@ -332,7 +364,6 @@ impl WalletCore { pre_states, instruction_data, acc_manager.visibility_mask().to_vec(), - produce_random_nonces(private_account_keys.len()), private_account_keys .iter() .map(|keys| (keys.npk.clone(), keys.ssk)) @@ -382,12 +413,17 @@ impl WalletCore { } let before_polling = std::time::Instant::now(); - let num_of_blocks = block_id - self.last_synced_block; + let num_of_blocks = block_id.saturating_sub(self.last_synced_block); + if num_of_blocks == 0 { + return Ok(()); + } + println!("Syncing to block {block_id}. Blocks to sync: {num_of_blocks}"); let poller = self.poller.clone(); - let mut blocks = - std::pin::pin!(poller.poll_block_range(self.last_synced_block + 1..=block_id)); + let mut blocks = std::pin::pin!( + poller.poll_block_range(self.last_synced_block.saturating_add(1)..=block_id) + ); let bar = indicatif::ProgressBar::new(num_of_blocks); while let Some(block) = blocks.try_next().await? { @@ -433,8 +469,8 @@ impl WalletCore { let affected_accounts = private_account_key_chains .flat_map(|(acc_account_id, key_chain, index)| { let view_tag = EncryptedAccountData::compute_view_tag( - key_chain.nullifier_public_key.clone(), - key_chain.viewing_public_key.clone(), + &key_chain.nullifer_public_key, + &key_chain.viewing_public_key, ); tx.message() @@ -445,14 +481,16 @@ impl WalletCore { .filter_map(|(ciph_id, encrypted_data)| { let ciphertext = &encrypted_data.ciphertext; let commitment = &tx.message.new_commitments[ciph_id]; - let shared_secret = key_chain - .calculate_shared_secret_receiver(encrypted_data.epk.clone(), index); + let shared_secret = + key_chain.calculate_shared_secret_receiver(&encrypted_data.epk, index); nssa_core::EncryptionScheme::decrypt( ciphertext, &shared_secret, commitment, - ciph_id as u32, + ciph_id + .try_into() + .expect("Ciphertext ID is expected to fit in u32"), ) }) .map(move |res_acc| (acc_account_id, res_acc)) @@ -469,15 +507,18 @@ impl WalletCore { } } - pub fn config_path(&self) -> &PathBuf { + #[must_use] + pub const fn config_path(&self) -> &PathBuf { &self.config_path } - pub fn storage_path(&self) -> &PathBuf { + #[must_use] + pub const fn storage_path(&self) -> &PathBuf { &self.storage_path } - pub fn config_overrides(&self) -> &Option { + #[must_use] + pub const fn config_overrides(&self) -> &Option { &self.config_overrides } } diff --git a/wallet/src/main.rs b/wallet/src/main.rs index 045b1b83..4704675b 100644 --- a/wallet/src/main.rs +++ b/wallet/src/main.rs @@ -1,3 +1,8 @@ +#![expect( + clippy::print_stdout, + reason = "This is a CLI application, printing to stdout is expected and convenient" +)] + use anyhow::{Context as _, Result}; use clap::{CommandFactory as _, Parser as _}; use wallet::{ @@ -33,7 +38,9 @@ async fn main() -> Result<()> { }; if let Some(command) = command { - let mut wallet = if !storage_path.exists() { + let mut wallet = if storage_path.exists() { + WalletCore::new_update_chain(config_path, storage_path, Some(config_overrides))? + } else { // TODO: Maybe move to `WalletCore::from_env()` or similar? println!("Persistent storage not found, need to execute setup"); @@ -48,8 +55,6 @@ async fn main() -> Result<()> { wallet.store_persistent_data().await?; wallet - } else { - WalletCore::new_update_chain(config_path, storage_path, Some(config_overrides))? }; let _output = execute_subcommand(&mut wallet, command).await?; Ok(()) diff --git a/wallet/src/poller.rs b/wallet/src/poller.rs index c037a36a..113f42ee 100644 --- a/wallet/src/poller.rs +++ b/wallet/src/poller.rs @@ -7,7 +7,7 @@ use log::{info, warn}; use crate::config::WalletConfig; #[derive(Clone)] -/// Helperstruct to poll transactions +/// Helperstruct to poll transactions. pub struct TxPoller { polling_max_blocks_to_query: usize, polling_max_error_attempts: u64, @@ -17,13 +17,14 @@ pub struct TxPoller { } impl TxPoller { - pub fn new(config: WalletConfig, client: Arc) -> Self { + #[must_use] + pub const fn new(config: &WalletConfig, client: Arc) -> Self { Self { polling_delay: config.seq_poll_timeout, polling_max_blocks_to_query: config.seq_tx_poll_max_blocks, polling_max_error_attempts: config.seq_poll_max_retries, block_poll_max_amount: config.seq_block_poll_max_amount, - client: client.clone(), + client, } } @@ -35,7 +36,7 @@ impl TxPoller { for poll_id in 1..max_blocks_to_query { info!("Poll {poll_id}"); - let mut try_error_counter = 0; + let mut try_error_counter = 0_u64; let tx_obj = loop { let tx_obj = self @@ -43,14 +44,15 @@ impl TxPoller { .get_transaction_by_hash(tx_hash) .await .inspect_err(|err| { - warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}") + warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}"); }); if let Ok(tx_obj) = tx_obj { break tx_obj; - } else { - try_error_counter += 1; } + try_error_counter = try_error_counter + .checked_add(1) + .expect("We check error counter in this loop"); if try_error_counter > self.polling_max_error_attempts { anyhow::bail!("Number of retries exceeded"); @@ -75,7 +77,7 @@ impl TxPoller { let mut chunk_start = *range.start(); loop { - let chunk_end = std::cmp::min(chunk_start + self.block_poll_max_amount - 1, *range.end()); + let chunk_end = std::cmp::min(chunk_start.saturating_add(self.block_poll_max_amount).saturating_sub(1), *range.end()); let blocks = self.client.get_block_range(chunk_start..=chunk_end).await?.blocks; for block in blocks { @@ -83,7 +85,7 @@ impl TxPoller { yield Ok(block); } - chunk_start = chunk_end + 1; + chunk_start = chunk_end.saturating_add(1); if chunk_start > *range.end() { break; } diff --git a/wallet/src/privacy_preserving_tx.rs b/wallet/src/privacy_preserving_tx.rs index 43928cb3..0aaffa9a 100644 --- a/wallet/src/privacy_preserving_tx.rs +++ b/wallet/src/privacy_preserving_tx.rs @@ -21,11 +21,13 @@ pub enum PrivacyPreservingAccount { } impl PrivacyPreservingAccount { - pub fn is_public(&self) -> bool { + #[must_use] + pub const fn is_public(&self) -> bool { matches!(&self, Self::Public(_)) } - pub fn is_private(&self) -> bool { + #[must_use] + pub const fn is_private(&self) -> bool { matches!( &self, Self::PrivateOwned(_) | Self::PrivateForeign { npk: _, vpk: _ } @@ -67,7 +69,7 @@ impl AccountManager { let acc = wallet .get_account_public(account_id) .await - .map_err(|_| ExecutionFailureKind::KeyNotFoundError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let sk = wallet.get_account_public_signing_key(account_id).cloned(); let account = AccountWithMetadata::new(acc.clone(), sk.is_some(), account_id); @@ -124,7 +126,7 @@ impl AccountManager { .iter() .filter_map(|state| match state { State::Public { account, sk } => sk.as_ref().map(|_| account.account.nonce), - _ => None, + State::Private(_) => None, }) .collect() } @@ -143,7 +145,7 @@ impl AccountManager { epk: eph_holder.generate_ephemeral_public_key(), }) } - _ => None, + State::Public { .. } => None, }) .collect() } @@ -153,7 +155,7 @@ impl AccountManager { .iter() .filter_map(|state| match state { State::Private(pre) => pre.nsk, - _ => None, + State::Public { .. } => None, }) .collect() } @@ -163,7 +165,7 @@ impl AccountManager { .iter() .filter_map(|state| match state { State::Private(pre) => Some(pre.proof.clone()), - _ => None, + State::Public { .. } => None, }) .collect() } @@ -173,7 +175,7 @@ impl AccountManager { .iter() .filter_map(|state| match state { State::Public { account, .. } => Some(account.account_id), - _ => None, + State::Private(_) => None, }) .collect() } @@ -183,7 +185,7 @@ impl AccountManager { .iter() .filter_map(|state| match state { State::Public { sk, .. } => sk.as_ref(), - _ => None, + State::Private(_) => None, }) .collect() } diff --git a/wallet/src/program_facades/amm.rs b/wallet/src/program_facades/amm.rs index 0722a769..19a51f29 100644 --- a/wallet/src/program_facades/amm.rs +++ b/wallet/src/program_facades/amm.rs @@ -4,7 +4,7 @@ use nssa::{AccountId, program::Program}; use token_core::TokenHolding; use crate::WalletCore; -pub struct Amm<'w>(pub &'w WalletCore); +pub struct Amm<'wallet>(pub &'wallet WalletCore); impl Amm<'_> { pub async fn send_new_definition( @@ -27,18 +27,18 @@ impl Amm<'_> { .0 .get_account_public(user_holding_a) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let user_b_acc = self .0 .get_account_public(user_holding_b) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_a))? .definition_id(); let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_b))? .definition_id(); let amm_pool = @@ -61,7 +61,7 @@ impl Amm<'_> { .0 .get_accounts_nonces(vec![user_holding_a, user_holding_b]) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let signing_key_a = self .0 @@ -80,7 +80,10 @@ impl Amm<'_> { let message = nssa::public_transaction::Message::try_new( program.id(), account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .unwrap(); @@ -115,18 +118,18 @@ impl Amm<'_> { .0 .get_account_public(user_holding_a) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let user_b_acc = self .0 .get_account_public(user_holding_b) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_a))? .definition_id(); let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_b))? .definition_id(); let amm_pool = @@ -149,17 +152,17 @@ impl Amm<'_> { .0 .get_account_public(user_holding_a) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let token_holder_acc_b = self .0 .get_account_public(user_holding_b) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let token_holder_a = TokenHolding::try_from(&token_holder_acc_a.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))?; + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_a))?; let token_holder_b = TokenHolding::try_from(&token_holder_acc_b.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))?; + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_b))?; if token_holder_a.definition_id() == token_definition_id_in { account_id_auth = user_holding_a; @@ -175,7 +178,7 @@ impl Amm<'_> { .0 .get_accounts_nonces(vec![account_id_auth]) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let signing_key = self .0 @@ -187,7 +190,10 @@ impl Amm<'_> { let message = nssa::public_transaction::Message::try_new( program.id(), account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .unwrap(); @@ -221,18 +227,18 @@ impl Amm<'_> { .0 .get_account_public(user_holding_a) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let user_b_acc = self .0 .get_account_public(user_holding_b) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_a))? .definition_id(); let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_b))? .definition_id(); let amm_pool = @@ -255,7 +261,7 @@ impl Amm<'_> { .0 .get_accounts_nonces(vec![user_holding_a, user_holding_b]) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let signing_key_a = self .0 @@ -274,7 +280,10 @@ impl Amm<'_> { let message = nssa::public_transaction::Message::try_new( program.id(), account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .unwrap(); @@ -310,18 +319,18 @@ impl Amm<'_> { .0 .get_account_public(user_holding_a) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let user_b_acc = self .0 .get_account_public(user_holding_b) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let definition_token_a_id = TokenHolding::try_from(&user_a_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_a))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_a))? .definition_id(); let definition_token_b_id = TokenHolding::try_from(&user_b_acc.data) - .map_err(|_| ExecutionFailureKind::AccountDataError(user_holding_b))? + .map_err(|_err| ExecutionFailureKind::AccountDataError(user_holding_b))? .definition_id(); let amm_pool = @@ -344,7 +353,7 @@ impl Amm<'_> { .0 .get_accounts_nonces(vec![user_holding_lp]) .await - .map_err(|_| ExecutionFailureKind::SequencerError)?; + .map_err(ExecutionFailureKind::SequencerError)?; let signing_key_lp = self .0 @@ -356,7 +365,10 @@ impl Amm<'_> { let message = nssa::public_transaction::Message::try_new( program.id(), account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .unwrap(); diff --git a/wallet/src/program_facades/native_token_transfer/mod.rs b/wallet/src/program_facades/native_token_transfer/mod.rs index 6d55ac0f..1db864f6 100644 --- a/wallet/src/program_facades/native_token_transfer/mod.rs +++ b/wallet/src/program_facades/native_token_transfer/mod.rs @@ -9,8 +9,17 @@ pub mod private; pub mod public; pub mod shielded; -pub struct NativeTokenTransfer<'w>(pub &'w WalletCore); +#[expect( + clippy::multiple_inherent_impl, + reason = "impl blocks split across multiple files for organization" +)] +pub struct NativeTokenTransfer<'wallet>(pub &'wallet WalletCore); +// TODO: handle large Err-variant properly +#[expect( + clippy::result_large_err, + reason = "ExecutionFailureKind is large, tracked by TODO" +)] fn auth_transfer_preparation( balance_to_move: u128, ) -> ( @@ -22,7 +31,6 @@ fn auth_transfer_preparation( let program = Program::authenticated_transfer_program(); // TODO: handle large Err-variant properly - #[allow(clippy::result_large_err)] let tx_pre_check = move |accounts: &[&Account]| { let from = accounts[0]; if from.balance >= balance_to_move { diff --git a/wallet/src/program_facades/native_token_transfer/public.rs b/wallet/src/program_facades/native_token_transfer/public.rs index 3e4815a1..eefaa1fe 100644 --- a/wallet/src/program_facades/native_token_transfer/public.rs +++ b/wallet/src/program_facades/native_token_transfer/public.rs @@ -14,19 +14,31 @@ impl NativeTokenTransfer<'_> { to: AccountId, balance_to_move: u128, ) -> Result { - let Ok(balance) = self.0.get_account_balance(from).await else { - return Err(ExecutionFailureKind::SequencerError); - }; + let balance = self + .0 + .get_account_balance(from) + .await + .map_err(ExecutionFailureKind::SequencerError)?; if balance >= balance_to_move { - let Ok(nonces) = self.0.get_accounts_nonces(vec![from]).await else { - return Err(ExecutionFailureKind::SequencerError); - }; + let nonces = self + .0 + .get_accounts_nonces(vec![from]) + .await + .map_err(ExecutionFailureKind::SequencerError)?; let account_ids = vec![from, to]; let program_id = Program::authenticated_transfer_program().id(); - let message = - Message::try_new(program_id, account_ids, nonces, balance_to_move).unwrap(); + let message = Message::try_new( + program_id, + account_ids, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), + balance_to_move, + ) + .unwrap(); let signing_key = self.0.storage.user_data.get_pub_account_signing_key(from); @@ -48,14 +60,25 @@ impl NativeTokenTransfer<'_> { &self, from: AccountId, ) -> Result { - let Ok(nonces) = self.0.get_accounts_nonces(vec![from]).await else { - return Err(ExecutionFailureKind::SequencerError); - }; + let nonces = self + .0 + .get_accounts_nonces(vec![from]) + .await + .map_err(ExecutionFailureKind::SequencerError)?; let instruction: u128 = 0; let account_ids = vec![from]; let program_id = Program::authenticated_transfer_program().id(); - let message = Message::try_new(program_id, account_ids, nonces, instruction).unwrap(); + let message = Message::try_new( + program_id, + account_ids, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), + instruction, + ) + .unwrap(); let signing_key = self.0.storage.user_data.get_pub_account_signing_key(from); diff --git a/wallet/src/program_facades/pinata.rs b/wallet/src/program_facades/pinata.rs index 68891ff5..c68fa658 100644 --- a/wallet/src/program_facades/pinata.rs +++ b/wallet/src/program_facades/pinata.rs @@ -4,7 +4,7 @@ use nssa_core::{MembershipProof, SharedSecretKey}; use crate::{PrivacyPreservingAccount, WalletCore}; -pub struct Pinata<'w>(pub &'w WalletCore); +pub struct Pinata<'wallet>(pub &'wallet WalletCore); impl Pinata<'_> { pub async fn claim( diff --git a/wallet/src/program_facades/token.rs b/wallet/src/program_facades/token.rs index 9543f593..bdacae37 100644 --- a/wallet/src/program_facades/token.rs +++ b/wallet/src/program_facades/token.rs @@ -5,7 +5,7 @@ use token_core::Instruction; use crate::{PrivacyPreservingAccount, WalletCore}; -pub struct Token<'w>(pub &'w WalletCore); +pub struct Token<'wallet>(pub &'wallet WalletCore); impl Token<'_> { pub async fn send_new_definition( @@ -133,13 +133,18 @@ impl Token<'_> { let instruction = Instruction::Transfer { amount_to_transfer: amount, }; - let Ok(nonces) = self.0.get_accounts_nonces(vec![sender_account_id]).await else { - return Err(ExecutionFailureKind::SequencerError); - }; + let nonces = self + .0 + .get_accounts_nonces(vec![sender_account_id]) + .await + .map_err(ExecutionFailureKind::SequencerError)?; let message = nssa::public_transaction::Message::try_new( program_id, account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .unwrap(); @@ -332,13 +337,18 @@ impl Token<'_> { amount_to_burn: amount, }; - let Ok(nonces) = self.0.get_accounts_nonces(vec![holder_account_id]).await else { - return Err(ExecutionFailureKind::SequencerError); - }; + let nonces = self + .0 + .get_accounts_nonces(vec![holder_account_id]) + .await + .map_err(ExecutionFailureKind::SequencerError)?; let message = nssa::public_transaction::Message::try_new( Program::token().id(), account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .expect("Instruction should serialize"); @@ -460,17 +470,18 @@ impl Token<'_> { amount_to_mint: amount, }; - let Ok(nonces) = self + let nonces = self .0 .get_accounts_nonces(vec![definition_account_id]) .await - else { - return Err(ExecutionFailureKind::SequencerError); - }; + .map_err(ExecutionFailureKind::SequencerError)?; let message = nssa::public_transaction::Message::try_new( Program::token().id(), account_ids, - nonces, + nonces + .iter() + .map(|x| nssa_core::account::Nonce(*x)) + .collect(), instruction, ) .unwrap();