Merge branch 'main' into marvin/issue-258

This commit is contained in:
jonesmarvin8 2026-03-23 18:35:30 -04:00
commit f40400e478
160 changed files with 2293 additions and 4853 deletions

View File

@ -26,11 +26,20 @@ Thumbs.db
ci_scripts/
# Documentation
docs/
*.md
!README.md
# Configs (copy selectively if needed)
# Non-build project files
completions/
configs/
# License
Justfile
clippy.toml
rustfmt.toml
flake.nix
flake.lock
LICENSE
# Docker compose files (not needed inside build)
docker-compose*.yml
**/docker-compose*.yml

View File

@ -12,12 +12,12 @@ jobs:
strategy:
matrix:
include:
- name: sequencer_runner
dockerfile: ./sequencer_runner/Dockerfile
- name: sequencer_service
dockerfile: ./sequencer/service/Dockerfile
build_args: |
STANDALONE=false
- name: sequencer_runner-standalone
dockerfile: ./sequencer_runner/Dockerfile
- name: sequencer_service-standalone
dockerfile: ./sequencer/service/Dockerfile
build_args: |
STANDALONE=true
- name: indexer_service

2
.gitignore vendored
View File

@ -6,7 +6,7 @@ data/
.idea/
.vscode/
rocksdb
sequencer_runner/data/
sequencer/service/data/
storage.json
result
wallet-ffi/wallet_ffi.h

432
Cargo.lock generated
View File

@ -2,229 +2,6 @@
# It is not intended for manual editing.
version = 4
[[package]]
name = "actix"
version = "0.13.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de7fa236829ba0841304542f7614c42b80fca007455315c45c785ccfa873a85b"
dependencies = [
"actix-macros",
"actix-rt",
"actix_derive",
"bitflags 2.11.0",
"bytes",
"crossbeam-channel",
"futures-core",
"futures-sink",
"futures-task",
"futures-util",
"log",
"once_cell",
"parking_lot",
"pin-project-lite",
"smallvec",
"tokio",
"tokio-util",
]
[[package]]
name = "actix-codec"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f7b0a21988c1bf877cf4759ef5ddaac04c1c9fe808c9142ecb78ba97d97a28a"
dependencies = [
"bitflags 2.11.0",
"bytes",
"futures-core",
"futures-sink",
"memchr",
"pin-project-lite",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "actix-cors"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daa239b93927be1ff123eebada5a3ff23e89f0124ccb8609234e5103d5a5ae6d"
dependencies = [
"actix-utils",
"actix-web",
"derive_more",
"futures-util",
"log",
"once_cell",
"smallvec",
]
[[package]]
name = "actix-http"
version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f860ee6746d0c5b682147b2f7f8ef036d4f92fe518251a3a35ffa3650eafdf0e"
dependencies = [
"actix-codec",
"actix-rt",
"actix-service",
"actix-utils",
"bitflags 2.11.0",
"bytes",
"bytestring",
"derive_more",
"encoding_rs",
"foldhash",
"futures-core",
"http 0.2.12",
"httparse",
"httpdate",
"itoa",
"language-tags",
"mime",
"percent-encoding",
"pin-project-lite",
"smallvec",
"tokio",
"tokio-util",
"tracing",
]
[[package]]
name = "actix-macros"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb"
dependencies = [
"quote",
"syn 2.0.117",
]
[[package]]
name = "actix-router"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "14f8c75c51892f18d9c46150c5ac7beb81c95f78c8b83a634d49f4ca32551fe7"
dependencies = [
"bytestring",
"cfg-if",
"http 0.2.12",
"regex-lite",
"serde",
"tracing",
]
[[package]]
name = "actix-rt"
version = "2.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92589714878ca59a7626ea19734f0e07a6a875197eec751bb5d3f99e64998c63"
dependencies = [
"futures-core",
"tokio",
]
[[package]]
name = "actix-server"
version = "2.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a65064ea4a457eaf07f2fba30b4c695bf43b721790e9530d26cb6f9019ff7502"
dependencies = [
"actix-rt",
"actix-service",
"actix-utils",
"futures-core",
"futures-util",
"mio",
"socket2 0.5.10",
"tokio",
"tracing",
]
[[package]]
name = "actix-service"
version = "2.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9e46f36bf0e5af44bdc4bdb36fbbd421aa98c79a9bce724e1edeb3894e10dc7f"
dependencies = [
"futures-core",
"pin-project-lite",
]
[[package]]
name = "actix-utils"
version = "3.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "88a1dcdff1466e3c2488e1cb5c36a71822750ad43839937f85d2f4d9f8b705d8"
dependencies = [
"local-waker",
"pin-project-lite",
]
[[package]]
name = "actix-web"
version = "4.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff87453bc3b56e9b2b23c1cc0b1be8797184accf51d2abe0f8a33ec275d316bf"
dependencies = [
"actix-codec",
"actix-http",
"actix-macros",
"actix-router",
"actix-rt",
"actix-server",
"actix-service",
"actix-utils",
"actix-web-codegen",
"bytes",
"bytestring",
"cfg-if",
"derive_more",
"encoding_rs",
"foldhash",
"futures-core",
"futures-util",
"impl-more",
"itoa",
"language-tags",
"log",
"mime",
"once_cell",
"pin-project-lite",
"regex-lite",
"serde",
"serde_json",
"serde_urlencoded",
"smallvec",
"socket2 0.6.3",
"time",
"tracing",
"url",
]
[[package]]
name = "actix-web-codegen"
version = "4.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f591380e2e68490b5dfaf1dd1aa0ebe78d84ba7067078512b4ea6e4492d622b8"
dependencies = [
"actix-router",
"proc-macro2",
"quote",
"syn 2.0.117",
]
[[package]]
name = "actix_derive"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b6ac1e58cded18cb28ddc17143c4dea5345b3ad575e14f32f66e4054a56eb271"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
]
[[package]]
name = "addchain"
version = "0.2.1"
@ -1011,7 +788,7 @@ dependencies = [
"axum-core 0.4.5",
"bytes",
"futures-util",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"hyper",
@ -1045,7 +822,7 @@ dependencies = [
"bytes",
"form_urlencoded",
"futures-util",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"hyper",
@ -1080,7 +857,7 @@ dependencies = [
"async-trait",
"bytes",
"futures-util",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"mime",
@ -1099,7 +876,7 @@ checksum = "08c78f31d7b1291f7ee735c1c6780ccde7785daae9a9206026862dab7d8792d1"
dependencies = [
"bytes",
"futures-core",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"mime",
@ -1313,7 +1090,7 @@ dependencies = [
"futures-util",
"hex",
"home",
"http 1.4.0",
"http",
"http-body-util",
"hyper",
"hyper-named-pipe",
@ -1466,15 +1243,6 @@ dependencies = [
"serde_core",
]
[[package]]
name = "bytestring"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "113b4343b5f6617e7ad401ced8de3cc8b012e73a594347c307b90db3e9271289"
dependencies = [
"bytes",
]
[[package]]
name = "bzip2-sys"
version = "0.1.13+1.0.8"
@ -1732,20 +1500,15 @@ dependencies = [
"anyhow",
"base64 0.22.1",
"borsh",
"bytesize",
"hex",
"log",
"logos-blockchain-common-http-client",
"nssa",
"nssa_core",
"reqwest",
"serde",
"serde_json",
"serde_with",
"sha2",
"thiserror 2.0.18",
"tokio-retry",
"url",
]
[[package]]
@ -1877,15 +1640,6 @@ dependencies = [
"unicode-segmentation",
]
[[package]]
name = "convert_case"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "633458d4ef8c78b72454de2d54fd6ab2e60f9e02be22f3c6104cdc8a4e0fceb9"
dependencies = [
"unicode-segmentation",
]
[[package]]
name = "convert_case"
version = "0.11.0"
@ -1992,15 +1746,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b"
[[package]]
name = "crossbeam-channel"
version = "0.5.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2"
dependencies = [
"crossbeam-utils",
]
[[package]]
name = "crossbeam-deque"
version = "0.8.6"
@ -2297,7 +2042,6 @@ version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "799a97264921d8623a957f6c3b9011f3b5492f557bbb7a5a19b7fa6d06ba8dcb"
dependencies = [
"convert_case 0.10.0",
"proc-macro2",
"quote",
"rustc_version",
@ -3099,7 +2843,7 @@ dependencies = [
"futures-core",
"futures-sink",
"gloo-utils",
"http 1.4.0",
"http",
"js-sys",
"pin-project",
"serde",
@ -3163,7 +2907,7 @@ dependencies = [
"fnv",
"futures-core",
"futures-sink",
"http 1.4.0",
"http",
"indexmap 2.13.0",
"slab",
"tokio",
@ -3318,17 +3062,6 @@ dependencies = [
"utf8-width",
]
[[package]]
name = "http"
version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1"
dependencies = [
"bytes",
"fnv",
"itoa",
]
[[package]]
name = "http"
version = "1.4.0"
@ -3346,7 +3079,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184"
dependencies = [
"bytes",
"http 1.4.0",
"http",
]
[[package]]
@ -3357,7 +3090,7 @@ checksum = "b021d93e26becf5dc7e1b75b1bed1fd93124b374ceb73f43d4d4eafec896a64a"
dependencies = [
"bytes",
"futures-core",
"http 1.4.0",
"http",
"http-body",
"pin-project-lite",
]
@ -3432,7 +3165,7 @@ dependencies = [
"futures-channel",
"futures-core",
"h2",
"http 1.4.0",
"http",
"http-body",
"httparse",
"httpdate",
@ -3465,7 +3198,7 @@ version = "0.27.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3c93eb611681b207e1fe55d5a71ecf91572ec8a6705cdb6857f7d8d5242cf58"
dependencies = [
"http 1.4.0",
"http",
"hyper",
"hyper-util",
"log",
@ -3516,14 +3249,14 @@ dependencies = [
"bytes",
"futures-channel",
"futures-util",
"http 1.4.0",
"http",
"http-body",
"hyper",
"ipnet",
"libc",
"percent-encoding",
"pin-project-lite",
"socket2 0.6.3",
"socket2",
"system-configuration",
"tokio",
"tower-service",
@ -3684,12 +3417,6 @@ dependencies = [
"icu_properties",
]
[[package]]
name = "impl-more"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8a5a9a0ff0086c7a148acb942baaabeadf9504d10400b5a05645853729b9cd2"
[[package]]
name = "include_bytes_aligned"
version = "0.1.4"
@ -3725,7 +3452,6 @@ version = "0.1.0"
dependencies = [
"anyhow",
"arc-swap",
"async-trait",
"clap",
"env_logger",
"futures",
@ -3825,8 +3551,6 @@ name = "integration_tests"
version = "0.1.0"
dependencies = [
"anyhow",
"base64 0.22.1",
"borsh",
"bytesize",
"common",
"env_logger",
@ -3839,7 +3563,8 @@ dependencies = [
"nssa",
"nssa_core",
"sequencer_core",
"sequencer_runner",
"sequencer_service",
"sequencer_service_rpc",
"serde_json",
"tempfile",
"testcontainers",
@ -4048,7 +3773,7 @@ dependencies = [
"futures-channel",
"futures-util",
"gloo-net",
"http 1.4.0",
"http",
"jsonrpsee-core",
"pin-project",
"rustls",
@ -4073,7 +3798,7 @@ dependencies = [
"bytes",
"futures-timer",
"futures-util",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"jsonrpsee-types",
@ -4134,7 +3859,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4c51b7c290bb68ce3af2d029648148403863b982f138484a73f02a9dd52dbd7f"
dependencies = [
"futures-util",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"hyper",
@ -4160,7 +3885,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bc88ff4688e43cc3fa9883a8a95c6fa27aa2e76c96e610b737b6554d650d7fd5"
dependencies = [
"http 1.4.0",
"http",
"serde",
"serde_json",
"thiserror 2.0.18",
@ -4184,7 +3909,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b6fceceeb05301cc4c065ab3bd2fa990d41ff4eb44e4ca1b30fa99c057c3e79"
dependencies = [
"http 1.4.0",
"http",
"jsonrpsee-client-transport",
"jsonrpsee-core",
"jsonrpsee-types",
@ -4238,12 +3963,6 @@ dependencies = [
"thiserror 2.0.18",
]
[[package]]
name = "language-tags"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d4345964bb142484797b161f473a503a434de77149dd8c7427788c6e13379388"
[[package]]
name = "lazy-regex"
version = "3.6.0"
@ -4620,12 +4339,6 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6373607a59f0be73a39b6fe456b8192fcc3585f602af20751600e974dd455e77"
[[package]]
name = "local-waker"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487"
[[package]]
name = "lock_api"
version = "0.4.14"
@ -5384,7 +5097,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
dependencies = [
"libc",
"log",
"wasi",
"windows-sys 0.61.2",
]
@ -5398,7 +5110,7 @@ dependencies = [
"bytes",
"encoding_rs",
"futures-util",
"http 1.4.0",
"http",
"httparse",
"memchr",
"mime",
@ -5545,6 +5257,7 @@ dependencies = [
"risc0-zkvm",
"secp256k1",
"serde",
"serde_with",
"sha2",
"test-case",
"test_program_methods",
@ -6148,8 +5861,10 @@ name = "program_deployment"
version = "0.1.0"
dependencies = [
"clap",
"common",
"nssa",
"nssa_core",
"sequencer_service_rpc",
"tokio",
"wallet",
]
@ -6270,7 +5985,7 @@ dependencies = [
"quinn-udp",
"rustc-hash",
"rustls",
"socket2 0.6.3",
"socket2",
"thiserror 2.0.18",
"tokio",
"tracing",
@ -6307,7 +6022,7 @@ dependencies = [
"cfg_aliases",
"libc",
"once_cell",
"socket2 0.6.3",
"socket2",
"tracing",
"windows-sys 0.60.2",
]
@ -6581,12 +6296,6 @@ dependencies = [
"regex-syntax",
]
[[package]]
name = "regex-lite"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab834c73d247e67f4fae452806d17d3c7501756d98c8808d7c9c7aa7d18f973"
[[package]]
name = "regex-syntax"
version = "0.8.10"
@ -6606,7 +6315,7 @@ dependencies = [
"futures-core",
"futures-util",
"h2",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"hyper",
@ -7453,47 +7162,43 @@ dependencies = [
]
[[package]]
name = "sequencer_rpc"
name = "sequencer_service"
version = "0.1.0"
dependencies = [
"actix-cors",
"actix-web",
"anyhow",
"base58",
"base64 0.22.1",
"bedrock_client",
"borsh",
"bytesize",
"common",
"futures",
"hex",
"itertools 0.14.0",
"log",
"mempool",
"nssa",
"sequencer_core",
"serde",
"serde_json",
"tempfile",
"tokio",
]
[[package]]
name = "sequencer_runner"
version = "0.1.0"
dependencies = [
"actix",
"actix-web",
"anyhow",
"clap",
"common",
"env_logger",
"futures",
"indexer_service_rpc",
"jsonrpsee",
"log",
"mempool",
"nssa",
"sequencer_core",
"sequencer_rpc",
"sequencer_service_protocol",
"sequencer_service_rpc",
"tokio",
"tokio-util",
]
[[package]]
name = "sequencer_service_protocol"
version = "0.1.0"
dependencies = [
"common",
"nssa",
"nssa_core",
]
[[package]]
name = "sequencer_service_rpc"
version = "0.1.0"
dependencies = [
"jsonrpsee",
"sequencer_service_protocol",
]
[[package]]
@ -7689,7 +7394,7 @@ dependencies = [
"const_format",
"futures",
"gloo-net",
"http 1.4.0",
"http",
"http-body-util",
"hyper",
"inventory",
@ -7826,16 +7531,6 @@ dependencies = [
"tokio",
]
[[package]]
name = "socket2"
version = "0.5.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e22376abed350d73dd1cd119b57ffccad95b4e585a7cda43e286245ce23c0678"
dependencies = [
"libc",
"windows-sys 0.52.0",
]
[[package]]
name = "socket2"
version = "0.6.3"
@ -7855,7 +7550,7 @@ dependencies = [
"base64 0.22.1",
"bytes",
"futures",
"http 1.4.0",
"http",
"httparse",
"log",
"rand 0.8.5",
@ -8161,7 +7856,7 @@ dependencies = [
"etcetera",
"ferroid",
"futures",
"http 1.4.0",
"http",
"itertools 0.14.0",
"log",
"memchr",
@ -8321,7 +8016,7 @@ dependencies = [
"parking_lot",
"pin-project-lite",
"signal-hook-registry",
"socket2 0.6.3",
"socket2",
"tokio-macros",
"windows-sys 0.61.2",
]
@ -8518,7 +8213,7 @@ dependencies = [
"base64 0.22.1",
"bytes",
"h2",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"hyper",
@ -8526,7 +8221,7 @@ dependencies = [
"hyper-util",
"percent-encoding",
"pin-project",
"socket2 0.6.3",
"socket2",
"sync_wrapper",
"tokio",
"tokio-stream",
@ -8576,7 +8271,7 @@ dependencies = [
"bytes",
"futures-core",
"futures-util",
"http 1.4.0",
"http",
"http-body",
"http-body-util",
"http-range-header",
@ -8678,7 +8373,7 @@ checksum = "8628dcc84e5a09eb3d8423d6cb682965dea9133204e8fb3efee74c2a0c259442"
dependencies = [
"bytes",
"data-encoding",
"http 1.4.0",
"http",
"httparse",
"log",
"rand 0.9.2",
@ -8860,7 +8555,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
dependencies = [
"base64 0.22.1",
"http 1.4.0",
"http",
"httparse",
"log",
]
@ -8955,8 +8650,6 @@ dependencies = [
"anyhow",
"async-stream",
"base58",
"base64 0.22.1",
"borsh",
"clap",
"common",
"env_logger",
@ -8971,9 +8664,12 @@ dependencies = [
"nssa",
"nssa_core",
"optfield",
"rand 0.8.5",
"sequencer_service_rpc",
"serde",
"serde_json",
"sha2",
"thiserror 2.0.18",
"token_core",
"tokio",
"url",
@ -8984,9 +8680,9 @@ name = "wallet-ffi"
version = "0.1.0"
dependencies = [
"cbindgen",
"common",
"nssa",
"nssa_core",
"sequencer_service_rpc",
"tempfile",
"tokio",
"wallet",

View File

@ -17,9 +17,10 @@ members = [
"programs/amm",
"programs/token/core",
"programs/token",
"sequencer_core",
"sequencer_rpc",
"sequencer_runner",
"sequencer/core",
"sequencer/service",
"sequencer/service/protocol",
"sequencer/service/rpc",
"indexer/core",
"indexer/service",
"indexer/service/protocol",
@ -42,9 +43,10 @@ common = { path = "common" }
mempool = { path = "mempool" }
storage = { path = "storage" }
key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
sequencer_core = { path = "sequencer/core" }
sequencer_service_protocol = { path = "sequencer/service/protocol" }
sequencer_service_rpc = { path = "sequencer/service/rpc" }
sequencer_service = { path = "sequencer/service" }
indexer_core = { path = "indexer/core" }
indexer_service = { path = "indexer/service" }
indexer_service_protocol = { path = "indexer/service/protocol" }

View File

@ -30,10 +30,10 @@ run-bedrock:
docker compose up
# Run Sequencer
[working-directory: 'sequencer_runner']
[working-directory: 'sequencer/service']
run-sequencer:
@echo "🧠 Running sequencer"
RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner configs/debug
RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_service configs/debug/sequencer_config.json
# Run Indexer
[working-directory: 'indexer/service']
@ -62,8 +62,8 @@ run-wallet +args:
# Clean runtime data
clean:
@echo "🧹 Cleaning run artifacts"
rm -rf sequencer_runner/bedrock_signing_key
rm -rf sequencer_runner/rocksdb
rm -rf sequencer/service/bedrock_signing_key
rm -rf sequencer/service/rocksdb
rm -rf indexer/service/rocksdb
rm -rf wallet/configs/debug/storage.json
rm -rf rocksdb

View File

@ -161,7 +161,7 @@ The sequencer and logos blockchain node can be run locally:
- `RUST_LOG=info cargo run -p indexer_service indexer/service/configs/indexer_config.json`
3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
- `RUST_LOG=info cargo run -p sequencer_runner sequencer_runner/configs/debug`
- `RUST_LOG=info cargo run -p sequencer_service sequencer/service/configs/debug/sequencer_config.json`
4. (To run the explorer): on another terminal go to `logos-blockchain/lssa/explorer_service` and run the following:
- `cargo install cargo-leptos`
- `cargo leptos build --release`
@ -171,8 +171,8 @@ The sequencer and logos blockchain node can be run locally:
After stopping services above you need to remove 3 folders to start cleanly:
1. In the `logos-blockchain/logos-blockchain` folder `state` (not needed in case of docker setup)
2. In the `lssa` folder `sequencer_runner/rocksdb`
3. In the `lssa` file `sequencer_runner/bedrock_signing_key`
2. In the `lssa` folder `sequencer/service/rocksdb`
3. In the `lssa` file `sequencer/service/bedrock_signing_key`
4. In the `lssa` folder `indexer/service/rocksdb`
### Normal mode (`just` commands)
@ -220,7 +220,7 @@ This will use a wallet binary built from this repo and not the one installed in
### Standalone mode
The sequencer can be run in standalone mode with:
```bash
RUST_LOG=info cargo run --features standalone -p sequencer_runner sequencer_runner/configs/debug
RUST_LOG=info cargo run --features standalone -p sequencer_service sequencer/service/configs/debug
```
## Running with Docker

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -13,16 +13,11 @@ nssa_core.workspace = true
anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_with.workspace = true
reqwest.workspace = true
base64.workspace = true
sha2.workspace = true
log.workspace = true
hex.workspace = true
borsh.workspace = true
bytesize.workspace = true
base64.workspace = true
url.workspace = true
logos-blockchain-common-http-client.workspace = true
tokio-retry.workspace = true

View File

@ -60,6 +60,18 @@ pub struct Block {
pub bedrock_parent_id: MantleMsgId,
}
impl Serialize for Block {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
crate::borsh_base64::serialize(self, serializer)
}
}
impl<'de> Deserialize<'de> for Block {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
crate::borsh_base64::deserialize(deserializer)
}
}
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct HashableBlockData {
pub block_id: BlockId,

View File

@ -0,0 +1,25 @@
//! This module provides utilities for serializing and deserializing data by combining Borsh and
//! Base64 encodings.
use base64::{Engine as _, engine::general_purpose::STANDARD};
use borsh::{BorshDeserialize, BorshSerialize};
use serde::{Deserialize, Serialize};
pub fn serialize<T: BorshSerialize, S: serde::Serializer>(
value: &T,
serializer: S,
) -> Result<S::Ok, S::Error> {
let borsh_encoded = borsh::to_vec(value).map_err(serde::ser::Error::custom)?;
let base64_encoded = STANDARD.encode(&borsh_encoded);
Serialize::serialize(&base64_encoded, serializer)
}
pub fn deserialize<'de, T: BorshDeserialize, D: serde::Deserializer<'de>>(
deserializer: D,
) -> Result<T, D::Error> {
let base64_encoded = <String as Deserialize>::deserialize(deserializer)?;
let borsh_encoded = STANDARD
.decode(base64_encoded.as_bytes())
.map_err(serde::de::Error::custom)?;
borsh::from_slice(&borsh_encoded).map_err(serde::de::Error::custom)
}

View File

@ -1,43 +0,0 @@
use nssa::AccountId;
use serde::Deserialize;
use crate::rpc_primitives::errors::RpcError;
#[derive(Debug, Clone, Deserialize)]
pub struct SequencerRpcError {
pub jsonrpc: String,
pub error: RpcError,
pub id: u64,
}
#[derive(thiserror::Error, Debug)]
pub enum SequencerClientError {
#[error("HTTP error")]
HTTPError(#[from] reqwest::Error),
#[error("Serde error")]
SerdeError(#[from] serde_json::Error),
#[error("Internal error: {0:?}")]
InternalError(SequencerRpcError),
}
impl From<SequencerRpcError> for SequencerClientError {
fn from(value: SequencerRpcError) -> Self {
Self::InternalError(value)
}
}
#[derive(Debug, thiserror::Error)]
pub enum ExecutionFailureKind {
#[error("Failed to get data from sequencer")]
SequencerError(#[source] anyhow::Error),
#[error("Inputs amounts does not match outputs")]
AmountMismatchError,
#[error("Accounts key not found")]
KeyNotFoundError,
#[error("Sequencer client error: {0:?}")]
SequencerClientError(#[from] SequencerClientError),
#[error("Can not pay for operation")]
InsufficientFundsError,
#[error("Account {0} data is invalid")]
AccountDataError(AccountId),
}

View File

@ -4,10 +4,8 @@ use borsh::{BorshDeserialize, BorshSerialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
pub mod block;
mod borsh_base64;
pub mod config;
pub mod error;
pub mod rpc_primitives;
pub mod sequencer_client;
pub mod transaction;
// Module for tests utility functions

View File

@ -1,194 +0,0 @@
use std::fmt;
use serde_json::{Value, to_value};
#[derive(serde::Serialize)]
pub struct RpcParseError(pub String);
/// This struct may be returned from JSON RPC server in case of error.
///
/// It is expected that that this struct has impls From<_> all other RPC errors
/// like [`RpcBlockError`](crate::types::blocks::RpcBlockError).
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct RpcError {
#[serde(flatten)]
pub error_struct: Option<RpcErrorKind>,
/// Deprecated please use the `error_struct` instead.
pub code: i64,
/// Deprecated please use the `error_struct` instead.
pub message: String,
/// Deprecated please use the `error_struct` instead.
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(tag = "name", content = "cause", rename_all = "SCREAMING_SNAKE_CASE")]
pub enum RpcErrorKind {
RequestValidationError(RpcRequestValidationErrorKind),
HandlerError(Value),
InternalError(Value),
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(tag = "name", content = "info", rename_all = "SCREAMING_SNAKE_CASE")]
pub enum RpcRequestValidationErrorKind {
MethodNotFound { method_name: String },
ParseError { error_message: String },
}
/// A general Server Error.
#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum ServerError {
Timeout,
Closed,
}
impl RpcError {
/// A generic constructor.
///
/// Mostly for completeness, doesn't do anything but filling in the corresponding fields.
#[must_use]
pub const fn new(code: i64, message: String, data: Option<Value>) -> Self {
Self {
code,
message,
data,
error_struct: None,
}
}
/// Create an Invalid Param error.
pub fn invalid_params(data: impl serde::Serialize) -> Self {
let value = match to_value(data) {
Ok(value) => value,
Err(err) => {
return Self::server_error(Some(format!(
"Failed to serialize invalid parameters error: {:?}",
err.to_string()
)));
}
};
Self::new(-32_602, "Invalid params".to_owned(), Some(value))
}
/// Create a server error.
pub fn server_error<E: serde::Serialize>(e: Option<E>) -> Self {
Self::new(
-32_000,
"Server error".to_owned(),
e.map(|v| to_value(v).expect("Must be representable in JSON")),
)
}
/// Create a parse error.
#[must_use]
pub fn parse_error(e: String) -> Self {
Self {
code: -32_700,
message: "Parse error".to_owned(),
data: Some(Value::String(e.clone())),
error_struct: Some(RpcErrorKind::RequestValidationError(
RpcRequestValidationErrorKind::ParseError { error_message: e },
)),
}
}
#[must_use]
pub fn serialization_error(e: &str) -> Self {
Self::new_internal_error(Some(Value::String(e.to_owned())), e)
}
/// Helper method to define extract `INTERNAL_ERROR` in separate `RpcErrorKind`
/// Returns `HANDLER_ERROR` if the error is not internal one.
#[must_use]
pub fn new_internal_or_handler_error(error_data: Option<Value>, error_struct: Value) -> Self {
if error_struct["name"] == "INTERNAL_ERROR" {
let error_message = match error_struct["info"].get("error_message") {
Some(Value::String(error_message)) => error_message.as_str(),
_ => "InternalError happened during serializing InternalError",
};
Self::new_internal_error(error_data, error_message)
} else {
Self::new_handler_error(error_data, error_struct)
}
}
#[must_use]
pub fn new_internal_error(error_data: Option<Value>, info: &str) -> Self {
Self {
code: -32_000,
message: "Server error".to_owned(),
data: error_data,
error_struct: Some(RpcErrorKind::InternalError(serde_json::json!({
"name": "INTERNAL_ERROR",
"info": serde_json::json!({"error_message": info})
}))),
}
}
fn new_handler_error(error_data: Option<Value>, error_struct: Value) -> Self {
Self {
code: -32_000,
message: "Server error".to_owned(),
data: error_data,
error_struct: Some(RpcErrorKind::HandlerError(error_struct)),
}
}
/// Create a method not found error.
#[must_use]
pub fn method_not_found(method: String) -> Self {
Self {
code: -32_601,
message: "Method not found".to_owned(),
data: Some(Value::String(method.clone())),
error_struct: Some(RpcErrorKind::RequestValidationError(
RpcRequestValidationErrorKind::MethodNotFound {
method_name: method,
},
)),
}
}
}
impl fmt::Display for RpcError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{self:?}")
}
}
impl From<RpcParseError> for RpcError {
fn from(parse_error: RpcParseError) -> Self {
Self::parse_error(parse_error.0)
}
}
impl From<std::convert::Infallible> for RpcError {
fn from(_: std::convert::Infallible) -> Self {
// SAFETY: Infallible error can never be constructed, so this code can never be reached.
unsafe { core::hint::unreachable_unchecked() }
}
}
impl fmt::Display for ServerError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Timeout => write!(f, "ServerError: Timeout"),
Self::Closed => write!(f, "ServerError: Closed"),
}
}
}
impl From<ServerError> for RpcError {
fn from(e: ServerError) -> Self {
let error_data = match to_value(&e) {
Ok(value) => value,
Err(_err) => {
return Self::new_internal_error(None, "Failed to serialize ServerError");
}
};
Self::new_internal_error(Some(error_data), e.to_string().as_str())
}
}

View File

@ -1,588 +0,0 @@
// Copyright 2017 tokio-jsonrpc Developers
//
// Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
// http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
// http://opensource.org/licenses/MIT>, at your option. This file may not be
// copied, modified, or distributed except according to those terms.
//! JSON-RPC 2.0 messages.
//!
//! The main entrypoint here is the [Message](enum.Message.html). The others are just building
//! blocks and you should generally work with `Message` instead.
use std::fmt::{Formatter, Result as FmtResult};
use serde::{
de::{Deserializer, Error, Unexpected, Visitor},
ser::{SerializeStruct as _, Serializer},
};
use serde_json::{Result as JsonResult, Value};
use super::errors::RpcError;
pub type Parsed = Result<Message, Broken>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Version;
impl serde::Serialize for Version {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str("2.0")
}
}
impl<'de> serde::Deserialize<'de> for Version {
#[expect(
clippy::renamed_function_params,
reason = "More readable than original serde parameter names"
)]
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct VersionVisitor;
impl Visitor<'_> for VersionVisitor {
type Value = Version;
fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {
formatter.write_str("a version string")
}
fn visit_str<E: Error>(self, value: &str) -> Result<Version, E> {
match value {
"2.0" => Ok(Version),
_ => Err(E::invalid_value(Unexpected::Str(value), &"value 2.0")),
}
}
}
deserializer.deserialize_str(VersionVisitor)
}
}
/// An RPC request.
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[expect(
clippy::partial_pub_fields,
reason = "We don't want to allow access to the version, but the others are public for ease of use"
)]
pub struct Request {
jsonrpc: Version,
pub method: String,
#[serde(default, skip_serializing_if = "Value::is_null")]
pub params: Value,
pub id: Value,
}
impl Request {
#[must_use]
pub fn from_payload_version_2_0(method: String, payload: serde_json::Value) -> Self {
Self {
jsonrpc: Version,
method,
params: payload,
// ToDo: Correct checking of id
id: 1.into(),
}
}
/// Answer the request with a (positive) reply.
///
/// The ID is taken from the request.
#[must_use]
pub fn reply(&self, reply: Value) -> Message {
Message::Response(Response {
jsonrpc: Version,
result: Ok(reply),
id: self.id.clone(),
})
}
/// Answer the request with an error.
#[must_use]
pub fn error(&self, error: RpcError) -> Message {
Message::Response(Response {
jsonrpc: Version,
result: Err(error),
id: self.id.clone(),
})
}
}
/// A response to an RPC.
///
/// It is created by the methods on [Request](struct.Request.html).
#[expect(
clippy::partial_pub_fields,
reason = "We don't want to allow access to the version, but the others are public for ease of use"
)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Response {
jsonrpc: Version,
pub result: Result<Value, RpcError>,
pub id: Value,
}
impl serde::Serialize for Response {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let mut sub = serializer.serialize_struct("Response", 3)?;
sub.serialize_field("jsonrpc", &self.jsonrpc)?;
match &self.result {
Ok(value) => sub.serialize_field("result", value),
Err(err) => sub.serialize_field("error", err),
}?;
sub.serialize_field("id", &self.id)?;
sub.end()
}
}
/// A helper trick for deserialization.
#[derive(serde::Deserialize)]
#[serde(deny_unknown_fields)]
struct WireResponse {
// It is actually used to eat and sanity check the deserialized text
#[serde(rename = "jsonrpc")]
_jsonrpc: Version,
// Make sure we accept null as Some(Value::Null), instead of going to None
#[serde(default, deserialize_with = "some_value")]
result: Option<Value>,
error: Option<RpcError>,
id: Value,
}
// Implementing deserialize is hard. We sidestep the difficulty by deserializing a similar
// structure that directly corresponds to whatever is on the wire and then convert it to our more
// convenient representation.
impl<'de> serde::Deserialize<'de> for Response {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let wr: WireResponse = serde::Deserialize::deserialize(deserializer)?;
let result = match (wr.result, wr.error) {
(Some(res), None) => Ok(res),
(None, Some(err)) => Err(err),
_ => {
let err = D::Error::custom("Either 'error' or 'result' is expected, but not both");
return Err(err);
}
};
Ok(Self {
jsonrpc: Version,
result,
id: wr.id,
})
}
}
/// A notification (doesn't expect an answer).
#[expect(
clippy::partial_pub_fields,
reason = "We don't want to allow access to the version, but the others are public for ease of use"
)]
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct Notification {
jsonrpc: Version,
pub method: String,
#[serde(default, skip_serializing_if = "Value::is_null")]
pub params: Value,
}
/// One message of the JSON RPC protocol.
///
/// One message, directly mapped from the structures of the protocol. See the
/// [specification](http://www.jsonrpc.org/specification) for more details.
///
/// Since the protocol allows one endpoint to be both client and server at the same time, the
/// message can decode and encode both directions of the protocol.
///
/// The `Batch` variant is supposed to be created directly, without a constructor.
///
/// The `UnmatchedSub` variant is used when a request is an array and some of the subrequests
/// aren't recognized as valid json rpc 2.0 messages. This is never returned as a top-level
/// element, it is returned as `Err(Broken::Unmatched)`.
#[derive(Debug, Clone, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(untagged)]
pub enum Message {
/// An RPC request.
Request(Request),
/// A response to a Request.
Response(Response),
/// A notification.
Notification(Notification),
/// A batch of more requests or responses.
///
/// The protocol allows bundling multiple requests, notifications or responses to a single
/// message.
///
/// This variant has no direct constructor and is expected to be constructed manually.
Batch(Vec<Self>),
/// An unmatched sub entry in a `Batch`.
///
/// When there's a `Batch` and an element doesn't comform to the JSONRPC 2.0 format, that one
/// is represented by this. This is never produced as a top-level value when parsing, the
/// `Err(Broken::Unmatched)` is used instead. It is not possible to serialize.
#[serde(skip_serializing)]
UnmatchedSub(Value),
}
impl Message {
/// A constructor for a request.
///
/// The ID is auto-set to dontcare.
#[must_use]
pub fn request(method: String, params: Value) -> Self {
let id = Value::from("dontcare");
Self::Request(Request {
jsonrpc: Version,
method,
params,
id,
})
}
/// Create a top-level error (without an ID).
#[must_use]
pub const fn error(error: RpcError) -> Self {
Self::Response(Response {
jsonrpc: Version,
result: Err(error),
id: Value::Null,
})
}
/// A constructor for a notification.
#[must_use]
pub const fn notification(method: String, params: Value) -> Self {
Self::Notification(Notification {
jsonrpc: Version,
method,
params,
})
}
/// A constructor for a response.
#[must_use]
pub const fn response(id: Value, result: Result<Value, RpcError>) -> Self {
Self::Response(Response {
jsonrpc: Version,
result,
id,
})
}
/// Returns id or Null if there is no id.
#[must_use]
pub fn id(&self) -> Value {
match self {
Self::Request(req) => req.id.clone(),
Self::Response(response) => response.id.clone(),
Self::Notification(_) | Self::Batch(_) | Self::UnmatchedSub(_) => Value::Null,
}
}
}
impl From<Message> for String {
fn from(val: Message) -> Self {
::serde_json::ser::to_string(&val).expect("message serialization to json should not fail")
}
}
impl From<Message> for Vec<u8> {
fn from(val: Message) -> Self {
::serde_json::ser::to_vec(&val)
.expect("message serialization to json bytes should not fail")
}
}
/// A broken message.
///
/// Protocol-level errors.
#[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize)]
#[serde(untagged)]
pub enum Broken {
/// It was valid JSON, but doesn't match the form of a JSONRPC 2.0 message.
Unmatched(Value),
/// Invalid JSON.
#[serde(skip_deserializing)]
SyntaxError(String),
}
impl Broken {
/// Generate an appropriate error message.
///
/// The error message for these things are specified in the RFC, so this just creates an error
/// with the right values.
#[must_use]
pub fn reply(&self) -> Message {
match self {
Self::Unmatched(_) => Message::error(RpcError::parse_error(
"JSON RPC Request format was expected".to_owned(),
)),
Self::SyntaxError(e) => Message::error(RpcError::parse_error(e.clone())),
}
}
}
/// A trick to easily deserialize and detect valid JSON, but invalid Message.
#[derive(serde::Deserialize)]
#[serde(untagged)]
pub enum WireMessage {
Message(Message),
Broken(Broken),
}
pub fn decoded_to_parsed(res: JsonResult<WireMessage>) -> Parsed {
match res {
Ok(WireMessage::Message(Message::UnmatchedSub(value))) => Err(Broken::Unmatched(value)),
Ok(WireMessage::Message(m)) => Ok(m),
Ok(WireMessage::Broken(b)) => Err(b),
Err(e) => Err(Broken::SyntaxError(e.to_string())),
}
}
/// Read a [Message](enum.Message.html) from a slice.
///
/// Invalid JSON or JSONRPC messages are reported as [Broken](enum.Broken.html).
pub fn from_slice(s: &[u8]) -> Parsed {
decoded_to_parsed(::serde_json::de::from_slice(s))
}
/// Read a [Message](enum.Message.html) from a string.
///
/// Invalid JSON or JSONRPC messages are reported as [Broken](enum.Broken.html).
pub fn from_str(s: &str) -> Parsed {
from_slice(s.as_bytes())
}
/// Deserializer for `Option<Value>` that produces `Some(Value::Null)`.
///
/// The usual one produces None in that case. But we need to know the difference between
/// `{x: null}` and `{}`.
fn some_value<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Option<Value>, D::Error> {
serde::Deserialize::deserialize(deserializer).map(Some)
}
#[cfg(test)]
mod tests {
use serde_json::{Value, de::from_slice, json, ser::to_vec};
use super::*;
/// Test serialization and deserialization of the Message.
///
/// We first deserialize it from a string. That way we check deserialization works.
/// But since serialization doesn't have to produce the exact same result (order, spaces, …),
/// we then serialize and deserialize the thing again and check it matches.
#[test]
fn message_serde() {
// A helper for running one message test
fn one(input: &str, expected: &Message) {
let parsed: Message = from_str(input).unwrap();
assert_eq!(*expected, parsed);
let serialized = to_vec(&parsed).unwrap();
let deserialized: Message = from_slice(&serialized).unwrap();
assert_eq!(parsed, deserialized);
}
// A request without parameters
one(
r#"{"jsonrpc": "2.0", "method": "call", "id": 1}"#,
&Message::Request(Request {
jsonrpc: Version,
method: "call".to_owned(),
params: Value::Null,
id: json!(1),
}),
);
// A request with parameters
one(
r#"{"jsonrpc": "2.0", "method": "call", "params": [1, 2, 3], "id": 2}"#,
&Message::Request(Request {
jsonrpc: Version,
method: "call".to_owned(),
params: json!([1, 2, 3]),
id: json!(2),
}),
);
// A notification (with parameters)
one(
r#"{"jsonrpc": "2.0", "method": "notif", "params": {"x": "y"}}"#,
&Message::Notification(Notification {
jsonrpc: Version,
method: "notif".to_owned(),
params: json!({"x": "y"}),
}),
);
// A successful response
one(
r#"{"jsonrpc": "2.0", "result": 42, "id": 3}"#,
&Message::Response(Response {
jsonrpc: Version,
result: Ok(json!(42)),
id: json!(3),
}),
);
// A successful response
one(
r#"{"jsonrpc": "2.0", "result": null, "id": 3}"#,
&Message::Response(Response {
jsonrpc: Version,
result: Ok(Value::Null),
id: json!(3),
}),
);
// An error
one(
r#"{"jsonrpc": "2.0", "error": {"code": 42, "message": "Wrong!"}, "id": null}"#,
&Message::Response(Response {
jsonrpc: Version,
result: Err(RpcError::new(42, "Wrong!".to_owned(), None)),
id: Value::Null,
}),
);
// A batch
one(
r#"[
{"jsonrpc": "2.0", "method": "notif"},
{"jsonrpc": "2.0", "method": "call", "id": 42}
]"#,
&Message::Batch(vec![
Message::Notification(Notification {
jsonrpc: Version,
method: "notif".to_owned(),
params: Value::Null,
}),
Message::Request(Request {
jsonrpc: Version,
method: "call".to_owned(),
params: Value::Null,
id: json!(42),
}),
]),
);
// Some handling of broken messages inside a batch
let parsed = from_str(
r#"[
{"jsonrpc": "2.0", "method": "notif"},
{"jsonrpc": "2.0", "method": "call", "id": 42},
true
]"#,
)
.unwrap();
assert_eq!(
Message::Batch(vec![
Message::Notification(Notification {
jsonrpc: Version,
method: "notif".to_owned(),
params: Value::Null,
}),
Message::Request(Request {
jsonrpc: Version,
method: "call".to_owned(),
params: Value::Null,
id: json!(42),
}),
Message::UnmatchedSub(Value::Bool(true)),
]),
parsed
);
to_vec(&Message::UnmatchedSub(Value::Null)).unwrap_err();
}
/// A helper for the `broken` test.
///
/// Check that the given JSON string parses, but is not recognized as a valid RPC message.
///
/// Test things that are almost but not entirely JSONRPC are rejected.
///
/// The reject is done by returning it as Unmatched.
#[test]
fn broken() {
// A helper with one test
fn one(input: &str) {
let msg = from_str(input);
match msg {
Err(Broken::Unmatched(_)) => (),
_ => panic!("{input} recognized as an RPC message: {msg:?}!"),
}
}
// Missing the version
one(r#"{"method": "notif"}"#);
// Wrong version
one(r#"{"jsonrpc": 2.0, "method": "notif"}"#);
// A response with both result and error
one(r#"{"jsonrpc": "2.0", "result": 42, "error": {"code": 42, "message": "!"}, "id": 1}"#);
// A response without an id
one(r#"{"jsonrpc": "2.0", "result": 42}"#);
// An extra field
one(r#"{"jsonrpc": "2.0", "method": "weird", "params": 42, "others": 43, "id": 2}"#);
// Something completely different
one(r#"{"x": [1, 2, 3]}"#);
match from_str("{]") {
Err(Broken::SyntaxError(_)) => (),
other => panic!("Something unexpected: {other:?}"),
}
}
/// Test some non-trivial aspects of the constructors.
///
/// This doesn't have a full coverage, because there's not much to actually test there.
/// Most of it is related to the ids.
#[test]
#[ignore = "Not a full coverage test"]
fn constructors() {
let msg1 = Message::request("call".to_owned(), json!([1, 2, 3]));
let msg2 = Message::request("call".to_owned(), json!([1, 2, 3]));
// They differ, even when created with the same parameters
assert_ne!(msg1, msg2);
// And, specifically, they differ in the ID's
let (req1, req2) = if let (Message::Request(req1), Message::Request(req2)) = (msg1, msg2) {
assert_ne!(req1.id, req2.id);
assert!(req1.id.is_string());
assert!(req2.id.is_string());
(req1, req2)
} else {
panic!("Non-request received");
};
let id1 = req1.id.clone();
// When we answer a message, we get the same ID
if let Message::Response(resp) = req1.reply(json!([1, 2, 3])) {
assert_eq!(
resp,
Response {
jsonrpc: Version,
result: Ok(json!([1, 2, 3])),
id: id1
}
);
} else {
panic!("Not a response");
}
let id2 = req2.id.clone();
// The same with an error
if let Message::Response(resp) = req2.error(RpcError::new(42, "Wrong!".to_owned(), None)) {
assert_eq!(
resp,
Response {
jsonrpc: Version,
result: Err(RpcError::new(42, "Wrong!".to_owned(), None)),
id: id2,
}
);
} else {
panic!("Not a response");
}
// When we have unmatched, we generate a top-level error with Null id.
if let Message::Response(resp) =
Message::error(RpcError::new(43, "Also wrong!".to_owned(), None))
{
assert_eq!(
resp,
Response {
jsonrpc: Version,
result: Err(RpcError::new(43, "Also wrong!".to_owned(), None)),
id: Value::Null,
}
);
} else {
panic!("Not a response");
}
}
}

View File

@ -1,57 +0,0 @@
use bytesize::ByteSize;
use serde::{Deserialize, Serialize};
pub mod errors;
pub mod message;
pub mod parser;
pub mod requests;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RpcLimitsConfig {
/// Maximum byte size of the json payload.
pub json_payload_max_size: ByteSize,
}
impl Default for RpcLimitsConfig {
fn default() -> Self {
Self {
json_payload_max_size: ByteSize::mib(10),
}
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct RpcConfig {
pub addr: String,
pub cors_allowed_origins: Vec<String>,
#[serde(default)]
pub limits_config: RpcLimitsConfig,
}
impl Default for RpcConfig {
fn default() -> Self {
Self {
addr: "0.0.0.0:3040".to_owned(),
cors_allowed_origins: vec!["*".to_owned()],
limits_config: RpcLimitsConfig::default(),
}
}
}
impl RpcConfig {
#[must_use]
pub fn new(addr: &str) -> Self {
Self {
addr: addr.to_owned(),
..Default::default()
}
}
#[must_use]
pub fn with_port(port: u16) -> Self {
Self {
addr: format!("0.0.0.0:{port}"),
..Default::default()
}
}
}

View File

@ -1,29 +0,0 @@
use serde::de::DeserializeOwned;
use serde_json::Value;
use super::errors::RpcParseError;
#[macro_export]
macro_rules! parse_request {
($request_name:ty) => {
impl RpcRequest for $request_name {
fn parse(value: Option<Value>) -> Result<Self, RpcParseError> {
parse_params::<Self>(value)
}
}
};
}
pub trait RpcRequest: Sized {
fn parse(value: Option<Value>) -> Result<Self, RpcParseError>;
}
pub fn parse_params<T: DeserializeOwned>(value: Option<Value>) -> Result<T, RpcParseError> {
value.map_or_else(
|| Err(RpcParseError("Require at least one parameter".to_owned())),
|value| {
serde_json::from_value(value)
.map_err(|err| RpcParseError(format!("Failed parsing args: {err}")))
},
)
}

View File

@ -1,219 +0,0 @@
use std::collections::HashMap;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use super::{
errors::RpcParseError,
parser::{RpcRequest, parse_params},
};
use crate::{HashType, parse_request};
mod base64_deser {
use base64::{Engine as _, engine::general_purpose};
use serde::{self, Deserialize, Deserializer, Serializer, ser::SerializeSeq as _};
pub mod vec {
use super::*;
pub fn serialize<S>(bytes_vec: &[Vec<u8>], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(bytes_vec.len()))?;
for bytes in bytes_vec {
let s = general_purpose::STANDARD.encode(bytes);
seq.serialize_element(&s)?;
}
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Vec<u8>>, D::Error>
where
D: Deserializer<'de>,
{
let base64_strings: Vec<String> = Deserialize::deserialize(deserializer)?;
base64_strings
.into_iter()
.map(|s| {
general_purpose::STANDARD
.decode(&s)
.map_err(serde::de::Error::custom)
})
.collect()
}
}
pub fn serialize<S>(bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let base64_string = general_purpose::STANDARD.encode(bytes);
serializer.serialize_str(&base64_string)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>
where
D: Deserializer<'de>,
{
let base64_string: String = Deserialize::deserialize(deserializer)?;
general_purpose::STANDARD
.decode(&base64_string)
.map_err(serde::de::Error::custom)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct RegisterAccountRequest {
pub account_id: [u8; 32],
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SendTxRequest {
#[serde(with = "base64_deser")]
pub transaction: Vec<u8>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetBlockDataRequest {
pub block_id: u64,
}
/// Get a range of blocks from `start_block_id` to `end_block_id` (inclusive).
#[derive(Serialize, Deserialize, Debug)]
pub struct GetBlockRangeDataRequest {
pub start_block_id: u64,
pub end_block_id: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetGenesisIdRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct GetLastBlockRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct GetInitialTestnetAccountsRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountBalanceRequest {
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetTransactionByHashRequest {
pub hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountsNoncesRequest {
pub account_ids: Vec<AccountId>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountRequest {
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProofForCommitmentRequest {
pub commitment: nssa_core::Commitment,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProgramIdsRequest;
parse_request!(HelloRequest);
parse_request!(RegisterAccountRequest);
parse_request!(SendTxRequest);
parse_request!(GetBlockDataRequest);
parse_request!(GetBlockRangeDataRequest);
parse_request!(GetGenesisIdRequest);
parse_request!(GetLastBlockRequest);
parse_request!(GetInitialTestnetAccountsRequest);
parse_request!(GetAccountBalanceRequest);
parse_request!(GetTransactionByHashRequest);
parse_request!(GetAccountsNoncesRequest);
parse_request!(GetProofForCommitmentRequest);
parse_request!(GetAccountRequest);
parse_request!(GetProgramIdsRequest);
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloResponse {
pub greeting: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct RegisterAccountResponse {
pub status: String,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SendTxResponse {
pub status: String,
pub tx_hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetBlockDataResponse {
#[serde(with = "base64_deser")]
pub block: Vec<u8>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetBlockRangeDataResponse {
#[serde(with = "base64_deser::vec")]
pub blocks: Vec<Vec<u8>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetGenesisIdResponse {
pub genesis_id: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetLastBlockResponse {
pub last_block: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountBalanceResponse {
pub balance: u128,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountsNoncesResponse {
pub nonces: Vec<u128>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetTransactionByHashResponse {
pub transaction: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountResponse {
pub account: nssa::Account,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProofForCommitmentResponse {
pub membership_proof: Option<nssa_core::MembershipProof>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProgramIdsResponse {
pub program_ids: HashMap<String, ProgramId>,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct GetInitialTestnetAccountsResponse {
/// Hex encoded account id.
pub account_id: String,
pub balance: u64,
}

View File

@ -1,361 +0,0 @@
use std::{collections::HashMap, ops::RangeInclusive};
use anyhow::Result;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use reqwest::Client;
use serde::Deserialize;
use serde_json::Value;
use url::Url;
use super::rpc_primitives::requests::{
GetAccountBalanceRequest, GetAccountBalanceResponse, GetBlockDataRequest, GetBlockDataResponse,
GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest,
};
use crate::{
HashType,
config::BasicAuth,
error::{SequencerClientError, SequencerRpcError},
rpc_primitives::{
self,
requests::{
GetAccountRequest, GetAccountResponse, GetAccountsNoncesRequest,
GetAccountsNoncesResponse, GetBlockRangeDataRequest, GetBlockRangeDataResponse,
GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse,
GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest,
GetProofForCommitmentResponse, GetTransactionByHashRequest,
GetTransactionByHashResponse, SendTxRequest, SendTxResponse,
},
},
transaction::NSSATransaction,
};
#[derive(Debug, Clone, Deserialize)]
struct SequencerRpcResponse {
#[serde(rename = "jsonrpc")]
_jsonrpc: String,
result: serde_json::Value,
#[serde(rename = "id")]
_id: u64,
}
#[derive(Clone)]
pub struct SequencerClient {
pub client: reqwest::Client,
pub sequencer_addr: Url,
pub basic_auth: Option<BasicAuth>,
}
impl SequencerClient {
pub fn new(sequencer_addr: Url) -> Result<Self> {
Self::new_with_auth(sequencer_addr, None)
}
pub fn new_with_auth(sequencer_addr: Url, basic_auth: Option<BasicAuth>) -> Result<Self> {
Ok(Self {
client: Client::builder()
// Add more fields if needed
.timeout(std::time::Duration::from_secs(60))
// Should be kept in sync with server keep-alive settings
.pool_idle_timeout(std::time::Duration::from_secs(5))
.build()?,
sequencer_addr,
basic_auth,
})
}
pub async fn call_method_with_payload(
&self,
method: &str,
payload: Value,
) -> Result<Value, SequencerClientError> {
let request =
rpc_primitives::message::Request::from_payload_version_2_0(method.to_owned(), payload);
log::debug!(
"Calling method {method} with payload {request:?} to sequencer at {}",
self.sequencer_addr
);
let strategy = tokio_retry::strategy::FixedInterval::from_millis(10000).take(60);
let response_vall = tokio_retry::Retry::spawn(strategy, || async {
let mut call_builder = self.client.post(self.sequencer_addr.clone());
if let Some(BasicAuth { username, password }) = &self.basic_auth {
call_builder = call_builder.basic_auth(username, password.as_deref());
}
let call_res_res = call_builder.json(&request).send().await;
match call_res_res {
Err(err) => Err(err),
Ok(call_res) => call_res.json::<Value>().await,
}
})
.await?;
if let Ok(response) = serde_json::from_value::<SequencerRpcResponse>(response_vall.clone())
{
Ok(response.result)
} else {
let err_resp = serde_json::from_value::<SequencerRpcError>(response_vall)?;
Err(err_resp.into())
}
}
/// Get block data at `block_id` from sequencer.
pub async fn get_block(
&self,
block_id: u64,
) -> Result<GetBlockDataResponse, SequencerClientError> {
let block_req = GetBlockDataRequest { block_id };
let req = serde_json::to_value(block_req)?;
let resp = self.call_method_with_payload("get_block", req).await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
pub async fn get_block_range(
&self,
range: RangeInclusive<u64>,
) -> Result<GetBlockRangeDataResponse, SequencerClientError> {
let block_req = GetBlockRangeDataRequest {
start_block_id: *range.start(),
end_block_id: *range.end(),
};
let req = serde_json::to_value(block_req)?;
let resp = self
.call_method_with_payload("get_block_range", req)
.await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Get last known `blokc_id` from sequencer.
pub async fn get_last_block(&self) -> Result<GetLastBlockResponse, SequencerClientError> {
let block_req = GetLastBlockRequest {};
let req = serde_json::to_value(block_req)?;
let resp = self.call_method_with_payload("get_last_block", req).await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Get account public balance for `account_id`. `account_id` must be a valid hex-string for 32
/// bytes.
pub async fn get_account_balance(
&self,
account_id: AccountId,
) -> Result<GetAccountBalanceResponse, SequencerClientError> {
let block_req = GetAccountBalanceRequest { account_id };
let req = serde_json::to_value(block_req)?;
let resp = self
.call_method_with_payload("get_account_balance", req)
.await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Get accounts nonces for `account_ids`. `account_ids` must be a list of valid hex-strings for
/// 32 bytes.
pub async fn get_accounts_nonces(
&self,
account_ids: Vec<AccountId>,
) -> Result<GetAccountsNoncesResponse, SequencerClientError> {
let block_req = GetAccountsNoncesRequest { account_ids };
let req = serde_json::to_value(block_req)?;
let resp = self
.call_method_with_payload("get_accounts_nonces", req)
.await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
pub async fn get_account(
&self,
account_id: AccountId,
) -> Result<GetAccountResponse, SequencerClientError> {
let block_req = GetAccountRequest { account_id };
let req = serde_json::to_value(block_req)?;
let resp = self.call_method_with_payload("get_account", req).await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Get transaction details for `hash`.
pub async fn get_transaction_by_hash(
&self,
hash: HashType,
) -> Result<GetTransactionByHashResponse, SequencerClientError> {
let block_req = GetTransactionByHashRequest { hash };
let req = serde_json::to_value(block_req)?;
let resp = self
.call_method_with_payload("get_transaction_by_hash", req)
.await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Send transaction to sequencer.
pub async fn send_tx_public(
&self,
transaction: nssa::PublicTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = NSSATransaction::Public(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
};
let req = serde_json::to_value(tx_req)?;
let resp = self.call_method_with_payload("send_tx", req).await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Send transaction to sequencer.
pub async fn send_tx_private(
&self,
transaction: nssa::PrivacyPreservingTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = NSSATransaction::PrivacyPreserving(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
};
let req = serde_json::to_value(tx_req)?;
let resp = self.call_method_with_payload("send_tx", req).await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Get genesis id from sequencer.
pub async fn get_genesis_id(&self) -> Result<GetGenesisIdResponse, SequencerClientError> {
let genesis_req = GetGenesisIdRequest {};
let req = serde_json::to_value(genesis_req).unwrap();
let resp = self
.call_method_with_payload("get_genesis", req)
.await
.unwrap();
let resp_deser = serde_json::from_value(resp).unwrap();
Ok(resp_deser)
}
/// Get initial testnet accounts from sequencer.
pub async fn get_initial_testnet_accounts(
&self,
) -> Result<Vec<GetInitialTestnetAccountsResponse>, SequencerClientError> {
let acc_req = GetInitialTestnetAccountsRequest {};
let req = serde_json::to_value(acc_req).unwrap();
let resp = self
.call_method_with_payload("get_initial_testnet_accounts", req)
.await
.unwrap();
let resp_deser = serde_json::from_value(resp).unwrap();
Ok(resp_deser)
}
/// Get proof for commitment.
pub async fn get_proof_for_commitment(
&self,
commitment: nssa_core::Commitment,
) -> Result<Option<nssa_core::MembershipProof>, SequencerClientError> {
let acc_req = GetProofForCommitmentRequest { commitment };
let req = serde_json::to_value(acc_req).unwrap();
let resp = self
.call_method_with_payload("get_proof_for_commitment", req)
.await
.unwrap();
let resp_deser = serde_json::from_value::<GetProofForCommitmentResponse>(resp)
.unwrap()
.membership_proof;
Ok(resp_deser)
}
pub async fn send_tx_program(
&self,
transaction: nssa::ProgramDeploymentTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = NSSATransaction::ProgramDeployment(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
};
let req = serde_json::to_value(tx_req)?;
let resp = self.call_method_with_payload("send_tx", req).await?;
let resp_deser = serde_json::from_value(resp)?;
Ok(resp_deser)
}
/// Get Ids of the programs used by the node.
pub async fn get_program_ids(
&self,
) -> Result<HashMap<String, ProgramId>, SequencerClientError> {
let acc_req = GetProgramIdsRequest {};
let req = serde_json::to_value(acc_req).unwrap();
let resp = self
.call_method_with_payload("get_program_ids", req)
.await
.unwrap();
let resp_deser = serde_json::from_value::<GetProgramIdsResponse>(resp)
.unwrap()
.program_ids;
Ok(resp_deser)
}
}

View File

@ -1,6 +1,6 @@
use borsh::{BorshDeserialize, BorshSerialize};
use log::warn;
use nssa::{AccountId, V02State};
use nssa::{AccountId, V03State};
use serde::{Deserialize, Serialize};
use crate::HashType;
@ -12,6 +12,18 @@ pub enum NSSATransaction {
ProgramDeployment(nssa::ProgramDeploymentTransaction),
}
impl Serialize for NSSATransaction {
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
crate::borsh_base64::serialize(self, serializer)
}
}
impl<'de> Deserialize<'de> for NSSATransaction {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
crate::borsh_base64::deserialize(deserializer)
}
}
impl NSSATransaction {
#[must_use]
pub fn hash(&self) -> HashType {
@ -55,7 +67,7 @@ impl NSSATransaction {
pub fn execute_check_on_state(
self,
state: &mut V02State,
state: &mut V03State,
) -> Result<Self, nssa::error::NssaError> {
match &self {
Self::Public(tx) => state.transition_from_public_transaction(tx),
@ -87,7 +99,7 @@ impl From<nssa::ProgramDeploymentTransaction> for NSSATransaction {
}
#[derive(
Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, BorshSerialize, BorshDeserialize,
Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, BorshSerialize, BorshDeserialize,
)]
pub enum TxKind {
Public,

View File

@ -1,6 +1,5 @@
{
"home": "/var/lib/sequencer_runner",
"override_rust_log": null,
"home": "/var/lib/sequencer_service",
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
@ -8,7 +7,6 @@
"mempool_max_size": 10000,
"block_create_timeout": "10s",
"retry_pending_blocks_timeout": "7s",
"port": 3040,
"bedrock_config": {
"backoff": {
"start_delay": "100ms",

View File

@ -7,21 +7,21 @@ services:
environment:
- RUST_LOG=error
sequencer_runner:
sequencer_service:
depends_on:
- logos-blockchain-node-0
- indexer_service
volumes: !override
- ./configs/docker-all-in-one/sequencer:/etc/sequencer_runner
volumes:
- ./configs/docker-all-in-one/sequencer_config.json:/etc/sequencer_service/sequencer_config.json
indexer_service:
depends_on:
- logos-blockchain-node-0
volumes:
- ./configs/docker-all-in-one/indexer/indexer_config.json:/etc/indexer_service/indexer_config.json
- ./configs/docker-all-in-one/indexer_config.json:/etc/indexer_service/indexer_config.json
explorer_service:
depends_on:
- indexer_service
environment:
- INDEXER_RPC_URL=http://indexer_service:8779
- INDEXER_RPC_URL=http://indexer_service:8779

View File

@ -6,7 +6,7 @@ include:
- path:
bedrock/docker-compose.yml
- path:
sequencer_runner/docker-compose.yml
sequencer/service/docker-compose.yml
- path:
indexer/service/docker-compose.yml
- path:

View File

@ -8,8 +8,10 @@ license = { workspace = true }
workspace = true
[dependencies]
common.workspace = true
nssa.workspace = true
nssa_core.workspace = true
sequencer_service_rpc = { workspace = true, features = ["client"] }
wallet.workspace = true
tokio = { workspace = true, features = ["macros"] }

View File

@ -1,8 +1,10 @@
use common::transaction::NSSATransaction;
use nssa::{
AccountId, PublicTransaction,
program::Program,
public_transaction::{Message, WitnessSet},
};
use sequencer_service_rpc::RpcClient as _;
use wallet::WalletCore;
// Before running this example, compile the `hello_world.rs` guest program with:
@ -58,7 +60,7 @@ async fn main() {
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap();
}

View File

@ -1,8 +1,10 @@
use common::transaction::NSSATransaction;
use nssa::{
AccountId, PublicTransaction,
program::Program,
public_transaction::{Message, WitnessSet},
};
use sequencer_service_rpc::RpcClient as _;
use wallet::WalletCore;
// Before running this example, compile the `simple_tail_call.rs` guest program with:
@ -54,7 +56,7 @@ async fn main() {
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap();
}

View File

@ -1,9 +1,10 @@
use common::transaction::NSSATransaction;
use nssa::{
AccountId, PublicTransaction,
program::Program,
public_transaction::{Message, WitnessSet},
};
use nssa_core::account::Nonce;
use sequencer_service_rpc::RpcClient as _;
use wallet::WalletCore;
// Before running this example, compile the `hello_world_with_authorization.rs` guest program with:
@ -63,13 +64,7 @@ async fn main() {
.await
.expect("Node should be reachable to query account data");
let signing_keys = [signing_key];
let message = Message::try_new(
program.id(),
vec![account_id],
nonces.iter().map(|x| Nonce(*x)).collect(),
greeting,
)
.unwrap();
let message = Message::try_new(program.id(), vec![account_id], nonces, greeting).unwrap();
// Pass the signing key to sign the message. This will be used by the node
// to flag the pre_state as `is_authorized` when executing the program
let witness_set = WitnessSet::for_message(&message, &signing_keys);
@ -78,7 +73,7 @@ async fn main() {
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap();
}

View File

@ -3,12 +3,14 @@
reason = "This is an example program, it's fine to print to stdout"
)]
use common::transaction::NSSATransaction;
use nssa::{
AccountId, PublicTransaction,
program::Program,
public_transaction::{Message, WitnessSet},
};
use nssa_core::program::PdaSeed;
use sequencer_service_rpc::RpcClient as _;
use wallet::WalletCore;
// Before running this example, compile the `simple_tail_call.rs` guest program with:
@ -56,7 +58,7 @@ async fn main() {
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap();

View File

@ -1,5 +1,7 @@
use clap::{Parser, Subcommand};
use common::transaction::NSSATransaction;
use nssa::{PublicTransaction, program::Program, public_transaction};
use sequencer_service_rpc::RpcClient as _;
use wallet::{PrivacyPreservingAccount, WalletCore};
// Before running this example, compile the `hello_world_with_move_function.rs` guest program with:
@ -87,7 +89,7 @@ async fn main() {
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap();
}
@ -126,7 +128,7 @@ async fn main() {
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap();
}

View File

@ -22,7 +22,13 @@ WORKDIR /explorer_service
COPY . .
# Build the app
RUN cargo leptos build --release -vv
RUN --mount=type=cache,target=/usr/local/cargo/registry/index \
--mount=type=cache,target=/usr/local/cargo/registry/cache \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/explorer_service/target \
cargo leptos build --release -vv \
&& cp /explorer_service/target/release/explorer_service /usr/local/bin/explorer_service \
&& cp -r /explorer_service/target/site /explorer_service/site_output
FROM debian:trixie-slim AS runtime
WORKDIR /explorer_service
@ -33,10 +39,10 @@ RUN apt-get update -y \
&& rm -rf /var/lib/apt/lists/*
# Copy the server binary to the /explorer_service directory
COPY --from=builder /explorer_service/target/release/explorer_service /explorer_service/
COPY --from=builder /usr/local/bin/explorer_service /explorer_service/
# /target/site contains our JS/WASM/CSS, etc.
COPY --from=builder /explorer_service/target/site /explorer_service/site
COPY --from=builder /explorer_service/site_output /explorer_service/site
# Copy Cargo.toml as its needed at runtime
COPY --from=builder /explorer_service/Cargo.toml /explorer_service/

View File

@ -41,12 +41,12 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
// Try as hash
if let Ok(hash) = HashType::from_str(&query) {
// Try as block hash
if let Ok(block) = client.get_block_by_hash(hash).await {
if let Ok(Some(block)) = client.get_block_by_hash(hash).await {
blocks.push(block);
}
// Try as transaction hash
if let Ok(tx) = client.get_transaction(hash).await {
if let Ok(Some(tx)) = client.get_transaction(hash).await {
transactions.push(tx);
}
}
@ -60,7 +60,7 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
// Try as block ID
if let Ok(block_id) = query.parse::<u64>()
&& let Ok(block) = client.get_block_by_id(block_id).await
&& let Ok(Some(block)) = client.get_block_by_id(block_id).await
{
blocks.push(block);
}
@ -81,6 +81,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result<Block, ServerFnError>
.get_block_by_id(block_id)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
.and_then(|opt| opt.ok_or_else(|| ServerFnError::ServerError("Block not found".to_owned())))
}
/// Get latest block ID
@ -103,6 +104,7 @@ pub async fn get_block_by_hash(block_hash: HashType) -> Result<Block, ServerFnEr
.get_block_by_hash(block_hash)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
.and_then(|opt| opt.ok_or_else(|| ServerFnError::ServerError("Block not found".to_owned())))
}
/// Get transaction by hash
@ -114,6 +116,9 @@ pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnE
.get_transaction(tx_hash)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
.and_then(|opt| {
opt.ok_or_else(|| ServerFnError::ServerError("Transaction not found".to_owned()))
})
}
/// Get blocks with pagination

View File

@ -84,7 +84,7 @@ pub fn TransactionPage() -> impl IntoView {
} = witness_set;
let program_id_str = program_id.to_string();
let proof_len = proof.0.len();
let proof_len = proof.map_or(0, |p| p.0.len());
let signatures_count = signatures_and_public_keys.len();
view! {
@ -183,7 +183,7 @@ pub fn TransactionPage() -> impl IntoView {
proof,
} = witness_set;
let proof_len = proof.0.len();
let proof_len = proof.map_or(0, |p| p.0.len());
view! {
<div class="transaction-details">
<h2>"Privacy-Preserving Transaction Details"</h2>

View File

@ -28,4 +28,3 @@ async-stream.workspace = true
[dev-dependencies]
tempfile.workspace = true

View File

@ -6,14 +6,14 @@ use common::{
block::{BedrockStatus, Block, BlockId},
transaction::NSSATransaction,
};
use nssa::{Account, AccountId, V02State};
use nssa::{Account, AccountId, V03State};
use storage::indexer::RocksDBIO;
use tokio::sync::RwLock;
#[derive(Clone)]
pub struct IndexerStore {
dbio: Arc<RocksDBIO>,
current_state: Arc<RwLock<V02State>>,
current_state: Arc<RwLock<V03State>>,
}
impl IndexerStore {
@ -24,7 +24,7 @@ impl IndexerStore {
pub fn open_db_with_genesis(
location: &Path,
genesis_block: &Block,
initial_state: &V02State,
initial_state: &V03State,
) -> Result<Self> {
let dbio = RocksDBIO::open_or_create(location, genesis_block, initial_state)?;
let current_state = dbio.final_state()?;
@ -46,7 +46,7 @@ impl IndexerStore {
Ok(self.dbio.get_meta_last_block_in_db()?)
}
pub fn get_block_at_id(&self, id: u64) -> Result<Block> {
pub fn get_block_at_id(&self, id: u64) -> Result<Option<Block>> {
Ok(self.dbio.get_block(id)?)
}
@ -54,20 +54,25 @@ impl IndexerStore {
Ok(self.dbio.get_block_batch(before, limit)?)
}
pub fn get_transaction_by_hash(&self, tx_hash: [u8; 32]) -> Result<NSSATransaction> {
let block = self.get_block_at_id(self.dbio.get_block_id_by_tx_hash(tx_hash)?)?;
let transaction = block
pub fn get_transaction_by_hash(&self, tx_hash: [u8; 32]) -> Result<Option<NSSATransaction>> {
let Some(block_id) = self.dbio.get_block_id_by_tx_hash(tx_hash)? else {
return Ok(None);
};
let Some(block) = self.get_block_at_id(block_id)? else {
return Ok(None);
};
Ok(block
.body
.transactions
.iter()
.find(|enc_tx| enc_tx.hash().0 == tx_hash)
.ok_or_else(|| anyhow::anyhow!("Transaction not found in DB"))?;
Ok(transaction.clone())
.into_iter()
.find(|enc_tx| enc_tx.hash().0 == tx_hash))
}
pub fn get_block_by_hash(&self, hash: [u8; 32]) -> Result<Block> {
self.get_block_at_id(self.dbio.get_block_id_by_hash(hash)?)
pub fn get_block_by_hash(&self, hash: [u8; 32]) -> Result<Option<Block>> {
let Some(id) = self.dbio.get_block_id_by_hash(hash)? else {
return Ok(None);
};
self.get_block_at_id(id)
}
pub fn get_transactions_by_account(
@ -93,14 +98,14 @@ impl IndexerStore {
.expect("Must be set at the DB startup")
}
pub fn get_state_at_block(&self, block_id: u64) -> Result<V02State> {
pub fn get_state_at_block(&self, block_id: u64) -> Result<V03State> {
Ok(self.dbio.calculate_state_for_id(block_id)?)
}
/// Recalculation of final state directly from DB.
///
/// Used for indexer healthcheck.
pub fn recalculate_final_state(&self) -> Result<V02State> {
pub fn recalculate_final_state(&self) -> Result<V03State> {
Ok(self.dbio.final_state()?)
}
@ -167,11 +172,11 @@ mod tests {
let storage = IndexerStore::open_db_with_genesis(
home.as_ref(),
&genesis_block(),
&nssa::V02State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[]),
&nssa::V03State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[]),
)
.unwrap();
let block = storage.get_block_at_id(1).unwrap();
let block = storage.get_block_at_id(1).unwrap().unwrap();
let final_id = storage.get_last_block_id().unwrap();
assert_eq!(block.header.hash, genesis_block().header.hash);
@ -185,7 +190,7 @@ mod tests {
let storage = IndexerStore::open_db_with_genesis(
home.as_ref(),
&genesis_block(),
&nssa::V02State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[]),
&nssa::V03State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[]),
)
.unwrap();

View File

@ -80,7 +80,7 @@ impl IndexerCore {
.map(|acc_data| (acc_data.account_id, acc_data.balance))
.collect();
let mut state = nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments);
let mut state = nssa::V03State::new_with_genesis_accounts(&init_accs, &initial_commitments);
// ToDo: Remove after testnet
state.add_pinata_program(PINATA_BASE58.parse().unwrap());

View File

@ -21,7 +21,6 @@ log.workspace = true
jsonrpsee.workspace = true
serde_json.workspace = true
futures.workspace = true
async-trait = "0.1.89"
arc-swap = "1.8.1"
[features]

View File

@ -51,32 +51,34 @@ RUN cargo chef prepare --bin indexer_service --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /indexer_service/recipe.json recipe.json
# Build dependencies only (this layer will be cached)
RUN cargo chef cook --bin indexer_service --release --recipe-path recipe.json
RUN --mount=type=cache,target=/usr/local/cargo/registry/index \
--mount=type=cache,target=/usr/local/cargo/registry/cache \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/indexer_service/target \
cargo chef cook --bin indexer_service --release --recipe-path recipe.json
# Copy source code
COPY . .
# Build the actual application
RUN cargo build --release --bin indexer_service
# Strip debug symbols to reduce binary size
RUN strip /indexer_service/target/release/indexer_service
# Build the actual application and copy the binary out of the cache mount
RUN --mount=type=cache,target=/usr/local/cargo/registry/index \
--mount=type=cache,target=/usr/local/cargo/registry/cache \
--mount=type=cache,target=/usr/local/cargo/git \
--mount=type=cache,target=/indexer_service/target \
cargo build --release --bin indexer_service \
&& strip /indexer_service/target/release/indexer_service \
&& cp /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
# Runtime stage - minimal image
FROM debian:trixie-slim
# Install runtime dependencies
RUN apt-get update \
&& apt-get install -y gosu jq \
&& rm -rf /var/lib/apt/lists/*
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user && \
mkdir -p /indexer_service /etc/indexer_service && \
chown -R indexer_service_user:indexer_service_user /indexer_service /etc/indexer_service
mkdir -p /indexer_service /etc/indexer_service /var/lib/indexer_service && \
chown -R indexer_service_user:indexer_service_user /indexer_service /etc/indexer_service /var/lib/indexer_service
# Copy binary from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
COPY --from=builder --chown=indexer_service_user:indexer_service_user /usr/local/bin/indexer_service /usr/local/bin/indexer_service
# Copy r0vm binary from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /usr/local/bin/r0vm /usr/local/bin/r0vm
@ -84,9 +86,7 @@ COPY --from=builder --chown=indexer_service_user:indexer_service_user /usr/local
# Copy logos blockchain circuits from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /root/.logos-blockchain-circuits /home/indexer_service_user/.logos-blockchain-circuits
# Copy entrypoint script
COPY indexer/service/docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint.sh
VOLUME /var/lib/indexer_service
# Expose default port
EXPOSE 8779
@ -105,9 +105,7 @@ HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
# Run the application
ENV RUST_LOG=info
USER root
ENTRYPOINT ["/docker-entrypoint.sh"]
USER indexer_service_user
WORKDIR /indexer_service
CMD ["indexer_service", "/etc/indexer_service/indexer_config.json"]

View File

@ -11,50 +11,50 @@
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"initial_accounts": [
{
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"account_id": "CbgR6tj5kWx5oziiFptM7jMvrQeYY3Mzaao6ciuhSr2r",
"balance": 10000
},
{
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"account_id": "2RHZhw9h534Zr3eq2RGhQete2Hh667foECzXPmSkGni2",
"balance": 20000
}
],
"initial_commitments": [
{
"npk":[
177,
64,
1,
"npk": [
139,
19,
158,
11,
87,
38,
254,
159,
155,
231,
165,
1,
94,
64,
137,
243,
76,
249,
101,
251,
129,
33,
101,
189,
30,
42,
11,
191,
34,
103,
186,
227,
230
] ,
85,
206,
132,
228,
220,
114,
145,
89,
113,
156,
238,
142,
242,
74,
182,
91,
43,
100,
6,
190,
31,
15,
31,
88,
96,
204
],
"account": {
"program_owner": [
0,
@ -73,38 +73,38 @@
},
{
"npk": [
32,
67,
72,
164,
106,
53,
66,
239,
141,
15,
52,
230,
136,
177,
2,
236,
207,
243,
173,
134,
135,
210,
143,
87,
232,
33,
223,
54,
226,
10,
71,
215,
128,
194,
120,
113,
224,
4,
165
254,
143,
172,
24,
244,
243,
208,
65,
112,
118,
70,
217,
240,
69,
100,
129,
3,
121,
25,
213,
132,
42,
45
],
"account": {
"program_owner": [
@ -157,4 +157,4 @@
37,
37
]
}
}

View File

@ -10,5 +10,8 @@ services:
volumes:
# Mount configuration
- ./configs/indexer_config.json:/etc/indexer_service/indexer_config.json
# Mount data folder
- ./data:/var/lib/indexer_service
# Mount data volume
- indexer_data:/var/lib/indexer_service
volumes:
indexer_data:

View File

@ -1,29 +0,0 @@
#!/bin/sh
# This is an entrypoint script for the indexer_service Docker container,
# it's not meant to be executed outside of the container.
set -e
CONFIG="/etc/indexer_service/indexer_config.json"
# Check config file exists
if [ ! -f "$CONFIG" ]; then
echo "Config file not found: $CONFIG" >&2
exit 1
fi
# Parse home dir
HOME_DIR=$(jq -r '.home' "$CONFIG")
if [ -z "$HOME_DIR" ] || [ "$HOME_DIR" = "null" ]; then
echo "'home' key missing in config" >&2
exit 1
fi
# Give permissions to the data directory and switch to non-root user
if [ "$(id -u)" = "0" ]; then
mkdir -p "$HOME_DIR"
chown -R indexer_service_user:indexer_service_user "$HOME_DIR"
exec gosu indexer_service_user "$@"
fi

View File

@ -359,12 +359,16 @@ impl From<ProgramDeploymentMessage> for nssa::program_deployment_transaction::Me
// WitnessSet conversions
// ============================================================================
impl TryFrom<nssa::public_transaction::WitnessSet> for WitnessSet {
type Error = ();
fn try_from(_value: nssa::public_transaction::WitnessSet) -> Result<Self, Self::Error> {
// Public transaction witness sets don't have proofs, so we can't convert them directly
Err(())
impl From<nssa::public_transaction::WitnessSet> for WitnessSet {
fn from(value: nssa::public_transaction::WitnessSet) -> Self {
Self {
signatures_and_public_keys: value
.signatures_and_public_keys()
.iter()
.map(|(sig, pk)| (sig.clone().into(), pk.clone().into()))
.collect(),
proof: None,
}
}
}
@ -376,7 +380,7 @@ impl From<nssa::privacy_preserving_transaction::witness_set::WitnessSet> for Wit
.into_iter()
.map(|(sig, pk)| (sig.into(), pk.into()))
.collect(),
proof: proof.into(),
proof: Some(proof.into()),
}
}
}
@ -396,7 +400,9 @@ impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::
Ok(Self::from_raw_parts(
signatures_and_public_keys,
proof.into(),
proof
.map(Into::into)
.ok_or_else(|| nssa::error::NssaError::InvalidInput("Missing proof".to_owned()))?,
))
}
}
@ -416,14 +422,7 @@ impl From<nssa::PublicTransaction> for PublicTransaction {
Self {
hash,
message: message.into(),
witness_set: WitnessSet {
signatures_and_public_keys: witness_set
.signatures_and_public_keys()
.iter()
.map(|(sig, pk)| (sig.clone().into(), pk.clone().into()))
.collect(),
proof: Proof(vec![]), // Public transactions don't have proofs
},
witness_set: witness_set.into(),
}
}
}

View File

@ -240,7 +240,7 @@ pub struct PrivacyPreservingMessage {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct WitnessSet {
pub signatures_and_public_keys: Vec<(Signature, PublicKey)>,
pub proof: Proof,
pub proof: Option<Proof>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]

View File

@ -30,16 +30,22 @@ pub trait Rpc {
async fn get_last_finalized_block_id(&self) -> Result<BlockId, ErrorObjectOwned>;
#[method(name = "getBlockById")]
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Option<Block>, ErrorObjectOwned>;
#[method(name = "getBlockByHash")]
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned>;
async fn get_block_by_hash(
&self,
block_hash: HashType,
) -> Result<Option<Block>, ErrorObjectOwned>;
#[method(name = "getAccount")]
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
#[method(name = "getTransaction")]
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned>;
async fn get_transaction(
&self,
tx_hash: HashType,
) -> Result<Option<Transaction>, ErrorObjectOwned>;
#[method(name = "getBlocks")]
async fn get_blocks(

View File

@ -3,7 +3,7 @@ use std::net::SocketAddr;
use anyhow::{Context as _, Result};
pub use indexer_core::config::*;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use jsonrpsee::server::{Server, ServerHandle};
use log::{error, info};
pub mod service;
@ -13,10 +13,11 @@ pub mod mock_service;
pub struct IndexerHandle {
addr: SocketAddr,
server_handle: Option<jsonrpsee::server::ServerHandle>,
/// Option because of `Drop` which forbids to simply move out of `self` in `stopped()`.
server_handle: Option<ServerHandle>,
}
impl IndexerHandle {
const fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self {
const fn new(addr: SocketAddr, server_handle: ServerHandle) -> Self {
Self {
addr,
server_handle: Some(server_handle),
@ -28,6 +29,7 @@ impl IndexerHandle {
self.addr
}
/// Wait for all Indexer tasks to stop.
pub async fn stopped(mut self) {
let handle = self
.server_handle
@ -37,15 +39,11 @@ impl IndexerHandle {
handle.stopped().await;
}
#[expect(
clippy::redundant_closure_for_method_calls,
reason = "Clippy suggested path jsonrpsee::jsonrpsee_server::ServerHandle is not accessible"
)]
#[must_use]
pub fn is_stopped(&self) -> bool {
pub fn is_healthy(&self) -> bool {
self.server_handle
.as_ref()
.is_none_or(|handle| handle.is_stopped())
.is_some_and(|handle| !handle.is_stopped())
}
}

View File

@ -15,7 +15,10 @@ use indexer_service_protocol::{
ProgramDeploymentTransaction, ProgramId, PublicMessage, PublicTransaction, Signature,
Transaction, WitnessSet,
};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
use jsonrpsee::{
core::{SubscriptionResult, async_trait},
types::ErrorObjectOwned,
};
/// A mock implementation of the `IndexerService` RPC for testing purposes.
pub struct MockIndexerService {
@ -92,7 +95,7 @@ impl MockIndexerService {
},
witness_set: WitnessSet {
signatures_and_public_keys: vec![],
proof: indexer_service_protocol::Proof(vec![0; 32]),
proof: None,
},
}),
// PrivacyPreserving transactions
@ -124,7 +127,7 @@ impl MockIndexerService {
},
witness_set: WitnessSet {
signatures_and_public_keys: vec![],
proof: indexer_service_protocol::Proof(vec![0; 32]),
proof: Some(indexer_service_protocol::Proof(vec![0; 32])),
},
}),
// ProgramDeployment transactions (rare)
@ -171,7 +174,7 @@ impl MockIndexerService {
}
}
#[async_trait::async_trait]
#[async_trait]
impl indexer_service_rpc::RpcServer for MockIndexerService {
async fn subscribe_to_finalized_blocks(
&self,
@ -198,26 +201,23 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
})
}
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
self.blocks
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Option<Block>, ErrorObjectOwned> {
Ok(self
.blocks
.iter()
.find(|b| b.header.block_id == block_id)
.cloned()
.ok_or_else(|| {
ErrorObjectOwned::owned(
-32001,
format!("Block with ID {block_id} not found"),
None::<()>,
)
})
.cloned())
}
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
self.blocks
async fn get_block_by_hash(
&self,
block_hash: HashType,
) -> Result<Option<Block>, ErrorObjectOwned> {
Ok(self
.blocks
.iter()
.find(|b| b.header.hash == block_hash)
.cloned()
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Block with hash not found", None::<()>))
.cloned())
}
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
@ -227,11 +227,11 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>))
}
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
self.transactions
.get(&tx_hash)
.map(|(tx, _)| tx.clone())
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Transaction not found", None::<()>))
async fn get_transaction(
&self,
tx_hash: HashType,
) -> Result<Option<Transaction>, ErrorObjectOwned> {
Ok(self.transactions.get(&tx_hash).map(|(tx, _)| tx.clone()))
}
async fn get_blocks(

View File

@ -7,7 +7,7 @@ use indexer_core::{IndexerCore, config::IndexerConfig};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::{
SubscriptionSink,
core::{Serialize, SubscriptionResult},
core::{Serialize, SubscriptionResult, async_trait},
types::{ErrorCode, ErrorObject, ErrorObjectOwned},
};
use log::{debug, error, info, warn};
@ -30,7 +30,7 @@ impl IndexerService {
}
}
#[async_trait::async_trait]
#[async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_finalized_blocks(
&self,
@ -52,22 +52,25 @@ impl indexer_service_rpc::RpcServer for IndexerService {
self.indexer.store.get_last_block_id().map_err(db_error)
}
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Option<Block>, ErrorObjectOwned> {
Ok(self
.indexer
.store
.get_block_at_id(block_id)
.map_err(db_error)?
.into())
.map(Into::into))
}
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
async fn get_block_by_hash(
&self,
block_hash: HashType,
) -> Result<Option<Block>, ErrorObjectOwned> {
Ok(self
.indexer
.store
.get_block_by_hash(block_hash.0)
.map_err(db_error)?
.into())
.map(Into::into))
}
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
@ -80,13 +83,16 @@ impl indexer_service_rpc::RpcServer for IndexerService {
.into())
}
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
async fn get_transaction(
&self,
tx_hash: HashType,
) -> Result<Option<Transaction>, ErrorObjectOwned> {
Ok(self
.indexer
.store
.get_transaction_by_hash(tx_hash.0)
.map_err(db_error)?
.into())
.map(Into::into))
}
async fn get_blocks(

View File

@ -11,7 +11,7 @@ workspace = true
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["default", "testnet"] }
sequencer_runner.workspace = true
sequencer_service.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
@ -19,6 +19,7 @@ indexer_service.workspace = true
serde_json.workspace = true
token_core.workspace = true
indexer_service_rpc.workspace = true
sequencer_service_rpc = { workspace = true, features = ["client"] }
wallet-ffi.workspace = true
url.workspace = true
@ -26,11 +27,9 @@ url.workspace = true
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
base64.workspace = true
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
bytesize.workspace = true
futures.workspace = true
testcontainers = { version = "0.27.0", features = ["docker-compose"] }

View File

@ -59,11 +59,11 @@ impl InitialData {
let mut private_charlie_key_chain = KeyChain::new_os_random();
let mut private_charlie_account_id =
AccountId::from(&private_charlie_key_chain.nullifer_public_key);
AccountId::from(&private_charlie_key_chain.nullifier_public_key);
let mut private_david_key_chain = KeyChain::new_os_random();
let mut private_david_account_id =
AccountId::from(&private_david_key_chain.nullifer_public_key);
AccountId::from(&private_david_key_chain.nullifier_public_key);
// Ensure consistent ordering
if private_charlie_account_id > private_david_account_id {
@ -120,7 +120,7 @@ impl InitialData {
self.private_accounts
.iter()
.map(|(key_chain, account)| CommitmentsInitialData {
npk: key_chain.nullifer_public_key.clone(),
npk: key_chain.nullifier_public_key.clone(),
account: account.clone(),
})
.collect()
@ -138,7 +138,7 @@ impl InitialData {
})
})
.chain(self.private_accounts.iter().map(|(key_chain, account)| {
let account_id = AccountId::from(&key_chain.nullifer_public_key);
let account_id = AccountId::from(&key_chain.nullifier_public_key);
InitialAccountData::Private(Box::new(InitialAccountDataPrivate {
account_id,
account: account.clone(),
@ -204,7 +204,6 @@ pub fn sequencer_config(
Ok(SequencerConfig {
home,
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
max_num_tx_in_block,
@ -212,7 +211,6 @@ pub fn sequencer_config(
mempool_max_size,
block_create_timeout,
retry_pending_blocks_timeout: Duration::from_secs(120),
port: 0,
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
@ -236,7 +234,6 @@ pub fn wallet_config(
initial_data: &InitialData,
) -> Result<WalletConfig> {
Ok(WalletConfig {
override_rust_log: None,
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
.context("Failed to convert sequencer addr to URL")?,
seq_poll_timeout: Duration::from_secs(30),

View File

@ -3,15 +3,15 @@
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
use anyhow::{Context as _, Result, bail};
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64};
use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction};
use common::{HashType, transaction::NSSATransaction};
use futures::FutureExt as _;
use indexer_service::IndexerHandle;
use log::{debug, error, warn};
use nssa::{AccountId, PrivacyPreservingTransaction};
use nssa_core::Commitment;
use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait as _};
use sequencer_runner::SequencerHandle;
use sequencer_service::SequencerHandle;
use sequencer_service_rpc::{RpcClient as _, SequencerClient, SequencerClientBuilder};
use tempfile::TempDir;
use testcontainers::compose::DockerCompose;
use wallet::{WalletCore, config::WalletConfigOverrides};
@ -38,7 +38,8 @@ pub struct TestContext {
indexer_client: IndexerClient,
wallet: WalletCore,
wallet_password: String,
sequencer_handle: SequencerHandle,
/// Optional to move out value in Drop.
sequencer_handle: Option<SequencerHandle>,
indexer_handle: IndexerHandle,
bedrock_compose: DockerCompose,
_temp_indexer_dir: TempDir,
@ -90,8 +91,9 @@ impl TestContext {
.context("Failed to convert sequencer addr to URL")?;
let indexer_url = config::addr_to_url(config::UrlProtocol::Ws, indexer_handle.addr())
.context("Failed to convert indexer addr to URL")?;
let sequencer_client =
SequencerClient::new(sequencer_url).context("Failed to create sequencer client")?;
let sequencer_client = SequencerClientBuilder::default()
.build(sequencer_url)
.context("Failed to create sequencer client")?;
let indexer_client = IndexerClient::new(&indexer_url)
.await
.context("Failed to create indexer client")?;
@ -102,7 +104,7 @@ impl TestContext {
wallet,
wallet_password,
bedrock_compose,
sequencer_handle,
sequencer_handle: Some(sequencer_handle),
indexer_handle,
_temp_indexer_dir: temp_indexer_dir,
_temp_sequencer_dir: temp_sequencer_dir,
@ -229,7 +231,7 @@ impl TestContext {
)
.context("Failed to create Sequencer config")?;
let sequencer_handle = sequencer_runner::startup_sequencer(config).await?;
let sequencer_handle = sequencer_service::run(config, 0).await?;
Ok((sequencer_handle, temp_sequencer_dir))
}
@ -333,18 +335,20 @@ impl Drop for TestContext {
wallet_password: _,
} = self;
if sequencer_handle.is_finished() {
let Err(err) = self
.sequencer_handle
.run_forever()
let sequencer_handle = sequencer_handle
.take()
.expect("Sequencer handle should be present in TestContext drop");
if !sequencer_handle.is_healthy() {
let Err(err) = sequencer_handle
.failed()
.now_or_never()
.expect("Future is finished and should be ready");
.expect("Sequencer handle should not be running");
error!(
"Sequencer handle has unexpectedly finished before TestContext drop with error: {err:#}"
"Sequencer handle has unexpectedly stopped before TestContext drop with error: {err:#}"
);
}
if indexer_handle.is_stopped() {
if !indexer_handle.is_healthy() {
error!("Indexer handle has unexpectedly stopped before TestContext drop");
}
@ -459,15 +463,8 @@ pub async fn fetch_privacy_preserving_tx(
seq_client: &SequencerClient,
tx_hash: HashType,
) -> PrivacyPreservingTransaction {
let transaction_encoded = seq_client
.get_transaction_by_hash(tx_hash)
.await
.unwrap()
.transaction
.unwrap();
let tx = seq_client.get_transaction(tx_hash).await.unwrap().unwrap();
let tx_bytes = BASE64.decode(transaction_encoded).unwrap();
let tx = borsh::from_slice(&tx_bytes).unwrap();
match tx {
NSSATransaction::PrivacyPreserving(privacy_preserving_transaction) => {
privacy_preserving_transaction
@ -480,8 +477,10 @@ pub async fn verify_commitment_is_in_state(
commitment: Commitment,
seq_client: &SequencerClient,
) -> bool {
matches!(
seq_client.get_proof_for_commitment(commitment).await,
Ok(Some(_))
)
seq_client
.get_proof_for_commitment(commitment)
.await
.ok()
.flatten()
.is_some()
}

View File

@ -7,6 +7,7 @@ use anyhow::Result;
use integration_tests::TestContext;
use log::info;
use nssa::program::Program;
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::{
Command,
@ -21,8 +22,7 @@ async fn get_existing_account() -> Result<()> {
let account = ctx
.sequencer_client()
.get_account(ctx.existing_public_accounts()[0])
.await?
.account;
.await?;
assert_eq!(
account.program_owner,

View File

@ -9,6 +9,7 @@ use std::time::Duration;
use anyhow::Result;
use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id};
use log::info;
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::{
Command, SubcommandReturnValue,
@ -194,20 +195,14 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1)
.await?
.account;
.await?;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2)
.await?
.account;
.await?;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp)
.await?
.account;
let user_holding_lp_acc = ctx.sequencer_client().get_account(user_holding_lp).await?;
assert_eq!(
u128::from_le_bytes(user_holding_a_acc.data[33..].try_into().unwrap()),
@ -243,20 +238,14 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1)
.await?
.account;
.await?;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2)
.await?
.account;
.await?;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp)
.await?
.account;
let user_holding_lp_acc = ctx.sequencer_client().get_account(user_holding_lp).await?;
assert_eq!(
u128::from_le_bytes(user_holding_a_acc.data[33..].try_into().unwrap()),
@ -292,20 +281,14 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1)
.await?
.account;
.await?;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2)
.await?
.account;
.await?;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp)
.await?
.account;
let user_holding_lp_acc = ctx.sequencer_client().get_account(user_holding_lp).await?;
assert_eq!(
u128::from_le_bytes(user_holding_a_acc.data[33..].try_into().unwrap()),
@ -342,20 +325,14 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1)
.await?
.account;
.await?;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2)
.await?
.account;
.await?;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp)
.await?
.account;
let user_holding_lp_acc = ctx.sequencer_client().get_account(user_holding_lp).await?;
assert_eq!(
u128::from_le_bytes(user_holding_a_acc.data[33..].try_into().unwrap()),
@ -392,20 +369,14 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1)
.await?
.account;
.await?;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2)
.await?
.account;
.await?;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp)
.await?
.account;
let user_holding_lp_acc = ctx.sequencer_client().get_account(user_holding_lp).await?;
assert_eq!(
u128::from_le_bytes(user_holding_a_acc.data[33..].try_into().unwrap()),

View File

@ -8,6 +8,7 @@ use integration_tests::{
use log::info;
use nssa::{AccountId, program::Program};
use nssa_core::{NullifierPublicKey, encryption::shared_key_derivation::Secp256k1Point};
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::{
Command, SubcommandReturnValue,
@ -135,7 +136,7 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
let acc_2_balance = ctx.sequencer_client().get_account_balance(to).await?;
assert_eq!(from_acc.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
assert_eq!(acc_2_balance, 20100);
info!("Successfully deshielded transfer to public account");
@ -175,7 +176,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_npk: Some(hex::encode(to_keys.nullifier_public_key.0)),
to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)),
amount: 100,
});
@ -245,7 +246,7 @@ async fn shielded_transfer_to_owned_private_account() -> Result<()> {
let acc_from_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert_eq!(acc_from_balance.balance, 9900);
assert_eq!(acc_from_balance, 9900);
assert_eq!(acc_to.balance, 20100);
info!("Successfully shielded transfer to owned private account");
@ -290,7 +291,7 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
.await
);
assert_eq!(acc_1_balance.balance, 9900);
assert_eq!(acc_1_balance, 9900);
info!("Successfully shielded transfer to foreign account");
@ -335,7 +336,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_npk: Some(hex::encode(to_keys.nullifier_public_key.0)),
to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)),
amount: 100,
});

View File

@ -4,6 +4,7 @@ use anyhow::Result;
use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id};
use log::info;
use nssa::program::Program;
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::{
Command, SubcommandReturnValue,
@ -41,8 +42,8 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
info!("Balance of sender: {acc_1_balance:#?}");
info!("Balance of receiver: {acc_2_balance:#?}");
assert_eq!(acc_1_balance.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
assert_eq!(acc_1_balance, 9900);
assert_eq!(acc_2_balance, 20100);
Ok(())
}
@ -97,8 +98,8 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
info!("Balance of sender: {acc_1_balance:#?}");
info!("Balance of receiver: {acc_2_balance:#?}");
assert_eq!(acc_1_balance.balance, 9900);
assert_eq!(acc_2_balance.balance, 100);
assert_eq!(acc_1_balance, 9900);
assert_eq!(acc_2_balance, 100);
Ok(())
}
@ -134,8 +135,8 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
info!("Balance of sender: {acc_1_balance:#?}");
info!("Balance of receiver: {acc_2_balance:#?}");
assert_eq!(acc_1_balance.balance, 10000);
assert_eq!(acc_2_balance.balance, 20000);
assert_eq!(acc_1_balance, 10000);
assert_eq!(acc_2_balance, 20000);
Ok(())
}
@ -171,8 +172,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Balance of sender: {acc_1_balance:#?}");
info!("Balance of receiver: {acc_2_balance:#?}");
assert_eq!(acc_1_balance.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
assert_eq!(acc_1_balance, 9900);
assert_eq!(acc_2_balance, 20100);
info!("First TX Success!");
@ -203,8 +204,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Balance of sender: {acc_1_balance:#?}");
info!("Balance of receiver: {acc_2_balance:#?}");
assert_eq!(acc_1_balance.balance, 9800);
assert_eq!(acc_2_balance.balance, 20200);
assert_eq!(acc_1_balance, 9800);
assert_eq!(acc_2_balance, 20200);
info!("Second TX Success!");
@ -230,11 +231,7 @@ async fn initialize_public_account() -> Result<()> {
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
info!("Checking correct execution");
let account = ctx
.sequencer_client()
.get_account(account_id)
.await?
.account;
let account = ctx.sequencer_client().get_account(account_id).await?;
assert_eq!(
account.program_owner,

View File

@ -8,11 +8,12 @@ use std::time::Duration;
use anyhow::Result;
use bytesize::ByteSize;
use common::{block::HashableBlockData, transaction::NSSATransaction};
use common::transaction::NSSATransaction;
use integration_tests::{
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, config::SequencerPartialConfig,
};
use nssa::program::Program;
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
#[test]
@ -36,7 +37,10 @@ async fn reject_oversized_transaction() -> Result<()> {
let tx = nssa::ProgramDeploymentTransaction::new(message);
// Try to submit the transaction and expect an error
let result = ctx.sequencer_client().send_tx_program(tx).await;
let result = ctx
.sequencer_client()
.send_transaction(NSSATransaction::ProgramDeployment(tx))
.await;
assert!(
result.is_err(),
@ -74,7 +78,10 @@ async fn accept_transaction_within_limit() -> Result<()> {
let tx = nssa::ProgramDeploymentTransaction::new(message);
// This should succeed
let result = ctx.sequencer_client().send_tx_program(tx).await;
let result = ctx
.sequencer_client()
.send_transaction(NSSATransaction::ProgramDeployment(tx))
.await;
assert!(
result.is_ok(),
@ -112,33 +119,38 @@ async fn transaction_deferred_to_next_block_when_current_full() -> Result<()> {
let burner_id = Program::new(burner_bytecode.clone())?.id();
let chain_caller_id = Program::new(chain_caller_bytecode.clone())?.id();
let initial_block_height = ctx.sequencer_client().get_last_block().await?.last_block;
let initial_block_height = ctx.sequencer_client().get_last_block_id().await?;
// Submit both program deployments
ctx.sequencer_client()
.send_tx_program(nssa::ProgramDeploymentTransaction::new(
nssa::program_deployment_transaction::Message::new(burner_bytecode),
.send_transaction(NSSATransaction::ProgramDeployment(
nssa::ProgramDeploymentTransaction::new(
nssa::program_deployment_transaction::Message::new(burner_bytecode),
),
))
.await?;
ctx.sequencer_client()
.send_tx_program(nssa::ProgramDeploymentTransaction::new(
nssa::program_deployment_transaction::Message::new(chain_caller_bytecode),
.send_transaction(NSSATransaction::ProgramDeployment(
nssa::ProgramDeploymentTransaction::new(
nssa::program_deployment_transaction::Message::new(chain_caller_bytecode),
),
))
.await?;
// Wait for first block
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
let block1_response = ctx
let block1 = ctx
.sequencer_client()
.get_block(initial_block_height + 1)
.await?;
let block1: HashableBlockData = borsh::from_slice(&block1_response.block)?;
.await?
.unwrap();
// Check which program is in block 1
let get_program_ids = |block: &HashableBlockData| -> Vec<nssa::ProgramId> {
let get_program_ids = |block: &common::block::Block| -> Vec<nssa::ProgramId> {
block
.body
.transactions
.iter()
.filter_map(|tx| {
@ -168,11 +180,11 @@ async fn transaction_deferred_to_next_block_when_current_full() -> Result<()> {
// Wait for second block
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
let block2_response = ctx
let block2 = ctx
.sequencer_client()
.get_block(initial_block_height + 2)
.await?;
let block2: HashableBlockData = borsh::from_slice(&block2_response.block)?;
.await?
.unwrap();
let block2_program_ids = get_program_ids(&block2);
// The other program should be in block 2

View File

@ -22,12 +22,8 @@ async fn indexer_test_run() -> Result<()> {
// RUN OBSERVATION
tokio::time::sleep(std::time::Duration::from_millis(L2_TO_L1_TIMEOUT_MILLIS)).await;
let last_block_seq = ctx
.sequencer_client()
.get_last_block()
.await
.unwrap()
.last_block;
let last_block_seq =
sequencer_service_rpc::RpcClient::get_last_block_id(ctx.sequencer_client()).await?;
info!("Last block on seq now is {last_block_seq}");
@ -100,20 +96,22 @@ async fn indexer_state_consistency() -> Result<()> {
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
let acc_1_balance = sequencer_service_rpc::RpcClient::get_account_balance(
ctx.sequencer_client(),
ctx.existing_public_accounts()[0],
)
.await?;
let acc_2_balance = sequencer_service_rpc::RpcClient::get_account_balance(
ctx.sequencer_client(),
ctx.existing_public_accounts()[1],
)
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
info!("Balance of receiver: {acc_2_balance:#?}");
assert_eq!(acc_1_balance.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
assert_eq!(acc_1_balance, 9900);
assert_eq!(acc_2_balance, 20100);
// WAIT
info!("Waiting for indexer to parse blocks");
@ -131,16 +129,16 @@ async fn indexer_state_consistency() -> Result<()> {
.unwrap();
info!("Checking correct state transition");
let acc1_seq_state = ctx
.sequencer_client()
.get_account(ctx.existing_public_accounts()[0])
.await?
.account;
let acc2_seq_state = ctx
.sequencer_client()
.get_account(ctx.existing_public_accounts()[1])
.await?
.account;
let acc1_seq_state = sequencer_service_rpc::RpcClient::get_account(
ctx.sequencer_client(),
ctx.existing_public_accounts()[0],
)
.await?;
let acc2_seq_state = sequencer_service_rpc::RpcClient::get_account(
ctx.sequencer_client(),
ctx.existing_public_accounts()[1],
)
.await?;
assert_eq!(acc1_ind_state, acc1_seq_state.into());
assert_eq!(acc2_ind_state, acc2_seq_state.into());

View File

@ -14,6 +14,7 @@ use integration_tests::{
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
use nssa::{AccountId, program::Program};
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::{
Command, SubcommandReturnValue,
@ -70,7 +71,7 @@ async fn sync_private_account_with_non_zero_chain_index() -> Result<()> {
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_npk: Some(hex::encode(to_keys.nullifier_public_key.0)),
to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)),
amount: 100,
});
@ -305,8 +306,8 @@ async fn restore_keys_from_seed() -> Result<()> {
.get_account_balance(to_account_id4)
.await?;
assert_eq!(acc3.balance, 91); // 102 - 11
assert_eq!(acc4.balance, 114); // 103 + 11
assert_eq!(acc3, 91); // 102 - 11
assert_eq!(acc4, 114); // 103 + 11
info!("Successfully restored keys and verified transactions");

View File

@ -13,6 +13,7 @@ use integration_tests::{
format_public_account_id, verify_commitment_is_in_state,
};
use log::info;
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::{
Command, SubcommandReturnValue,
@ -46,8 +47,7 @@ async fn claim_pinata_to_uninitialized_public_account_fails_fast() -> Result<()>
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
let claim_result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
@ -70,8 +70,7 @@ async fn claim_pinata_to_uninitialized_public_account_fails_fast() -> Result<()>
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
assert_eq!(pinata_balance_post, pinata_balance_pre);
@ -102,8 +101,7 @@ async fn claim_pinata_to_uninitialized_private_account_fails_fast() -> Result<()
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
let claim_result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
@ -126,8 +124,7 @@ async fn claim_pinata_to_uninitialized_private_account_fails_fast() -> Result<()
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
assert_eq!(pinata_balance_post, pinata_balance_pre);
@ -146,8 +143,7 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
@ -158,14 +154,12 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
let winner_balance_post = ctx
.sequencer_client()
.get_account_balance(ctx.existing_public_accounts()[0])
.await?
.balance;
.await?;
assert_eq!(pinata_balance_post, pinata_balance_pre - pinata_prize);
assert_eq!(winner_balance_post, 10000 + pinata_prize);
@ -187,8 +181,7 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
let result = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash: _ } = result else {
@ -211,8 +204,7 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
assert_eq!(pinata_balance_post, pinata_balance_pre - pinata_prize);
@ -268,8 +260,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
@ -285,8 +276,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
.await?;
assert_eq!(pinata_balance_post, pinata_balance_pre - pinata_prize);

View File

@ -6,11 +6,13 @@
use std::{path::PathBuf, time::Duration};
use anyhow::Result;
use common::transaction::NSSATransaction;
use integration_tests::{
NSSA_PROGRAM_FOR_TEST_DATA_CHANGER, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
};
use log::info;
use nssa::{AccountId, program::Program};
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
use wallet::cli::Command;
@ -47,18 +49,17 @@ async fn deploy_and_execute_program() -> Result<()> {
)?;
let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[]);
let transaction = nssa::PublicTransaction::new(message, witness_set);
let _response = ctx.sequencer_client().send_tx_public(transaction).await?;
let _response = ctx
.sequencer_client()
.send_transaction(NSSATransaction::Public(transaction))
.await?;
info!("Waiting for next block creation");
// Waiting for long time as it may take some time for such a big transaction to be included in a
// block
tokio::time::sleep(Duration::from_secs(2 * TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
let post_state_account = ctx
.sequencer_client()
.get_account(account_id)
.await?
.account;
let post_state_account = ctx.sequencer_client().get_account(account_id).await?;
assert_eq!(post_state_account.program_owner, data_changer.id());
assert_eq!(post_state_account.balance, 0);

View File

@ -14,6 +14,7 @@ use integration_tests::{
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
use nssa::program::Program;
use sequencer_service_rpc::RpcClient as _;
use token_core::{TokenDefinition, TokenHolding};
use tokio::test;
use wallet::cli::{
@ -92,8 +93,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id)
.await?
.account;
.await?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(definition_acc.program_owner, Program::token().id());
@ -110,8 +110,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id)
.await?
.account;
.await?;
// The account must be owned by the token program
assert_eq!(supply_acc.program_owner, Program::token().id());
@ -143,8 +142,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id)
.await?
.account;
.await?;
assert_eq!(supply_acc.program_owner, Program::token().id());
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
@ -159,8 +157,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id)
.await?
.account;
.await?;
assert_eq!(recipient_acc.program_owner, Program::token().id());
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -188,8 +185,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id)
.await?
.account;
.await?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
@ -205,8 +201,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id)
.await?
.account;
.await?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -236,8 +231,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id)
.await?
.account;
.await?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
@ -253,8 +247,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id)
.await?
.account;
.await?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -341,8 +334,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id)
.await?
.account;
.await?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(definition_acc.program_owner, Program::token().id());
@ -405,8 +397,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id)
.await?
.account;
.await?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
@ -506,8 +497,7 @@ async fn create_token_with_private_definition() -> Result<()> {
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id)
.await?
.account;
.await?;
assert_eq!(supply_acc.program_owner, Program::token().id());
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
@ -586,8 +576,7 @@ async fn create_token_with_private_definition() -> Result<()> {
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_public)
.await?
.account;
.await?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -882,8 +871,7 @@ async fn shielded_token_transfer() -> Result<()> {
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id)
.await?
.account;
.await?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
token_holding,
@ -1026,8 +1014,7 @@ async fn deshielded_token_transfer() -> Result<()> {
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id)
.await?
.account;
.await?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
token_holding,
@ -1123,7 +1110,7 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(definition_account_id),
holder: None,
holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)),
holder_npk: Some(hex::encode(holder_keys.nullifier_public_key.0)),
holder_vpk: Some(hex::encode(holder_keys.viewing_public_key.0)),
amount: mint_amount,
};

View File

@ -13,6 +13,7 @@ use std::time::{Duration, Instant};
use anyhow::Result;
use bytesize::ByteSize;
use common::transaction::NSSATransaction;
use integration_tests::{
TestContext,
config::{InitialData, SequencerPartialConfig},
@ -30,6 +31,7 @@ use nssa_core::{
account::{AccountWithMetadata, Nonce, data::Data},
encryption::ViewingPublicKey,
};
use sequencer_service_rpc::RpcClient as _;
use tokio::test;
pub(crate) struct TpsTestManager {
@ -153,10 +155,9 @@ pub async fn tps_test() -> Result<()> {
for (i, tx) in txs.into_iter().enumerate() {
let tx_hash = ctx
.sequencer_client()
.send_tx_public(tx)
.send_transaction(NSSATransaction::Public(tx))
.await
.unwrap()
.tx_hash;
.unwrap();
info!("Sent tx {i}");
tx_hashes.push(tx_hash);
}
@ -170,15 +171,13 @@ pub async fn tps_test() -> Result<()> {
let tx_obj = ctx
.sequencer_client()
.get_transaction_by_hash(*tx_hash)
.get_transaction(*tx_hash)
.await
.inspect_err(|err| {
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}");
});
if let Ok(tx_obj) = tx_obj
&& tx_obj.transaction.is_some()
{
if tx_obj.is_ok_and(|opt| opt.is_some()) {
info!("Found tx {i} with hash {tx_hash}");
break;
}

View File

@ -606,7 +606,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
.unwrap()
.0;
let expected_npk = &key_chain.nullifer_public_key;
let expected_npk = &key_chain.nullifier_public_key;
let expected_vpk = &key_chain.viewing_public_key;
assert_eq!(&keys.npk(), expected_npk);

View File

@ -17,10 +17,12 @@ serde.workspace = true
k256.workspace = true
sha2.workspace = true
rand.workspace = true
base58.workspace = true
hex.workspace = true
aes-gcm.workspace = true
bip39.workspace = true
hmac-sha512.workspace = true
thiserror.workspace = true
itertools.workspace = true
[dev-dependencies]
base58.workspace = true

View File

@ -172,7 +172,7 @@ mod tests {
7, 123, 125, 191, 233, 183, 201, 4, 20, 214, 155, 210, 45, 234, 27, 240, 194, 111, 97,
247, 155, 113, 122, 246, 192, 0, 70, 61, 76, 71, 70, 2,
]);
let expected_vsk: ViewingSecretKey = [
let expected_vsk = [
155, 90, 54, 75, 228, 130, 68, 201, 129, 251, 180, 195, 250, 64, 34, 230, 241, 204,
216, 50, 149, 156, 10, 67, 208, 74, 9, 10, 47, 59, 50, 202,
];
@ -211,7 +211,7 @@ mod tests {
124, 61, 40, 92, 33, 135, 3, 41, 200, 234, 3, 69, 102, 184, 57, 191, 106, 151, 194,
192, 103, 132, 141, 112, 249, 108, 192, 117, 24, 48, 70, 216,
];
let expected_npk: NullifierPublicKey = nssa_core::NullifierPublicKey([
let expected_npk = nssa_core::NullifierPublicKey([
116, 231, 246, 189, 145, 240, 37, 59, 219, 223, 216, 246, 116, 171, 223, 55, 197, 200,
134, 192, 221, 40, 218, 167, 239, 5, 11, 95, 147, 247, 162, 226,
]);

View File

@ -1,7 +1,7 @@
use std::{collections::BTreeMap, sync::Arc};
use std::collections::BTreeMap;
use anyhow::Result;
use common::sequencer_client::SequencerClient;
use nssa::{Account, AccountId};
use serde::{Deserialize, Serialize};
use crate::key_management::{
@ -197,40 +197,6 @@ impl<N: KeyNode> KeyTree<N> {
}
impl KeyTree<ChildKeysPrivate> {
/// Cleanup of all non-initialized accounts in a private tree.
///
/// For given `depth` checks children to a tree such that their `ChainIndex::depth(&self) <
/// depth`.
///
/// If account is default, removes them.
///
/// Chain must be parsed for accounts beforehand.
///
/// Fast, leaves gaps between accounts.
pub fn cleanup_tree_remove_uninit_for_depth(&mut self, depth: u32) {
let mut id_stack = vec![ChainIndex::root()];
while let Some(curr_id) = id_stack.pop() {
if let Some(node) = self.key_map.get(&curr_id)
&& node.value.1 == nssa::Account::default()
&& curr_id != ChainIndex::root()
{
let addr = node.account_id();
self.remove(addr);
}
let mut next_id = curr_id.nth_child(0);
while (next_id.depth()) < depth {
id_stack.push(next_id.clone());
next_id = match next_id.next_in_line() {
Some(id) => id,
None => break,
};
}
}
}
/// Cleanup of non-initialized accounts in a private tree.
///
/// If account is default, removes them, stops at first non-default account.
@ -259,56 +225,17 @@ impl KeyTree<ChildKeysPrivate> {
}
impl KeyTree<ChildKeysPublic> {
/// Cleanup of all non-initialized accounts in a public tree.
///
/// For given `depth` checks children to a tree such that their `ChainIndex::depth(&self) <
/// depth`.
///
/// If account is default, removes them.
///
/// Fast, leaves gaps between accounts.
pub async fn cleanup_tree_remove_ininit_for_depth(
&mut self,
depth: u32,
client: Arc<SequencerClient>,
) -> Result<()> {
let mut id_stack = vec![ChainIndex::root()];
while let Some(curr_id) = id_stack.pop() {
if let Some(node) = self.key_map.get(&curr_id) {
let address = node.account_id();
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() && curr_id != ChainIndex::root() {
self.remove(address);
}
}
let mut next_id = curr_id.nth_child(0);
while (next_id.depth()) < depth {
id_stack.push(next_id.clone());
next_id = match next_id.next_in_line() {
Some(id) => id,
None => break,
};
}
}
Ok(())
}
/// Cleanup of non-initialized accounts in a public tree.
///
/// If account is default, removes them, stops at first non-default account.
///
/// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`.
/// Walks through tree in layers of same depth using `ChainIndex::chain_ids_at_depth()`.
///
/// Slow, maintains tree consistency.
pub async fn cleanup_tree_remove_uninit_layered(
pub async fn cleanup_tree_remove_uninit_layered<F: Future<Output = Result<Account>>>(
&mut self,
depth: u32,
client: Arc<SequencerClient>,
get_account: impl Fn(AccountId) -> F,
) -> Result<()> {
let depth = usize::try_from(depth).expect("Depth is expected to fit in usize");
'outer: for i in (1..depth).rev() {
@ -316,7 +243,7 @@ impl KeyTree<ChildKeysPublic> {
for id in ChainIndex::chain_ids_at_depth(i) {
if let Some(node) = self.key_map.get(&id) {
let address = node.account_id();
let node_acc = client.get_account(address).await?.account;
let node_acc = get_account(address).await?;
if node_acc == nssa::Account::default() {
let addr = node.account_id();

View File

@ -10,16 +10,16 @@ use sha2::{Digest as _, digest::FixedOutput as _};
const NSSA_ENTROPY_BYTES: [u8; 32] = [0; 32];
#[derive(Debug)]
/// Seed holder. Non-clonable to ensure that different holders use different seeds.
/// Produces `TopSecretKeyHolder` objects.
#[derive(Debug)]
pub struct SeedHolder {
// ToDo: Needs to be vec as serde derives is not implemented for [u8; 64]
pub(crate) seed: Vec<u8>,
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
/// Secret spending key object. Can produce `PrivateKeyHolder` objects.
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct SecretSpendingKey(pub(crate) [u8; 32]);
pub type ViewingSecretKey = Scalar;

View File

@ -66,13 +66,13 @@ impl NSSAUserData {
) -> Result<Self> {
if !Self::valid_public_key_transaction_pairing_check(&default_accounts_keys) {
anyhow::bail!(
"Key transaction pairing check not satisfied, there is account_ids, which is not derived from keys"
"Key transaction pairing check not satisfied, there are public account_ids, which are not derived from keys"
);
}
if !Self::valid_private_key_transaction_pairing_check(&default_accounts_key_chains) {
anyhow::bail!(
"Key transaction pairing check not satisfied, there is account_ids, which is not derived from keys"
"Key transaction pairing check not satisfied, there are private account_ids, which are not derived from keys"
);
}

View File

@ -14,6 +14,7 @@ anyhow.workspace = true
thiserror.workspace = true
risc0-zkvm.workspace = true
serde.workspace = true
serde_with.workspace = true
sha2.workspace = true
rand.workspace = true
borsh.workspace = true
@ -37,4 +38,4 @@ test-case = "3.3.1"
[features]
default = []
prove = ["risc0-zkvm/prove"]
test-utils = []
test-utils = []

View File

@ -12,8 +12,8 @@ use crate::{NullifierPublicKey, account::Account};
/// DUMMY_COMMITMENT = hasher.digest()
/// ```
pub const DUMMY_COMMITMENT: Commitment = Commitment([
130, 75, 48, 230, 171, 101, 121, 141, 159, 118, 21, 74, 135, 248, 16, 255, 238, 156, 61, 24,
165, 33, 34, 172, 227, 30, 215, 20, 85, 47, 230, 29,
55, 228, 215, 207, 112, 221, 239, 49, 238, 79, 71, 135, 155, 15, 184, 45, 104, 74, 51, 211,
238, 42, 160, 243, 15, 124, 253, 62, 3, 229, 90, 27,
]);
/// The hash of the dummy commitment.
@ -24,8 +24,8 @@ pub const DUMMY_COMMITMENT: Commitment = Commitment([
/// DUMMY_COMMITMENT_HASH = hasher.digest()
/// ```
pub const DUMMY_COMMITMENT_HASH: [u8; 32] = [
170, 10, 217, 228, 20, 35, 189, 177, 238, 235, 97, 129, 132, 89, 96, 247, 86, 91, 222, 214, 38,
194, 216, 67, 56, 251, 208, 226, 0, 117, 149, 39,
250, 237, 192, 113, 155, 101, 119, 30, 235, 183, 20, 84, 26, 32, 196, 229, 154, 74, 254, 249,
129, 241, 118, 39, 41, 253, 141, 171, 184, 71, 8, 41,
];
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
@ -50,10 +50,14 @@ impl std::fmt::Debug for Commitment {
impl Commitment {
/// Generates the commitment to a private account owned by user for npk:
/// SHA256(npk || `program_owner` || balance || nonce || SHA256(data)).
/// SHA256( `Comm_DS` || npk || `program_owner` || balance || nonce || SHA256(data)).
#[must_use]
pub fn new(npk: &NullifierPublicKey, account: &Account) -> Self {
const COMMITMENT_PREFIX: &[u8; 32] =
b"/LEE/v0.3/Commitment/\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00";
let mut bytes = Vec::new();
bytes.extend_from_slice(COMMITMENT_PREFIX);
bytes.extend_from_slice(&npk.to_byte_array());
let account_bytes_with_hashed_data = {
let mut this = Vec::new();

View File

@ -76,7 +76,7 @@ impl Nullifier {
/// Computes a nullifier for an account update.
#[must_use]
pub fn for_account_update(commitment: &Commitment, nsk: &NullifierSecretKey) -> Self {
const UPDATE_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Update/\x00\x00\x00\x00";
const UPDATE_PREFIX: &[u8; 32] = b"/LEE/v0.3/Nullifier/Update/\x00\x00\x00\x00\x00";
let mut bytes = UPDATE_PREFIX.to_vec();
bytes.extend_from_slice(&commitment.to_byte_array());
bytes.extend_from_slice(nsk);
@ -86,7 +86,7 @@ impl Nullifier {
/// Computes a nullifier for an account initialization.
#[must_use]
pub fn for_account_initialization(npk: &NullifierPublicKey) -> Self {
const INIT_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Initialize/";
const INIT_PREFIX: &[u8; 32] = b"/LEE/v0.3/Nullifier/Initialize/\x00";
let mut bytes = INIT_PREFIX.to_vec();
bytes.extend_from_slice(&npk.to_byte_array());
Self(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap())
@ -102,8 +102,8 @@ mod tests {
let commitment = Commitment((0..32_u8).collect::<Vec<_>>().try_into().unwrap());
let nsk = [0x42; 32];
let expected_nullifier = Nullifier([
148, 243, 116, 209, 140, 231, 211, 61, 35, 62, 114, 110, 143, 224, 82, 201, 221, 34,
53, 80, 185, 48, 174, 28, 203, 43, 94, 187, 85, 199, 115, 81,
70, 162, 122, 15, 33, 237, 244, 216, 89, 223, 90, 50, 94, 184, 210, 144, 174, 64, 189,
254, 62, 255, 5, 1, 139, 227, 194, 185, 16, 30, 55, 48,
]);
let nullifier = Nullifier::for_account_update(&commitment, &nsk);
assert_eq!(nullifier, expected_nullifier);
@ -116,8 +116,8 @@ mod tests {
255, 29, 105, 42, 186, 43, 11, 157, 168, 132, 225, 17, 163,
]);
let expected_nullifier = Nullifier([
1, 6, 59, 168, 16, 146, 65, 252, 255, 91, 48, 85, 116, 189, 110, 218, 110, 136, 163,
193, 245, 103, 51, 27, 235, 170, 215, 115, 97, 144, 36, 238,
149, 59, 95, 181, 2, 194, 20, 143, 72, 233, 104, 243, 59, 70, 67, 243, 110, 77, 109,
132, 139, 111, 51, 125, 128, 92, 107, 46, 252, 4, 20, 149,
]);
let nullifier = Nullifier::for_account_initialization(&npk);
assert_eq!(nullifier, expected_nullifier);

View File

@ -16,7 +16,7 @@ pub use program_deployment_transaction::ProgramDeploymentTransaction;
pub use program_methods::PRIVACY_PRESERVING_CIRCUIT_ID;
pub use public_transaction::PublicTransaction;
pub use signature::{PrivateKey, PublicKey, Signature};
pub use state::V02State;
pub use state::V03State;
pub mod encoding;
pub mod error;

View File

@ -32,11 +32,11 @@ impl EncryptedAccountData {
}
}
/// Computes the tag as the first byte of SHA256("/NSSA/v0.2/ViewTag/" || Npk || vpk).
/// Computes the tag as the first byte of SHA256("/LEE/v0.3/ViewTag/" || Npk || vpk).
#[must_use]
pub fn compute_view_tag(npk: &NullifierPublicKey, vpk: &ViewingPublicKey) -> ViewTag {
let mut hasher = Sha256::new();
hasher.update(b"/NSSA/v0.2/ViewTag/");
hasher.update(b"/LEE/v0.3/ViewTag/");
hasher.update(npk.to_byte_array());
hasher.update(vpk.to_bytes());
let digest: [u8; 32] = hasher.finalize().into();
@ -179,7 +179,7 @@ pub mod tests {
let expected_view_tag = {
let mut hasher = Sha256::new();
hasher.update(b"/NSSA/v0.2/ViewTag/");
hasher.update(b"/LEE/v0.3/ViewTag/");
hasher.update(npk.to_byte_array());
hasher.update(vpk.to_bytes());
let digest: [u8; 32] = hasher.finalize().into();

View File

@ -12,7 +12,7 @@ use sha2::{Digest as _, digest::FixedOutput as _};
use super::{message::Message, witness_set::WitnessSet};
use crate::{
AccountId, V02State,
AccountId, V03State,
error::NssaError,
privacy_preserving_transaction::{circuit::Proof, message::EncryptedAccountData},
};
@ -34,7 +34,7 @@ impl PrivacyPreservingTransaction {
pub(crate) fn validate_and_produce_public_state_diff(
&self,
state: &V02State,
state: &V03State,
) -> Result<HashMap<AccountId, Account>, NssaError> {
let message = &self.message;
let witness_set = &self.witness_set;

View File

@ -3,7 +3,7 @@ use nssa_core::account::AccountId;
use sha2::{Digest as _, digest::FixedOutput as _};
use crate::{
V02State, error::NssaError, program::Program, program_deployment_transaction::message::Message,
V03State, error::NssaError, program::Program, program_deployment_transaction::message::Message,
};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
@ -24,7 +24,7 @@ impl ProgramDeploymentTransaction {
pub(crate) fn validate_and_produce_public_state_diff(
&self,
state: &V02State,
state: &V03State,
) -> Result<Program, NssaError> {
// TODO: remove clone
let program = Program::new(self.message.bytecode.clone())?;

View File

@ -9,7 +9,7 @@ use nssa_core::{
use sha2::{Digest as _, digest::FixedOutput as _};
use crate::{
V02State, ensure,
V03State, ensure,
error::NssaError,
public_transaction::{Message, WitnessSet},
state::MAX_NUMBER_CHAINED_CALLS,
@ -69,7 +69,7 @@ impl PublicTransaction {
pub(crate) fn validate_and_produce_public_state_diff(
&self,
state: &V02State,
state: &V03State,
) -> Result<HashMap<AccountId, Account>, NssaError> {
let message = self.message();
let witness_set = self.witness_set();
@ -247,7 +247,7 @@ pub mod tests {
use sha2::{Digest as _, digest::FixedOutput as _};
use crate::{
AccountId, PrivateKey, PublicKey, PublicTransaction, Signature, V02State,
AccountId, PrivateKey, PublicKey, PublicTransaction, Signature, V03State,
error::NssaError,
program::Program,
public_transaction::{Message, WitnessSet},
@ -261,10 +261,10 @@ pub mod tests {
(key1, key2, addr1, addr2)
}
fn state_for_tests() -> V02State {
fn state_for_tests() -> V03State {
let (_, _, addr1, addr2) = keys_for_tests();
let initial_data = [(addr1, 10000), (addr2, 20000)];
V02State::new_with_genesis_accounts(&initial_data, &[])
V03State::new_with_genesis_accounts(&initial_data, &[])
}
fn transaction_for_tests() -> PublicTransaction {

View File

@ -1,13 +1,37 @@
use std::str::FromStr;
use rand::{Rng as _, rngs::OsRng};
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use crate::error::NssaError;
// TODO: Remove Debug, Clone, Serialize, Deserialize, PartialEq and Eq for security reasons
// TODO: Implement Zeroize
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[derive(Clone, SerializeDisplay, DeserializeFromStr, PartialEq, Eq)]
pub struct PrivateKey([u8; 32]);
impl std::fmt::Debug for PrivateKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl std::fmt::Display for PrivateKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl FromStr for PrivateKey {
type Err = NssaError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0_u8; 32];
hex::decode_to_slice(s, &mut bytes).map_err(|_err| NssaError::InvalidPrivateKey)?;
Self::try_new(bytes)
}
}
impl PrivateKey {
#[must_use]
pub fn new_os_random() -> Self {

View File

@ -1,20 +1,39 @@
use std::str::FromStr;
use borsh::{BorshDeserialize, BorshSerialize};
use k256::elliptic_curve::sec1::ToEncodedPoint as _;
use nssa_core::account::AccountId;
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use sha2::{Digest as _, Sha256};
use crate::{PrivateKey, error::NssaError};
#[derive(Clone, PartialEq, Eq, BorshSerialize, Serialize, Deserialize)]
#[derive(Clone, PartialEq, Eq, BorshSerialize, SerializeDisplay, DeserializeFromStr)]
pub struct PublicKey([u8; 32]);
impl std::fmt::Debug for PublicKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f)
}
}
impl std::fmt::Display for PublicKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl FromStr for PublicKey {
type Err = NssaError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0_u8; 32];
hex::decode_to_slice(s, &mut bytes)
.map_err(|_err| NssaError::InvalidPublicKey(secp256k1::Error::InvalidPublicKey))?;
Self::try_new(bytes)
}
}
impl BorshDeserialize for PublicKey {
fn deserialize_reader<R: std::io::Read>(reader: &mut R) -> std::io::Result<Self> {
let mut buf = [0_u8; 32];

View File

@ -107,13 +107,13 @@ impl BorshDeserialize for NullifierSet {
#[derive(Clone, BorshSerialize, BorshDeserialize)]
#[cfg_attr(test, derive(Debug, PartialEq, Eq))]
pub struct V02State {
pub struct V03State {
public_state: HashMap<AccountId, Account>,
private_state: (CommitmentSet, NullifierSet),
programs: HashMap<ProgramId, Program>,
}
impl V02State {
impl V03State {
#[must_use]
pub fn new_with_genesis_accounts(
initial_data: &[(AccountId, u128)],
@ -286,7 +286,7 @@ impl V02State {
}
// TODO: Testnet only. Refactor to prevent compilation on mainnet.
impl V02State {
impl V03State {
pub fn add_pinata_program(&mut self, account_id: AccountId) {
self.insert_program(Program::pinata());
@ -318,7 +318,7 @@ impl V02State {
}
#[cfg(any(test, feature = "test-utils"))]
impl V02State {
impl V03State {
pub fn force_insert_account(&mut self, account_id: AccountId, account: Account) {
self.public_state.insert(account_id, account);
}
@ -342,7 +342,7 @@ pub mod tests {
};
use crate::{
PublicKey, PublicTransaction, V02State,
PublicKey, PublicTransaction, V03State,
error::NssaError,
execute_and_prove,
privacy_preserving_transaction::{
@ -357,7 +357,7 @@ pub mod tests {
state::MAX_NUMBER_CHAINED_CALLS,
};
impl V02State {
impl V03State {
/// Include test programs in the builtin programs map.
#[must_use]
pub fn with_test_programs(mut self) -> Self {
@ -504,7 +504,7 @@ pub mod tests {
this
};
let state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let state = V03State::new_with_genesis_accounts(&initial_data, &[]);
assert_eq!(state.public_state, expected_public_state);
assert_eq!(state.programs, expected_builtin_programs);
@ -512,7 +512,7 @@ pub mod tests {
#[test]
fn insert_program() {
let mut state = V02State::new_with_genesis_accounts(&[], &[]);
let mut state = V03State::new_with_genesis_accounts(&[], &[]);
let program_to_insert = Program::simple_balance_transfer();
let program_id = program_to_insert.id();
assert!(!state.programs.contains_key(&program_id));
@ -527,7 +527,7 @@ pub mod tests {
let key = PrivateKey::try_new([1; 32]).unwrap();
let account_id = AccountId::from(&PublicKey::new_from_private_key(&key));
let initial_data = [(account_id, 100_u128)];
let state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let state = V03State::new_with_genesis_accounts(&initial_data, &[]);
let expected_account = &state.public_state[&account_id];
let account = state.get_account_by_id(account_id);
@ -538,7 +538,7 @@ pub mod tests {
#[test]
fn get_account_by_account_id_default_account() {
let addr2 = AccountId::new([0; 32]);
let state = V02State::new_with_genesis_accounts(&[], &[]);
let state = V03State::new_with_genesis_accounts(&[], &[]);
let expected_account = Account::default();
let account = state.get_account_by_id(addr2);
@ -548,7 +548,7 @@ pub mod tests {
#[test]
fn builtin_programs_getter() {
let state = V02State::new_with_genesis_accounts(&[], &[]);
let state = V03State::new_with_genesis_accounts(&[], &[]);
let builtin_programs = state.programs();
@ -560,7 +560,7 @@ pub mod tests {
let key = PrivateKey::try_new([1; 32]).unwrap();
let account_id = AccountId::from(&PublicKey::new_from_private_key(&key));
let initial_data = [(account_id, 100)];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[]);
let from = account_id;
let to = AccountId::new([2; 32]);
assert_eq!(state.get_account_by_id(to), Account::default());
@ -580,7 +580,7 @@ pub mod tests {
let key = PrivateKey::try_new([1; 32]).unwrap();
let account_id = AccountId::from(&PublicKey::new_from_private_key(&key));
let initial_data = [(account_id, 100)];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[]);
let from = account_id;
let from_key = key;
let to = AccountId::new([2; 32]);
@ -604,7 +604,7 @@ pub mod tests {
let account_id1 = AccountId::from(&PublicKey::new_from_private_key(&key1));
let account_id2 = AccountId::from(&PublicKey::new_from_private_key(&key2));
let initial_data = [(account_id1, 100), (account_id2, 200)];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[]);
let from = account_id2;
let from_key = key2;
let to = account_id1;
@ -627,7 +627,7 @@ pub mod tests {
let key2 = PrivateKey::try_new([2; 32]).unwrap();
let account_id2 = AccountId::from(&PublicKey::new_from_private_key(&key2));
let initial_data = [(account_id1, 100)];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[]);
let account_id3 = AccountId::new([3; 32]);
let balance_to_move = 5;
@ -649,7 +649,7 @@ pub mod tests {
fn program_should_fail_if_modifies_nonces() {
let initial_data = [(AccountId::new([1; 32]), 100)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_ids = vec![AccountId::new([1; 32])];
let program_id = Program::nonce_changer_program().id();
let message =
@ -666,7 +666,7 @@ pub mod tests {
fn program_should_fail_if_output_accounts_exceed_inputs() {
let initial_data = [(AccountId::new([1; 32]), 100)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_ids = vec![AccountId::new([1; 32])];
let program_id = Program::extra_output_program().id();
let message =
@ -683,7 +683,7 @@ pub mod tests {
fn program_should_fail_with_missing_output_accounts() {
let initial_data = [(AccountId::new([1; 32]), 100)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_ids = vec![AccountId::new([1; 32]), AccountId::new([2; 32])];
let program_id = Program::missing_output_program().id();
let message =
@ -700,7 +700,7 @@ pub mod tests {
fn program_should_fail_if_modifies_program_owner_with_only_non_default_program_owner() {
let initial_data = [(AccountId::new([1; 32]), 0)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_id = AccountId::new([1; 32]);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in the program owner
@ -723,7 +723,7 @@ pub mod tests {
#[test]
fn program_should_fail_if_modifies_program_owner_with_only_non_default_balance() {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[])
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[])
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([255; 32]);
@ -747,7 +747,7 @@ pub mod tests {
#[test]
fn program_should_fail_if_modifies_program_owner_with_only_non_default_nonce() {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[])
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[])
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([254; 32]);
@ -771,7 +771,7 @@ pub mod tests {
#[test]
fn program_should_fail_if_modifies_program_owner_with_only_non_default_data() {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[])
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[])
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([253; 32]);
@ -796,7 +796,7 @@ pub mod tests {
fn program_should_fail_if_transfers_balance_from_non_owned_account() {
let initial_data = [(AccountId::new([1; 32]), 100)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let sender_account_id = AccountId::new([1; 32]);
let receiver_account_id = AccountId::new([2; 32]);
let balance_to_move: u128 = 1;
@ -823,7 +823,7 @@ pub mod tests {
#[test]
fn program_should_fail_if_modifies_data_of_non_owned_account() {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[])
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[])
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([255; 32]);
@ -849,7 +849,7 @@ pub mod tests {
fn program_should_fail_if_does_not_preserve_total_balance_by_minting() {
let initial_data = [];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_id = AccountId::new([1; 32]);
let program_id = Program::minter().id();
@ -866,7 +866,7 @@ pub mod tests {
#[test]
fn program_should_fail_if_does_not_preserve_total_balance_by_burning() {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[])
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[])
.with_test_programs()
.with_account_owned_by_burner_program();
let program_id = Program::burner().id();
@ -916,7 +916,7 @@ pub mod tests {
sender_keys: &TestPublicKeys,
recipient_keys: &TestPrivateKeys,
balance_to_move: u128,
state: &V02State,
state: &V03State,
) -> PrivacyPreservingTransaction {
let sender = AccountWithMetadata::new(
state.get_account_by_id(sender_keys.account_id()),
@ -960,7 +960,7 @@ pub mod tests {
sender_private_account: &Account,
recipient_keys: &TestPrivateKeys,
balance_to_move: u128,
state: &V02State,
state: &V03State,
) -> PrivacyPreservingTransaction {
let program = Program::authenticated_transfer_program();
let sender_commitment = Commitment::new(&sender_keys.npk(), sender_private_account);
@ -1012,7 +1012,7 @@ pub mod tests {
sender_private_account: &Account,
recipient_account_id: &AccountId,
balance_to_move: u128,
state: &V02State,
state: &V03State,
) -> PrivacyPreservingTransaction {
let program = Program::authenticated_transfer_program();
let sender_commitment = Commitment::new(&sender_keys.npk(), sender_private_account);
@ -1058,7 +1058,7 @@ pub mod tests {
let recipient_keys = test_private_account_keys_1();
let mut state =
V02State::new_with_genesis_accounts(&[(sender_keys.account_id(), 200)], &[]);
V03State::new_with_genesis_accounts(&[(sender_keys.account_id(), 200)], &[]);
let balance_to_move = 37;
@ -1106,7 +1106,7 @@ pub mod tests {
};
let recipient_keys = test_private_account_keys_2();
let mut state = V02State::new_with_genesis_accounts(&[], &[])
let mut state = V03State::new_with_genesis_accounts(&[], &[])
.with_private_account(&sender_keys, &sender_private_account);
let balance_to_move = 37;
@ -1173,7 +1173,7 @@ pub mod tests {
};
let recipient_keys = test_public_account_keys_1();
let recipient_initial_balance = 400;
let mut state = V02State::new_with_genesis_accounts(
let mut state = V03State::new_with_genesis_accounts(
&[(recipient_keys.account_id(), recipient_initial_balance)],
&[],
)
@ -2127,7 +2127,7 @@ pub mod tests {
};
let recipient_keys = test_private_account_keys_2();
let mut state = V02State::new_with_genesis_accounts(&[], &[])
let mut state = V03State::new_with_genesis_accounts(&[], &[])
.with_private_account(&sender_keys, &sender_private_account);
let balance_to_move = 37;
@ -2212,7 +2212,7 @@ pub mod tests {
let initial_balance = 100;
let initial_data = [(account_id, initial_balance)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let from = account_id;
let from_key = key;
let to = AccountId::new([2; 32]);
@ -2253,7 +2253,7 @@ pub mod tests {
let initial_balance = 1000;
let initial_data = [(from, initial_balance), (to, 0)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let from_key = key;
let amount: u128 = 37;
let instruction: (u128, ProgramId, u32, Option<PdaSeed>) = (
@ -2298,7 +2298,7 @@ pub mod tests {
let initial_balance = 100;
let initial_data = [(from, initial_balance), (to, 0)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let from_key = key;
let amount: u128 = 0;
let instruction: (u128, ProgramId, u32, Option<PdaSeed>) = (
@ -2336,7 +2336,7 @@ pub mod tests {
let initial_balance = 1000;
let initial_data = [(from, initial_balance), (to, 0)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let amount: u128 = 58;
let instruction: (u128, ProgramId, u32, Option<PdaSeed>) = (
amount,
@ -2382,7 +2382,7 @@ pub mod tests {
let initial_balance = 100;
let initial_data = [(account_id, initial_balance)];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let from = account_id;
let from_key = key;
let to = AccountId::new([2; 32]);
@ -2454,7 +2454,7 @@ pub mod tests {
let from_commitment = Commitment::new(&from_keys.npk(), &from_account.account);
let to_commitment = Commitment::new(&to_keys.npk(), &to_account.account);
let mut state = V02State::new_with_genesis_accounts(
let mut state = V03State::new_with_genesis_accounts(
&[],
&[from_commitment.clone(), to_commitment.clone()],
)
@ -2563,7 +2563,7 @@ pub mod tests {
..Account::default()
};
let mut state = V02State::new_with_genesis_accounts(&[], &[]);
let mut state = V03State::new_with_genesis_accounts(&[], &[]);
state.add_pinata_token_program(pinata_definition_id);
// Execution of the token program to create new token for the pinata token
@ -2624,7 +2624,7 @@ pub mod tests {
#[test]
fn claiming_mechanism_cannot_claim_initialied_accounts() {
let claimer = Program::claimer();
let mut state = V02State::new_with_genesis_accounts(&[], &[]).with_test_programs();
let mut state = V03State::new_with_genesis_accounts(&[], &[]).with_test_programs();
let account_id = AccountId::new([2; 32]);
// Insert an account with non-default program owner
@ -2659,7 +2659,7 @@ pub mod tests {
let recipient_id = AccountId::from(&PublicKey::new_from_private_key(&recipient_key));
let recipient_init_balance: u128 = 10;
let mut state = V02State::new_with_genesis_accounts(
let mut state = V03State::new_with_genesis_accounts(
&[
(sender_id, sender_init_balance),
(recipient_id, recipient_init_balance),
@ -2714,7 +2714,7 @@ pub mod tests {
#[test]
fn private_authorized_uninitialized_account() {
let mut state = V02State::new_with_genesis_accounts(&[], &[]);
let mut state = V03State::new_with_genesis_accounts(&[], &[]);
// Set up keys for the authorized private account
let private_keys = test_private_account_keys_1();
@ -2766,7 +2766,7 @@ pub mod tests {
#[test]
fn private_account_claimed_then_used_without_init_flag_should_fail() {
let mut state = V02State::new_with_genesis_accounts(&[], &[]).with_test_programs();
let mut state = V03State::new_with_genesis_accounts(&[], &[]).with_test_programs();
// Set up keys for the private account
let private_keys = test_private_account_keys_1();
@ -2847,7 +2847,7 @@ pub mod tests {
fn public_changer_claimer_no_data_change_no_claim_succeeds() {
let initial_data = [];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_id = AccountId::new([1; 32]);
let program_id = Program::changer_claimer().id();
// Don't change data (None) and don't claim (false)
@ -2871,7 +2871,7 @@ pub mod tests {
fn public_changer_claimer_data_change_no_claim_fails() {
let initial_data = [];
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_id = AccountId::new([1; 32]);
let program_id = Program::changer_claimer().id();
// Change data but don't claim (false) - should fail
@ -2965,7 +2965,7 @@ pub mod tests {
let recipient_commitment =
Commitment::new(&recipient_keys.npk(), &recipient_account.account);
let state = V02State::new_with_genesis_accounts(
let state = V03State::new_with_genesis_accounts(
&[(sender_account.account_id, sender_account.account.balance)],
std::slice::from_ref(&recipient_commitment),
)
@ -3001,9 +3001,9 @@ pub mod tests {
let account_id_1 = AccountId::new([1; 32]);
let account_id_2 = AccountId::new([2; 32]);
let initial_data = [(account_id_1, 100_u128), (account_id_2, 151_u128)];
let state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let state = V03State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let bytes = borsh::to_vec(&state).unwrap();
let state_from_bytes: V02State = borsh::from_slice(&bytes).unwrap();
let state_from_bytes: V03State = borsh::from_slice(&bytes).unwrap();
assert_eq!(state, state_from_bytes);
}
}

View File

@ -8,10 +8,9 @@ license = { workspace = true }
workspace = true
[dependencies]
nssa = { workspace = true, optional = true, features = ["test-utils"], default-features = true }
nssa_core.workspace = true
token_core.workspace = true
amm_core.workspace = true
[features]
nssa = ["dep:nssa"]
[dev-dependencies]
nssa = { workspace = true, features = ["test-utils"] }

View File

@ -4,9 +4,8 @@ use amm_core::{
PoolDefinition, compute_liquidity_token_pda, compute_liquidity_token_pda_seed,
compute_pool_pda, compute_vault_pda, compute_vault_pda_seed,
};
#[cfg(feature = "nssa")]
use nssa::{
PrivateKey, PublicKey, PublicTransaction, V02State, program::Program, public_transaction,
PrivateKey, PublicKey, PublicTransaction, V03State, program::Program, public_transaction,
};
use nssa_core::{
account::{Account, AccountId, AccountWithMetadata, Data},
@ -25,16 +24,15 @@ struct BalanceForTests;
struct ChainedCallForTests;
struct IdForTests;
struct AccountWithMetadataForTests;
#[cfg(feature = "nssa")]
struct PrivateKeysForTests;
#[cfg(feature = "nssa")]
struct IdForExeTests;
#[cfg(feature = "nssa")]
struct BalanceForExeTests;
#[cfg(feature = "nssa")]
struct AccountsForExeTests;
#[cfg(feature = "nssa")]
impl PrivateKeysForTests {
fn user_token_a_key() -> PrivateKey {
PrivateKey::try_new([31; 32]).expect("Keys constructor expects valid private key")
@ -1008,7 +1006,6 @@ impl AccountWithMetadataForTests {
}
}
#[cfg(feature = "nssa")]
impl BalanceForExeTests {
fn user_token_a_holding_init() -> u128 {
10_000
@ -1172,7 +1169,6 @@ impl BalanceForExeTests {
}
}
#[cfg(feature = "nssa")]
impl IdForExeTests {
fn pool_definition_id() -> AccountId {
amm_core::compute_pool_pda(
@ -1229,7 +1225,6 @@ impl IdForExeTests {
}
}
#[cfg(feature = "nssa")]
impl AccountsForExeTests {
fn user_token_a_holding() -> Account {
Account {
@ -2641,10 +2636,9 @@ fn new_definition_lp_symmetric_amounts() {
assert_eq!(chained_call_lp, expected_lp_call);
}
#[cfg(feature = "nssa")]
fn state_for_amm_tests() -> V02State {
fn state_for_amm_tests() -> V03State {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[]);
state.force_insert_account(
IdForExeTests::pool_definition_id(),
AccountsForExeTests::pool_definition_init(),
@ -2685,10 +2679,9 @@ fn state_for_amm_tests() -> V02State {
state
}
#[cfg(feature = "nssa")]
fn state_for_amm_tests_with_new_def() -> V02State {
fn state_for_amm_tests_with_new_def() -> V03State {
let initial_data = [];
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let mut state = V03State::new_with_genesis_accounts(&initial_data, &[]);
state.force_insert_account(
IdForExeTests::token_a_definition_id(),
AccountsForExeTests::token_a_definition_account(),
@ -2708,7 +2701,6 @@ fn state_for_amm_tests_with_new_def() -> V02State {
state
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_remove() {
let mut state = state_for_amm_tests();
@ -2768,7 +2760,6 @@ fn simple_amm_remove() {
assert_eq!(user_token_lp_post, expected_user_token_lp);
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_new_definition_inactive_initialized_pool_and_uninit_user_lp() {
let mut state = state_for_amm_tests_with_new_def();
@ -2849,7 +2840,6 @@ fn simple_amm_new_definition_inactive_initialized_pool_and_uninit_user_lp() {
assert_eq!(user_token_lp_post, expected_user_token_lp);
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_new_definition_inactive_initialized_pool_init_user_lp() {
let mut state = state_for_amm_tests_with_new_def();
@ -2934,7 +2924,6 @@ fn simple_amm_new_definition_inactive_initialized_pool_init_user_lp() {
assert_eq!(user_token_lp_post, expected_user_token_lp);
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_new_definition_uninitialized_pool() {
let mut state = state_for_amm_tests_with_new_def();
@ -3007,7 +2996,6 @@ fn simple_amm_new_definition_uninitialized_pool() {
assert_eq!(user_token_lp_post, expected_user_token_lp);
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_add() {
let mut state = state_for_amm_tests();
@ -3070,7 +3058,6 @@ fn simple_amm_add() {
assert_eq!(user_token_lp_post, expected_user_token_lp);
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_swap_1() {
let mut state = state_for_amm_tests();
@ -3122,7 +3109,6 @@ fn simple_amm_swap_1() {
assert_eq!(user_token_b_post, expected_user_token_b);
}
#[cfg(feature = "nssa")]
#[test]
fn simple_amm_swap_2() {
let mut state = state_for_amm_tests();

Some files were not shown because too many files have changed in this diff Show More