feat: fully integrate Sequencer, Indexer and Explorer with Bedrock

This commit is contained in:
Daniil Polyakov 2026-01-29 22:20:42 +03:00
parent 818fc5e601
commit 975cfb9ec6
131 changed files with 3160 additions and 2824 deletions

1
.gitignore vendored
View File

@ -9,3 +9,4 @@ rocksdb
sequencer_runner/data/
storage.json
result
wallet-ffi/wallet_ffi.h

582
Cargo.lock generated
View File

@ -69,7 +69,7 @@ dependencies = [
"actix-rt",
"actix-service",
"actix-utils",
"base64",
"base64 0.22.1",
"bitflags 2.10.0",
"bytes",
"bytestring",
@ -398,6 +398,15 @@ version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]]
name = "arc-swap"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73"
dependencies = [
"rustversion",
]
[[package]]
name = "archery"
version = "1.2.2"
@ -813,6 +822,22 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "astral-tokio-tar"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec179a06c1769b1e42e1e2cbe74c7dcdb3d6383c838454d063eaac5bbb7ebbe5"
dependencies = [
"filetime",
"futures-core",
"libc",
"portable-atomic",
"rustc-hash",
"tokio",
"tokio-stream",
"xattr",
]
[[package]]
name = "async-lock"
version = "3.4.2"
@ -946,7 +971,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core 0.5.6",
"base64",
"base64 0.22.1",
"bytes",
"form_urlencoded",
"futures-util",
@ -1050,6 +1075,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@ -1067,6 +1098,7 @@ name = "bedrock_client"
version = "0.1.0"
dependencies = [
"anyhow",
"common",
"futures",
"log",
"logos-blockchain-chain-broadcast-service",
@ -1173,6 +1205,83 @@ dependencies = [
"generic-array 0.14.7",
]
[[package]]
name = "bollard"
version = "0.19.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87a52479c9237eb04047ddb94788c41ca0d26eaff8b697ecfbb4c32f7fdc3b1b"
dependencies = [
"async-stream",
"base64 0.22.1",
"bitflags 2.10.0",
"bollard-buildkit-proto",
"bollard-stubs",
"bytes",
"chrono",
"futures-core",
"futures-util",
"hex",
"home",
"http 1.4.0",
"http-body-util",
"hyper",
"hyper-named-pipe",
"hyper-rustls",
"hyper-util",
"hyperlocal",
"log",
"num",
"pin-project-lite",
"rand 0.9.2",
"rustls",
"rustls-native-certs",
"rustls-pemfile",
"rustls-pki-types",
"serde",
"serde_derive",
"serde_json",
"serde_repr",
"serde_urlencoded",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"tonic",
"tower-service",
"url",
"winapi",
]
[[package]]
name = "bollard-buildkit-proto"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85a885520bf6249ab931a764ffdb87b0ceef48e6e7d807cfdb21b751e086e1ad"
dependencies = [
"prost 0.14.3",
"prost-types",
"tonic",
"tonic-prost",
"ureq",
]
[[package]]
name = "bollard-stubs"
version = "1.49.1-rc.28.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5731fe885755e92beff1950774068e0cae67ea6ec7587381536fca84f1779623"
dependencies = [
"base64 0.22.1",
"bollard-buildkit-proto",
"bytes",
"chrono",
"prost 0.14.3",
"serde",
"serde_json",
"serde_repr",
"serde_with",
]
[[package]]
name = "bonsai-sdk"
version = "1.4.1"
@ -1252,9 +1361,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b"
[[package]]
name = "bytes"
version = "1.11.1"
version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
dependencies = [
"serde",
]
@ -1512,7 +1621,7 @@ name = "common"
version = "0.1.0"
dependencies = [
"anyhow",
"base64",
"base64 0.22.1",
"borsh",
"hex",
"log",
@ -1522,6 +1631,7 @@ dependencies = [
"reqwest",
"serde",
"serde_json",
"serde_with",
"sha2",
"thiserror 2.0.17",
"url",
@ -1927,7 +2037,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976"
dependencies = [
"data-encoding",
"syn 1.0.109",
"syn 2.0.111",
]
[[package]]
@ -2084,12 +2194,35 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "docker-compose-types"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edb75a85449fd9c34d9fb3376c6208ec4115d2ca43b965175a52d71349ecab8"
dependencies = [
"derive_builder",
"indexmap 2.12.1",
"serde",
"serde_yaml",
]
[[package]]
name = "docker-generate"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf673e0848ef09fa4aeeba78e681cf651c0c7d35f76ee38cec8e55bc32fa111"
[[package]]
name = "docker_credential"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d89dfcba45b4afad7450a99b39e751590463e45c04728cf555d36bb66940de8"
dependencies = [
"base64 0.21.7",
"serde",
"serde_json",
]
[[package]]
name = "downcast-rs"
version = "1.2.1"
@ -2296,6 +2429,16 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "etcetera"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96"
dependencies = [
"cfg-if",
"windows-sys 0.61.2",
]
[[package]]
name = "event-listener"
version = "5.4.1"
@ -2367,6 +2510,17 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "ferroid"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb330bbd4cb7a5b9f559427f06f98a4f853a137c8298f3bd3f8ca57663e21986"
dependencies = [
"portable-atomic",
"rand 0.9.2",
"web-time",
]
[[package]]
name = "ff"
version = "0.13.1"
@ -2383,6 +2537,17 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db"
dependencies = [
"cfg-if",
"libc",
"libredox",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.5"
@ -2857,6 +3022,15 @@ version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89e8d20b3799fa526152a5301a771eaaad80857f83e01b23216ceaafb2d9280"
[[package]]
name = "home"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "html-escape"
version = "0.2.13"
@ -2973,6 +3147,21 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-named-pipe"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278"
dependencies = [
"hex",
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
"winapi",
]
[[package]]
name = "hyper-rustls"
version = "0.27.7"
@ -2991,6 +3180,19 @@ dependencies = [
"webpki-roots",
]
[[package]]
name = "hyper-timeout"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
dependencies = [
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "hyper-tls"
version = "0.6.0"
@ -3013,7 +3215,7 @@ version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures-channel",
"futures-core",
@ -3033,6 +3235,21 @@ dependencies = [
"windows-registry",
]
[[package]]
name = "hyperlocal"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7"
dependencies = [
"hex",
"http-body-util",
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "iana-time-zone"
version = "0.1.64"
@ -3176,6 +3393,7 @@ name = "indexer_core"
version = "0.1.0"
dependencies = [
"anyhow",
"async-stream",
"bedrock_client",
"borsh",
"common",
@ -3193,13 +3411,18 @@ name = "indexer_service"
version = "0.1.0"
dependencies = [
"anyhow",
"arc-swap",
"async-trait",
"clap",
"env_logger",
"futures",
"indexer_core",
"indexer_service_protocol",
"indexer_service_rpc",
"jsonrpsee",
"log",
"serde",
"serde_json",
"tokio",
"tokio-util",
]
@ -3208,7 +3431,7 @@ dependencies = [
name = "indexer_service_protocol"
version = "0.1.0"
dependencies = [
"base64",
"base64 0.22.1",
"borsh",
"common",
"nssa",
@ -3279,21 +3502,23 @@ version = "0.1.0"
dependencies = [
"actix-web",
"anyhow",
"base64",
"base64 0.22.1",
"borsh",
"common",
"env_logger",
"futures",
"hex",
"indexer_core",
"indexer_service",
"key_protocol",
"log",
"nssa",
"nssa_core",
"rand 0.8.5",
"sequencer_core",
"sequencer_runner",
"serde_json",
"tempfile",
"testcontainers",
"token_core",
"tokio",
"url",
@ -3473,7 +3698,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98"
dependencies = [
"base64",
"base64 0.22.1",
"futures-channel",
"futures-util",
"gloo-net",
@ -3526,7 +3751,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8"
dependencies = [
"base64",
"base64 0.22.1",
"http-body",
"hyper",
"hyper-rustls",
@ -3712,7 +3937,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f9569fc37575a5d64c0512145af7630bf651007237ef67a8a77328199d315bb"
dependencies = [
"any_spawner",
"base64",
"base64 0.22.1",
"cfg-if",
"either_of",
"futures",
@ -3914,7 +4139,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf1045af93050bf3388d1c138426393fc131f6d9e46a65519da884c033ed730"
dependencies = [
"any_spawner",
"base64",
"base64 0.22.1",
"codee",
"futures",
"hydration_context",
@ -3971,6 +4196,7 @@ checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50"
dependencies = [
"bitflags 2.10.0",
"libc",
"redox_syscall 0.6.0",
]
[[package]]
@ -4860,9 +5086,24 @@ dependencies = [
"risc0-zkvm",
"serde",
"serde_json",
"serde_with",
"thiserror 2.0.17",
]
[[package]]
name = "num"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@ -4890,10 +5131,19 @@ dependencies = [
]
[[package]]
name = "num-conv"
version = "0.2.0"
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]]
name = "num-conv"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
[[package]]
name = "num-integer"
@ -4915,6 +5165,17 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -5117,11 +5378,36 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"redox_syscall 0.5.18",
"smallvec",
"windows-link",
]
[[package]]
name = "parse-display"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914a1c2265c98e2446911282c6ac86d8524f495792c38c5bd884f80499c7538a"
dependencies = [
"parse-display-derive",
"regex",
"regex-syntax",
]
[[package]]
name = "parse-display-derive"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ae7800a4c974efd12df917266338e79a7a74415173caf7e70aa0a0707345281"
dependencies = [
"proc-macro2",
"quote",
"regex",
"regex-syntax",
"structmeta",
"syn 2.0.111",
]
[[package]]
name = "paste"
version = "1.0.15"
@ -5222,9 +5508,9 @@ dependencies = [
[[package]]
name = "portable-atomic"
version = "1.11.1"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
[[package]]
name = "postcard"
@ -5388,7 +5674,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
dependencies = [
"bytes",
"prost-derive",
"prost-derive 0.13.5",
]
[[package]]
name = "prost"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568"
dependencies = [
"bytes",
"prost-derive 0.14.3",
]
[[package]]
@ -5404,6 +5700,28 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "prost-derive"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b"
dependencies = [
"anyhow",
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "prost-types"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7"
dependencies = [
"prost 0.14.3",
]
[[package]]
name = "quanta"
version = "0.12.6"
@ -5651,6 +5969,15 @@ dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_syscall"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5"
dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_users"
version = "0.5.2"
@ -5723,7 +6050,7 @@ version = "0.12.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"encoding_rs",
"futures-channel",
@ -5966,7 +6293,7 @@ dependencies = [
"derive_more 2.1.0",
"hex",
"lazy-regex",
"prost",
"prost 0.13.5",
"risc0-binfmt",
"risc0-build",
"risc0-circuit-keccak",
@ -6095,9 +6422,9 @@ dependencies = [
[[package]]
name = "ruint"
version = "1.17.2"
version = "1.17.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c141e807189ad38a07276942c6623032d3753c8859c146104ac2e4d68865945a"
checksum = "a68df0380e5c9d20ce49534f292a36a7514ae21350726efe1865bdb1fa91d278"
dependencies = [
"borsh",
"proptest",
@ -6170,6 +6497,15 @@ dependencies = [
"security-framework 3.5.1",
]
[[package]]
name = "rustls-pemfile"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
version = "1.13.2"
@ -6417,6 +6753,7 @@ dependencies = [
"chrono",
"common",
"futures",
"jsonrpsee",
"log",
"logos-blockchain-core",
"logos-blockchain-key-management-system-service",
@ -6430,6 +6767,7 @@ dependencies = [
"storage",
"tempfile",
"tokio",
"url",
]
[[package]]
@ -6440,7 +6778,7 @@ dependencies = [
"actix-web",
"anyhow",
"base58",
"base64",
"base64 0.22.1",
"borsh",
"common",
"futures",
@ -6466,6 +6804,9 @@ dependencies = [
"clap",
"common",
"env_logger",
"futures",
"indexer_service_protocol",
"indexer_service_rpc",
"log",
"sequencer_core",
"sequencer_rpc",
@ -6557,6 +6898,17 @@ dependencies = [
"thiserror 2.0.17",
]
[[package]]
name = "serde_repr"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "serde_spanned"
version = "0.6.9"
@ -6593,7 +6945,7 @@ version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
dependencies = [
"base64",
"base64 0.22.1",
"chrono",
"hex",
"indexmap 1.9.3",
@ -6618,6 +6970,19 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "serde_yaml"
version = "0.9.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9"
dependencies = [
"indexmap 2.12.1",
"itoa",
"ryu",
"serde",
"unsafe-libyaml",
]
[[package]]
name = "serdect"
version = "0.2.0"
@ -6635,7 +7000,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353d02fa2886cd8dae0b8da0965289fa8f2ecc7df633d1ce965f62fdf9644d29"
dependencies = [
"axum 0.8.8",
"base64",
"base64 0.22.1",
"bytes",
"const-str 0.7.1",
"const_format",
@ -6807,7 +7172,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures",
"http 1.4.0",
@ -6881,6 +7246,29 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "structmeta"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329"
dependencies = [
"proc-macro2",
"quote",
"structmeta-derive",
"syn 2.0.111",
]
[[package]]
name = "structmeta-derive"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "strum"
version = "0.27.2"
@ -7087,6 +7475,38 @@ dependencies = [
"risc0-zkvm",
]
[[package]]
name = "testcontainers"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a81ec0158db5fbb9831e09d1813fe5ea9023a2b5e6e8e0a5fe67e2a820733629"
dependencies = [
"astral-tokio-tar",
"async-trait",
"bollard",
"bytes",
"docker-compose-types",
"docker_credential",
"either",
"etcetera",
"ferroid",
"futures",
"itertools 0.14.0",
"log",
"memchr",
"parse-display",
"pin-project-lite",
"serde",
"serde_json",
"serde_with",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"url",
"uuid",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@ -7138,30 +7558,30 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.47"
version = "0.3.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c"
checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde_core",
"serde",
"time-core",
"time-macros",
]
[[package]]
name = "time-core"
version = "0.1.8"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca"
checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b"
[[package]]
name = "time-macros"
version = "0.2.27"
version = "0.2.24"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215"
checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3"
dependencies = [
"num-conv",
"time-core",
@ -7407,6 +7827,46 @@ version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
[[package]]
name = "tonic"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a"
dependencies = [
"async-trait",
"axum 0.8.8",
"base64 0.22.1",
"bytes",
"h2 0.4.13",
"http 1.4.0",
"http-body",
"http-body-util",
"hyper",
"hyper-timeout",
"hyper-util",
"percent-encoding",
"pin-project",
"socket2 0.6.1",
"sync_wrapper",
"tokio",
"tokio-stream",
"tower 0.5.2",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tonic-prost"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0"
dependencies = [
"bytes",
"prost 0.14.3",
"tonic",
]
[[package]]
name = "tower"
version = "0.4.13"
@ -7426,9 +7886,12 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"indexmap 2.12.1",
"pin-project-lite",
"slab",
"sync_wrapper",
"tokio",
"tokio-util",
"tower-layer",
"tower-service",
"tracing",
@ -7678,6 +8141,12 @@ dependencies = [
"subtle",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "unsigned-varint"
version = "0.8.0"
@ -7690,6 +8159,34 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "ureq"
version = "3.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d39cb1dbab692d82a977c0392ffac19e188bd9186a9f32806f0aaa859d75585a"
dependencies = [
"base64 0.22.1",
"log",
"percent-encoding",
"rustls",
"rustls-pki-types",
"ureq-proto",
"utf-8",
"webpki-roots",
]
[[package]]
name = "ureq-proto"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
dependencies = [
"base64 0.22.1",
"http 1.4.0",
"httparse",
"log",
]
[[package]]
name = "url"
version = "2.5.7"
@ -7779,7 +8276,7 @@ dependencies = [
"anyhow",
"async-stream",
"base58",
"base64",
"base64 0.22.1",
"borsh",
"bytemuck",
"clap",
@ -7795,6 +8292,7 @@ dependencies = [
"nssa_core",
"optfield",
"rand 0.8.5",
"risc0-zkvm",
"serde",
"serde_json",
"sha2",
@ -8336,6 +8834,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "xattr"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156"
dependencies = [
"libc",
"rustix",
]
[[package]]
name = "xxhash-rust"
version = "0.8.15"

View File

@ -20,9 +20,10 @@ members = [
"sequencer_core",
"sequencer_rpc",
"sequencer_runner",
"indexer_service",
"indexer_service/protocol",
"indexer_service/rpc",
"indexer/core",
"indexer/service",
"indexer/service/protocol",
"indexer/service/rpc",
"explorer_service",
"programs/token/core",
"programs/token",
@ -34,7 +35,6 @@ members = [
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
"bedrock_client",
"indexer_core",
]
[workspace.dependencies]
@ -47,9 +47,10 @@ key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
indexer_service = { path = "indexer_service" }
indexer_service_protocol = { path = "indexer_service/protocol" }
indexer_service_rpc = { path = "indexer_service/rpc" }
indexer_core = { path = "indexer/core" }
indexer_service = { path = "indexer/service" }
indexer_service_protocol = { path = "indexer/service/protocol" }
indexer_service_rpc = { path = "indexer/service/rpc" }
wallet = { path = "wallet" }
wallet-ffi = { path = "wallet-ffi" }
token_core = { path = "programs/token/core" }
@ -58,7 +59,6 @@ amm_core = { path = "programs/amm/core" }
amm_program = { path = "programs/amm" }
test_program_methods = { path = "test_program_methods" }
bedrock_client = { path = "bedrock_client" }
indexer_core = { path = "indexer_core" }
tokio = { version = "1.28.2", features = [
"net",
@ -75,6 +75,7 @@ openssl = { version = "0.10", features = ["vendored"] }
openssl-probe = { version = "0.1.2" }
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
serde_with = "3.16.1"
actix = "0.13.0"
actix-cors = "0.6.1"
jsonrpsee = "0.26.0"
@ -106,6 +107,7 @@ itertools = "0.14.0"
url = { version = "2.5.4", features = ["serde"] }
tokio-retry = "0.3.0"
schemars = "1.2.0"
async-stream = "0.3.6"
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -5,6 +5,8 @@ edition = "2024"
license = { workspace = true }
[dependencies]
common.workspace = true
reqwest.workspace = true
anyhow.workspace = true
tokio-retry.workspace = true

View File

@ -1,20 +1,32 @@
use anyhow::Result;
use std::time::Duration;
use anyhow::{Context as _, Result};
use common::config::BasicAuth;
use futures::{Stream, TryFutureExt};
use log::warn;
pub use logos_blockchain_chain_broadcast_service::BlockInfo;
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
pub use logos_blockchain_common_http_client::{CommonHttpClient, Error};
pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx};
use reqwest::{Client, Url};
use serde::{Deserialize, Serialize};
use tokio_retry::Retry;
/// Fibonacci backoff retry strategy configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct BackoffConfig {
pub start_delay_millis: u64,
pub max_retries: usize,
}
impl Default for BackoffConfig {
fn default() -> Self {
Self {
start_delay_millis: 100,
max_retries: 5,
}
}
}
// Simple wrapper
// maybe extend in the future for our purposes
// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`).
@ -22,26 +34,36 @@ pub struct BackoffConfig {
pub struct BedrockClient {
http_client: CommonHttpClient,
node_url: Url,
backoff: BackoffConfig,
}
impl BedrockClient {
pub fn new(auth: Option<BasicAuthCredentials>, node_url: Url) -> Result<Self> {
pub fn new(backoff: BackoffConfig, node_url: Url, auth: Option<BasicAuth>) -> Result<Self> {
let client = Client::builder()
//Add more fields if needed
.timeout(std::time::Duration::from_secs(60))
.build()?;
.build()
.context("Failed to build HTTP client")?;
let auth = auth.map(|a| {
logos_blockchain_common_http_client::BasicAuthCredentials::new(a.username, a.password)
});
let http_client = CommonHttpClient::new_with_client(client, auth);
Ok(Self {
http_client,
node_url,
backoff,
})
}
pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> {
self.http_client
.post_transaction(self.node_url.clone(), tx)
.await
Retry::spawn(self.backoff_strategy(), || {
self.http_client
.post_transaction(self.node_url.clone(), tx.clone())
.inspect_err(|err| warn!("Transaction posting failed with err: {err:#?}"))
})
.await
}
pub async fn get_lib_stream(&self) -> Result<impl Stream<Item = BlockInfo>, Error> {
@ -51,17 +73,17 @@ impl BedrockClient {
pub async fn get_block_by_id(
&self,
header_id: HeaderId,
backoff: &BackoffConfig,
) -> Result<Option<Block<SignedMantleTx>>, Error> {
let strategy =
tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis)
.take(backoff.max_retries);
Retry::spawn(strategy, || {
Retry::spawn(self.backoff_strategy(), || {
self.http_client
.get_block_by_id(self.node_url.clone(), header_id)
.inspect_err(|err| warn!("Block fetching failed with err: {err:#?}"))
})
.await
}
fn backoff_strategy(&self) -> impl Iterator<Item = Duration> {
tokio_retry::strategy::FibonacciBackoff::from_millis(self.backoff.start_delay_millis)
.take(self.backoff.max_retries)
}
}

View File

@ -12,6 +12,7 @@ anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_with.workspace = true
reqwest.workspace = true
sha2.workspace = true
log.workspace = true

View File

@ -1,9 +1,8 @@
use borsh::{BorshDeserialize, BorshSerialize};
use sha2::{Digest, Sha256, digest::FixedOutput};
use crate::transaction::EncodedTransaction;
use crate::{HashType, transaction::NSSATransaction};
pub type HashType = [u8; 32];
pub type MantleMsgId = [u8; 32];
#[derive(Debug, Clone)]
@ -16,11 +15,11 @@ impl OwnHasher {
let mut hasher = Sha256::new();
hasher.update(data);
<HashType>::from(hasher.finalize_fixed())
HashType(<[u8; 32]>::from(hasher.finalize_fixed()))
}
}
pub type BlockHash = [u8; 32];
pub type BlockHash = HashType;
pub type BlockId = u64;
pub type TimeStamp = u64;
@ -35,7 +34,7 @@ pub struct BlockHeader {
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
pub struct BlockBody {
pub transactions: Vec<EncodedTransaction>,
pub transactions: Vec<NSSATransaction>,
}
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
@ -58,7 +57,7 @@ pub struct HashableBlockData {
pub block_id: BlockId,
pub prev_block_hash: BlockHash,
pub timestamp: TimeStamp,
pub transactions: Vec<EncodedTransaction>,
pub transactions: Vec<NSSATransaction>,
}
impl HashableBlockData {
@ -104,12 +103,12 @@ impl From<Block> for HashableBlockData {
#[cfg(test)]
mod tests {
use crate::{block::HashableBlockData, test_utils};
use crate::{HashType, block::HashableBlockData, test_utils};
#[test]
fn test_encoding_roundtrip() {
let transactions = vec![test_utils::produce_dummy_empty_transaction()];
let block = test_utils::produce_dummy_block(1, Some([1; 32]), transactions);
let block = test_utils::produce_dummy_block(1, Some(HashType([1; 32])), transactions);
let hashable = HashableBlockData::from(block);
let bytes = borsh::to_vec(&hashable).unwrap();
let block_from_bytes = borsh::from_slice::<HashableBlockData>(&bytes).unwrap();

View File

@ -1,6 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Message {
L2BlockFinalized { l2_block_height: u64 },
}

View File

@ -1 +0,0 @@
pub mod indexer;

55
common/src/config.rs Normal file
View File

@ -0,0 +1,55 @@
//! Common configuration structures and utilities.
use std::str::FromStr;
use logos_blockchain_common_http_client::BasicAuthCredentials;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BasicAuth {
pub username: String,
pub password: Option<String>,
}
impl std::fmt::Display for BasicAuth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.username)?;
if let Some(password) = &self.password {
write!(f, ":{password}")?;
}
Ok(())
}
}
impl FromStr for BasicAuth {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parse = || {
let mut parts = s.splitn(2, ':');
let username = parts.next()?;
let password = parts.next().filter(|p| !p.is_empty());
if parts.next().is_some() {
return None;
}
Some((username, password))
};
let (username, password) = parse().ok_or_else(|| {
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
})?;
Ok(Self {
username: username.to_string(),
password: password.map(|p| p.to_string()),
})
}
}
impl From<BasicAuth> for BasicAuthCredentials {
fn from(value: BasicAuth) -> Self {
BasicAuthCredentials::new(value.username, value.password)
}
}

View File

@ -1,5 +1,10 @@
use std::{fmt::Display, str::FromStr};
use borsh::{BorshDeserialize, BorshSerialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
pub mod block;
pub mod communication;
pub mod config;
pub mod error;
pub mod rpc_primitives;
pub mod sequencer_client;
@ -8,6 +13,81 @@ pub mod transaction;
// Module for tests utility functions
// TODO: Compile only for tests
pub mod test_utils;
pub type HashType = [u8; 32];
pub const PINATA_BASE58: &str = "EfQhKQAkX2FJiwNii2WFQsGndjvF1Mzd7RuVe7QdPLw7";
#[derive(
Debug,
Default,
Copy,
Clone,
PartialEq,
Eq,
Hash,
SerializeDisplay,
DeserializeFromStr,
BorshSerialize,
BorshDeserialize,
)]
pub struct HashType(pub [u8; 32]);
impl Display for HashType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl FromStr for HashType {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0u8; 32];
hex::decode_to_slice(s, &mut bytes)?;
Ok(HashType(bytes))
}
}
impl AsRef<[u8]> for HashType {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl From<HashType> for [u8; 32] {
fn from(hash: HashType) -> Self {
hash.0
}
}
impl From<[u8; 32]> for HashType {
fn from(bytes: [u8; 32]) -> Self {
HashType(bytes)
}
}
impl TryFrom<Vec<u8>> for HashType {
type Error = <[u8; 32] as TryFrom<Vec<u8>>>::Error;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Ok(HashType(value.try_into()?))
}
}
impl From<HashType> for Vec<u8> {
fn from(hash: HashType) -> Self {
hash.0.to_vec()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn serialization_roundtrip() {
let original = HashType([1u8; 32]);
let serialized = original.to_string();
let deserialized = HashType::from_str(&serialized).unwrap();
assert_eq!(original, deserialized);
}
}

View File

@ -1,5 +1,6 @@
use std::collections::HashMap;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use serde::{Deserialize, Serialize};
use serde_json::Value;
@ -8,7 +9,7 @@ use super::{
errors::RpcParseError,
parser::{RpcRequest, parse_params},
};
use crate::parse_request;
use crate::{HashType, parse_request};
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloRequest {}
@ -47,22 +48,22 @@ pub struct GetInitialTestnetAccountsRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountBalanceRequest {
pub account_id: String,
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetTransactionByHashRequest {
pub hash: String,
pub hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountsNoncesRequest {
pub account_ids: Vec<String>,
pub account_ids: Vec<AccountId>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountRequest {
pub account_id: String,
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
@ -73,11 +74,6 @@ pub struct GetProofForCommitmentRequest {
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProgramIdsRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIndexerMessageRequest {
pub message: crate::communication::indexer::Message,
}
parse_request!(HelloRequest);
parse_request!(RegisterAccountRequest);
parse_request!(SendTxRequest);
@ -92,7 +88,6 @@ parse_request!(GetAccountsNoncesRequest);
parse_request!(GetProofForCommitmentRequest);
parse_request!(GetAccountRequest);
parse_request!(GetProgramIdsRequest);
parse_request!(PostIndexerMessageRequest);
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloResponse {
@ -107,7 +102,7 @@ pub struct RegisterAccountResponse {
#[derive(Serialize, Deserialize, Debug)]
pub struct SendTxResponse {
pub status: String,
pub tx_hash: String,
pub tx_hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
@ -222,8 +217,3 @@ pub struct GetInitialTestnetAccountsResponse {
pub account_id: String,
pub balance: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIndexerMessageResponse {
pub status: String,
}

View File

@ -1,10 +1,10 @@
use std::{collections::HashMap, ops::RangeInclusive, str::FromStr};
use std::{collections::HashMap, ops::RangeInclusive};
use anyhow::Result;
use logos_blockchain_common_http_client::BasicAuthCredentials;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use serde_json::Value;
use url::Url;
@ -13,6 +13,8 @@ use super::rpc_primitives::requests::{
GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest,
};
use crate::{
HashType,
config::BasicAuth,
error::{SequencerClientError, SequencerRpcError},
rpc_primitives::{
self,
@ -22,62 +24,12 @@ use crate::{
GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse,
GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest,
GetProofForCommitmentResponse, GetTransactionByHashRequest,
GetTransactionByHashResponse, PostIndexerMessageRequest, PostIndexerMessageResponse,
SendTxRequest, SendTxResponse,
GetTransactionByHashResponse, SendTxRequest, SendTxResponse,
},
},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BasicAuth {
pub username: String,
pub password: Option<String>,
}
impl std::fmt::Display for BasicAuth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.username)?;
if let Some(password) = &self.password {
write!(f, ":{password}")?;
}
Ok(())
}
}
impl FromStr for BasicAuth {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parse = || {
let mut parts = s.splitn(2, ':');
let username = parts.next()?;
let password = parts.next().filter(|p| !p.is_empty());
if parts.next().is_some() {
return None;
}
Some((username, password))
};
let (username, password) = parse().ok_or_else(|| {
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
})?;
Ok(Self {
username: username.to_string(),
password: password.map(|p| p.to_string()),
})
}
}
impl From<BasicAuth> for BasicAuthCredentials {
fn from(value: BasicAuth) -> Self {
BasicAuthCredentials::new(value.username, value.password)
}
}
#[derive(Clone)]
pub struct SequencerClient {
pub client: reqwest::Client,
@ -196,7 +148,7 @@ impl SequencerClient {
/// bytes.
pub async fn get_account_balance(
&self,
account_id: String,
account_id: AccountId,
) -> Result<GetAccountBalanceResponse, SequencerClientError> {
let block_req = GetAccountBalanceRequest { account_id };
@ -215,7 +167,7 @@ impl SequencerClient {
/// 32 bytes.
pub async fn get_accounts_nonces(
&self,
account_ids: Vec<String>,
account_ids: Vec<AccountId>,
) -> Result<GetAccountsNoncesResponse, SequencerClientError> {
let block_req = GetAccountsNoncesRequest { account_ids };
@ -232,7 +184,7 @@ impl SequencerClient {
pub async fn get_account(
&self,
account_id: String,
account_id: AccountId,
) -> Result<GetAccountResponse, SequencerClientError> {
let block_req = GetAccountRequest { account_id };
@ -248,7 +200,7 @@ impl SequencerClient {
/// Get transaction details for `hash`.
pub async fn get_transaction_by_hash(
&self,
hash: String,
hash: HashType,
) -> Result<GetTransactionByHashResponse, SequencerClientError> {
let block_req = GetTransactionByHashRequest { hash };
@ -268,7 +220,7 @@ impl SequencerClient {
&self,
transaction: nssa::PublicTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::Public(transaction));
let transaction = NSSATransaction::Public(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -288,7 +240,7 @@ impl SequencerClient {
&self,
transaction: nssa::PrivacyPreservingTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::PrivacyPreserving(transaction));
let transaction = NSSATransaction::PrivacyPreserving(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -362,7 +314,7 @@ impl SequencerClient {
&self,
transaction: nssa::ProgramDeploymentTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::ProgramDeployment(transaction));
let transaction = NSSATransaction::ProgramDeployment(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -396,23 +348,4 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Post indexer into sequencer
pub async fn post_indexer_message(
&self,
message: crate::communication::indexer::Message,
) -> Result<PostIndexerMessageResponse, SequencerClientError> {
let last_req = PostIndexerMessageRequest { message };
let req = serde_json::to_value(last_req).unwrap();
let resp = self
.call_method_with_payload("post_indexer_message", req)
.await
.unwrap();
let resp_deser = serde_json::from_value(resp).unwrap();
Ok(resp_deser)
}
}

View File

@ -1,6 +1,9 @@
use nssa::AccountId;
use crate::{
HashType,
block::{Block, HashableBlockData},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
// Helpers
@ -20,8 +23,8 @@ pub fn sequencer_sign_key_for_testing() -> nssa::PrivateKey {
/// `transactions` - vector of `EncodedTransaction` objects
pub fn produce_dummy_block(
id: u64,
prev_hash: Option<[u8; 32]>,
transactions: Vec<EncodedTransaction>,
prev_hash: Option<HashType>,
transactions: Vec<NSSATransaction>,
) -> Block {
let block_data = HashableBlockData {
block_id: id,
@ -33,7 +36,7 @@ pub fn produce_dummy_block(
block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32])
}
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
pub fn produce_dummy_empty_transaction() -> NSSATransaction {
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let account_ids = vec![];
let nonces = vec![];
@ -50,17 +53,17 @@ pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
NSSATransaction::Public(nssa_tx)
}
pub fn create_transaction_native_token_transfer(
from: [u8; 32],
from: AccountId,
nonce: u128,
to: [u8; 32],
to: AccountId,
balance_to_move: u128,
signing_key: nssa::PrivateKey,
) -> EncodedTransaction {
let account_ids = vec![nssa::AccountId::new(from), nssa::AccountId::new(to)];
) -> NSSATransaction {
let account_ids = vec![from, to];
let nonces = vec![nonce];
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let message = nssa::public_transaction::Message::try_new(
@ -74,5 +77,5 @@ pub fn create_transaction_native_token_transfer(
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
NSSATransaction::Public(nssa_tx)
}

View File

@ -1,17 +1,25 @@
use borsh::{BorshDeserialize, BorshSerialize};
use log::info;
use serde::{Deserialize, Serialize};
use sha2::{Digest, digest::FixedOutput};
pub type HashType = [u8; 32];
use crate::HashType;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub enum NSSATransaction {
Public(nssa::PublicTransaction),
PrivacyPreserving(nssa::PrivacyPreservingTransaction),
ProgramDeployment(nssa::ProgramDeploymentTransaction),
}
impl NSSATransaction {
pub fn hash(&self) -> HashType {
HashType(match self {
NSSATransaction::Public(tx) => tx.hash(),
NSSATransaction::PrivacyPreserving(tx) => tx.hash(),
NSSATransaction::ProgramDeployment(tx) => tx.hash(),
})
}
}
impl From<nssa::PublicTransaction> for NSSATransaction {
fn from(value: nssa::PublicTransaction) -> Self {
Self::Public(value)
@ -38,106 +46,3 @@ pub enum TxKind {
PrivacyPreserving,
ProgramDeployment,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
/// General transaction object
pub struct EncodedTransaction {
pub tx_kind: TxKind,
/// Encoded blobs of data
pub encoded_transaction_data: Vec<u8>,
}
impl From<NSSATransaction> for EncodedTransaction {
fn from(value: NSSATransaction) -> Self {
match value {
NSSATransaction::Public(tx) => Self {
tx_kind: TxKind::Public,
encoded_transaction_data: tx.to_bytes(),
},
NSSATransaction::PrivacyPreserving(tx) => Self {
tx_kind: TxKind::PrivacyPreserving,
encoded_transaction_data: tx.to_bytes(),
},
NSSATransaction::ProgramDeployment(tx) => Self {
tx_kind: TxKind::ProgramDeployment,
encoded_transaction_data: tx.to_bytes(),
},
}
}
}
impl TryFrom<&EncodedTransaction> for NSSATransaction {
type Error = nssa::error::NssaError;
fn try_from(value: &EncodedTransaction) -> Result<Self, Self::Error> {
match value.tx_kind {
TxKind::Public => nssa::PublicTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into()),
TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into())
}
TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into())
}
}
}
}
impl EncodedTransaction {
/// Computes and returns the SHA-256 hash of the JSON-serialized representation of `self`.
pub fn hash(&self) -> HashType {
let bytes_to_hash = borsh::to_vec(&self).unwrap();
let mut hasher = sha2::Sha256::new();
hasher.update(&bytes_to_hash);
HashType::from(hasher.finalize_fixed())
}
pub fn log(&self) {
info!("Transaction hash is {:?}", hex::encode(self.hash()));
info!("Transaction tx_kind is {:?}", self.tx_kind);
}
}
#[cfg(test)]
mod tests {
use sha2::{Digest, digest::FixedOutput};
use crate::{
HashType,
transaction::{EncodedTransaction, TxKind},
};
fn test_transaction_body() -> EncodedTransaction {
EncodedTransaction {
tx_kind: TxKind::Public,
encoded_transaction_data: vec![1, 2, 3, 4],
}
}
#[test]
fn test_transaction_hash_is_sha256_of_json_bytes() {
let body = test_transaction_body();
let expected_hash = {
let data = borsh::to_vec(&body).unwrap();
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
HashType::from(hasher.finalize_fixed())
};
let hash = body.hash();
assert_eq!(expected_hash, hash);
}
#[test]
fn test_to_bytes_from_bytes() {
let body = test_transaction_body();
let body_bytes = borsh::to_vec(&body).unwrap();
let body_new = borsh::from_slice::<EncodedTransaction>(&body_bytes).unwrap();
assert_eq!(body, body_new);
}
}

View File

@ -49,7 +49,7 @@ async fn main() {
let signing_key = wallet_core
.storage()
.user_data
.get_pub_account_signing_key(&account_id)
.get_pub_account_signing_key(account_id)
.expect("Input account should be a self owned public account");
// Define the desired greeting in ASCII

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use leptos::prelude::*;
use serde::{Deserialize, Serialize};
@ -46,7 +46,7 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
if let Some(bytes) = parse_hex(&query)
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
{
let hash = Hash(hash_array);
let hash = HashType(hash_array);
// Try as block hash
if let Ok(block) = client.get_block_by_hash(hash).await {
@ -98,7 +98,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result<Block, ServerFnError>
/// Get block by hash
#[server]
pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError> {
pub async fn get_block_by_hash(block_hash: HashType) -> Result<Block, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client
@ -109,7 +109,7 @@ pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError>
/// Get transaction by hash
#[server]
pub async fn get_transaction(tx_hash: Hash) -> Result<Transaction, ServerFnError> {
pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Hash};
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, HashType};
use leptos::prelude::*;
use leptos_router::{components::A, hooks::use_params_map};
@ -7,7 +7,7 @@ use crate::{api, components::TransactionPreview, format_utils};
#[derive(Clone, PartialEq, Eq)]
enum BlockIdOrHash {
BlockId(BlockId),
Hash(Hash),
Hash(HashType),
}
/// Block page component
@ -29,7 +29,7 @@ pub fn BlockPage() -> impl IntoView {
if let Some(bytes) = format_utils::parse_hex(id_str)
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
{
return Some(BlockIdOrHash::Hash(Hash(hash_array)));
return Some(BlockIdOrHash::Hash(HashType(hash_array)));
}
None

View File

@ -1,5 +1,5 @@
use indexer_service_protocol::{
Hash, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
HashType, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet,
};
use leptos::prelude::*;
@ -18,7 +18,7 @@ pub fn TransactionPage() -> impl IntoView {
format_utils::parse_hex(&tx_hash_str).and_then(|bytes| {
if bytes.len() == 32 {
let hash_array: [u8; 32] = bytes.try_into().ok()?;
Some(Hash(hash_array))
Some(HashType(hash_array))
} else {
None
}

View File

@ -17,3 +17,4 @@ futures.workspace = true
url.workspace = true
logos-blockchain-core.workspace = true
serde_json.workspace = true
async-stream.workspace = true

View File

@ -1,36 +1,34 @@
use std::{fs::File, io::BufReader, path::Path};
use anyhow::{Context, Result};
use bedrock_client::BackoffConfig;
use common::sequencer_client::BasicAuth;
use logos_blockchain_core::mantle::ops::channel::ChannelId;
use anyhow::{Context as _, Result};
pub use bedrock_client::BackoffConfig;
use common::config::BasicAuth;
pub use logos_blockchain_core::mantle::ops::channel::ChannelId;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
/// ToDo: Expand if necessary
pub struct ClientConfig {
pub struct BedrockClientConfig {
/// For individual RPC requests we use Fibonacci backoff retry strategy.
pub backoff: BackoffConfig,
pub addr: Url,
pub auth: Option<BasicAuth>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
/// Note: For individual RPC requests we use Fibonacci backoff retry strategy
pub struct IndexerConfig {
pub resubscribe_interval_millis: u64,
pub backoff: BackoffConfig,
pub bedrock_client_config: ClientConfig,
pub sequencer_client_config: ClientConfig,
pub bedrock_client_config: BedrockClientConfig,
pub channel_id: ChannelId,
}
impl IndexerConfig {
pub fn from_path(config_home: &Path) -> Result<IndexerConfig> {
let file = File::open(config_home)
.with_context(|| format!("Failed to open indexer config at {config_home:?}"))?;
pub fn from_path(config_path: &Path) -> Result<IndexerConfig> {
let file = File::open(config_path)
.with_context(|| format!("Failed to open indexer config at {config_path:?}"))?;
let reader = BufReader::new(file);
serde_json::from_reader(reader)
.with_context(|| format!("Failed to parse indexer config at {config_home:?}"))
.with_context(|| format!("Failed to parse indexer config at {config_path:?}"))
}
}

110
indexer/core/src/lib.rs Normal file
View File

@ -0,0 +1,110 @@
use std::sync::Arc;
use anyhow::{Context as _, Result};
use bedrock_client::BedrockClient;
use common::block::Block;
use futures::StreamExt;
use log::{debug, info};
use logos_blockchain_core::mantle::{
Op, SignedMantleTx,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use tokio::sync::RwLock;
use crate::{config::IndexerConfig, state::IndexerState};
pub mod config;
pub mod state;
#[derive(Clone)]
pub struct IndexerCore {
bedrock_client: BedrockClient,
config: IndexerConfig,
state: IndexerState,
}
impl IndexerCore {
pub fn new(config: IndexerConfig) -> Result<Self> {
Ok(Self {
bedrock_client: BedrockClient::new(
config.bedrock_client_config.backoff,
config.bedrock_client_config.addr.clone(),
config.bedrock_client_config.auth.clone(),
)
.context("Failed to create Bedrock client")?,
config,
// No state setup for now, future task.
state: IndexerState {
latest_seen_block: Arc::new(RwLock::new(0)),
},
})
}
pub async fn subscribe_parse_block_stream(&self) -> impl futures::Stream<Item = Result<Block>> {
debug!("Subscribing to Bedrock block stream");
async_stream::stream! {
loop {
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
info!("Block stream joined");
while let Some(block_info) = stream_pinned.next().await {
let header_id = block_info.header_id;
info!("Observed L1 block at height {}", block_info.height);
if let Some(l1_block) = self
.bedrock_client
.get_block_by_id(header_id)
.await?
{
info!("Extracted L1 block at height {}", block_info.height);
let l2_blocks_parsed = parse_blocks(
l1_block.into_transactions().into_iter(),
&self.config.channel_id,
).collect::<Vec<_>>();
info!("Parsed {} L2 blocks", l2_blocks_parsed.len());
for l2_block in l2_blocks_parsed {
// State modification, will be updated in future
{
let mut guard = self.state.latest_seen_block.write().await;
if l2_block.header.block_id > *guard {
*guard = l2_block.header.block_id;
}
}
yield Ok(l2_block);
}
}
}
// Refetch stream after delay
tokio::time::sleep(std::time::Duration::from_millis(
self.config.resubscribe_interval_millis,
))
.await;
}
}
}
}
fn parse_blocks(
block_txs: impl Iterator<Item = SignedMantleTx>,
decoded_channel_id: &ChannelId,
) -> impl Iterator<Item = Block> {
block_txs.flat_map(|tx| {
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
Op::ChannelInscribe(InscriptionOp {
channel_id,
inscription,
..
}) if channel_id == *decoded_channel_id => {
borsh::from_slice::<Block>(&inscription).ok()
}
_ => None,
})
})
}

View File

@ -5,8 +5,9 @@ edition = "2024"
license = { workspace = true }
[dependencies]
indexer_service_protocol.workspace = true
indexer_service_protocol = { workspace = true, features = ["convert"] }
indexer_service_rpc = { workspace = true, features = ["server"] }
indexer_core.workspace = true
clap = { workspace = true, features = ["derive"] }
anyhow.workspace = true
@ -15,7 +16,11 @@ tokio-util.workspace = true
env_logger.workspace = true
log.workspace = true
jsonrpsee.workspace = true
serde.workspace = true
serde_json.workspace = true
futures.workspace = true
async-trait = "0.1.89"
arc-swap = "1.8.1"
[features]
# Return mock responses with generated data for testing purposes

View File

@ -381,7 +381,7 @@ impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::
impl From<nssa::PublicTransaction> for PublicTransaction {
fn from(value: nssa::PublicTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::PublicTransaction {
message,
witness_set,
@ -430,7 +430,7 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction {
impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction {
fn from(value: nssa::PrivacyPreservingTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::PrivacyPreservingTransaction {
message,
witness_set,
@ -467,7 +467,7 @@ impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransactio
impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction {
fn from(value: nssa::ProgramDeploymentTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::ProgramDeploymentTransaction { message } = value;
Self {
@ -531,8 +531,8 @@ impl From<common::block::BlockHeader> for BlockHeader {
} = value;
Self {
block_id,
prev_block_hash: Hash(prev_block_hash),
hash: Hash(hash),
prev_block_hash: prev_block_hash.into(),
hash: hash.into(),
timestamp,
signature: signature.into(),
}
@ -552,47 +552,32 @@ impl TryFrom<BlockHeader> for common::block::BlockHeader {
} = value;
Ok(Self {
block_id,
prev_block_hash: prev_block_hash.0,
hash: hash.0,
prev_block_hash: prev_block_hash.into(),
hash: hash.into(),
timestamp,
signature: signature.into(),
})
}
}
impl TryFrom<common::block::BlockBody> for BlockBody {
type Error = std::io::Error;
fn try_from(value: common::block::BlockBody) -> Result<Self, Self::Error> {
// Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction
// This conversion will decode and re-encode the transactions
use borsh::BorshDeserialize as _;
impl From<common::block::BlockBody> for BlockBody {
fn from(value: common::block::BlockBody) -> Self {
let common::block::BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|encoded_tx| match encoded_tx.tx_kind {
common::transaction::TxKind::Public => {
nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data)
.map(|tx| Transaction::Public(tx.into()))
.map(|tx| match tx {
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
Transaction::PrivacyPreserving(tx.into())
}
common::transaction::TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::PrivacyPreserving(tx.into()))
}
common::transaction::TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::ProgramDeployment(tx.into()))
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
Transaction::ProgramDeployment(tx.into())
}
})
.collect::<Result<Vec<_>, _>>()?;
.collect();
Ok(Self { transactions })
Self { transactions }
}
}
@ -606,7 +591,7 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
.into_iter()
.map(|tx| {
let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?;
Ok::<_, nssa::error::NssaError>(nssa_tx.into())
Ok::<_, nssa::error::NssaError>(nssa_tx)
})
.collect::<Result<Vec<_>, _>>()?;
@ -614,10 +599,8 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
}
}
impl TryFrom<common::block::Block> for Block {
type Error = std::io::Error;
fn try_from(value: common::block::Block) -> Result<Self, Self::Error> {
impl From<common::block::Block> for Block {
fn from(value: common::block::Block) -> Self {
let common::block::Block {
header,
body,
@ -625,12 +608,12 @@ impl TryFrom<common::block::Block> for Block {
bedrock_parent_id,
} = value;
Ok(Self {
Self {
header: header.into(),
body: body.try_into()?,
body: body.into(),
bedrock_status: bedrock_status.into(),
bedrock_parent_id: MantleMsgId(bedrock_parent_id),
})
}
}
}
@ -673,3 +656,15 @@ impl From<BedrockStatus> for common::block::BedrockStatus {
}
}
}
impl From<common::HashType> for HashType {
fn from(value: common::HashType) -> Self {
Self(value.0)
}
}
impl From<HashType> for common::HashType {
fn from(value: HashType) -> Self {
common::HashType(value.0)
}
}

View File

@ -42,8 +42,8 @@ pub struct Block {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockHeader {
pub block_id: BlockId,
pub prev_block_hash: Hash,
pub hash: Hash,
pub prev_block_hash: HashType,
pub hash: HashType,
pub timestamp: TimeStamp,
pub signature: Signature,
}
@ -69,7 +69,7 @@ pub enum Transaction {
impl Transaction {
/// Get the hash of the transaction
pub fn hash(&self) -> &self::Hash {
pub fn hash(&self) -> &self::HashType {
match self {
Transaction::Public(tx) => &tx.hash,
Transaction::PrivacyPreserving(tx) => &tx.hash,
@ -80,14 +80,14 @@ impl Transaction {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: PublicMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: PrivacyPreservingMessage,
pub witness_set: WitnessSet,
}
@ -134,7 +134,7 @@ pub struct EncryptedAccountData {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: ProgramDeploymentMessage,
}
@ -197,7 +197,7 @@ pub struct Data(
);
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Hash(
pub struct HashType(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded hash")]
pub [u8; 32],

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::proc_macros::rpc;
#[cfg(feature = "server")]
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
@ -23,23 +23,20 @@ pub trait Rpc {
Ok(serde_json::to_value(block_schema).expect("Schema serialization should not fail"))
}
#[subscription(name = "subscribeToBlocks", item = Vec<Block>)]
async fn subscribe_to_blocks(&self, from: BlockId) -> SubscriptionResult;
#[subscription(name = "subscribeToFinalizedBlocks", item = BlockId)]
async fn subscribe_to_finalized_blocks(&self) -> SubscriptionResult;
#[method(name = "getBlockById")]
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getBlockByHash")]
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getLastBlockId")]
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned>;
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getAccount")]
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
#[method(name = "getTransaction")]
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>;
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned>;
#[method(name = "getBlocks")]
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned>;

View File

@ -0,0 +1,88 @@
use std::net::SocketAddr;
use anyhow::{Context as _, Result};
pub use indexer_core::config::*;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use log::{error, info};
pub mod service;
#[cfg(feature = "mock-responses")]
pub mod mock_service;
pub struct IndexerHandle {
addr: SocketAddr,
server_handle: Option<jsonrpsee::server::ServerHandle>,
}
impl IndexerHandle {
fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self {
Self {
addr,
server_handle: Some(server_handle),
}
}
pub fn addr(&self) -> SocketAddr {
self.addr
}
pub async fn stopped(mut self) {
let handle = self
.server_handle
.take()
.expect("Indexer server handle is set");
handle.stopped().await
}
pub fn is_stopped(&self) -> bool {
self.server_handle
.as_ref()
.is_none_or(|handle| handle.is_stopped())
}
}
impl Drop for IndexerHandle {
fn drop(&mut self) {
let Self {
addr: _,
server_handle,
} = self;
let Some(handle) = server_handle else {
return;
};
if let Err(err) = handle.stop() {
error!("An error occurred while stopping Indexer RPC server: {err}");
}
}
}
pub async fn run_server(config: IndexerConfig, port: u16) -> Result<IndexerHandle> {
#[cfg(feature = "mock-responses")]
let _ = config;
let server = Server::builder()
.build(SocketAddr::from(([0, 0, 0, 0], port)))
.await
.context("Failed to build RPC server")?;
let addr = server
.local_addr()
.context("Failed to get local address of RPC server")?;
info!("Starting Indexer Service RPC server on {addr}");
#[cfg(not(feature = "mock-responses"))]
let handle = {
let service =
service::IndexerService::new(config).context("Failed to initialize indexer service")?;
server.start(service.into_rpc())
};
#[cfg(feature = "mock-responses")]
let handle = server.start(mock_service::MockIndexerService::new_with_mock_blocks().into_rpc());
Ok(IndexerHandle::new(addr, handle))
}

View File

@ -1,15 +1,15 @@
use std::net::SocketAddr;
use std::path::PathBuf;
use anyhow::{Context as _, Result};
use anyhow::Result;
use clap::Parser;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use log::{error, info};
use tokio_util::sync::CancellationToken;
#[derive(Debug, Parser)]
#[clap(version)]
struct Args {
#[clap(name = "config")]
config_path: PathBuf,
#[clap(short, long, default_value = "8779")]
port: u16,
}
@ -18,18 +18,18 @@ struct Args {
async fn main() -> Result<()> {
env_logger::init();
let args = Args::parse();
let Args { config_path, port } = Args::parse();
let cancellation_token = listen_for_shutdown_signal();
let handle = run_server(args.port).await?;
let handle_clone = handle.clone();
let config = indexer_service::IndexerConfig::from_path(&config_path)?;
let indexer_handle = indexer_service::run_server(config, port).await?;
tokio::select! {
_ = cancellation_token.cancelled() => {
info!("Shutting down server...");
}
_ = handle_clone.stopped() => {
_ = indexer_handle.stopped() => {
error!("Server stopped unexpectedly");
}
}
@ -39,28 +39,6 @@ async fn main() -> Result<()> {
Ok(())
}
async fn run_server(port: u16) -> Result<jsonrpsee::server::ServerHandle> {
let server = Server::builder()
.build(SocketAddr::from(([0, 0, 0, 0], port)))
.await
.context("Failed to build RPC server")?;
let addr = server
.local_addr()
.context("Failed to get local address of RPC server")?;
info!("Starting Indexer Service RPC server on {addr}");
#[cfg(not(feature = "mock-responses"))]
let handle = server.start(indexer_service::service::IndexerService.into_rpc());
#[cfg(feature = "mock-responses")]
let handle = server.start(
indexer_service::mock_service::MockIndexerService::new_with_mock_blocks().into_rpc(),
);
Ok(handle)
}
fn listen_for_shutdown_signal() -> CancellationToken {
let cancellation_token = CancellationToken::new();
let cancellation_token_clone = cancellation_token.clone();

View File

@ -2,9 +2,10 @@ use std::collections::HashMap;
use indexer_service_protocol::{
Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Commitment,
CommitmentSetDigest, Data, EncryptedAccountData, Hash, MantleMsgId, PrivacyPreservingMessage,
PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction,
PublicMessage, PublicTransaction, Signature, Transaction, WitnessSet,
CommitmentSetDigest, Data, EncryptedAccountData, HashType, MantleMsgId,
PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Signature, Transaction,
WitnessSet,
};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
@ -12,7 +13,7 @@ use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct MockIndexerService {
blocks: Vec<Block>,
accounts: HashMap<AccountId, Account>,
transactions: HashMap<Hash, (Transaction, BlockId)>,
transactions: HashMap<HashType, (Transaction, BlockId)>,
}
impl MockIndexerService {
@ -43,14 +44,14 @@ impl MockIndexerService {
}
// Create 10 blocks with transactions
let mut prev_hash = Hash([0u8; 32]);
let mut prev_hash = HashType([0u8; 32]);
for block_id in 0..10 {
let block_hash = {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
hash[1] = 0xff;
Hash(hash)
HashType(hash)
};
// Create 2-4 transactions per block (mix of Public, PrivacyPreserving, and
@ -63,7 +64,7 @@ impl MockIndexerService {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
hash[1] = tx_idx as u8;
Hash(hash)
HashType(hash)
};
// Vary transaction types: Public, PrivacyPreserving, or ProgramDeployment
@ -161,16 +162,22 @@ impl MockIndexerService {
}
}
// `async_trait` is required by `jsonrpsee`
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for MockIndexerService {
async fn subscribe_to_blocks(
async fn subscribe_to_finalized_blocks(
&self,
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
_from: BlockId,
subscription_sink: jsonrpsee::PendingSubscriptionSink,
) -> SubscriptionResult {
// Subscription not implemented for mock service
Err("Subscriptions not supported in mock service".into())
let sink = subscription_sink.accept().await?;
for block in self
.blocks
.iter()
.filter(|b| b.bedrock_status == BedrockStatus::Finalized)
{
let json = serde_json::value::to_raw_value(block).unwrap();
sink.send(json).await?;
}
Ok(())
}
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
@ -187,7 +194,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
})
}
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
self.blocks
.iter()
.find(|b| b.header.hash == block_hash)
@ -195,13 +202,6 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Block with hash not found", None::<()>))
}
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
self.blocks
.last()
.map(|b| b.header.block_id)
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "No blocks available", None::<()>))
}
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
self.accounts
.get(&account_id)
@ -209,7 +209,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>))
}
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
self.transactions
.get(&tx_hash)
.map(|(tx, _)| tx.clone())

View File

@ -0,0 +1,219 @@
use std::{pin::pin, sync::Arc};
use anyhow::{Context as _, Result, bail};
use arc_swap::ArcSwap;
use futures::{StreamExt as _, never::Never};
use indexer_core::{IndexerCore, config::IndexerConfig};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::{
SubscriptionSink,
core::{Serialize, SubscriptionResult},
types::ErrorObjectOwned,
};
use log::{debug, error, info, warn};
use tokio::sync::mpsc::UnboundedSender;
pub struct IndexerService {
subscription_service: SubscriptionService,
#[expect(
dead_code,
reason = "Will be used in future implementations of RPC methods"
)]
indexer: IndexerCore,
}
impl IndexerService {
pub fn new(config: IndexerConfig) -> Result<Self> {
let indexer = IndexerCore::new(config)?;
let subscription_service = SubscriptionService::spawn_new(indexer.clone());
Ok(Self {
subscription_service,
indexer,
})
}
}
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_finalized_blocks(
&self,
subscription_sink: jsonrpsee::PendingSubscriptionSink,
) -> SubscriptionResult {
let sink = subscription_sink.accept().await?;
info!(
"Accepted new subscription to finalized blocks with ID {:?}",
sink.subscription_id()
);
self.subscription_service
.add_subscription(Subscription::new(sink))
.await?;
Ok(())
}
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_block_by_hash(&self, _block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
todo!()
}
async fn get_transaction(&self, _tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
todo!()
}
async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
todo!()
}
async fn get_transactions_by_account(
&self,
_account_id: AccountId,
_limit: u32,
_offset: u32,
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
todo!()
}
}
struct SubscriptionService {
parts: ArcSwap<SubscriptionLoopParts>,
indexer: IndexerCore,
}
impl SubscriptionService {
pub fn spawn_new(indexer: IndexerCore) -> Self {
let parts = Self::spawn_respond_subscribers_loop(indexer.clone());
Self {
parts: ArcSwap::new(Arc::new(parts)),
indexer,
}
}
pub async fn add_subscription(&self, subscription: Subscription<BlockId>) -> Result<()> {
let guard = self.parts.load();
if let Err(err) = guard.new_subscription_sender.send(subscription) {
error!("Failed to send new subscription to subscription service with error: {err:#?}");
// Respawn the subscription service loop if it has finished (either with error or panic)
if guard.handle.is_finished() {
let new_parts = Self::spawn_respond_subscribers_loop(self.indexer.clone());
let old_handle_and_sender = self.parts.swap(Arc::new(new_parts));
let old_parts = Arc::into_inner(old_handle_and_sender)
.expect("There should be no other references to the old handle and sender");
match old_parts.handle.await {
Ok(Err(err)) => {
error!(
"Subscription service loop has unexpectedly finished with err: {err:#}"
);
}
Err(err) => {
error!("Subscription service loop has panicked with err: {err:#}");
}
}
}
bail!(err);
};
Ok(())
}
fn spawn_respond_subscribers_loop(indexer: IndexerCore) -> SubscriptionLoopParts {
let (new_subscription_sender, mut sub_receiver) =
tokio::sync::mpsc::unbounded_channel::<Subscription<BlockId>>();
let handle = tokio::spawn(async move {
let mut subscribers = Vec::new();
let mut block_stream = pin!(indexer.subscribe_parse_block_stream().await);
loop {
tokio::select! {
sub = sub_receiver.recv() => {
let Some(subscription) = sub else {
bail!("Subscription receiver closed unexpectedly");
};
info!("Added new subscription with ID {:?}", subscription.sink.subscription_id());
subscribers.push(subscription);
}
block_opt = block_stream.next() => {
debug!("Got new block from block stream");
let Some(block) = block_opt else {
bail!("Block stream ended unexpectedly");
};
let block = block.context("Failed to get L2 block data")?;
let block: indexer_service_protocol::Block = block.into();
for sub in &mut subscribers {
if let Err(err) = sub.try_send(&block.header.block_id) {
warn!(
"Failed to send block ID {:?} to subscription ID {:?} with error: {err:#?}",
block.header.block_id,
sub.sink.subscription_id(),
);
}
}
}
}
}
});
SubscriptionLoopParts {
handle,
new_subscription_sender,
}
}
}
impl Drop for SubscriptionService {
fn drop(&mut self) {
self.parts.load().handle.abort();
}
}
struct SubscriptionLoopParts {
handle: tokio::task::JoinHandle<Result<Never>>,
new_subscription_sender: UnboundedSender<Subscription<BlockId>>,
}
struct Subscription<T> {
sink: SubscriptionSink,
_marker: std::marker::PhantomData<T>,
}
impl<T> Subscription<T> {
fn new(sink: SubscriptionSink) -> Self {
Self {
sink,
_marker: std::marker::PhantomData,
}
}
fn try_send(&mut self, item: &T) -> Result<()>
where
T: Serialize,
{
let json = serde_json::value::to_raw_value(item)
.context("Failed to serialize item for subscription")?;
self.sink.try_send(json)?;
Ok(())
}
}
impl<T> Drop for Subscription<T> {
fn drop(&mut self) {
info!(
"Subscription with ID {:?} is being dropped",
self.sink.subscription_id()
);
}
}

View File

@ -1,124 +0,0 @@
use std::sync::Arc;
use anyhow::Result;
use bedrock_client::BedrockClient;
use common::{
block::HashableBlockData, communication::indexer::Message,
rpc_primitives::requests::PostIndexerMessageResponse, sequencer_client::SequencerClient,
};
use futures::StreamExt;
use log::info;
use logos_blockchain_core::mantle::{
Op, SignedMantleTx,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use tokio::sync::RwLock;
use crate::{config::IndexerConfig, state::IndexerState};
pub mod config;
pub mod state;
pub struct IndexerCore {
pub bedrock_client: BedrockClient,
pub sequencer_client: SequencerClient,
pub config: IndexerConfig,
pub state: IndexerState,
}
impl IndexerCore {
pub fn new(config: IndexerConfig) -> Result<Self> {
Ok(Self {
bedrock_client: BedrockClient::new(
config.bedrock_client_config.auth.clone().map(Into::into),
config.bedrock_client_config.addr.clone(),
)?,
sequencer_client: SequencerClient::new_with_auth(
config.sequencer_client_config.addr.clone(),
config.sequencer_client_config.auth.clone(),
)?,
config,
// No state setup for now, future task.
state: IndexerState {
latest_seen_block: Arc::new(RwLock::new(0)),
},
})
}
pub async fn subscribe_parse_block_stream(&self) -> Result<()> {
loop {
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
info!("Block stream joined");
while let Some(block_info) = stream_pinned.next().await {
let header_id = block_info.header_id;
info!("Observed L1 block at height {}", block_info.height);
if let Some(l1_block) = self
.bedrock_client
.get_block_by_id(header_id, &self.config.backoff)
.await?
{
info!("Extracted L1 block at height {}", block_info.height);
let l2_blocks_parsed = parse_blocks(
l1_block.into_transactions().into_iter(),
&self.config.channel_id,
);
for l2_block in l2_blocks_parsed {
// State modification, will be updated in future
{
let mut guard = self.state.latest_seen_block.write().await;
if l2_block.block_id > *guard {
*guard = l2_block.block_id;
}
}
// Sending data into sequencer, may need to be expanded.
let message = Message::L2BlockFinalized {
l2_block_height: l2_block.block_id,
};
let status = self.send_message_to_sequencer(message.clone()).await?;
info!("Sent message {message:#?} to sequencer; status {status:#?}");
}
}
}
// Refetch stream after delay
tokio::time::sleep(std::time::Duration::from_millis(
self.config.resubscribe_interval_millis,
))
.await;
}
}
pub async fn send_message_to_sequencer(
&self,
message: Message,
) -> Result<PostIndexerMessageResponse> {
Ok(self.sequencer_client.post_indexer_message(message).await?)
}
}
fn parse_blocks(
block_txs: impl Iterator<Item = SignedMantleTx>,
decoded_channel_id: &ChannelId,
) -> impl Iterator<Item = HashableBlockData> {
block_txs.flat_map(|tx| {
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
Op::ChannelInscribe(InscriptionOp {
channel_id,
inscription,
..
}) if channel_id == *decoded_channel_id => {
borsh::from_slice::<HashableBlockData>(&inscription).ok()
}
_ => None,
})
})
}

View File

@ -1,4 +0,0 @@
pub mod service;
#[cfg(feature = "mock-responses")]
pub mod mock_service;

View File

@ -1,49 +0,0 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct IndexerService;
// `async_trait` is required by `jsonrpsee`
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_blocks(
&self,
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
_from: BlockId,
) -> SubscriptionResult {
todo!()
}
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_block_by_hash(&self, _block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
todo!()
}
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
todo!()
}
async fn get_transaction(&self, _tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
todo!()
}
async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
todo!()
}
async fn get_transactions_by_account(
&self,
_account_id: AccountId,
_limit: u32,
_offset: u32,
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
todo!()
}
}

View File

@ -7,20 +7,20 @@ license = { workspace = true }
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_core = { workspace = true, features = ["default", "testnet"] }
sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
indexer_core.workspace = true
wallet-ffi.workspace = true
serde_json.workspace = true
token_core.workspace = true
indexer_service.workspace = true
url.workspace = true
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
serde_json.workspace = true
actix-web.workspace = true
base64.workspace = true
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
@ -28,3 +28,5 @@ hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
futures.workspace = true
rand.workspace = true
testcontainers = { version = "0.26.3", features = ["docker-compose"] }

View File

@ -0,0 +1,32 @@
# Bedrock Configuration Files for Integration Tests
## How to update
- `docker-compose.yml` file.
Compare with `https://github.com/logos-blockchain/logos-blockchain/blob/master/compose.static.yml` and update the file accordingly, don't bring unneeded things like grafana and etc.
Replace `sha` hash with the latest `testnet` tag hash.
- `scripts` folder.
```bash
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_cfgsync.sh >> scripts/run_cfgsync.sh
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_logos_blockchain_node.sh >> scripts/run_logos_blockchain_node.sh
chmod +x scripts/*
```
Then in `scripts/run_logos_blockchain_node.sh` update `cfgsync-client` to `logos-blockchain-cfgsync-client` and in `scripts/run_cfgsync.sh` update `cfgsync-server` to `logos-blockchain-cfgsync-server` if it hasn't been fixed already, see <https://github.com/logos-blockchain/logos-blockchain/pull/2092>.
- `cfgsync.yaml` file.
```bash
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/cfgsync.yaml
```
Set `logger`, `tracing` and `metrics` to `None`
- `kzgrs_test_params` file.
```bash
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/tests/kzgrs/kzgrs_test_params
```

View File

@ -0,0 +1,14 @@
port: 4400
n_hosts: 4
timeout: 10
# Tracing
tracing_settings:
logger: None
tracing: None
filter: !EnvFilter
filters:
logos-blockchain: debug
metrics: None
console: None
level: INFO

View File

@ -0,0 +1,47 @@
services:
cfgsync:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./cfgsync.yaml:/etc/logos-blockchain/cfgsync.yaml:z
entrypoint: /etc/logos-blockchain/scripts/run_cfgsync.sh
logos-blockchain-node-0:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
ports:
# Map 0 port so that multiple instances can run on the same host
- "0:18080/tcp"
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-1:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-2:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-3:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh

Binary file not shown.

View File

@ -0,0 +1,5 @@
#!/bin/sh
set -e
exec /usr/bin/logos-blockchain-cfgsync-server /etc/logos-blockchain/cfgsync.yaml

View File

@ -0,0 +1,13 @@
#!/bin/sh
set -e
export CFG_FILE_PATH="/config.yaml" \
CFG_SERVER_ADDR="http://cfgsync:4400" \
CFG_HOST_IP=$(hostname -i) \
CFG_HOST_IDENTIFIER="validator-$(hostname -i)" \
LOG_LEVEL="INFO" \
POL_PROOF_DEV_MODE=true
/usr/bin/logos-blockchain-cfgsync-client && \
exec /usr/bin/logos-blockchain-node /config.yaml

View File

@ -1,17 +0,0 @@
{
"bedrock_client_config": {
"addr": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"backoff": {
"max_retries": 10,
"start_delay_millis": 100
},
"resubscribe_interval_millis": 1000,
"sequencer_client_config": {
"addr": "will_be_replaced_in_runtime"
}
}

View File

@ -1,165 +0,0 @@
{
"home": "",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 0,
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
],
"bedrock_config": {
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
}
}

View File

@ -1,159 +0,0 @@
{
"home": "",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"retry_pending_blocks_timeout_millis": 240000,
"port": 0,
"initial_accounts": [
{
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"balance": 10000
},
{
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
]
}

View File

@ -1,547 +0,0 @@
{
"override_rust_log": null,
"sequencer_addr": "",
"seq_poll_timeout_millis": 12000,
"seq_tx_poll_max_blocks": 5,
"seq_poll_max_retries": 5,
"seq_block_poll_max_amount": 100,
"basic_auth": null,
"initial_accounts": [
{
"Public": {
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"pub_sign_key": [
16,
162,
106,
154,
236,
125,
52,
184,
35,
100,
238,
174,
69,
197,
41,
77,
187,
10,
118,
75,
0,
11,
148,
238,
185,
181,
133,
17,
220,
72,
124,
77
]
}
},
{
"Public": {
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"pub_sign_key": [
113,
121,
64,
177,
204,
85,
229,
214,
178,
6,
109,
191,
29,
154,
63,
38,
242,
18,
244,
219,
8,
208,
35,
136,
23,
127,
207,
237,
216,
169,
190,
27
]
}
},
{
"Private": {
"account_id": "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
251,
82,
235,
1,
146,
96,
30,
81,
162,
234,
33,
15,
123,
129,
116,
0,
84,
136,
176,
70,
190,
224,
161,
54,
134,
142,
154,
1,
18,
251,
242,
189
],
"private_key_holder": {
"nullifier_secret_key": [
29,
250,
10,
187,
35,
123,
180,
250,
246,
97,
216,
153,
44,
156,
16,
93,
241,
26,
174,
219,
72,
84,
34,
247,
112,
101,
217,
243,
189,
173,
75,
20
],
"incoming_viewing_secret_key": [
251,
201,
22,
154,
100,
165,
218,
108,
163,
190,
135,
91,
145,
84,
69,
241,
46,
117,
217,
110,
197,
248,
91,
193,
14,
104,
88,
103,
67,
153,
182,
158
],
"outgoing_viewing_secret_key": [
25,
67,
121,
76,
175,
100,
30,
198,
105,
123,
49,
169,
75,
178,
75,
210,
100,
143,
210,
243,
228,
243,
21,
18,
36,
84,
164,
186,
139,
113,
214,
12
]
},
"nullifer_public_key": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"incoming_viewing_public_key": [
3,
235,
139,
131,
237,
177,
122,
189,
6,
177,
167,
178,
202,
117,
246,
58,
28,
65,
132,
79,
220,
139,
119,
243,
187,
160,
212,
121,
61,
247,
116,
72,
205
]
}
}
},
{
"Private": {
"account_id": "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
238,
171,
241,
69,
111,
217,
85,
64,
19,
82,
18,
189,
32,
91,
78,
175,
107,
7,
109,
60,
52,
44,
243,
230,
72,
244,
192,
92,
137,
33,
118,
254
],
"private_key_holder": {
"nullifier_secret_key": [
25,
211,
215,
119,
57,
223,
247,
37,
245,
144,
122,
29,
118,
245,
83,
228,
23,
9,
101,
120,
88,
33,
238,
207,
128,
61,
110,
2,
89,
62,
164,
13
],
"incoming_viewing_secret_key": [
193,
181,
14,
196,
142,
84,
15,
65,
128,
101,
70,
196,
241,
47,
130,
221,
23,
146,
161,
237,
221,
40,
19,
126,
59,
15,
169,
236,
25,
105,
104,
231
],
"outgoing_viewing_secret_key": [
20,
170,
220,
108,
41,
23,
155,
217,
247,
190,
175,
168,
247,
34,
105,
134,
114,
74,
104,
91,
211,
62,
126,
13,
130,
100,
241,
214,
250,
236,
38,
150
]
},
"nullifer_public_key": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"incoming_viewing_public_key": [
2,
181,
98,
93,
216,
241,
241,
110,
58,
198,
119,
174,
250,
184,
1,
204,
200,
173,
44,
238,
37,
247,
170,
156,
100,
254,
116,
242,
28,
183,
187,
77,
255
]
}
}
}
]
}

View File

@ -0,0 +1,255 @@
use std::{net::SocketAddr, path::PathBuf};
use anyhow::{Context, Result};
use indexer_service::{BackoffConfig, BedrockClientConfig, ChannelId, IndexerConfig};
use key_protocol::key_management::KeyChain;
use nssa::{Account, AccountId, PrivateKey, PublicKey};
use nssa_core::{account::Data, program::DEFAULT_PROGRAM_ID};
use sequencer_core::config::{
AccountInitialData, BedrockConfig, CommitmentsInitialData, SequencerConfig,
};
use url::Url;
use wallet::config::{
InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, WalletConfig,
};
pub fn indexer_config(bedrock_addr: SocketAddr) -> Result<IndexerConfig> {
Ok(IndexerConfig {
resubscribe_interval_millis: 1000,
bedrock_client_config: BedrockClientConfig {
addr: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 10,
},
},
channel_id: bedrock_channel_id(),
})
}
/// Sequencer config options available for custom changes in integration tests.
pub struct SequencerPartialConfig {
pub max_num_tx_in_block: usize,
pub mempool_max_size: usize,
pub block_create_timeout_millis: u64,
}
impl Default for SequencerPartialConfig {
fn default() -> Self {
Self {
max_num_tx_in_block: 20,
mempool_max_size: 10_000,
block_create_timeout_millis: 10_000,
}
}
}
pub fn sequencer_config(
partial: SequencerPartialConfig,
home: PathBuf,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<SequencerConfig> {
let SequencerPartialConfig {
max_num_tx_in_block,
mempool_max_size,
block_create_timeout_millis,
} = partial;
Ok(SequencerConfig {
home,
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
max_num_tx_in_block,
mempool_max_size,
block_create_timeout_millis,
retry_pending_blocks_timeout_millis: 240_000,
port: 0,
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 5,
},
channel_id: bedrock_channel_id(),
node_url: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
},
indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr)
.context("Failed to convert indexer addr to URL")?,
})
}
pub fn wallet_config(
sequencer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<WalletConfig> {
Ok(WalletConfig {
override_rust_log: None,
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
.context("Failed to convert sequencer addr to URL")?,
seq_poll_timeout_millis: 12_000,
seq_tx_poll_max_blocks: 10,
seq_poll_max_retries: 5,
seq_block_poll_max_amount: 100,
initial_accounts: initial_data.wallet_initial_accounts(),
basic_auth: None,
})
}
pub struct InitialData {
pub public_accounts: Vec<(PrivateKey, u128)>,
pub private_accounts: Vec<(KeyChain, Account)>,
}
impl InitialData {
pub fn with_two_public_and_two_private_initialized_accounts() -> Self {
let mut public_alice_private_key = PrivateKey::new_os_random();
let mut public_alice_public_key =
PublicKey::new_from_private_key(&public_alice_private_key);
let mut public_alice_account_id = AccountId::from(&public_alice_public_key);
let mut public_bob_private_key = PrivateKey::new_os_random();
let mut public_bob_public_key = PublicKey::new_from_private_key(&public_bob_private_key);
let mut public_bob_account_id = AccountId::from(&public_bob_public_key);
// Ensure consistent ordering
if public_alice_account_id > public_bob_account_id {
std::mem::swap(&mut public_alice_private_key, &mut public_bob_private_key);
std::mem::swap(&mut public_alice_public_key, &mut public_bob_public_key);
std::mem::swap(&mut public_alice_account_id, &mut public_bob_account_id);
}
let mut private_charlie_key_chain = KeyChain::new_os_random();
let mut private_charlie_account_id =
AccountId::from(&private_charlie_key_chain.nullifer_public_key);
let mut private_david_key_chain = KeyChain::new_os_random();
let mut private_david_account_id =
AccountId::from(&private_david_key_chain.nullifer_public_key);
// Ensure consistent ordering
if private_charlie_account_id > private_david_account_id {
std::mem::swap(&mut private_charlie_key_chain, &mut private_david_key_chain);
std::mem::swap(
&mut private_charlie_account_id,
&mut private_david_account_id,
);
}
Self {
public_accounts: vec![
(public_alice_private_key, 10_000),
(public_bob_private_key, 20_000),
],
private_accounts: vec![
(
private_charlie_key_chain,
Account {
balance: 10_000,
data: Data::default(),
program_owner: DEFAULT_PROGRAM_ID,
nonce: 0,
},
),
(
private_david_key_chain,
Account {
balance: 20_000,
data: Data::default(),
program_owner: DEFAULT_PROGRAM_ID,
nonce: 0,
},
),
],
}
}
fn sequencer_initial_accounts(&self) -> Vec<AccountInitialData> {
self.public_accounts
.iter()
.map(|(priv_key, balance)| {
let pub_key = PublicKey::new_from_private_key(priv_key);
let account_id = AccountId::from(&pub_key);
AccountInitialData {
account_id,
balance: *balance,
}
})
.collect()
}
fn sequencer_initial_commitments(&self) -> Vec<CommitmentsInitialData> {
self.private_accounts
.iter()
.map(|(key_chain, account)| CommitmentsInitialData {
npk: key_chain.nullifer_public_key.clone(),
account: account.clone(),
})
.collect()
}
fn wallet_initial_accounts(&self) -> Vec<InitialAccountData> {
self.public_accounts
.iter()
.map(|(priv_key, _)| {
let pub_key = PublicKey::new_from_private_key(priv_key);
let account_id = AccountId::from(&pub_key);
InitialAccountData::Public(InitialAccountDataPublic {
account_id,
pub_sign_key: priv_key.clone(),
})
})
.chain(self.private_accounts.iter().map(|(key_chain, account)| {
let account_id = AccountId::from(&key_chain.nullifer_public_key);
InitialAccountData::Private(InitialAccountDataPrivate {
account_id,
account: account.clone(),
key_chain: key_chain.clone(),
})
}))
.collect()
}
}
pub enum UrlProtocol {
Http,
Ws,
}
impl std::fmt::Display for UrlProtocol {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UrlProtocol::Http => write!(f, "http"),
UrlProtocol::Ws => write!(f, "ws"),
}
}
}
pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result<Url> {
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
// but clients need to connect to 127.0.0.1:<port> to work reliably
let url_string = if addr.ip().is_unspecified() {
format!("{protocol}://127.0.0.1:{}", addr.port())
} else {
format!("{protocol}://{addr}")
};
url_string.parse().map_err(Into::into)
}
fn bedrock_channel_id() -> ChannelId {
let channel_id: [u8; 32] = [0u8, 1]
.repeat(16)
.try_into()
.unwrap_or_else(|_| unreachable!());
ChannelId::from(channel_id)
}

View File

@ -2,173 +2,179 @@
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
use actix_web::dev::ServerHandle;
use anyhow::{Context, Result};
use anyhow::{Context, Result, bail};
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use common::{
sequencer_client::SequencerClient,
transaction::{EncodedTransaction, NSSATransaction},
};
use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction};
use futures::FutureExt as _;
use indexer_core::{IndexerCore, config::IndexerConfig};
use log::debug;
use nssa::PrivacyPreservingTransaction;
use indexer_service::IndexerHandle;
use log::{debug, error, warn};
use nssa::{AccountId, PrivacyPreservingTransaction};
use nssa_core::Commitment;
use sequencer_core::config::SequencerConfig;
use sequencer_runner::SequencerHandle;
use tempfile::TempDir;
use tokio::task::JoinHandle;
use url::Url;
use testcontainers::compose::DockerCompose;
use wallet::{WalletCore, config::WalletConfigOverrides};
pub mod config;
// TODO: Remove this and control time from tests
pub const TIME_TO_WAIT_FOR_BLOCK_SECONDS: u64 = 12;
pub const ACC_SENDER: &str = "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV";
pub const ACC_RECEIVER: &str = "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo";
pub const ACC_SENDER_PRIVATE: &str = "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw";
pub const ACC_RECEIVER_PRIVATE: &str = "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX";
pub const NSSA_PROGRAM_FOR_TEST_DATA_CHANGER: &str = "data_changer.bin";
const BEDROCK_SERVICE_WITH_OPEN_PORT: &str = "logos-blockchain-node-0";
const BEDROCK_SERVICE_PORT: u16 = 18080;
static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init);
/// Test context which sets up a sequencer and a wallet for integration tests.
///
/// It's memory and logically safe to create multiple instances of this struct in parallel tests,
/// as each instance uses its own temporary directories for sequencer and wallet data.
// NOTE: Order of fields is important for proper drop order.
pub struct TestContext {
sequencer_server_handle: ServerHandle,
sequencer_loop_handle: JoinHandle<Result<()>>,
sequencer_retry_pending_blocks_handle: JoinHandle<Result<()>>,
indexer_loop_handle: Option<JoinHandle<Result<()>>>,
sequencer_client: SequencerClient,
wallet: WalletCore,
wallet_password: String,
sequencer_handle: SequencerHandle,
indexer_handle: IndexerHandle,
bedrock_compose: DockerCompose,
_temp_sequencer_dir: TempDir,
_temp_wallet_dir: TempDir,
}
impl TestContext {
/// Create new test context in detached mode. Default.
/// Create new test context.
pub async fn new() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path =
PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, None).await
Self::builder().build().await
}
/// Create new test context in local bedrock node attached mode.
pub async fn new_bedrock_local_attached() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path = PathBuf::from(manifest_dir)
.join("configs/sequencer/bedrock_local_attached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
let indexer_config_path =
PathBuf::from(manifest_dir).join("configs/indexer/indexer_config.json");
let indexer_config = IndexerConfig::from_path(&indexer_config_path)
.context("Failed to create indexer config from file")?;
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, Some(indexer_config))
.await
pub fn builder() -> TestContextBuilder {
TestContextBuilder::new()
}
/// Create new test context with custom sequencer config and maybe indexer config.
///
/// `home` and `port` fields of the provided config will be overridden to meet tests parallelism
/// requirements.
pub async fn new_with_sequencer_and_maybe_indexer_configs(
sequencer_config: SequencerConfig,
indexer_config: Option<IndexerConfig>,
async fn new_configured(
sequencer_partial_config: config::SequencerPartialConfig,
initial_data: config::InitialData,
) -> Result<Self> {
// Ensure logger is initialized only once
*LOGGER;
debug!("Test context setup");
let (
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
temp_sequencer_dir,
) = Self::setup_sequencer(sequencer_config)
.await
.context("Failed to setup sequencer")?;
let (bedrock_compose, bedrock_addr) = Self::setup_bedrock_node().await?;
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
// but clients need to connect to 127.0.0.1:<port> to work reliably
let sequencer_addr = if sequencer_addr.ip().is_unspecified() {
format!("http://127.0.0.1:{}", sequencer_addr.port())
} else {
format!("http://{sequencer_addr}")
let indexer_handle = Self::setup_indexer(bedrock_addr)
.await
.context("Failed to setup Indexer")?;
let (sequencer_handle, temp_sequencer_dir) = Self::setup_sequencer(
sequencer_partial_config,
bedrock_addr,
indexer_handle.addr(),
&initial_data,
)
.await
.context("Failed to setup Sequencer")?;
let (wallet, temp_wallet_dir, wallet_password) =
Self::setup_wallet(sequencer_handle.addr(), &initial_data)
.await
.context("Failed to setup wallet")?;
let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr())
.context("Failed to convert sequencer addr to URL")?;
let sequencer_client =
SequencerClient::new(sequencer_url).context("Failed to create sequencer client")?;
Ok(Self {
sequencer_client,
wallet,
wallet_password,
bedrock_compose,
sequencer_handle,
indexer_handle,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
})
}
async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let bedrock_compose_path = PathBuf::from(manifest_dir).join("bedrock/docker-compose.yml");
let mut compose = DockerCompose::with_auto_client(&[bedrock_compose_path])
.await
.context("Failed to setup docker compose for Bedrock")?;
async fn up_and_retrieve_port(compose: &mut DockerCompose) -> Result<u16> {
compose
.up()
.await
.context("Failed to bring up Bedrock services")?;
let container = compose
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
.with_context(|| {
format!(
"Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`"
)
})?;
let ports = container.ports().await.with_context(|| {
format!(
"Failed to get ports for Bedrock service container `{}`",
container.id()
)
})?;
ports
.map_to_host_port_ipv4(BEDROCK_SERVICE_PORT)
.with_context(|| {
format!(
"Failed to retrieve host port of {BEDROCK_SERVICE_PORT} container \
port for container `{}`, existing ports: {ports:?}",
container.id()
)
})
}
let mut port = None;
let mut attempt = 0;
let max_attempts = 5;
while port.is_none() && attempt < max_attempts {
attempt += 1;
match up_and_retrieve_port(&mut compose).await {
Ok(p) => {
port = Some(p);
}
Err(err) => {
warn!(
"Failed to bring up Bedrock services: {err:?}, attempt {attempt}/{max_attempts}"
);
}
}
}
let Some(port) = port else {
bail!("Failed to bring up Bedrock services after {max_attempts} attempts");
};
let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone())
let addr = SocketAddr::from(([127, 0, 0, 1], port));
Ok((compose, addr))
}
async fn setup_indexer(bedrock_addr: SocketAddr) -> Result<IndexerHandle> {
let indexer_config =
config::indexer_config(bedrock_addr).context("Failed to create Indexer config")?;
indexer_service::run_server(indexer_config, 0)
.await
.context("Failed to setup wallet")?;
let sequencer_client = SequencerClient::new(
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?,
)
.context("Failed to create sequencer client")?;
if let Some(mut indexer_config) = indexer_config {
indexer_config.sequencer_client_config.addr =
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?;
let indexer_core = IndexerCore::new(indexer_config)?;
let indexer_loop_handle = Some(tokio::spawn(async move {
indexer_core.subscribe_parse_block_stream().await
}));
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
wallet_password,
})
} else {
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle: None,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
wallet_password,
})
}
.context("Failed to run Indexer Service")
}
async fn setup_sequencer(
mut config: SequencerConfig,
) -> Result<(
ServerHandle,
SocketAddr,
JoinHandle<Result<()>>,
JoinHandle<Result<()>>,
TempDir,
)> {
partial: config::SequencerPartialConfig,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &config::InitialData,
) -> Result<(SequencerHandle, TempDir)> {
let temp_sequencer_dir =
tempfile::tempdir().context("Failed to create temp dir for sequencer home")?;
@ -176,43 +182,39 @@ impl TestContext {
"Using temp sequencer home at {:?}",
temp_sequencer_dir.path()
);
config.home = temp_sequencer_dir.path().to_owned();
// Setting port to 0 lets the OS choose a free port for us
config.port = 0;
let (
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
) = sequencer_runner::startup_sequencer(config).await?;
let config = config::sequencer_config(
partial,
temp_sequencer_dir.path().to_owned(),
bedrock_addr,
indexer_addr,
initial_data,
)
.context("Failed to create Sequencer config")?;
Ok((
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
temp_sequencer_dir,
))
let sequencer_handle = sequencer_runner::startup_sequencer(config).await?;
Ok((sequencer_handle, temp_sequencer_dir))
}
async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let wallet_config_source_path =
PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json");
async fn setup_wallet(
sequencer_addr: SocketAddr,
initial_data: &config::InitialData,
) -> Result<(WalletCore, TempDir, String)> {
let config = config::wallet_config(sequencer_addr, initial_data)
.context("Failed to create Wallet config")?;
let config_serialized =
serde_json::to_string_pretty(&config).context("Failed to serialize Wallet config")?;
let temp_wallet_dir =
tempfile::tempdir().context("Failed to create temp dir for wallet home")?;
let config_path = temp_wallet_dir.path().join("wallet_config.json");
std::fs::copy(&wallet_config_source_path, &config_path)
.context("Failed to copy wallet config to temp dir")?;
std::fs::write(&config_path, config_serialized)
.context("Failed to write wallet config in temp dir")?;
let storage_path = temp_wallet_dir.path().join("storage.json");
let config_overrides = WalletConfigOverrides {
sequencer_addr: Some(sequencer_addr),
..Default::default()
};
let config_overrides = WalletConfigOverrides::default();
let wallet_password = "test_pass".to_owned();
let wallet = WalletCore::new_init_storage(
@ -248,32 +250,71 @@ impl TestContext {
pub fn sequencer_client(&self) -> &SequencerClient {
&self.sequencer_client
}
/// Get existing public account IDs in the wallet.
pub fn existing_public_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
.user_data
.public_account_ids()
.collect()
}
/// Get existing private account IDs in the wallet.
pub fn existing_private_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
.user_data
.private_account_ids()
.collect()
}
}
impl Drop for TestContext {
fn drop(&mut self) {
debug!("Test context cleanup");
let Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle,
sequencer_handle,
indexer_handle,
bedrock_compose,
_temp_sequencer_dir: _,
_temp_wallet_dir: _,
sequencer_client: _,
wallet: _,
_temp_sequencer_dir,
_temp_wallet_dir,
wallet_password: _,
} = self;
sequencer_loop_handle.abort();
sequencer_retry_pending_blocks_handle.abort();
if let Some(indexer_loop_handle) = indexer_loop_handle {
indexer_loop_handle.abort();
if sequencer_handle.is_finished() {
let Err(err) = self
.sequencer_handle
.run_forever()
.now_or_never()
.expect("Future is finished and should be ready");
error!(
"Sequencer handle has unexpectedly finished before TestContext drop with error: {err:#}"
);
}
// Can't wait here as Drop can't be async, but anyway stop signal should be sent
sequencer_server_handle.stop(true).now_or_never();
if indexer_handle.is_stopped() {
error!("Indexer handle has unexpectedly stopped before TestContext drop");
}
let container = bedrock_compose
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
.unwrap_or_else(|| {
panic!("Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`")
});
let output = std::process::Command::new("docker")
.args(["inspect", "-f", "{{.State.Running}}", container.id()])
.output()
.expect("Failed to execute docker inspect command to check if Bedrock container is still running");
let stdout = String::from_utf8(output.stdout)
.expect("Failed to parse docker inspect output as String");
if stdout.trim() != "true" {
error!(
"Bedrock container `{}` is not running during TestContext drop, docker inspect output: {stdout}",
container.id()
);
}
}
}
@ -291,31 +332,65 @@ impl BlockingTestContext {
}
}
pub fn format_public_account_id(account_id: &str) -> String {
pub struct TestContextBuilder {
initial_data: Option<config::InitialData>,
sequencer_partial_config: Option<config::SequencerPartialConfig>,
}
impl TestContextBuilder {
fn new() -> Self {
Self {
initial_data: None,
sequencer_partial_config: None,
}
}
pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self {
self.initial_data = Some(initial_data);
self
}
pub fn with_sequencer_partial_config(
mut self,
sequencer_partial_config: config::SequencerPartialConfig,
) -> Self {
self.sequencer_partial_config = Some(sequencer_partial_config);
self
}
pub async fn build(self) -> Result<TestContext> {
TestContext::new_configured(
self.sequencer_partial_config.unwrap_or_default(),
self.initial_data.unwrap_or_else(|| {
config::InitialData::with_two_public_and_two_private_initialized_accounts()
}),
)
.await
}
}
pub fn format_public_account_id(account_id: AccountId) -> String {
format!("Public/{account_id}")
}
pub fn format_private_account_id(account_id: &str) -> String {
pub fn format_private_account_id(account_id: AccountId) -> String {
format!("Private/{account_id}")
}
pub async fn fetch_privacy_preserving_tx(
seq_client: &SequencerClient,
tx_hash: String,
tx_hash: HashType,
) -> PrivacyPreservingTransaction {
let transaction_encoded = seq_client
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(tx_hash)
.await
.unwrap()
.transaction
.unwrap();
let tx_base64_decode = BASE64.decode(transaction_encoded).unwrap();
match NSSATransaction::try_from(
&borsh::from_slice::<EncodedTransaction>(&tx_base64_decode).unwrap(),
)
.unwrap()
{
let tx_bytes = BASE64.decode(transaction_encoded).unwrap();
let tx = borsh::from_slice(&tx_bytes).unwrap();
match tx {
NSSATransaction::PrivacyPreserving(privacy_preserving_transaction) => {
privacy_preserving_transaction
}
@ -332,20 +407,3 @@ pub async fn verify_commitment_is_in_state(
Ok(Some(_))
)
}
#[cfg(test)]
mod tests {
use super::{format_private_account_id, format_public_account_id};
#[test]
fn correct_account_id_from_prefix() {
let account_id1 = "cafecafe";
let account_id2 = "deadbeaf";
let account_id1_pub = format_public_account_id(account_id1);
let account_id2_priv = format_private_account_id(account_id2);
assert_eq!(account_id1_pub, "Public/cafecafe".to_string());
assert_eq!(account_id2_priv, "Private/deadbeaf".to_string());
}
}

View File

@ -1,5 +1,5 @@
use anyhow::Result;
use integration_tests::{ACC_SENDER, TestContext};
use integration_tests::TestContext;
use log::info;
use nssa::program::Program;
use tokio::test;
@ -10,7 +10,7 @@ async fn get_existing_account() -> Result<()> {
let account = ctx
.sequencer_client()
.get_account(ACC_SENDER.to_string())
.get_account(ctx.existing_public_accounts()[0])
.await?
.account;

View File

@ -88,8 +88,8 @@ async fn amm_public() -> Result<()> {
// Create new token
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id_1.to_string()),
supply_account_id: format_public_account_id(&supply_account_id_1.to_string()),
definition_account_id: format_public_account_id(definition_account_id_1),
supply_account_id: format_public_account_id(supply_account_id_1),
name: "A NAM1".to_string(),
total_supply: 37,
};
@ -99,10 +99,8 @@ async fn amm_public() -> Result<()> {
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_1`
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id_1.to_string()),
to: Some(format_public_account_id(
&recipient_account_id_1.to_string(),
)),
from: format_public_account_id(supply_account_id_1),
to: Some(format_public_account_id(recipient_account_id_1)),
to_npk: None,
to_ipk: None,
amount: 7,
@ -114,8 +112,8 @@ async fn amm_public() -> Result<()> {
// Create new token
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id_2.to_string()),
supply_account_id: format_public_account_id(&supply_account_id_2.to_string()),
definition_account_id: format_public_account_id(definition_account_id_2),
supply_account_id: format_public_account_id(supply_account_id_2),
name: "A NAM2".to_string(),
total_supply: 37,
};
@ -125,10 +123,8 @@ async fn amm_public() -> Result<()> {
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_2`
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id_2.to_string()),
to: Some(format_public_account_id(
&recipient_account_id_2.to_string(),
)),
from: format_public_account_id(supply_account_id_2),
to: Some(format_public_account_id(recipient_account_id_2)),
to_npk: None,
to_ipk: None,
amount: 7,
@ -157,9 +153,9 @@ async fn amm_public() -> Result<()> {
// Send creation tx
let subcommand = AmmProgramAgnosticSubcommand::New {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
balance_a: 3,
balance_b: 3,
};
@ -170,19 +166,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -206,8 +202,8 @@ async fn amm_public() -> Result<()> {
// Make swap
let subcommand = AmmProgramAgnosticSubcommand::Swap {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
amount_in: 2,
min_amount_out: 1,
token_definition: definition_account_id_1.to_string(),
@ -219,19 +215,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -255,8 +251,8 @@ async fn amm_public() -> Result<()> {
// Make swap
let subcommand = AmmProgramAgnosticSubcommand::Swap {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
amount_in: 2,
min_amount_out: 1,
token_definition: definition_account_id_2.to_string(),
@ -268,19 +264,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -304,9 +300,9 @@ async fn amm_public() -> Result<()> {
// Add liquidity
let subcommand = AmmProgramAgnosticSubcommand::AddLiquidity {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
min_amount_lp: 1,
max_amount_a: 2,
max_amount_b: 2,
@ -318,19 +314,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -354,9 +350,9 @@ async fn amm_public() -> Result<()> {
// Remove liquidity
let subcommand = AmmProgramAgnosticSubcommand::RemoveLiquidity {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
balance_lp: 2,
min_amount_a: 1,
min_amount_b: 1,
@ -368,19 +364,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;

View File

@ -2,7 +2,6 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use integration_tests::{
ACC_RECEIVER, ACC_RECEIVER_PRIVATE, ACC_SENDER, ACC_SENDER_PRIVATE,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
};
@ -20,12 +19,12 @@ use wallet::cli::{
async fn private_transfer_to_owned_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to: AccountId = ctx.existing_private_accounts()[1];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -38,13 +37,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
let new_commitment2 = ctx
.wallet()
.get_private_account_commitment(&to)
.get_private_account_commitment(to)
.context("Failed to get private account commitment for receiver")?;
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
@ -57,13 +56,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
async fn private_transfer_to_foreign_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to_npk = NullifierPublicKey([42; 32]);
let to_npk_string = hex::encode(to_npk.0);
let to_ipk = Secp256k1Point::from_scalar(to_npk.0);
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(to_npk_string),
to_ipk: Some(hex::encode(to_ipk.0)),
@ -80,10 +79,10 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
@ -100,19 +99,19 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
async fn deshielded_transfer_to_public_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let to: AccountId = ACC_RECEIVER.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to: AccountId = ctx.existing_public_accounts()[1];
// Check initial balance of the private sender
let from_acc = ctx
.wallet()
.get_account_private(&from)
.get_account_private(from)
.context("Failed to get sender's private account")?;
assert_eq!(from_acc.balance, 10000);
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_public_account_id(&to.to_string())),
from: format_private_account_id(from),
to: Some(format_public_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -125,18 +124,15 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
let from_acc = ctx
.wallet()
.get_account_private(&from)
.get_account_private(from)
.context("Failed to get sender's private account")?;
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(to.to_string())
.await?;
let acc_2_balance = ctx.sequencer_client().get_account_balance(to).await?;
assert_eq!(from_acc.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
@ -150,7 +146,7 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
@ -168,13 +164,13 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&to_account_id)
.get_private_account(to_account_id)
.cloned()
.context("Failed to get private account")?;
// Send to this account using claiming path (using npk and ipk instead of account ID)
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_ipk: Some(hex::encode(to_keys.incoming_viewing_public_key.0)),
@ -186,7 +182,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
anyhow::bail!("Expected PrivacyPreservingTransfer return value");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
// Sync the wallet to claim the new account
let command = Command::Account(AccountSubcommand::SyncPrivate {});
@ -194,7 +190,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
assert_eq!(tx.message.new_commitments[0], new_commitment1);
@ -205,7 +201,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let to_res_acc = ctx
.wallet()
.get_account_private(&to_account_id)
.get_account_private(to_account_id)
.context("Failed to get recipient's private account")?;
assert_eq!(to_res_acc.balance, 100);
@ -218,12 +214,12 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
async fn shielded_transfer_to_owned_private_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER.parse()?;
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
let to: AccountId = ctx.existing_private_accounts()[1];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_private_account_id(&to.to_string())),
from: format_public_account_id(from),
to: Some(format_private_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -236,18 +232,15 @@ async fn shielded_transfer_to_owned_private_account() -> Result<()> {
let acc_to = ctx
.wallet()
.get_account_private(&to)
.get_account_private(to)
.context("Failed to get receiver's private account")?;
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&to)
.get_private_account_commitment(to)
.context("Failed to get receiver's commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let acc_from_balance = ctx
.sequencer_client()
.get_account_balance(from.to_string())
.await?;
let acc_from_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert_eq!(acc_from_balance.balance, 9900);
assert_eq!(acc_to.balance, 20100);
@ -264,10 +257,10 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
let to_npk = NullifierPublicKey([42; 32]);
let to_npk_string = hex::encode(to_npk.0);
let to_ipk = Secp256k1Point::from_scalar(to_npk.0);
let from: AccountId = ACC_SENDER.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
from: format_public_account_id(from),
to: None,
to_npk: Some(to_npk_string),
to_ipk: Some(hex::encode(to_ipk.0)),
@ -284,10 +277,7 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(from.to_string())
.await?;
let acc_1_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert!(
verify_commitment_is_in_state(
@ -313,7 +303,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// The original implementation spawned wallet::cli::execute_continuous_run() in background
// but this conflicts with TestContext's wallet management
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
@ -331,13 +321,13 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&to_account_id)
.get_private_account(to_account_id)
.cloned()
.context("Failed to get private account")?;
// Send transfer using nullifier and incoming viewing public keys
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_ipk: Some(hex::encode(to_keys.incoming_viewing_public_key.0)),
@ -349,7 +339,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
anyhow::bail!("Failed to send transaction");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
info!("Waiting for next blocks to check if continuous run fetches account");
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
@ -364,7 +354,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// Verify receiver account balance
let to_res_acc = ctx
.wallet()
.get_account_private(&to_account_id)
.get_account_private(to_account_id)
.context("Failed to get receiver account")?;
assert_eq!(to_res_acc.balance, 100);
@ -383,7 +373,7 @@ async fn initialize_private_account() -> Result<()> {
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: format_private_account_id(&account_id.to_string()),
account_id: format_private_account_id(account_id),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
@ -395,13 +385,13 @@ async fn initialize_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&account_id)
.get_private_account_commitment(account_id)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let account = ctx
.wallet()
.get_account_private(&account_id)
.get_account_private(account_id)
.context("Failed to get private account")?;
assert_eq!(

View File

@ -1,9 +1,7 @@
use std::time::Duration;
use anyhow::Result;
use integration_tests::{
ACC_RECEIVER, ACC_SENDER, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id,
};
use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id};
use log::info;
use nssa::program::Program;
use tokio::test;
@ -18,8 +16,8 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -33,11 +31,11 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -64,17 +62,15 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
.storage()
.user_data
.account_ids()
.map(ToString::to_string)
.find(|acc_id| acc_id != ACC_SENDER && acc_id != ACC_RECEIVER)
.find(|acc_id| {
*acc_id != ctx.existing_public_accounts()[0]
&& *acc_id != ctx.existing_public_accounts()[1]
})
.expect("Failed to find newly created account in the wallet storage");
if new_persistent_account_id == String::new() {
panic!("Failed to produce new account, not present in persistent accounts");
}
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(&new_persistent_account_id)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(new_persistent_account_id)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -88,7 +84,7 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
@ -109,8 +105,8 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
let mut ctx = TestContext::new().await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 1000000,
@ -125,11 +121,11 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
info!("Checking balances unchanged");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -147,8 +143,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
// First transfer
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -162,11 +158,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Checking correct balance move after first transfer");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -179,8 +175,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
// Second transfer
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -194,11 +190,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Checking correct balance move after second transfer");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -223,14 +219,14 @@ async fn initialize_public_account() -> Result<()> {
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: format_public_account_id(&account_id.to_string()),
account_id: format_public_account_id(account_id),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
info!("Checking correct execution");
let account = ctx
.sequencer_client()
.get_account(account_id.to_string())
.get_account(account_id)
.await?
.account;

View File

@ -1,23 +0,0 @@
use anyhow::Result;
use integration_tests::TestContext;
use log::info;
use tokio::test;
#[ignore = "needs complicated setup"]
#[test]
// To run this test properly, you need nomos node running in the background.
// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node).
//
// Recommended to run node locally from build binary.
async fn indexer_run_local_node() -> Result<()> {
let _ctx = TestContext::new_bedrock_local_attached().await?;
info!("Let's observe behaviour");
tokio::time::sleep(std::time::Duration::from_secs(180)).await;
// No way to check state of indexer now
// When it will be a service, then it will become possible.
Ok(())
}

View File

@ -2,8 +2,8 @@ use std::{str::FromStr, time::Duration};
use anyhow::Result;
use integration_tests::{
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
};
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
@ -19,7 +19,7 @@ use wallet::cli::{
async fn restore_keys_from_seed() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create first private account at root
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private {
@ -47,8 +47,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to first private account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to_account_id1.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to_account_id1)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -57,15 +57,15 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to second private account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to_account_id2.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to_account_id2)),
to_npk: None,
to_ipk: None,
amount: 101,
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let from: AccountId = ACC_SENDER.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
// Create first public account at root
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public {
@ -93,8 +93,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to first public account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_public_account_id(&to_account_id3.to_string())),
from: format_public_account_id(from),
to: Some(format_public_account_id(to_account_id3)),
to_npk: None,
to_ipk: None,
amount: 102,
@ -103,8 +103,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to second public account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_public_account_id(&to_account_id4.to_string())),
from: format_public_account_id(from),
to: Some(format_public_account_id(to_account_id4)),
to_npk: None,
to_ipk: None,
amount: 103,
@ -166,8 +166,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Test that restored accounts can send transactions
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&to_account_id1.to_string()),
to: Some(format_private_account_id(&to_account_id2.to_string())),
from: format_private_account_id(to_account_id1),
to: Some(format_private_account_id(to_account_id2)),
to_npk: None,
to_ipk: None,
amount: 10,
@ -175,8 +175,8 @@ async fn restore_keys_from_seed() -> Result<()> {
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&to_account_id3.to_string()),
to: Some(format_public_account_id(&to_account_id4.to_string())),
from: format_public_account_id(to_account_id3),
to: Some(format_public_account_id(to_account_id4)),
to_npk: None,
to_ipk: None,
amount: 11,
@ -188,11 +188,11 @@ async fn restore_keys_from_seed() -> Result<()> {
// Verify commitments exist for private accounts
let comm1 = ctx
.wallet()
.get_private_account_commitment(&to_account_id1)
.get_private_account_commitment(to_account_id1)
.expect("Acc 1 commitment should exist");
let comm2 = ctx
.wallet()
.get_private_account_commitment(&to_account_id2)
.get_private_account_commitment(to_account_id2)
.expect("Acc 2 commitment should exist");
assert!(verify_commitment_is_in_state(comm1, ctx.sequencer_client()).await);
@ -201,11 +201,11 @@ async fn restore_keys_from_seed() -> Result<()> {
// Verify public account balances
let acc3 = ctx
.sequencer_client()
.get_account_balance(to_account_id3.to_string())
.get_account_balance(to_account_id3)
.await?;
let acc4 = ctx
.sequencer_client()
.get_account_balance(to_account_id4.to_string())
.get_account_balance(to_account_id4)
.await?;
assert_eq!(acc3.balance, 91); // 102 - 11

View File

@ -3,8 +3,8 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use common::PINATA_BASE58;
use integration_tests::{
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
};
use log::info;
use tokio::test;
@ -22,12 +22,12 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
let pinata_prize = 150;
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: format_public_account_id(ACC_SENDER),
to: format_public_account_id(ctx.existing_public_accounts()[0]),
});
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -39,13 +39,13 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
info!("Checking correct balance move");
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
let winner_balance_post = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?
.balance;
@ -63,12 +63,12 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let pinata_prize = 150;
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: format_private_account_id(ACC_SENDER_PRIVATE),
to: format_private_account_id(ctx.existing_private_accounts()[0]),
});
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -86,13 +86,13 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&ACC_SENDER_PRIVATE.parse()?)
.get_private_account_commitment(ctx.existing_private_accounts()[0])
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -122,7 +122,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
anyhow::bail!("Expected RegisterAccount return value");
};
let winner_account_id_formatted = format_private_account_id(&winner_account_id.to_string());
let winner_account_id_formatted = format_private_account_id(winner_account_id);
// Initialize account under auth transfer program
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
@ -135,7 +135,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&winner_account_id)
.get_private_account_commitment(winner_account_id)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
@ -146,7 +146,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -157,13 +157,13 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&winner_account_id)
.get_private_account_commitment(winner_account_id)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;

View File

@ -45,11 +45,13 @@ async fn deploy_and_execute_program() -> Result<()> {
let _response = ctx.sequencer_client().send_tx_public(transaction).await?;
info!("Waiting for next block creation");
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
// Waiting for long time as it may take some time for such a big transaction to be included in a
// block
tokio::time::sleep(Duration::from_secs(2 * TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
let post_state_account = ctx
.sequencer_client()
.get_account(account_id.to_string())
.get_account(account_id)
.await?
.account;

View File

@ -63,8 +63,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: name.clone(),
total_supply,
};
@ -76,7 +76,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -94,7 +94,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token holding account with the total supply
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
@ -112,8 +112,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
from: format_public_account_id(supply_account_id),
to: Some(format_public_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -127,7 +127,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the supply account after transfer
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
assert_eq!(supply_acc.program_owner, Program::token().id());
@ -143,7 +143,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after transfer
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
assert_eq!(recipient_acc.program_owner, Program::token().id());
@ -159,8 +159,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_public_account_id(&recipient_account_id.to_string()),
definition: format_public_account_id(definition_account_id),
holder: format_public_account_id(recipient_account_id),
amount: burn_amount,
};
@ -172,7 +172,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account after burn
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -189,7 +189,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after burn
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -205,8 +205,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Mint 10 tokens at recipient_acc
let mint_amount = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(&recipient_account_id.to_string())),
definition: format_public_account_id(definition_account_id),
holder: Some(format_public_account_id(recipient_account_id)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount,
@ -220,7 +220,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account after mint
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -237,7 +237,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after mint
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -302,8 +302,8 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name: name.clone(),
total_supply,
};
@ -316,7 +316,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Check the status of the token definition account
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -333,15 +333,15 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -354,21 +354,21 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
let new_commitment2 = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_private_account_id(&recipient_account_id.to_string()),
definition: format_public_account_id(definition_account_id),
holder: format_private_account_id(recipient_account_id),
amount: burn_amount,
};
@ -380,7 +380,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Check the token definition account after burn
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -396,14 +396,14 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let new_commitment2 = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
// Check the recipient account balance after burn
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -458,8 +458,8 @@ async fn create_token_with_private_definition() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: name.clone(),
total_supply,
};
@ -472,14 +472,14 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify private definition commitment
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&definition_account_id)
.get_private_account_commitment(definition_account_id)
.context("Failed to get definition commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify supply account
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
@ -522,10 +522,8 @@ async fn create_token_with_private_definition() -> Result<()> {
// Mint to public account
let mint_amount_public = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(
&recipient_account_id_public.to_string(),
)),
definition: format_private_account_id(definition_account_id),
holder: Some(format_public_account_id(recipient_account_id_public)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount_public,
@ -539,7 +537,7 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify definition account has updated supply
let definition_acc = ctx
.wallet()
.get_account_private(&definition_account_id)
.get_account_private(definition_account_id)
.context("Failed to get definition account")?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -555,7 +553,7 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify public recipient received tokens
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_public.to_string())
.get_account(recipient_account_id_public)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -571,10 +569,8 @@ async fn create_token_with_private_definition() -> Result<()> {
// Mint to private account
let mint_amount_private = 5;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_private_account_id(
&recipient_account_id_private.to_string(),
)),
definition: format_private_account_id(definition_account_id),
holder: Some(format_private_account_id(recipient_account_id_private)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount_private,
@ -588,14 +584,14 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify private recipient commitment
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id_private)
.get_private_account_commitment(recipient_account_id_private)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify private recipient balance
let recipient_acc_private = ctx
.wallet()
.get_account_private(&recipient_account_id_private)
.get_account_private(recipient_account_id_private)
.context("Failed to get private recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc_private.data)?;
@ -646,8 +642,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name,
total_supply,
};
@ -660,21 +656,21 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Verify definition commitment
let definition_commitment = ctx
.wallet()
.get_private_account_commitment(&definition_account_id)
.get_private_account_commitment(definition_account_id)
.context("Failed to get definition commitment")?;
assert!(verify_commitment_is_in_state(definition_commitment, ctx.sequencer_client()).await);
// Verify supply commitment
let supply_commitment = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply commitment")?;
assert!(verify_commitment_is_in_state(supply_commitment, ctx.sequencer_client()).await);
// Verify supply balance
let supply_acc = ctx
.wallet()
.get_account_private(&supply_account_id)
.get_account_private(supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
@ -702,8 +698,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Transfer tokens
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -717,20 +713,20 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Verify both commitments updated
let supply_commitment = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply commitment")?;
assert!(verify_commitment_is_in_state(supply_commitment, ctx.sequencer_client()).await);
let recipient_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(recipient_commitment, ctx.sequencer_client()).await);
// Verify balances
let supply_acc = ctx
.wallet()
.get_account_private(&supply_account_id)
.get_account_private(supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
@ -743,7 +739,7 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -806,8 +802,8 @@ async fn shielded_token_transfer() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name,
total_supply,
};
@ -820,8 +816,8 @@ async fn shielded_token_transfer() -> Result<()> {
// Perform shielded transfer: public supply -> private recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_public_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -835,7 +831,7 @@ async fn shielded_token_transfer() -> Result<()> {
// Verify supply account balance
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
@ -850,14 +846,14 @@ async fn shielded_token_transfer() -> Result<()> {
// Verify recipient commitment exists
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify recipient balance
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -920,8 +916,8 @@ async fn deshielded_token_transfer() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name,
total_supply,
};
@ -934,8 +930,8 @@ async fn deshielded_token_transfer() -> Result<()> {
// Perform deshielded transfer: private supply -> public recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_public_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -949,14 +945,14 @@ async fn deshielded_token_transfer() -> Result<()> {
// Verify supply account commitment exists
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify supply balance
let supply_acc = ctx
.wallet()
.get_account_private(&supply_account_id)
.get_account_private(supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
@ -970,7 +966,7 @@ async fn deshielded_token_transfer() -> Result<()> {
// Verify recipient balance
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -1021,8 +1017,8 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name,
total_supply,
};
@ -1050,14 +1046,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&recipient_account_id)
.get_private_account(recipient_account_id)
.cloned()
.context("Failed to get private account keys")?;
// Mint using claiming path (foreign account)
let mint_amount = 9;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
definition: format_private_account_id(definition_account_id),
holder: None,
holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)),
holder_ipk: Some(hex::encode(holder_keys.incoming_viewing_public_key.0)),
@ -1076,14 +1072,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
// Verify commitment exists
let recipient_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(recipient_commitment, ctx.sequencer_client()).await);
// Verify balance
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(

View File

@ -1,8 +1,11 @@
use std::time::{Duration, Instant};
use anyhow::Result;
use integration_tests::TestContext;
use key_protocol::key_management::ephemeral_key_holder::EphemeralKeyHolder;
use integration_tests::{
TestContext,
config::{InitialData, SequencerPartialConfig},
};
use key_protocol::key_management::{KeyChain, ephemeral_key_holder::EphemeralKeyHolder};
use log::info;
use nssa::{
Account, AccountId, PrivacyPreservingTransaction, PrivateKey, PublicKey, PublicTransaction,
@ -15,21 +18,20 @@ use nssa_core::{
account::{AccountWithMetadata, data::Data},
encryption::IncomingViewingPublicKey,
};
use sequencer_core::config::{AccountInitialData, CommitmentsInitialData, SequencerConfig};
use tokio::test;
// TODO: Make a proper benchmark instead of an ad-hoc test
#[test]
pub async fn tps_test() -> Result<()> {
let num_transactions = 300 * 5;
let target_tps = 12;
let target_tps = 8;
let tps_test = TpsTestManager::new(target_tps, num_transactions);
let ctx = TestContext::new_with_sequencer_and_maybe_indexer_configs(
tps_test.generate_sequencer_config(),
None,
)
.await?;
let ctx = TestContext::builder()
.with_sequencer_partial_config(TpsTestManager::generate_sequencer_partial_config())
.with_initial_data(tps_test.generate_initial_data())
.build()
.await?;
let target_time = tps_test.target_time();
info!(
@ -59,12 +61,10 @@ pub async fn tps_test() -> Result<()> {
let tx_obj = ctx
.sequencer_client()
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(*tx_hash)
.await
.inspect_err(|err| {
log::warn!(
"Failed to get transaction by hash {tx_hash:#?} with error: {err:#?}"
)
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}")
});
if let Ok(tx_obj) = tx_obj
@ -151,46 +151,35 @@ impl TpsTestManager {
/// Generates a sequencer configuration with initial balance in a number of public accounts.
/// The transactions generated with the function `build_public_txs` will be valid in a node
/// started with the config from this method.
pub(crate) fn generate_sequencer_config(&self) -> SequencerConfig {
fn generate_initial_data(&self) -> InitialData {
// Create public public keypairs
let initial_public_accounts = self
let public_accounts = self
.public_keypairs
.iter()
.map(|(_, account_id)| AccountInitialData {
account_id: account_id.to_string(),
balance: 10,
})
.map(|(key, _)| (key.clone(), 10))
.collect();
// Generate an initial commitment to be used with the privacy preserving transaction
// created with the `build_privacy_transaction` function.
let sender_nsk = [1; 32];
let sender_npk = NullifierPublicKey::from(&sender_nsk);
let key_chain = KeyChain::new_os_random();
let account = Account {
balance: 100,
nonce: 0xdeadbeef,
program_owner: Program::authenticated_transfer_program().id(),
data: Data::default(),
};
let initial_commitment = CommitmentsInitialData {
npk: sender_npk,
account,
};
SequencerConfig {
home: ".".into(),
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
InitialData {
public_accounts,
private_accounts: vec![(key_chain, account)],
}
}
fn generate_sequencer_partial_config() -> SequencerPartialConfig {
SequencerPartialConfig {
max_num_tx_in_block: 300,
mempool_max_size: 10000,
block_create_timeout_millis: 12000,
port: 3040,
initial_accounts: initial_public_accounts,
initial_commitments: vec![initial_commitment],
signing_key: [37; 32],
bedrock_config: None,
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
mempool_max_size: 10_000,
block_create_timeout_millis: 12_000,
}
}
}

View File

@ -6,12 +6,9 @@ use std::{
};
use anyhow::Result;
use integration_tests::{
ACC_RECEIVER, ACC_SENDER, ACC_SENDER_PRIVATE, BlockingTestContext,
TIME_TO_WAIT_FOR_BLOCK_SECONDS,
};
use integration_tests::{BlockingTestContext, TIME_TO_WAIT_FOR_BLOCK_SECONDS};
use log::info;
use nssa::{Account, AccountId, PublicKey, program::Program};
use nssa::{Account, AccountId, PrivateKey, PublicKey, program::Program};
use nssa_core::program::DEFAULT_PROGRAM_ID;
use tempfile::tempdir;
use wallet::WalletCore;
@ -328,7 +325,7 @@ fn test_wallet_ffi_list_accounts() {
#[test]
fn test_wallet_ffi_get_balance_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let balance = unsafe {
@ -356,7 +353,7 @@ fn test_wallet_ffi_get_balance_public() -> Result<()> {
#[test]
fn test_wallet_ffi_get_account_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut out_account = FfiAccount::default();
@ -391,7 +388,7 @@ fn test_wallet_ffi_get_account_public() -> Result<()> {
#[test]
fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut out_key = FfiPublicAccountKey::default();
@ -409,7 +406,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
let private_key = ctx
.ctx
.wallet()
.get_account_public_signing_key(&account_id)
.get_account_public_signing_key(account_id)
.unwrap();
PublicKey::new_from_private_key(private_key)
};
@ -428,7 +425,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
#[test]
fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER_PRIVATE.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut keys = FfiPrivateAccountKeys::default();
@ -446,7 +443,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&account_id)
.get_private_account(account_id)
.unwrap()
.0;
@ -468,14 +465,15 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
#[test]
fn test_wallet_ffi_account_id_to_base58() {
let account_id_str = ACC_SENDER;
let account_id: AccountId = account_id_str.parse().unwrap();
let private_key = PrivateKey::new_os_random();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
let ffi_bytes: FfiBytes32 = (&account_id).into();
let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) };
let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() };
assert_eq!(account_id_str, ffi_result);
assert_eq!(account_id.to_string(), ffi_result);
unsafe {
wallet_ffi_free_string(ptr);
@ -484,8 +482,11 @@ fn test_wallet_ffi_account_id_to_base58() {
#[test]
fn test_wallet_ffi_base58_to_account_id() {
let account_id_str = ACC_SENDER;
let account_id_c_str = CString::new(account_id_str).unwrap();
let private_key = PrivateKey::new_os_random();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
let account_id_str = account_id.to_string();
let account_id_c_str = CString::new(account_id_str.clone()).unwrap();
let account_id: AccountId = unsafe {
let mut out_account_id_bytes = FfiBytes32::default();
wallet_ffi_account_id_from_base58(
@ -566,8 +567,8 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
fn test_wallet_ffi_transfer_public() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let from: FfiBytes32 = (&ACC_SENDER.parse::<AccountId>().unwrap()).into();
let to: FfiBytes32 = (&ACC_RECEIVER.parse::<AccountId>().unwrap()).into();
let from: FfiBytes32 = (&ctx.ctx.existing_public_accounts()[0]).into();
let to: FfiBytes32 = (&ctx.ctx.existing_public_accounts()[1]).into();
let amount: [u8; 16] = 100u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();

View File

@ -22,4 +22,4 @@ aes-gcm.workspace = true
bip39.workspace = true
hmac-sha512.workspace = true
thiserror.workspace = true
itertools.workspace = true
itertools.workspace = true

View File

@ -272,7 +272,7 @@ impl KeyTree<ChildKeysPublic> {
while let Some(curr_id) = id_stack.pop() {
if let Some(node) = self.key_map.get(&curr_id) {
let address = node.account_id();
let node_acc = client.get_account(address.to_string()).await?.account;
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() && curr_id != ChainIndex::root() {
self.remove(address);
@ -307,7 +307,7 @@ impl KeyTree<ChildKeysPublic> {
for id in ChainIndex::chain_ids_at_depth(i) {
if let Some(node) = self.key_map.get(&id) {
let address = node.account_id();
let node_acc = client.get_account(address.to_string()).await?.account;
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() {
let addr = node.account_id();

View File

@ -66,11 +66,11 @@ impl SeedHolder {
}
// Safe unwrap
*hash.first_chunk::<32>().unwrap()
HashType(*hash.first_chunk::<32>().unwrap())
}
pub fn produce_top_secret_key_holder(&self) -> SecretSpendingKey {
SecretSpendingKey(self.generate_secret_spending_key_hash())
SecretSpendingKey(self.generate_secret_spending_key_hash().into())
}
}
@ -94,7 +94,7 @@ impl SecretSpendingKey {
hasher.update([2u8]);
hasher.update([0u8; 22]);
<HashType>::from(hasher.finalize_fixed())
hasher.finalize_fixed().into()
}
pub fn generate_outgoing_viewing_secret_key(&self) -> OutgoingViewingSecretKey {
@ -105,7 +105,7 @@ impl SecretSpendingKey {
hasher.update([3u8]);
hasher.update([0u8; 22]);
<HashType>::from(hasher.finalize_fixed())
hasher.finalize_fixed().into()
}
pub fn produce_private_key_holder(&self) -> PrivateKeyHolder {

View File

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::BTreeMap;
use anyhow::Result;
use k256::AffinePoint;
@ -15,10 +15,10 @@ pub type PublicKey = AffinePoint;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NSSAUserData {
/// Default public accounts
pub default_pub_account_signing_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
pub default_pub_account_signing_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
/// Default private accounts
pub default_user_private_accounts:
HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
/// Tree of public keys
pub public_key_tree: KeyTreePublic,
/// Tree of private keys
@ -27,7 +27,7 @@ pub struct NSSAUserData {
impl NSSAUserData {
fn valid_public_key_transaction_pairing_check(
accounts_keys_map: &HashMap<nssa::AccountId, nssa::PrivateKey>,
accounts_keys_map: &BTreeMap<nssa::AccountId, nssa::PrivateKey>,
) -> bool {
let mut check_res = true;
for (account_id, key) in accounts_keys_map {
@ -42,7 +42,7 @@ impl NSSAUserData {
}
fn valid_private_key_transaction_pairing_check(
accounts_keys_map: &HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
accounts_keys_map: &BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
) -> bool {
let mut check_res = true;
for (account_id, (key, _)) in accounts_keys_map {
@ -56,8 +56,8 @@ impl NSSAUserData {
}
pub fn new_with_accounts(
default_accounts_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
default_accounts_key_chains: HashMap<
default_accounts_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
default_accounts_key_chains: BTreeMap<
nssa::AccountId,
(KeyChain, nssa_core::account::Account),
>,
@ -106,14 +106,14 @@ impl NSSAUserData {
/// Returns the signing key for public transaction signatures
pub fn get_pub_account_signing_key(
&self,
account_id: &nssa::AccountId,
account_id: nssa::AccountId,
) -> Option<&nssa::PrivateKey> {
// First seek in defaults
if let Some(key) = self.default_pub_account_signing_keys.get(account_id) {
if let Some(key) = self.default_pub_account_signing_keys.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.public_key_tree.get_node(*account_id).map(Into::into)
self.public_key_tree.get_node(account_id).map(Into::into)
}
}
@ -139,14 +139,14 @@ impl NSSAUserData {
/// Returns the signing key for public transaction signatures
pub fn get_private_account(
&self,
account_id: &nssa::AccountId,
account_id: nssa::AccountId,
) -> Option<&(KeyChain, nssa_core::account::Account)> {
// First seek in defaults
if let Some(key) = self.default_user_private_accounts.get(account_id) {
if let Some(key) = self.default_user_private_accounts.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.private_key_tree.get_node(*account_id).map(Into::into)
self.private_key_tree.get_node(account_id).map(Into::into)
}
}
@ -166,20 +166,30 @@ impl NSSAUserData {
}
}
pub fn account_ids(&self) -> impl Iterator<Item = &nssa::AccountId> {
pub fn account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.public_account_ids().chain(self.private_account_ids())
}
pub fn public_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.default_pub_account_signing_keys
.keys()
.chain(self.public_key_tree.account_id_map.keys())
.chain(self.default_user_private_accounts.keys())
.chain(self.private_key_tree.account_id_map.keys())
.copied()
.chain(self.public_key_tree.account_id_map.keys().copied())
}
pub fn private_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.default_user_private_accounts
.keys()
.copied()
.chain(self.private_key_tree.account_id_map.keys().copied())
}
}
impl Default for NSSAUserData {
fn default() -> Self {
Self::new_with_accounts(
HashMap::new(),
HashMap::new(),
BTreeMap::new(),
BTreeMap::new(),
KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_string())),
KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_string())),
)
@ -198,16 +208,13 @@ mod tests {
let (account_id_private, _) = user_data
.generate_new_privacy_preserving_transaction_key_chain(Some(ChainIndex::root()));
let is_key_chain_generated = user_data.get_private_account(&account_id_private).is_some();
let is_key_chain_generated = user_data.get_private_account(account_id_private).is_some();
assert!(is_key_chain_generated);
let account_id_private_str = account_id_private.to_string();
println!("{account_id_private_str:#?}");
let key_chain = &user_data
.get_private_account(&account_id_private)
.unwrap()
.0;
let key_chain = &user_data.get_private_account(account_id_private).unwrap().0;
println!("{key_chain:#?}");
}
}

View File

@ -8,12 +8,12 @@ license = { workspace = true }
risc0-zkvm.workspace = true
borsh.workspace = true
serde.workspace = true
serde_with.workspace = true
thiserror.workspace = true
bytemuck.workspace = true
base58.workspace = true
k256 = { workspace = true, optional = true }
base58 = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
chacha20 = { version = "0.9", default-features = false }
[dev-dependencies]
@ -21,4 +21,4 @@ serde_json.workspace = true
[features]
default = []
host = ["dep:k256", "dep:base58", "dep:anyhow"]
host = ["dep:k256", "dep:anyhow"]

View File

@ -1,11 +1,10 @@
#[cfg(feature = "host")]
use std::{fmt::Display, str::FromStr};
#[cfg(feature = "host")]
use base58::{FromBase58, ToBase58};
use borsh::{BorshDeserialize, BorshSerialize};
pub use data::Data;
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use crate::program::ProgramId;
@ -47,8 +46,8 @@ impl AccountWithMetadata {
Default,
Copy,
Clone,
Serialize,
Deserialize,
SerializeDisplay,
DeserializeFromStr,
PartialEq,
Eq,
Hash,
@ -80,23 +79,19 @@ impl AsRef<[u8]> for AccountId {
}
}
#[cfg(feature = "host")]
#[derive(Debug, thiserror::Error)]
pub enum AccountIdError {
#[error("invalid base58")]
InvalidBase58(#[from] anyhow::Error),
#[error("invalid base58: {0:?}")]
InvalidBase58(base58::FromBase58Error),
#[error("invalid length: expected 32 bytes, got {0}")]
InvalidLength(usize),
}
#[cfg(feature = "host")]
impl FromStr for AccountId {
type Err = AccountIdError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let bytes = s
.from_base58()
.map_err(|err| anyhow::anyhow!("Invalid base58 err {err:?}"))?;
let bytes = s.from_base58().map_err(AccountIdError::InvalidBase58)?;
if bytes.len() != 32 {
return Err(AccountIdError::InvalidLength(bytes.len()));
}
@ -106,7 +101,6 @@ impl FromStr for AccountId {
}
}
#[cfg(feature = "host")]
impl Display for AccountId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value.to_base58())

View File

@ -81,7 +81,7 @@ impl PrivacyPreservingTransaction {
let signer_account_ids = self.signer_account_ids();
// Check nonces corresponds to the current nonces on the public state.
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
let current_nonce = state.get_account_by_id(account_id).nonce;
let current_nonce = state.get_account_by_id(*account_id).nonce;
if current_nonce != *nonce {
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
}
@ -93,7 +93,7 @@ impl PrivacyPreservingTransaction {
.iter()
.map(|account_id| {
AccountWithMetadata::new(
state.get_account_by_id(account_id),
state.get_account_by_id(*account_id),
signer_account_ids.contains(account_id),
*account_id,
)

View File

@ -83,7 +83,7 @@ impl PublicTransaction {
let signer_account_ids = self.signer_account_ids();
// Check nonces corresponds to the current nonces on the public state.
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
let current_nonce = state.get_account_by_id(account_id).nonce;
let current_nonce = state.get_account_by_id(*account_id).nonce;
if current_nonce != *nonce {
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
}
@ -95,7 +95,7 @@ impl PublicTransaction {
.iter()
.map(|account_id| {
AccountWithMetadata::new(
state.get_account_by_id(account_id),
state.get_account_by_id(*account_id),
signer_account_ids.contains(account_id),
*account_id,
)
@ -147,7 +147,7 @@ impl PublicTransaction {
let expected_pre = state_diff
.get(&account_id)
.cloned()
.unwrap_or_else(|| state.get_account_by_id(&account_id));
.unwrap_or_else(|| state.get_account_by_id(account_id));
if pre.account != expected_pre {
return Err(NssaError::InvalidProgramBehavior);
}
@ -202,7 +202,7 @@ impl PublicTransaction {
// Check that all modified uninitialized accounts where claimed
for post in state_diff.iter().filter_map(|(account_id, post)| {
let pre = state.get_account_by_id(account_id);
let pre = state.get_account_by_id(*account_id);
if pre.program_owner != DEFAULT_PROGRAM_ID {
return None;
}

View File

@ -221,9 +221,9 @@ impl V02State {
self.public_state.entry(account_id).or_default()
}
pub fn get_account_by_id(&self, account_id: &AccountId) -> Account {
pub fn get_account_by_id(&self, account_id: AccountId) -> Account {
self.public_state
.get(account_id)
.get(&account_id)
.cloned()
.unwrap_or(Account::default())
}
@ -417,7 +417,7 @@ pub mod tests {
let state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let expected_account = state.public_state.get(&account_id).unwrap();
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
assert_eq!(&account, expected_account);
}
@ -428,7 +428,7 @@ pub mod tests {
let state = V02State::new_with_genesis_accounts(&[], &[]);
let expected_account = Account::default();
let account = state.get_account_by_id(&addr2);
let account = state.get_account_by_id(addr2);
assert_eq!(account, expected_account);
}
@ -450,16 +450,16 @@ pub mod tests {
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let from = account_id;
let to = AccountId::new([2; 32]);
assert_eq!(state.get_account_by_id(&to), Account::default());
assert_eq!(state.get_account_by_id(to), Account::default());
let balance_to_move = 5;
let tx = transfer_transaction(from, key, 0, to, balance_to_move);
state.transition_from_public_transaction(&tx).unwrap();
assert_eq!(state.get_account_by_id(&from).balance, 95);
assert_eq!(state.get_account_by_id(&to).balance, 5);
assert_eq!(state.get_account_by_id(&from).nonce, 1);
assert_eq!(state.get_account_by_id(&to).nonce, 0);
assert_eq!(state.get_account_by_id(from).balance, 95);
assert_eq!(state.get_account_by_id(to).balance, 5);
assert_eq!(state.get_account_by_id(from).nonce, 1);
assert_eq!(state.get_account_by_id(to).nonce, 0);
}
#[test]
@ -472,16 +472,16 @@ pub mod tests {
let from_key = key;
let to = AccountId::new([2; 32]);
let balance_to_move = 101;
assert!(state.get_account_by_id(&from).balance < balance_to_move);
assert!(state.get_account_by_id(from).balance < balance_to_move);
let tx = transfer_transaction(from, from_key, 0, to, balance_to_move);
let result = state.transition_from_public_transaction(&tx);
assert!(matches!(result, Err(NssaError::ProgramExecutionFailed(_))));
assert_eq!(state.get_account_by_id(&from).balance, 100);
assert_eq!(state.get_account_by_id(&to).balance, 0);
assert_eq!(state.get_account_by_id(&from).nonce, 0);
assert_eq!(state.get_account_by_id(&to).nonce, 0);
assert_eq!(state.get_account_by_id(from).balance, 100);
assert_eq!(state.get_account_by_id(to).balance, 0);
assert_eq!(state.get_account_by_id(from).nonce, 0);
assert_eq!(state.get_account_by_id(to).nonce, 0);
}
#[test]
@ -495,16 +495,16 @@ pub mod tests {
let from = account_id2;
let from_key = key2;
let to = account_id1;
assert_ne!(state.get_account_by_id(&to), Account::default());
assert_ne!(state.get_account_by_id(to), Account::default());
let balance_to_move = 8;
let tx = transfer_transaction(from, from_key, 0, to, balance_to_move);
state.transition_from_public_transaction(&tx).unwrap();
assert_eq!(state.get_account_by_id(&from).balance, 192);
assert_eq!(state.get_account_by_id(&to).balance, 108);
assert_eq!(state.get_account_by_id(&from).nonce, 1);
assert_eq!(state.get_account_by_id(&to).nonce, 0);
assert_eq!(state.get_account_by_id(from).balance, 192);
assert_eq!(state.get_account_by_id(to).balance, 108);
assert_eq!(state.get_account_by_id(from).nonce, 1);
assert_eq!(state.get_account_by_id(to).nonce, 0);
}
#[test]
@ -524,12 +524,12 @@ pub mod tests {
let tx = transfer_transaction(account_id2, key2, 0, account_id3, balance_to_move);
state.transition_from_public_transaction(&tx).unwrap();
assert_eq!(state.get_account_by_id(&account_id1).balance, 95);
assert_eq!(state.get_account_by_id(&account_id2).balance, 2);
assert_eq!(state.get_account_by_id(&account_id3).balance, 3);
assert_eq!(state.get_account_by_id(&account_id1).nonce, 1);
assert_eq!(state.get_account_by_id(&account_id2).nonce, 1);
assert_eq!(state.get_account_by_id(&account_id3).nonce, 0);
assert_eq!(state.get_account_by_id(account_id1).balance, 95);
assert_eq!(state.get_account_by_id(account_id2).balance, 2);
assert_eq!(state.get_account_by_id(account_id3).balance, 3);
assert_eq!(state.get_account_by_id(account_id1).nonce, 1);
assert_eq!(state.get_account_by_id(account_id2).nonce, 1);
assert_eq!(state.get_account_by_id(account_id3).nonce, 0);
}
impl V02State {
@ -656,7 +656,7 @@ pub mod tests {
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_id = AccountId::new([1; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in the program owner
// field
assert_ne!(account.program_owner, Account::default().program_owner);
@ -681,7 +681,7 @@ pub mod tests {
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([255; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in balance field
assert_eq!(account.program_owner, Account::default().program_owner);
assert_ne!(account.balance, Account::default().balance);
@ -705,7 +705,7 @@ pub mod tests {
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([254; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in nonce field
assert_eq!(account.program_owner, Account::default().program_owner);
assert_eq!(account.balance, Account::default().balance);
@ -729,7 +729,7 @@ pub mod tests {
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([253; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in data field
assert_eq!(account.program_owner, Account::default().program_owner);
assert_eq!(account.balance, Account::default().balance);
@ -756,7 +756,7 @@ pub mod tests {
let balance_to_move: u128 = 1;
let program_id = Program::simple_balance_transfer().id();
assert_ne!(
state.get_account_by_id(&sender_account_id).program_owner,
state.get_account_by_id(sender_account_id).program_owner,
program_id
);
let message = public_transaction::Message::try_new(
@ -783,9 +783,9 @@ pub mod tests {
let account_id = AccountId::new([255; 32]);
let program_id = Program::data_changer().id();
assert_ne!(state.get_account_by_id(&account_id), Account::default());
assert_ne!(state.get_account_by_id(account_id), Account::default());
assert_ne!(
state.get_account_by_id(&account_id).program_owner,
state.get_account_by_id(account_id).program_owner,
program_id
);
let message =
@ -826,11 +826,11 @@ pub mod tests {
let program_id = Program::burner().id();
let account_id = AccountId::new([252; 32]);
assert_eq!(
state.get_account_by_id(&account_id).program_owner,
state.get_account_by_id(account_id).program_owner,
program_id
);
let balance_to_burn: u128 = 1;
assert!(state.get_account_by_id(&account_id).balance > balance_to_burn);
assert!(state.get_account_by_id(account_id).balance > balance_to_burn);
let message = public_transaction::Message::try_new(
program_id,
@ -898,7 +898,7 @@ pub mod tests {
state: &V02State,
) -> PrivacyPreservingTransaction {
let sender = AccountWithMetadata::new(
state.get_account_by_id(&sender_keys.account_id()),
state.get_account_by_id(sender_keys.account_id()),
true,
sender_keys.account_id(),
);
@ -1002,7 +1002,7 @@ pub mod tests {
let sender_pre =
AccountWithMetadata::new(sender_private_account.clone(), true, &sender_keys.npk());
let recipient_pre = AccountWithMetadata::new(
state.get_account_by_id(recipient_account_id),
state.get_account_by_id(*recipient_account_id),
false,
*recipient_account_id,
);
@ -1054,7 +1054,7 @@ pub mod tests {
);
let expected_sender_post = {
let mut this = state.get_account_by_id(&sender_keys.account_id());
let mut this = state.get_account_by_id(sender_keys.account_id());
this.balance -= balance_to_move;
this.nonce += 1;
this
@ -1067,12 +1067,12 @@ pub mod tests {
.transition_from_privacy_preserving_transaction(&tx)
.unwrap();
let sender_post = state.get_account_by_id(&sender_keys.account_id());
let sender_post = state.get_account_by_id(sender_keys.account_id());
assert_eq!(sender_post, expected_sender_post);
assert!(state.private_state.0.contains(&expected_new_commitment));
assert_eq!(
state.get_account_by_id(&sender_keys.account_id()).balance,
state.get_account_by_id(sender_keys.account_id()).balance,
200 - balance_to_move
);
}
@ -1163,7 +1163,7 @@ pub mod tests {
let balance_to_move = 37;
let expected_recipient_post = {
let mut this = state.get_account_by_id(&recipient_keys.account_id());
let mut this = state.get_account_by_id(recipient_keys.account_id());
this.balance += balance_to_move;
this
};
@ -1199,15 +1199,13 @@ pub mod tests {
.transition_from_privacy_preserving_transaction(&tx)
.unwrap();
let recipient_post = state.get_account_by_id(&recipient_keys.account_id());
let recipient_post = state.get_account_by_id(recipient_keys.account_id());
assert_eq!(recipient_post, expected_recipient_post);
assert!(state.private_state.0.contains(&sender_pre_commitment));
assert!(state.private_state.0.contains(&expected_new_commitment));
assert!(state.private_state.1.contains(&expected_new_nullifier));
assert_eq!(
state
.get_account_by_id(&recipient_keys.account_id())
.balance,
state.get_account_by_id(recipient_keys.account_id()).balance,
recipient_initial_balance + balance_to_move
);
}
@ -2227,7 +2225,7 @@ pub mod tests {
let amount: u128 = 37;
// Check the recipient is an uninitialized account
assert_eq!(state.get_account_by_id(&to), Account::default());
assert_eq!(state.get_account_by_id(to), Account::default());
let expected_recipient_post = Account {
program_owner: program.id(),
@ -2243,7 +2241,7 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let recipient_post = state.get_account_by_id(&to);
let recipient_post = state.get_account_by_id(to);
assert_eq!(recipient_post, expected_recipient_post);
}
@ -2286,8 +2284,8 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let from_post = state.get_account_by_id(&from);
let to_post = state.get_account_by_id(&to);
let from_post = state.get_account_by_id(from);
let to_post = state.get_account_by_id(to);
// The `chain_caller` program calls the program twice
assert_eq!(from_post.balance, initial_balance - 2 * amount);
assert_eq!(to_post, expected_to_post);
@ -3245,13 +3243,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_remove();
let expected_vault_a = AccountForTests::vault_a_remove();
@ -3325,13 +3323,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_new_init();
let expected_vault_a = AccountForTests::vault_a_init();
@ -3409,13 +3407,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_init();
let expected_vault_a = AccountForTests::vault_a_init();
@ -3481,13 +3479,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_new_init();
let expected_vault_a = AccountForTests::vault_a_init();
@ -3544,13 +3542,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_add();
let expected_vault_a = AccountForTests::vault_a_add();
@ -3601,11 +3599,11 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let expected_pool = AccountForTests::pool_definition_swap_1();
let expected_vault_a = AccountForTests::vault_a_swap_1();
@ -3651,11 +3649,11 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let expected_pool = AccountForTests::pool_definition_swap_2();
let expected_vault_a = AccountForTests::vault_a_swap_2();
@ -3706,8 +3704,8 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let from_post = state.get_account_by_id(&from);
let to_post = state.get_account_by_id(&to);
let from_post = state.get_account_by_id(from);
let to_post = state.get_account_by_id(to);
assert_eq!(from_post.balance, initial_balance - amount);
assert_eq!(to_post, expected_to_post);
}
@ -3732,7 +3730,7 @@ pub mod tests {
let amount: u128 = 37;
// Check the recipient is an uninitialized account
assert_eq!(state.get_account_by_id(&to), Account::default());
assert_eq!(state.get_account_by_id(to), Account::default());
let expected_to_post = Account {
// The expected program owner is the authenticated transfer program
@ -3762,8 +3760,8 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let from_post = state.get_account_by_id(&from);
let to_post = state.get_account_by_id(&to);
let from_post = state.get_account_by_id(from);
let to_post = state.get_account_by_id(to);
assert_eq!(from_post.balance, initial_balance - amount);
assert_eq!(to_post, expected_to_post);
}
@ -3958,7 +3956,7 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let winner_token_holding_post = state.get_account_by_id(&winner_token_holding_id);
let winner_token_holding_post = state.get_account_by_id(winner_token_holding_id);
assert_eq!(
winner_token_holding_post,
expected_winner_token_holding_post
@ -4015,13 +4013,12 @@ pub mod tests {
let balance_to_move: u128 = 4;
let sender =
AccountWithMetadata::new(state.get_account_by_id(&sender_id.clone()), true, sender_id);
let sender = AccountWithMetadata::new(state.get_account_by_id(sender_id), true, sender_id);
let sender_nonce = sender.account.nonce;
let _recipient =
AccountWithMetadata::new(state.get_account_by_id(&recipient_id), false, sender_id);
AccountWithMetadata::new(state.get_account_by_id(recipient_id), false, sender_id);
let message = public_transaction::Message::try_new(
Program::modified_transfer_program().id(),
@ -4036,18 +4033,18 @@ pub mod tests {
let res = state.transition_from_public_transaction(&tx);
assert!(matches!(res, Err(NssaError::InvalidProgramBehavior)));
let sender_post = state.get_account_by_id(&sender_id);
let recipient_post = state.get_account_by_id(&recipient_id);
let sender_post = state.get_account_by_id(sender_id);
let recipient_post = state.get_account_by_id(recipient_id);
let expected_sender_post = {
let mut this = state.get_account_by_id(&sender_id);
let mut this = state.get_account_by_id(sender_id);
this.balance = sender_init_balance;
this.nonce = 0;
this
};
let expected_recipient_post = {
let mut this = state.get_account_by_id(&sender_id);
let mut this = state.get_account_by_id(sender_id);
this.balance = recipient_init_balance;
this.nonce = 0;
this
@ -4217,7 +4214,7 @@ pub mod tests {
// Should succeed - no changes made, no claim needed
assert!(result.is_ok());
// Account should remain default/unclaimed
assert_eq!(state.get_account_by_id(&account_id), Account::default());
assert_eq!(state.get_account_by_id(account_id), Account::default());
}
#[test]

View File

@ -7,4 +7,4 @@ license = { workspace = true }
[dependencies]
nssa_core.workspace = true
token_core.workspace = true
amm_core.workspace = true
amm_core.workspace = true

View File

@ -8,4 +8,4 @@ license = { workspace = true }
nssa_core.workspace = true
serde.workspace = true
risc0-zkvm.workspace = true
borsh.workspace = true
borsh.workspace = true

View File

@ -10,6 +10,7 @@ nssa_core.workspace = true
common.workspace = true
storage.workspace = true
mempool.workspace = true
bedrock_client.workspace = true
base58.workspace = true
anyhow.workspace = true
@ -19,16 +20,19 @@ tempfile.workspace = true
chrono.workspace = true
log.workspace = true
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
bedrock_client.workspace = true
logos-blockchain-key-management-system-service.workspace = true
logos-blockchain-core.workspace = true
rand.workspace = true
reqwest.workspace = true
borsh.workspace = true
url.workspace = true
jsonrpsee = { workspace = true, features = ["ws-client"] }
[features]
default = []
testnet = []
# Generate mock external clients implementations for testing
mock = []
[dev-dependencies]
futures.workspace = true

View File

@ -1,51 +1,39 @@
use std::{fs, path::Path, str::FromStr};
use anyhow::{Context, Result, anyhow};
use anyhow::{Context, Result};
use bedrock_client::BedrockClient;
use common::block::Block;
pub use common::block::Block;
pub use logos_blockchain_core::mantle::{MantleTx, SignedMantleTx, ops::channel::MsgId};
use logos_blockchain_core::mantle::{
MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger,
ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp},
Op, OpProof, Transaction, TxHash, ledger,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use logos_blockchain_key_management_system_service::keys::{
ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey,
};
use reqwest::Url;
pub use logos_blockchain_key_management_system_service::keys::Ed25519Key;
use logos_blockchain_key_management_system_service::keys::Ed25519PublicKey;
use crate::config::BedrockConfig;
/// A component that posts block data to logos blockchain
#[derive(Clone)]
pub struct BlockSettlementClient {
bedrock_client: BedrockClient,
bedrock_signing_key: Ed25519Key,
bedrock_channel_id: ChannelId,
}
#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")]
pub trait BlockSettlementClientTrait: Clone {
//// Create a new client.
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self>;
impl BlockSettlementClient {
pub fn try_new(home: &Path, config: &BedrockConfig) -> Result<Self> {
let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key"))
.context("Failed to load or create signing key")?;
let bedrock_url = Url::from_str(config.node_url.as_ref())
.context("Bedrock node address is not a valid url")?;
let bedrock_client =
BedrockClient::new(None, bedrock_url).context("Failed to initialize bedrock client")?;
Ok(Self {
bedrock_client,
bedrock_signing_key,
bedrock_channel_id: config.channel_id,
})
}
/// Get the bedrock channel ID used by this client.
fn bedrock_channel_id(&self) -> ChannelId;
/// Create and sign a transaction for inscribing data
pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> {
/// Get the bedrock signing key used by this client.
fn bedrock_signing_key(&self) -> &Ed25519Key;
/// Post a transaction to the node.
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId>;
/// Create and sign a transaction for inscribing data.
fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> {
let inscription_data = borsh::to_vec(block)?;
let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes();
let verifying_key_bytes = self.bedrock_signing_key().public_key().to_bytes();
let verifying_key =
Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key");
let inscribe_op = InscriptionOp {
channel_id: self.bedrock_channel_id,
channel_id: self.bedrock_channel_id(),
inscription: inscription_data,
parent: block.bedrock_parent_id.into(),
signer: verifying_key,
@ -64,7 +52,7 @@ impl BlockSettlementClient {
let tx_hash = inscribe_tx.hash();
let signature_bytes = self
.bedrock_signing_key
.bedrock_signing_key()
.sign_payload(tx_hash.as_signing_bytes().as_ref())
.to_bytes();
let signature =
@ -79,31 +67,46 @@ impl BlockSettlementClient {
};
Ok((signed_mantle_tx, inscribe_op_id))
}
}
/// Post a transaction to the node
pub async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
/// A component that posts block data to logos blockchain
#[derive(Clone)]
pub struct BlockSettlementClient {
bedrock_client: BedrockClient,
bedrock_signing_key: Ed25519Key,
bedrock_channel_id: ChannelId,
}
impl BlockSettlementClientTrait for BlockSettlementClient {
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self> {
let bedrock_client =
BedrockClient::new(config.backoff, config.node_url.clone(), config.auth.clone())
.context("Failed to initialize bedrock client")?;
Ok(Self {
bedrock_client,
bedrock_signing_key,
bedrock_channel_id: config.channel_id,
})
}
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
let (tx, new_msg_id) = self.create_inscribe_tx(block)?;
// Post the transaction
self.bedrock_client.post_transaction(tx).await?;
self.bedrock_client
.post_transaction(tx)
.await
.context("Failed to post transaction to Bedrock")?;
Ok(new_msg_id)
}
}
/// Load signing key from file or generate a new one if it doesn't exist
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
if path.exists() {
let key_bytes = fs::read(path)?;
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
.try_into()
.map_err(|_| anyhow!("Found key with incorrect length"))?;
Ok(Ed25519Key::from_bytes(&key_array))
} else {
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
fs::write(path, key_bytes)?;
Ok(Ed25519Key::from_bytes(&key_bytes))
fn bedrock_channel_id(&self) -> ChannelId {
self.bedrock_channel_id
}
fn bedrock_signing_key(&self) -> &Ed25519Key {
&self.bedrock_signing_key
}
}

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, path::Path};
use anyhow::Result;
use common::{HashType, block::Block, transaction::EncodedTransaction};
use common::{HashType, block::Block, transaction::NSSATransaction};
use nssa::V02State;
use storage::RocksDBIO;
@ -20,7 +20,7 @@ impl SequencerStore {
/// ATTENTION: Will overwrite genesis block.
pub fn open_db_with_genesis(
location: &Path,
genesis_block: Option<Block>,
genesis_block: Option<&Block>,
signing_key: nssa::PrivateKey,
) -> Result<Self> {
let tx_hash_to_block_map = if let Some(block) = &genesis_block {
@ -55,7 +55,7 @@ impl SequencerStore {
}
/// Returns the transaction corresponding to the given hash, if it exists in the blockchain.
pub fn get_transaction_by_hash(&self, hash: HashType) -> Option<EncodedTransaction> {
pub fn get_transaction_by_hash(&self, hash: HashType) -> Option<NSSATransaction> {
let block_id = self.tx_hash_to_block_map.get(&hash);
let block = block_id.map(|&id| self.get_block_at_id(id));
if let Some(Ok(block)) = block {
@ -68,7 +68,7 @@ impl SequencerStore {
None
}
pub fn insert(&mut self, tx: &EncodedTransaction, block_id: u64) {
pub fn insert(&mut self, tx: &NSSATransaction, block_id: u64) {
self.tx_hash_to_block_map.insert(tx.hash(), block_id);
}
@ -84,8 +84,8 @@ impl SequencerStore {
self.dbio.get_all_blocks().map(|res| Ok(res?))
}
pub(crate) fn update(&mut self, block: Block, state: &V02State) -> Result<()> {
let new_transactions_map = block_to_transactions_map(&block);
pub(crate) fn update(&mut self, block: &Block, state: &V02State) -> Result<()> {
let new_transactions_map = block_to_transactions_map(block);
self.dbio.atomic_update(block, state)?;
self.tx_hash_to_block_map.extend(new_transactions_map);
Ok(())
@ -121,7 +121,7 @@ mod tests {
let genesis_block_hashable_data = HashableBlockData {
block_id: 0,
prev_block_hash: [0; 32],
prev_block_hash: HashType([0; 32]),
timestamp: 0,
transactions: vec![],
};
@ -129,7 +129,7 @@ mod tests {
let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]);
// Start an empty node store
let mut node_store =
SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap();
SequencerStore::open_db_with_genesis(path, Some(&genesis_block), signing_key).unwrap();
let tx = common::test_utils::produce_dummy_empty_transaction();
let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]);
@ -139,7 +139,7 @@ mod tests {
assert_eq!(None, retrieved_tx);
// Add the block with the transaction
let dummy_state = V02State::new_with_genesis_accounts(&[], &[]);
node_store.update(block, &dummy_state).unwrap();
node_store.update(&block, &dummy_state).unwrap();
// Try again
let retrieved_tx = node_store.get_transaction_by_hash(tx.hash());
assert_eq!(Some(tx), retrieved_tx);

View File

@ -5,15 +5,17 @@ use std::{
};
use anyhow::Result;
use common::sequencer_client::BasicAuth;
pub use bedrock_client::BackoffConfig;
use common::config::BasicAuth;
use logos_blockchain_core::mantle::ops::channel::ChannelId;
use nssa::AccountId;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Serialize, Deserialize, Clone)]
/// Helperstruct for account serialization
pub struct AccountInitialData {
/// Hex encoded account id
pub account_id: String,
pub account_id: AccountId,
pub balance: u128,
}
@ -52,15 +54,20 @@ pub struct SequencerConfig {
/// Sequencer own signing key
pub signing_key: [u8; 32],
/// Bedrock configuration options
pub bedrock_config: Option<BedrockConfig>,
pub bedrock_config: BedrockConfig,
/// Indexer RPC URL
pub indexer_rpc_url: Url,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct BedrockConfig {
/// Fibonacci backoff retry strategy configuration
#[serde(default)]
pub backoff: BackoffConfig,
/// Bedrock channel ID
pub channel_id: ChannelId,
/// Bedrock Url
pub node_url: String,
pub node_url: Url,
/// Bedrock auth
pub auth: Option<BasicAuth>,
}

View File

@ -0,0 +1,31 @@
use std::{ops::Deref, sync::Arc};
use anyhow::{Context as _, Result};
pub use url::Url;
#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")]
pub trait IndexerClientTrait: Clone {
async fn new(indexer_url: &Url) -> Result<Self>;
}
#[derive(Clone)]
pub struct IndexerClient(Arc<jsonrpsee::ws_client::WsClient>);
impl IndexerClientTrait for IndexerClient {
async fn new(indexer_url: &Url) -> Result<Self> {
let client = jsonrpsee::ws_client::WsClientBuilder::default()
.build(indexer_url)
.await
.context("Failed to create websocket client")?;
Ok(Self(Arc::new(client)))
}
}
impl Deref for IndexerClient {
type Target = jsonrpsee::ws_client::WsClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}

View File

@ -1,31 +1,43 @@
use std::{fmt::Display, time::Instant};
use std::{fmt::Display, path::Path, time::Instant};
use anyhow::Result;
use anyhow::{Result, anyhow};
#[cfg(feature = "testnet")]
use common::PINATA_BASE58;
use common::{
HashType,
block::{BedrockStatus, Block, HashableBlockData, MantleMsgId},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
use config::SequencerConfig;
use log::{info, warn};
use log::{error, info, warn};
use logos_blockchain_key_management_system_service::keys::{ED25519_SECRET_KEY_SIZE, Ed25519Key};
use mempool::{MemPool, MemPoolHandle};
use serde::{Deserialize, Serialize};
use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerStore};
use crate::{
block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait},
block_store::SequencerStore,
indexer_client::{IndexerClient, IndexerClientTrait},
};
mod block_settlement_client;
pub mod block_settlement_client;
pub mod block_store;
pub mod config;
pub mod indexer_client;
#[cfg(feature = "mock")]
pub mod mock;
pub struct SequencerCore {
pub struct SequencerCore<
BC: BlockSettlementClientTrait = BlockSettlementClient,
IC: IndexerClientTrait = IndexerClient,
> {
state: nssa::V02State,
store: SequencerStore,
mempool: MemPool<EncodedTransaction>,
mempool: MemPool<NSSATransaction>,
sequencer_config: SequencerConfig,
chain_height: u64,
block_settlement_client: Option<BlockSettlementClient>,
block_settlement_client: BC,
indexer_client: IC,
last_bedrock_msg_id: MantleMsgId,
}
@ -43,17 +55,19 @@ impl Display for TransactionMalformationError {
impl std::error::Error for TransactionMalformationError {}
impl SequencerCore {
impl<BC: BlockSettlementClientTrait, IC: IndexerClientTrait> SequencerCore<BC, IC> {
/// Starts the sequencer using the provided configuration.
/// If an existing database is found, the sequencer state is loaded from it and
/// assumed to represent the correct latest state consistent with Bedrock-finalized data.
/// If no database is found, the sequencer performs a fresh start from genesis,
/// initializing its state with the accounts defined in the configuration file.
pub fn start_from_config(config: SequencerConfig) -> (Self, MemPoolHandle<EncodedTransaction>) {
pub async fn start_from_config(
config: SequencerConfig,
) -> (Self, MemPoolHandle<NSSATransaction>) {
let hashable_data = HashableBlockData {
block_id: config.genesis_id,
transactions: vec![],
prev_block_hash: [0; 32],
prev_block_hash: HashType([0; 32]),
timestamp: 0,
};
@ -65,11 +79,12 @@ impl SequencerCore {
// as fixing this issue may require actions non-native to program scope
let store = SequencerStore::open_db_with_genesis(
&config.home.join("rocksdb"),
Some(genesis_block),
Some(&genesis_block),
signing_key,
)
.unwrap();
#[cfg_attr(not(feature = "testnet"), allow(unused_mut))]
let mut state = match store.get_nssa_state() {
Some(state) => {
info!("Found local database. Loading state and pending blocks from it.");
@ -97,7 +112,7 @@ impl SequencerCore {
let init_accs: Vec<(nssa::AccountId, u128)> = config
.initial_accounts
.iter()
.map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance))
.map(|acc_data| (acc_data.account_id, acc_data.balance))
.collect();
nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments)
@ -108,10 +123,20 @@ impl SequencerCore {
state.add_pinata_program(PINATA_BASE58.parse().unwrap());
let (mempool, mempool_handle) = MemPool::new(config.mempool_max_size);
let block_settlement_client = config.bedrock_config.as_ref().map(|bedrock_config| {
BlockSettlementClient::try_new(&config.home, bedrock_config)
.expect("Block settlement client should be constructible")
});
let bedrock_signing_key =
load_or_create_signing_key(&config.home.join("bedrock_signing_key"))
.expect("Failed to load or create signing key");
let block_settlement_client = BC::new(&config.bedrock_config, bedrock_signing_key)
.expect("Failed to initialize Block Settlement Client");
let (_, msg_id) = block_settlement_client
.create_inscribe_tx(&genesis_block)
.expect("Inscription transaction with genesis block should be constructible");
let last_bedrock_msg_id = msg_id.into();
let indexer_client = IC::new(&config.indexer_rpc_url)
.await
.expect("Failed to create Indexer Client");
let sequencer_core = Self {
state,
@ -120,7 +145,8 @@ impl SequencerCore {
chain_height: config.genesis_id,
sequencer_config: config,
block_settlement_client,
last_bedrock_msg_id: channel_genesis_msg_id,
indexer_client,
last_bedrock_msg_id,
};
(sequencer_core, mempool_handle)
@ -145,21 +171,28 @@ impl SequencerCore {
}
pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result<u64> {
let block_data = self.produce_new_block_with_mempool_transactions()?;
if let Some(client) = self.block_settlement_client.as_mut() {
let block =
block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id);
let msg_id = client.submit_block_to_bedrock(&block).await?;
self.last_bedrock_msg_id = msg_id.into();
log::info!("Posted block data to Bedrock");
{
let block = self.produce_new_block_with_mempool_transactions()?;
match self
.block_settlement_client
.submit_block_to_bedrock(&block)
.await
{
Ok(msg_id) => {
self.last_bedrock_msg_id = msg_id.into();
info!("Posted block data to Bedrock, msg_id: {msg_id:?}");
}
Err(err) => {
error!("Failed to post block data to Bedrock with error: {err:#}");
}
}
}
Ok(self.chain_height)
}
/// Produces new block from transactions in mempool
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<HashableBlockData> {
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<Block> {
let now = Instant::now();
let new_block_height = self.chain_height + 1;
@ -167,17 +200,22 @@ impl SequencerCore {
let mut valid_transactions = vec![];
while let Some(tx) = self.mempool.pop() {
let nssa_transaction = NSSATransaction::try_from(&tx)
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx.hash() })?;
let tx_hash = tx.hash();
match self.execute_check_transaction_on_state(tx) {
Ok(valid_tx) => {
info!("Validated transaction with hash {tx_hash}, including it in block",);
valid_transactions.push(valid_tx);
if let Ok(valid_tx) = self.execute_check_transaction_on_state(nssa_transaction) {
valid_transactions.push(valid_tx.into());
if valid_transactions.len() >= self.sequencer_config.max_num_tx_in_block {
break;
if valid_transactions.len() >= self.sequencer_config.max_num_tx_in_block {
break;
}
}
Err(err) => {
error!(
"Transaction with hash {tx_hash} failed execution check with error: {err:#?}, skipping it",
);
// TODO: Probably need to handle unsuccessful transaction execution?
}
} else {
// Probably need to handle unsuccessful transaction execution?
}
}
@ -196,7 +234,7 @@ impl SequencerCore {
.clone()
.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id);
self.store.update(block, &self.state)?;
self.store.update(&block, &self.state)?;
self.chain_height = new_block_height;
@ -215,7 +253,7 @@ impl SequencerCore {
hashable_data.transactions.len(),
now.elapsed().as_secs()
);
Ok(hashable_data)
Ok(block)
}
pub fn state(&self) -> &nssa::V02State {
@ -245,6 +283,10 @@ impl SequencerCore {
.map(|block| block.header.block_id)
.min()
{
info!(
"Clearing pending blocks up to id: {}",
last_finalized_block_id
);
(first_pending_block_id..=last_finalized_block_id)
.try_for_each(|id| self.store.delete_block_at_id(id))
} else {
@ -263,9 +305,13 @@ impl SequencerCore {
.collect())
}
pub fn block_settlement_client(&self) -> Option<BlockSettlementClient> {
pub fn block_settlement_client(&self) -> BC {
self.block_settlement_client.clone()
}
pub fn indexer_client(&self) -> IC {
self.indexer_client.clone()
}
}
// TODO: Introduce type-safe wrapper around checked transaction, e.g. AuthenticatedTransaction
@ -292,22 +338,38 @@ pub fn transaction_pre_check(
}
}
#[cfg(test)]
mod tests {
use std::pin::pin;
use base58::{FromBase58, ToBase58};
use common::test_utils::sequencer_sign_key_for_testing;
use nssa::PrivateKey;
use super::*;
use crate::config::AccountInitialData;
fn parse_unwrap_tx_body_into_nssa_tx(tx_body: EncodedTransaction) -> NSSATransaction {
NSSATransaction::try_from(&tx_body)
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx_body.hash() })
.unwrap()
/// Load signing key from file or generate a new one if it doesn't exist
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
if path.exists() {
let key_bytes = std::fs::read(path)?;
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
.try_into()
.map_err(|_| anyhow!("Found key with incorrect length"))?;
Ok(Ed25519Key::from_bytes(&key_array))
} else {
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
std::fs::write(path, key_bytes)?;
Ok(Ed25519Key::from_bytes(&key_bytes))
}
}
#[cfg(all(test, feature = "mock"))]
mod tests {
use std::{pin::pin, str::FromStr as _};
use base58::ToBase58;
use bedrock_client::BackoffConfig;
use common::{test_utils::sequencer_sign_key_for_testing, transaction::NSSATransaction};
use logos_blockchain_core::mantle::ops::channel::ChannelId;
use mempool::MemPoolHandle;
use nssa::{AccountId, PrivateKey};
use crate::{
config::{AccountInitialData, BedrockConfig, SequencerConfig},
mock::SequencerCoreWithMockClients,
transaction_pre_check,
};
fn setup_sequencer_config_variable_initial_accounts(
initial_accounts: Vec<AccountInitialData>,
@ -327,8 +389,17 @@ mod tests {
initial_accounts,
initial_commitments: vec![],
signing_key: *sequencer_sign_key_for_testing().value(),
bedrock_config: None,
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 5,
},
channel_id: ChannelId::from([0; 32]),
node_url: "http://not-used-in-unit-tests".parse().unwrap(),
auth: None,
},
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
indexer_rpc_url: "ws://localhost:8779".parse().unwrap(),
}
}
@ -344,12 +415,12 @@ mod tests {
];
let initial_acc1 = AccountInitialData {
account_id: acc1_account_id.to_base58(),
account_id: AccountId::from_str(&acc1_account_id.to_base58()).unwrap(),
balance: 10000,
};
let initial_acc2 = AccountInitialData {
account_id: acc2_account_id.to_base58(),
account_id: AccountId::from_str(&acc2_account_id.to_base58()).unwrap(),
balance: 20000,
};
@ -366,15 +437,16 @@ mod tests {
nssa::PrivateKey::try_new([2; 32]).unwrap()
}
async fn common_setup() -> (SequencerCore, MemPoolHandle<EncodedTransaction>) {
async fn common_setup() -> (SequencerCoreWithMockClients, MemPoolHandle<NSSATransaction>) {
let config = setup_sequencer_config();
common_setup_with_config(config).await
}
async fn common_setup_with_config(
config: SequencerConfig,
) -> (SequencerCore, MemPoolHandle<EncodedTransaction>) {
let (mut sequencer, mempool_handle) = SequencerCore::start_from_config(config);
) -> (SequencerCoreWithMockClients, MemPoolHandle<NSSATransaction>) {
let (mut sequencer, mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
let tx = common::test_utils::produce_dummy_empty_transaction();
mempool_handle.push(tx).await.unwrap();
@ -386,45 +458,28 @@ mod tests {
(sequencer, mempool_handle)
}
#[test]
fn test_start_from_config() {
#[tokio::test]
async fn test_start_from_config() {
let config = setup_sequencer_config();
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone());
let (sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
assert_eq!(sequencer.chain_height, config.genesis_id);
assert_eq!(sequencer.sequencer_config.max_num_tx_in_block, 10);
assert_eq!(sequencer.sequencer_config.port, 8080);
let acc1_account_id = config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2_account_id = config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1_account_id = config.initial_accounts[0].account_id;
let acc2_account_id = config.initial_accounts[1].account_id;
let balance_acc_1 = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc1_account_id))
.balance;
let balance_acc_2 = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc2_account_id))
.balance;
let balance_acc_1 = sequencer.state.get_account_by_id(acc1_account_id).balance;
let balance_acc_2 = sequencer.state.get_account_by_id(acc2_account_id).balance;
assert_eq!(10000, balance_acc_1);
assert_eq!(20000, balance_acc_2);
}
#[test]
fn test_start_different_intial_accounts_balances() {
#[tokio::test]
async fn test_start_different_intial_accounts_balances() {
let acc1_account_id: Vec<u8> = vec![
27, 132, 197, 86, 123, 18, 100, 64, 153, 93, 62, 213, 170, 186, 5, 101, 215, 30, 24,
52, 96, 72, 25, 255, 156, 23, 245, 233, 213, 221, 7, 143,
@ -436,55 +491,38 @@ mod tests {
];
let initial_acc1 = AccountInitialData {
account_id: acc1_account_id.to_base58(),
account_id: AccountId::from_str(&acc1_account_id.to_base58()).unwrap(),
balance: 10000,
};
let initial_acc2 = AccountInitialData {
account_id: acc2_account_id.to_base58(),
account_id: AccountId::from_str(&acc2_account_id.to_base58()).unwrap(),
balance: 20000,
};
let initial_accounts = vec![initial_acc1, initial_acc2];
let config = setup_sequencer_config_variable_initial_accounts(initial_accounts);
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone());
let (sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
let acc1_account_id = config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2_account_id = config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1_account_id = config.initial_accounts[0].account_id;
let acc2_account_id = config.initial_accounts[1].account_id;
assert_eq!(
10000,
sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc1_account_id))
.balance
sequencer.state.get_account_by_id(acc1_account_id).balance
);
assert_eq!(
20000,
sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc2_account_id))
.balance
sequencer.state.get_account_by_id(acc2_account_id).balance
);
}
#[test]
fn test_transaction_pre_check_pass() {
let tx = common::test_utils::produce_dummy_empty_transaction();
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
let result = transaction_pre_check(tx);
assert!(result.is_ok());
}
@ -493,27 +531,15 @@ mod tests {
async fn test_transaction_pre_check_native_transfer_valid() {
let (sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
let tx = common::test_utils::create_transaction_native_token_transfer(
acc1, 0, acc2, 10, sign_key1,
);
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
let result = transaction_pre_check(tx);
assert!(result.is_ok());
}
@ -522,20 +548,8 @@ mod tests {
async fn test_transaction_pre_check_native_transfer_other_signature() {
let (mut sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key2 = create_signing_key_for_account2();
@ -544,7 +558,7 @@ mod tests {
);
// Signature is valid, stateless check pass
let tx = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx)).unwrap();
let tx = transaction_pre_check(tx).unwrap();
// Signature is not from sender. Execution fails
let result = sequencer.execute_check_transaction_on_state(tx);
@ -559,20 +573,8 @@ mod tests {
async fn test_transaction_pre_check_native_transfer_sent_too_much() {
let (mut sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -580,7 +582,7 @@ mod tests {
acc1, 0, acc2, 10000000, sign_key1,
);
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
let result = transaction_pre_check(tx);
// Passed pre-check
assert!(result.is_ok());
@ -598,20 +600,8 @@ mod tests {
async fn test_transaction_execute_native_transfer() {
let (mut sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -619,18 +609,10 @@ mod tests {
acc1, 0, acc2, 100, sign_key1,
);
sequencer
.execute_check_transaction_on_state(parse_unwrap_tx_body_into_nssa_tx(tx))
.unwrap();
sequencer.execute_check_transaction_on_state(tx).unwrap();
let bal_from = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc1))
.balance;
let bal_to = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc2))
.balance;
let bal_from = sequencer.state.get_account_by_id(acc1).balance;
let bal_to = sequencer.state.get_account_by_id(acc2).balance;
assert_eq!(bal_from, 9900);
assert_eq!(bal_to, 20100);
@ -673,27 +655,15 @@ mod tests {
let block = sequencer.produce_new_block_with_mempool_transactions();
assert!(block.is_ok());
assert_eq!(block.unwrap().block_id, genesis_height + 1);
assert_eq!(block.unwrap().header.block_id, genesis_height + 1);
}
#[tokio::test]
async fn test_replay_transactions_are_rejected_in_the_same_block() {
let (mut sequencer, mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -711,6 +681,7 @@ mod tests {
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap()
.header
.block_id;
let block = sequencer.store.get_block_at_id(current_height).unwrap();
@ -722,20 +693,8 @@ mod tests {
async fn test_replay_transactions_are_rejected_in_different_blocks() {
let (mut sequencer, mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -748,6 +707,7 @@ mod tests {
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap()
.header
.block_id;
let block = sequencer.store.get_block_at_id(current_height).unwrap();
assert_eq!(block.body.transactions, vec![tx.clone()]);
@ -757,6 +717,7 @@ mod tests {
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap()
.header
.block_id;
let block = sequencer.store.get_block_at_id(current_height).unwrap();
assert!(block.body.transactions.is_empty());
@ -765,23 +726,22 @@ mod tests {
#[tokio::test]
async fn test_restart_from_storage() {
let config = setup_sequencer_config();
let acc1_account_id: nssa::AccountId =
config.initial_accounts[0].account_id.parse().unwrap();
let acc2_account_id: nssa::AccountId =
config.initial_accounts[1].account_id.parse().unwrap();
let acc1_account_id = config.initial_accounts[0].account_id;
let acc2_account_id = config.initial_accounts[1].account_id;
let balance_to_move = 13;
// In the following code block a transaction will be processed that moves `balance_to_move`
// from `acc_1` to `acc_2`. The block created with that transaction will be kept stored in
// the temporary directory for the block storage of this test.
{
let (mut sequencer, mempool_handle) = SequencerCore::start_from_config(config.clone());
let (mut sequencer, mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
let signing_key = PrivateKey::try_new([1; 32]).unwrap();
let tx = common::test_utils::create_transaction_native_token_transfer(
*acc1_account_id.value(),
acc1_account_id,
0,
*acc2_account_id.value(),
acc2_account_id,
balance_to_move,
signing_key,
);
@ -790,6 +750,7 @@ mod tests {
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap()
.header
.block_id;
let block = sequencer.store.get_block_at_id(current_height).unwrap();
assert_eq!(block.body.transactions, vec![tx.clone()]);
@ -797,9 +758,10 @@ mod tests {
// Instantiating a new sequencer from the same config. This should load the existing block
// with the above transaction and update the state to reflect that.
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone());
let balance_acc_1 = sequencer.state.get_account_by_id(&acc1_account_id).balance;
let balance_acc_2 = sequencer.state.get_account_by_id(&acc2_account_id).balance;
let (sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
let balance_acc_1 = sequencer.state.get_account_by_id(acc1_account_id).balance;
let balance_acc_2 = sequencer.state.get_account_by_id(acc2_account_id).balance;
// Balances should be consistent with the stored block
assert_eq!(
@ -812,10 +774,11 @@ mod tests {
);
}
#[test]
fn test_get_pending_blocks() {
#[tokio::test]
async fn test_get_pending_blocks() {
let config = setup_sequencer_config();
let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config);
let (mut sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
@ -828,10 +791,11 @@ mod tests {
assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4);
}
#[test]
fn test_delete_blocks() {
#[tokio::test]
async fn test_delete_blocks() {
let config = setup_sequencer_config();
let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config);
let (mut sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();

View File

@ -0,0 +1,49 @@
use anyhow::Result;
use common::block::Block;
use logos_blockchain_core::mantle::ops::channel::{ChannelId, MsgId};
use logos_blockchain_key_management_system_service::keys::Ed25519Key;
use url::Url;
use crate::{
block_settlement_client::BlockSettlementClientTrait, config::BedrockConfig,
indexer_client::IndexerClientTrait,
};
pub type SequencerCoreWithMockClients =
crate::SequencerCore<MockBlockSettlementClient, MockIndexerClient>;
#[derive(Clone)]
pub struct MockBlockSettlementClient {
bedrock_channel_id: ChannelId,
bedrock_signing_key: Ed25519Key,
}
impl BlockSettlementClientTrait for MockBlockSettlementClient {
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self> {
Ok(Self {
bedrock_channel_id: config.channel_id,
bedrock_signing_key,
})
}
fn bedrock_channel_id(&self) -> ChannelId {
self.bedrock_channel_id
}
fn bedrock_signing_key(&self) -> &Ed25519Key {
&self.bedrock_signing_key
}
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
self.create_inscribe_tx(block).map(|(_, msg_id)| msg_id)
}
}
#[derive(Copy, Clone)]
pub struct MockIndexerClient;
impl IndexerClientTrait for MockIndexerClient {
async fn new(_indexer_url: &Url) -> Result<Self> {
Ok(Self)
}
}

Some files were not shown because too many files have changed in this diff Show More