Merge branch 'arjentix/full-bedrock-integration' into Pravdyvy/indexer-state-management

This commit is contained in:
Pravdyvy 2026-02-10 09:14:24 +02:00
commit 36407c1d43
103 changed files with 3283 additions and 2391 deletions

View File

@ -1,23 +0,0 @@
name: Deploy Sequencer
on:
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Deploy to server
uses: appleboy/ssh-action@v1.2.4
with:
host: ${{ secrets.DEPLOY_SSH_HOST }}
username: ${{ secrets.DEPLOY_SSH_USERNAME }}
key: ${{ secrets.DEPLOY_SSH_KEY }}
envs: GITHUB_ACTOR
script_path: ci_scripts/deploy.sh

View File

@ -1,4 +1,4 @@
name: Publish Sequencer Runner Image
name: Publish Docker Images
on:
workflow_dispatch:
@ -6,6 +6,15 @@ on:
jobs:
publish:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- name: sequencer_runner
dockerfile: ./sequencer_runner/Dockerfile
- name: indexer_service
dockerfile: ./indexer/service/Dockerfile
- name: explorer_service
dockerfile: ./explorer_service/Dockerfile
steps:
- uses: actions/checkout@v5
@ -23,7 +32,7 @@ jobs:
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/sequencer_runner
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/${{ matrix.name }}
tags: |
type=ref,event=branch
type=ref,event=pr
@ -36,7 +45,7 @@ jobs:
uses: docker/build-push-action@v5
with:
context: .
file: ./sequencer_runner/Dockerfile
file: ${{ matrix.dockerfile }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

542
Cargo.lock generated
View File

@ -69,7 +69,7 @@ dependencies = [
"actix-rt",
"actix-service",
"actix-utils",
"base64",
"base64 0.22.1",
"bitflags 2.10.0",
"bytes",
"bytestring",
@ -379,6 +379,15 @@ version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]]
name = "arc-swap"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73"
dependencies = [
"rustversion",
]
[[package]]
name = "archery"
version = "1.2.2"
@ -794,6 +803,22 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "astral-tokio-tar"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec179a06c1769b1e42e1e2cbe74c7dcdb3d6383c838454d063eaac5bbb7ebbe5"
dependencies = [
"filetime",
"futures-core",
"libc",
"portable-atomic",
"rustc-hash",
"tokio",
"tokio-stream",
"xattr",
]
[[package]]
name = "async-lock"
version = "3.4.2"
@ -926,7 +951,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core 0.5.6",
"base64",
"base64 0.22.1",
"bytes",
"form_urlencoded",
"futures-util",
@ -1029,6 +1054,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@ -1046,6 +1077,7 @@ name = "bedrock_client"
version = "0.1.0"
dependencies = [
"anyhow",
"common",
"futures",
"log",
"logos-blockchain-chain-broadcast-service",
@ -1152,6 +1184,83 @@ dependencies = [
"generic-array 0.14.7",
]
[[package]]
name = "bollard"
version = "0.19.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87a52479c9237eb04047ddb94788c41ca0d26eaff8b697ecfbb4c32f7fdc3b1b"
dependencies = [
"async-stream",
"base64 0.22.1",
"bitflags 2.10.0",
"bollard-buildkit-proto",
"bollard-stubs",
"bytes",
"chrono",
"futures-core",
"futures-util",
"hex",
"home",
"http 1.4.0",
"http-body-util",
"hyper",
"hyper-named-pipe",
"hyper-rustls",
"hyper-util",
"hyperlocal",
"log",
"num",
"pin-project-lite",
"rand 0.9.2",
"rustls",
"rustls-native-certs",
"rustls-pemfile",
"rustls-pki-types",
"serde",
"serde_derive",
"serde_json",
"serde_repr",
"serde_urlencoded",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"tonic",
"tower-service",
"url",
"winapi",
]
[[package]]
name = "bollard-buildkit-proto"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85a885520bf6249ab931a764ffdb87b0ceef48e6e7d807cfdb21b751e086e1ad"
dependencies = [
"prost 0.14.3",
"prost-types",
"tonic",
"tonic-prost",
"ureq",
]
[[package]]
name = "bollard-stubs"
version = "1.49.1-rc.28.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5731fe885755e92beff1950774068e0cae67ea6ec7587381536fca84f1779623"
dependencies = [
"base64 0.22.1",
"bollard-buildkit-proto",
"bytes",
"chrono",
"prost 0.14.3",
"serde",
"serde_json",
"serde_repr",
"serde_with",
]
[[package]]
name = "bonsai-sdk"
version = "1.4.1"
@ -1491,7 +1600,7 @@ name = "common"
version = "0.1.0"
dependencies = [
"anyhow",
"base64",
"base64 0.22.1",
"borsh",
"hex",
"log",
@ -1501,6 +1610,7 @@ dependencies = [
"reqwest",
"serde",
"serde_json",
"serde_with",
"sha2",
"thiserror 2.0.17",
"url",
@ -1893,7 +2003,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ab67060fc6b8ef687992d439ca0fa36e7ed17e9a0b16b25b601e8757df720de"
dependencies = [
"data-encoding",
"syn 1.0.109",
"syn 2.0.111",
]
[[package]]
@ -2050,12 +2160,35 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "docker-compose-types"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edb75a85449fd9c34d9fb3376c6208ec4115d2ca43b965175a52d71349ecab8"
dependencies = [
"derive_builder",
"indexmap 2.12.1",
"serde",
"serde_yaml",
]
[[package]]
name = "docker-generate"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf673e0848ef09fa4aeeba78e681cf651c0c7d35f76ee38cec8e55bc32fa111"
[[package]]
name = "docker_credential"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d89dfcba45b4afad7450a99b39e751590463e45c04728cf555d36bb66940de8"
dependencies = [
"base64 0.21.7",
"serde",
"serde_json",
]
[[package]]
name = "downcast-rs"
version = "1.2.1"
@ -2262,6 +2395,16 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "etcetera"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96"
dependencies = [
"cfg-if",
"windows-sys 0.61.2",
]
[[package]]
name = "event-listener"
version = "5.4.1"
@ -2333,6 +2476,17 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "ferroid"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb330bbd4cb7a5b9f559427f06f98a4f853a137c8298f3bd3f8ca57663e21986"
dependencies = [
"portable-atomic",
"rand 0.9.2",
"web-time",
]
[[package]]
name = "ff"
version = "0.13.1"
@ -2349,6 +2503,17 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db"
dependencies = [
"cfg-if",
"libc",
"libredox",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.5"
@ -2793,6 +2958,15 @@ version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89e8d20b3799fa526152a5301a771eaaad80857f83e01b23216ceaafb2d9280"
[[package]]
name = "home"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "html-escape"
version = "0.2.13"
@ -2909,6 +3083,21 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-named-pipe"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278"
dependencies = [
"hex",
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
"winapi",
]
[[package]]
name = "hyper-rustls"
version = "0.27.7"
@ -2927,6 +3116,19 @@ dependencies = [
"webpki-roots",
]
[[package]]
name = "hyper-timeout"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
dependencies = [
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "hyper-tls"
version = "0.6.0"
@ -2949,7 +3151,7 @@ version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures-channel",
"futures-core",
@ -2969,6 +3171,21 @@ dependencies = [
"windows-registry",
]
[[package]]
name = "hyperlocal"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7"
dependencies = [
"hex",
"http-body-util",
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "iana-time-zone"
version = "0.1.64"
@ -3133,6 +3350,7 @@ name = "indexer_service"
version = "0.1.0"
dependencies = [
"anyhow",
"arc-swap",
"async-trait",
"clap",
"env_logger",
@ -3152,7 +3370,7 @@ dependencies = [
name = "indexer_service_protocol"
version = "0.1.0"
dependencies = [
"base64",
"base64 0.22.1",
"borsh",
"common",
"nssa",
@ -3223,7 +3441,7 @@ version = "0.1.0"
dependencies = [
"actix-web",
"anyhow",
"base64",
"base64 0.22.1",
"borsh",
"common",
"env_logger",
@ -3235,10 +3453,12 @@ dependencies = [
"log",
"nssa",
"nssa_core",
"rand 0.8.5",
"sequencer_core",
"sequencer_runner",
"serde_json",
"tempfile",
"testcontainers",
"token_core",
"tokio",
"url",
@ -3418,7 +3638,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98"
dependencies = [
"base64",
"base64 0.22.1",
"futures-channel",
"futures-util",
"gloo-net",
@ -3471,7 +3691,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8"
dependencies = [
"base64",
"base64 0.22.1",
"http-body",
"hyper",
"hyper-rustls",
@ -3656,7 +3876,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f9569fc37575a5d64c0512145af7630bf651007237ef67a8a77328199d315bb"
dependencies = [
"any_spawner",
"base64",
"base64 0.22.1",
"cfg-if",
"either_of",
"futures",
@ -3858,7 +4078,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf1045af93050bf3388d1c138426393fc131f6d9e46a65519da884c033ed730"
dependencies = [
"any_spawner",
"base64",
"base64 0.22.1",
"codee",
"futures",
"hydration_context",
@ -3915,6 +4135,7 @@ checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50"
dependencies = [
"bitflags 2.10.0",
"libc",
"redox_syscall 0.6.0",
]
[[package]]
@ -4803,9 +5024,24 @@ dependencies = [
"risc0-zkvm",
"serde",
"serde_json",
"serde_with",
"thiserror 2.0.17",
]
[[package]]
name = "num"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@ -4832,6 +5068,15 @@ dependencies = [
"zeroize",
]
[[package]]
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]]
name = "num-conv"
version = "0.1.0"
@ -4858,6 +5103,17 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -5060,11 +5316,36 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"redox_syscall 0.5.18",
"smallvec",
"windows-link",
]
[[package]]
name = "parse-display"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914a1c2265c98e2446911282c6ac86d8524f495792c38c5bd884f80499c7538a"
dependencies = [
"parse-display-derive",
"regex",
"regex-syntax",
]
[[package]]
name = "parse-display-derive"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ae7800a4c974efd12df917266338e79a7a74415173caf7e70aa0a0707345281"
dependencies = [
"proc-macro2",
"quote",
"regex",
"regex-syntax",
"structmeta",
"syn 2.0.111",
]
[[package]]
name = "paste"
version = "1.0.15"
@ -5165,9 +5446,9 @@ dependencies = [
[[package]]
name = "portable-atomic"
version = "1.11.1"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
[[package]]
name = "postcard"
@ -5329,7 +5610,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
dependencies = [
"bytes",
"prost-derive",
"prost-derive 0.13.5",
]
[[package]]
name = "prost"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568"
dependencies = [
"bytes",
"prost-derive 0.14.3",
]
[[package]]
@ -5345,6 +5636,28 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "prost-derive"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b"
dependencies = [
"anyhow",
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "prost-types"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7"
dependencies = [
"prost 0.14.3",
]
[[package]]
name = "quinn"
version = "0.11.9"
@ -5568,6 +5881,15 @@ dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_syscall"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5"
dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_users"
version = "0.5.2"
@ -5640,7 +5962,7 @@ version = "0.12.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"encoding_rs",
"futures-channel",
@ -5883,7 +6205,7 @@ dependencies = [
"derive_more 2.1.0",
"hex",
"lazy-regex",
"prost",
"prost 0.13.5",
"risc0-binfmt",
"risc0-build",
"risc0-circuit-keccak",
@ -6087,6 +6409,15 @@ dependencies = [
"security-framework 3.5.1",
]
[[package]]
name = "rustls-pemfile"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
version = "1.13.2"
@ -6359,7 +6690,8 @@ dependencies = [
"actix-web",
"anyhow",
"base58",
"base64",
"base64 0.22.1",
"bedrock_client",
"borsh",
"common",
"futures",
@ -6479,6 +6811,17 @@ dependencies = [
"thiserror 2.0.17",
]
[[package]]
name = "serde_repr"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "serde_spanned"
version = "0.6.9"
@ -6515,7 +6858,7 @@ version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
dependencies = [
"base64",
"base64 0.22.1",
"chrono",
"hex",
"indexmap 1.9.3",
@ -6540,6 +6883,19 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "serde_yaml"
version = "0.9.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9"
dependencies = [
"indexmap 2.12.1",
"itoa",
"ryu",
"serde",
"unsafe-libyaml",
]
[[package]]
name = "serdect"
version = "0.2.0"
@ -6557,7 +6913,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353d02fa2886cd8dae0b8da0965289fa8f2ecc7df633d1ce965f62fdf9644d29"
dependencies = [
"axum 0.8.8",
"base64",
"base64 0.22.1",
"bytes",
"const-str 0.7.1",
"const_format",
@ -6729,7 +7085,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures",
"http 1.4.0",
@ -6795,6 +7151,29 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "structmeta"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329"
dependencies = [
"proc-macro2",
"quote",
"structmeta-derive",
"syn 2.0.111",
]
[[package]]
name = "structmeta-derive"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "strum"
version = "0.27.2"
@ -7001,6 +7380,38 @@ dependencies = [
"risc0-zkvm",
]
[[package]]
name = "testcontainers"
version = "0.26.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a81ec0158db5fbb9831e09d1813fe5ea9023a2b5e6e8e0a5fe67e2a820733629"
dependencies = [
"astral-tokio-tar",
"async-trait",
"bollard",
"bytes",
"docker-compose-types",
"docker_credential",
"either",
"etcetera",
"ferroid",
"futures",
"itertools 0.14.0",
"log",
"memchr",
"parse-display",
"pin-project-lite",
"serde",
"serde_json",
"serde_with",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"url",
"uuid",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@ -7321,6 +7732,46 @@ version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
[[package]]
name = "tonic"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a"
dependencies = [
"async-trait",
"axum 0.8.8",
"base64 0.22.1",
"bytes",
"h2 0.4.13",
"http 1.4.0",
"http-body",
"http-body-util",
"hyper",
"hyper-timeout",
"hyper-util",
"percent-encoding",
"pin-project",
"socket2 0.6.1",
"sync_wrapper",
"tokio",
"tokio-stream",
"tower",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tonic-prost"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0"
dependencies = [
"bytes",
"prost 0.14.3",
"tonic",
]
[[package]]
name = "tower"
version = "0.5.2"
@ -7329,9 +7780,12 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"indexmap 2.12.1",
"pin-project-lite",
"slab",
"sync_wrapper",
"tokio",
"tokio-util",
"tower-layer",
"tower-service",
"tracing",
@ -7565,6 +8019,12 @@ dependencies = [
"subtle",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "unsigned-varint"
version = "0.8.0"
@ -7577,6 +8037,34 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "ureq"
version = "3.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d39cb1dbab692d82a977c0392ffac19e188bd9186a9f32806f0aaa859d75585a"
dependencies = [
"base64 0.22.1",
"log",
"percent-encoding",
"rustls",
"rustls-pki-types",
"ureq-proto",
"utf-8",
"webpki-roots",
]
[[package]]
name = "ureq-proto"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
dependencies = [
"base64 0.22.1",
"http 1.4.0",
"httparse",
"log",
]
[[package]]
name = "url"
version = "2.5.7"
@ -7665,7 +8153,7 @@ dependencies = [
"anyhow",
"async-stream",
"base58",
"base64",
"base64 0.22.1",
"borsh",
"bytemuck",
"clap",
@ -8223,6 +8711,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "xattr"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156"
dependencies = [
"libc",
"rustix",
]
[[package]]
name = "xxhash-rust"
version = "0.8.15"

View File

@ -69,6 +69,7 @@ openssl = { version = "0.10", features = ["vendored"] }
openssl-probe = { version = "0.1.2" }
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
serde_with = "3.16.1"
actix = "0.13.0"
actix-cors = "0.6.1"
jsonrpsee = "0.26.0"

646
README.md
View File

@ -35,19 +35,17 @@ To our knowledge, this design is unique to LEZ. Other privacy-focused programmab
3. Transferring private to public (local / privacy-preserving execution)
- Bob executes the token program `Transfer` function locally, sending to Charlies public account.
- A ZKP of correct execution is generated.
- Bobs private balance stays hidden.
- Charlies public account is updated on-chain.
- Bobs private account and balance still remain hidden.
- Charlie's public account is modified with the new tokens added.
4. Transferring public to public (public execution):
- Alice submits a transaction to execute the token program `Transfer` function on-chain, specifying Charlie's public account as recipient.
- The execution is handled on-chain without ZKPs involved.
- Alice's and Charlie's accounts are modified according to the transaction.
4. Transfer from public to public (public execution)
- Alice submits an on-chain transaction to run `Transfer`, sending to Charlies public account.
- Execution is handled fully on-chain without ZKPs.
- Alices and Charlies public balances are updated.
### Key points:
- The same token program is used in every execution.
- The only difference is execution mode: public execution updates visible state on-chain, while private execution relies on ZKPs.
- Validators verify proofs only for privacy-preserving transactions, keeping processing efficient.
#### Key points:
- The same token program is used in all executions.
- The difference lies in execution mode: public executions update visible accounts on-chain, while private executions rely on ZKPs.
- Validators only need to verify proofs for privacy-preserving transactions, keeping processing efficient.
---
@ -141,7 +139,625 @@ The sequencer and node can be run locally:
- `git checkout schouhy/full-bedrock-integration`
- `RUST_LOG=info cargo run --release -p indexer_service $(pwd)/integration_tests/configs/indexer/indexer_config.json`
3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
- `git checkout schouhy/full-bedrock-integration`
- `RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner sequencer_runner/configs/debug`
# Running with Docker
You can run the whole setup with Docker:
```bash
docker compose up
```
With that you can send transactions from local wallet to the Sequencer running inside Docker using `wallet/configs/debug` as well as exploring block by opening `http://localhost:8080`.
## Caution for local image builds
If you're going to build sequencer image locally you should better adjust default docker settings and set `defaultKeepStorage` at least `25GB` so that it can keep layers properly cached.
# Try the Wallet CLI
## Install
This repository includes a CLI for interacting with the Nescience sequencer. To install it, run the following command from the root of the repository:
```bash
cargo install --path wallet --force
```
Run `wallet help` to check everything went well.
Some completion scripts exists, see the [completions](./completions/README.md) folder.
## Tutorial
This tutorial walks you through creating accounts and executing NSSA programs in both public and private contexts.
> [!NOTE]
> The NSSA state is split into two separate but interconnected components: the public state and the private state.
> The public state is an on-chain, publicly visible record of accounts indexed by their Account IDs
> The private state mirrors this, but the actual account values are stored locally by each account owner. On-chain, only a hidden commitment to each private account state is recorded. This allows the chain to enforce freshness (i.e., prevent the reuse of stale private states) while preserving privacy and unlinkability across executions and private accounts.
>
> Every piece of state in NSSA is stored in an account (public or private). Accounts are either uninitialized or are owned by a program, and programs can only modify the accounts they own.
>
> In NSSA, accounts can only be modified through program execution. A program is the sole mechanism that can change an accounts value.
> Programs run publicly when all involved accounts are public, and privately when at least one private account participates.
### Health-check
Verify that the node is running and that the wallet can connect to it:
```bash
wallet check-health
```
You should see `✅ All looks good!`.
### The commands
The wallet provides several commands to interact with the node and query state. To see the full list, run `wallet help`:
```bash
Commands:
auth-transfer Authenticated transfer subcommand
chain-info Generic chain info subcommand
account Account view and sync subcommand
pinata Pinata program interaction subcommand
token Token program interaction subcommand
amm AMM program interaction subcommand
check-health Check the wallet can connect to the node and builtin local programs match the remote versions
```
### Accounts
> [!NOTE]
> Accounts are the basic unit of state in NSSA. They essentially hold native tokens and arbitrary data managed by some program.
The CLI provides commands to manage accounts. Run `wallet account` to see the options available:
```bash
Commands:
get Get account data
new Produce new public or private account
sync-private Sync private accounts
help Print this message or the help of the given subcommand(s)
```
#### Create a new public account
You can create both public and private accounts through the CLI. For example:
```bash
wallet account new public
# Output:
Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
This id is required when executing any program that interacts with the account.
> [!NOTE]
> Public accounts live on-chain and are identified by a 32-byte Account ID.
> Running `wallet account new public` generates a fresh keypair for the signature scheme used in NSSA.
> The account ID is derived from the public key. The private key is used to sign transactions and to authorize the account in program executions.
#### Account initialization
To query the accounts current status, run:
```bash
# Replace the id with yours
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account is Uninitialized
```
> [!NOTE]
> New accounts begin in an uninitialized state, meaning they are not yet owned by any program. A program may claim an uninitialized account; once claimed, the account becomes owned by that program.
> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account.
> However, debiting native tokens from an account must always be performed by its owning program.
In this example, we will initialize the account for the Authenticated transfer program, which securely manages native token transfers by requiring authentication for debits.
Initialize the account by running:
```bash
# This command submits a public transaction executing the `init` function of the
# Authenticated-transfer program. The wallet polls the sequencer until the
# transaction is included in a block, which may take several seconds.
wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
After it completes, check the updated account status:
```bash
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account owned by authenticated transfer program
{"balance":0}
```
### Funding the account: executing the Piñata program
Now that we have a public account initialized by the authenticated transfer program, we need to fund it. For that, the testnet provides the Piñata program.
```bash
# Complete with your id
wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
After the claim succeeds, the account will be funded with some tokens:
```bash
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account owned by authenticated transfer program
{"balance":150}
```
### Native token transfers: executing the Authenticated transfers program
NSSA comes with a program for managing and transferring native tokens. Run `wallet auth-transfer` to see the options available:
```bash
Commands:
init Initialize account under authenticated transfer program
send Send native tokens from one account to another with variable privacy
help Print this message or the help of the given subcommand(s)
```
We have already used the `init` command. The `send` command is used to execute the `Transfer` function of the authenticated program.
Let's try it. For that we need to create another account for the recipient of the transfer.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
```
> [!NOTE]
> The new account is uninitialized. The authenticated transfers program will claim any uninitialized account used in a transfer. So we don't need to manually initialize the recipient account.
Let's send 37 tokens to the new account.
```bash
wallet auth-transfer send \
--from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \
--to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--amount 37
```
Once that succeeds we can check the states.
```bash
# Sender account
wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8
# Output:
Account owned by authenticated transfer program
{"balance":113}
```
```bash
# Recipient account
wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
# Output:
Account owned by authenticated transfer program
{"balance":37}
```
#### Create a new private account
> [!NOTE]
> Private accounts are structurally identical to public accounts; they differ only in how their state is stored off-chain and represented on-chain.
> The raw values of a private account are never stored on-chain. Instead, the chain only holds a 32-byte commitment (a hash-like binding to the actual values). Transactions include encrypted versions of the private values so that users can recover them from the blockchain. The decryption keys are known only to the user and are never shared.
> Private accounts are not managed through the usual signature mechanism used for public accounts. Instead, each private account is associated with two keypairs:
> - *Nullifier keys*, for using the corresponding private account in privacy preserving executions.
> - *Viewing keys*, used for encrypting and decrypting the values included in transactions.
>
> Private accounts also have a 32-byte identifier, derived from the nullifier public key.
>
> Just like public accounts, private accounts can only be initialized once. Any user can initialize them without knowing the owner's secret keys. However, modifying an initialized private account through an off-chain program execution requires knowledge of the owners secret keys.
>
> Transactions that modify the values of a private account include a commitment to the new values, which will be added to the on-chain commitment set. They also include a nullifier that marks the previous version as old.
> The nullifier is constructed so that it cannot be linked to any prior commitment, ensuring that updates to the same private account cannot be correlated.
Now lets switch to the private state and create a private account.
```bash
wallet account new private
# Output:
Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951
With ipk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17
```
For now, focus only on the account id. Ignore the `npk` and `ipk` values. These are the Nullifier public key and the Viewing public key. They are stored locally in the wallet and are used internally to build privacy-preserving transactions.
Also, the account id for private accounts is derived from the `npk` value. But we won't need them now.
Just like public accounts, new private accounts start out uninitialized:
```bash
wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
# Output:
Account is Uninitialized
```
Unlike public accounts, private accounts are never visible to the network. They exist only in your local wallet storage.
#### Sending tokens from the public account to the private account
Sending tokens to an uninitialized private account causes the Authenticated-Transfers program to claim it. Just like with public accounts.
This happens because program execution logic does not depend on whether the involved accounts are public or private.
Lets send 17 tokens to the new private account.
The syntax is identical to the public-to-public transfer; just set the private ID as the recipient.
This command will run the Authenticated-Transfer program locally, generate a proof, and submit it to the sequencer. Depending on your machine, this can take from 30 seconds to 4 minutes.
```bash
wallet auth-transfer send \
--from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \
--amount 17
```
After it succeeds, check both accounts:
```bash
# Public sender account
wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
# Output:
Account owned by authenticated transfer program
{"balance":20}
```
```bash
# Private recipient account
wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
# Output:
Account owned by authenticated transfer program
{"balance":17}
```
> [!NOTE]
> The last command does not query the network.
> It works even offline because private account data lives only in your wallet storage. Other users cannot read your private balances.
#### Digression: modifying private accounts
As a general rule, private accounts can only be modified through a program execution performed by their owner. That is, the person who holds the private key for that account. There is one exception: an uninitialized private account may be initialized by any user, without requiring the private key. After initialization, only the owner can modify it.
This mechanism enables a common use case: transferring funds from any account (public or private) to a private account owned by someone else. For such transfers, the recipients private account must be uninitialized.
#### Sending tokens from the public account to a private account owned by someone else
For this tutorial, well simulate that scenario by creating a new private account that we own, but well treat it as if it belonged to someone else.
Let's create a new (uninitialized) private account like before:
```bash
wallet account new private
# Output:
Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5
With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e
With ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72
```
Now we'll ignore the private account ID and focus on the `npk` and `ipk` values. We'll need this to send tokens to a foreign private account. Syntax is very similar.
```bash
wallet auth-transfer send \
--from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \
--to-ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \
--amount 3
```
The command above produces a privacy-preserving transaction, which may take a few minutes to complete. The updated values of the private account are encrypted and included in the transaction.
Once the transaction is accepted, the recipient must run `wallet account sync-private`. This command scans the chain for encrypted values that belong to their private accounts and updates the local versions accordingly.
#### Transfers in other combinations of public and private accounts
Weve shown how to use the authenticated-transfers program for transfers between two public accounts, and for transfers from a public sender to a private recipient. Sending tokens from a private account (whether to a public account or to another private account) works in essentially the same way.
### The token program
So far, weve made transfers using the authenticated-transfers program, which handles native token transfers. The Token program, on the other hand, is used for creating and managing custom tokens.
> [!NOTE]
> The token program is a single program responsible for creating and managing all tokens. There is no need to deploy new programs to introduce new tokens. All token-related operations are performed by invoking the appropriate functions of the token program.
The CLI provides commands to execute the token program. To see the options available run `wallet token`:
```bash
Commands:
new Produce a new token
send Send tokens from one account to another with variable privacy
help Print this message or the help of the given subcommand(s)
```
> [!NOTE]
> The Token program manages its accounts in two categories. Meaning, all accounts owned by the Token program fall into one of these types.
> - Token definition accounts: these accounts store metadata about a token, such as its name, total supply, and other identifying properties. They act as the tokens unique identifier.
> - Token holding accounts: these accounts hold actual token balances. In addition to the balance, they also record which token definition they belong to.
#### Creating a new token
To create a new token, simply run `wallet token new`. This will create a transaction to execute the `New` function of the token program.
The command expects a name, the desired total supply, and two uninitialized accounts:
- One that will be initialized as the token definition account for the new token.
- Another that will be initialized as a token holding account and receive the tokens entire initial supply.
##### New token with both definition and supply accounts set as public
For example, let's create two new (uninitialized) public accounts and then use them to create a new token.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
```
```bash
wallet account new public
# Output:
Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
```
Now we use them to create a new token. Let's call it the "Token A"
```bash
wallet token new \
--name TOKENA \
--total-supply 1337 \
--definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \
--supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
```
After it succeeds, we can inspect the two accounts to see how they were initialized.
```bash
wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
# Output:
Definition account owned by token program
{"account_type":"Token definition","name":"TOKENA","total_supply":1337}
```
```bash
wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337}
```
##### New token with public account definition but private holding account for initial supply
Lets create a new token, but this time using a public definition account and a private holding account to store the entire supply.
Since we cant reuse the accounts from the previous example, we need to create fresh ones for this case.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii
```
```bash
wallet account new private
# Output:
Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93
With ipk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577
```
And we use them to create the token.
Now we use them to create a new token. Let's call it "Token B".
```bash
wallet token new \
--name TOKENB \
--total-supply 7331 \
--definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \
--supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
```
After it succeeds, we can check their values
```bash
wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii
# Output:
Definition account owned by token program
{"account_type":"Token definition","name":"TOKENB","total_supply":7331}
```
```bash
wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331}
```
Like any other private account owned by us, it cannot be seen by other users.
#### Custom token transfers
The Token program has a function to move funds from one token holding account to another one. If executed with an uninitialized account as the recipient, this will be automatically claimed by the token program.
The transfer function can be executed with the `wallet token send` command.
Let's create a new public account for the recipient.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6
```
Let's send 1000 B tokens to this new account. We'll debit this from the supply account used in the creation of the token.
```bash
wallet token send \
--from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \
--to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--amount 1000
```
Let's inspect the public account:
```bash
wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000}
```
### Chain information
The wallet provides some commands to query information about the chain. These are under the `wallet chain-info` command.
```bash
Commands:
current-block-id Get current block id from sequencer
block Get block at id from sequencer
transaction Get transaction at hash from sequencer
```
For example, run this to find the current block id.
```bash
wallet chain-info current-block-id
# Output:
Last block id is 65537
```
### Automated Market Maker (AMM)
NSSA includes an AMM program that manages liquidity pools and enables swaps between custom tokens. To test this functionality, we first need to create a liquidity pool.
#### Creating a liquidity pool for a token pair
We start by creating a new pool for the tokens previously created. In return for providing liquidity, we will receive liquidity provider (LP) tokens, which represent our share of the pool and are required to withdraw liquidity later.
>[!NOTE]
> The AMM program does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore only represent a proportional share of the pool reserves and do not provide additional value from swap activity. Fee support for liquidity providers will be added in future versions of the AMM program.
To hold these LP tokens, we first create a new account:
```bash
wallet account new public
# Output:
Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf
```
Next, we initialize the liquidity pool by depositing tokens A and B and specifying the account that will receive the LP tokens:
```bash
wallet amm new \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--balance-a 100 \
--balance-b 200
```
The newly created account is owned by the token program, meaning that LP tokens are managed by the same token infrastructure as regular tokens.
```bash
wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100}
```
If you inspect the `user-holding-a` and `user-holding-b` accounts passed to the `wallet amm new` command, you will see that 100 and 200 tokens were deducted, respectively. These tokens now reside in the liquidity pool and are available for swaps by any user.
#### Swaping
Token swaps can be performed using the wallet amm swap command:
```bash
wallet amm swap \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
# The amount of tokens to swap
--amount-in 5 \
# The minimum number of tokens expected in return
--min-amount-out 8 \
# The definition ID of the token being provided to the swap
# In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA
--token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
```
Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (determined by the pools pricing function) is credited to the Token B holding account.
#### Withdrawing liquidity from the pool
Liquidity providers can withdraw assets from the pool by redeeming (burning) LP tokens. The amount of tokens received is proportional to the share of LP tokens being redeemed relative to the total LP supply.
This operation is performed using the `wallet amm remove-liquidity` command:
```bash
wallet amm remove-liquidity \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--balance-lp 20 \
--min-amount-a 1 \
--min-amount-b 1
```
This instruction burns `balance-lp` LP tokens from the users LP holding account. In exchange, the AMM transfers tokens A and B from the pools vault accounts to the users holding accounts, according to the current pool reserves.
The `min-amount-a` and `min-amount-b` parameters specify the minimum acceptable amounts of tokens A and B to be received. If the computed outputs fall below either threshold, the instruction fails, protecting the user against unfavorable pool state changes.
#### Adding liquidity to the pool
Additional liquidity can be added to an existing pool by depositing tokens A and B in the ratio implied by the current pool reserves. In return, new LP tokens are minted to represent the users proportional share of the pool.
This is done using the `wallet amm add-liquidity` command:
```bash
wallet amm add-liquidity \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--min-amount-lp 1 \
--max-amount-a 10 \
--max-amount-b 10
```
In this instruction, `max-amount-a` and `max-amount-b` define upper bounds on the number of tokens A and B that may be withdrawn from the users accounts. The AMM computes the actual required amounts based on the pools reserve ratio.
The `min-amount-lp` parameter specifies the minimum number of LP tokens that must be minted for the transaction to succeed. If the resulting LP token amount is below this threshold, the instruction fails.

32
bedrock/README.md Normal file
View File

@ -0,0 +1,32 @@
# Bedrock Configuration Files for All-in-One run and Integration Tests
## How to update
- `docker-compose.yml` file.
Compare with `https://github.com/logos-blockchain/logos-blockchain/blob/master/compose.static.yml` and update the file accordingly, don't bring unneeded things like grafana and etc.
Replace `sha` hash with the latest `testnet` tag hash.
- `scripts` folder.
```bash
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_cfgsync.sh >> scripts/run_cfgsync.sh
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_logos_blockchain_node.sh >> scripts/run_logos_blockchain_node.sh
chmod +x scripts/*
```
Then in `scripts/run_logos_blockchain_node.sh` update `cfgsync-client` to `logos-blockchain-cfgsync-client` and in `scripts/run_cfgsync.sh` update `cfgsync-server` to `logos-blockchain-cfgsync-server` if it hasn't been fixed already, see <https://github.com/logos-blockchain/logos-blockchain/pull/2092>.
- `cfgsync.yaml` file.
```bash
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/cfgsync.yaml
```
Set `logger`, `tracing` and `metrics` to `None`
- `kzgrs_test_params` file.
```bash
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/tests/kzgrs/kzgrs_test_params
```

12
bedrock/cfgsync.yaml Normal file
View File

@ -0,0 +1,12 @@
port: 4400
n_hosts: 4
timeout: 10
# Tracing
tracing_settings:
logger: Stdout
tracing: None
filter: None
metrics: None
console: None
level: DEBUG

View File

@ -0,0 +1,47 @@
services:
cfgsync:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./cfgsync.yaml:/etc/logos-blockchain/cfgsync.yaml:z
entrypoint: /etc/logos-blockchain/scripts/run_cfgsync.sh
logos-blockchain-node-0:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
ports:
# Map 0 port so that multiple instances can run on the same host
- "0:18080/tcp"
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-1:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-2:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-3:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh

BIN
bedrock/kzgrs_test_params Normal file

Binary file not shown.

5
bedrock/scripts/run_cfgsync.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/sh
set -e
exec /usr/bin/logos-blockchain-cfgsync-server /etc/logos-blockchain/cfgsync.yaml

View File

@ -0,0 +1,13 @@
#!/bin/sh
set -e
export CFG_FILE_PATH="/config.yaml" \
CFG_SERVER_ADDR="http://cfgsync:4400" \
CFG_HOST_IP=$(hostname -i) \
CFG_HOST_IDENTIFIER="validator-$(hostname -i)" \
LOG_LEVEL="INFO" \
POL_PROOF_DEV_MODE=true
/usr/bin/logos-blockchain-cfgsync-client && \
exec /usr/bin/logos-blockchain-node /config.yaml

View File

@ -5,6 +5,8 @@ edition = "2024"
license = { workspace = true }
[dependencies]
common.workspace = true
reqwest.workspace = true
anyhow.workspace = true
tokio-retry.workspace = true

View File

@ -1,20 +1,32 @@
use anyhow::Result;
use std::time::Duration;
use anyhow::{Context as _, Result};
use common::config::BasicAuth;
use futures::{Stream, TryFutureExt};
use log::warn;
use log::{info, warn};
pub use logos_blockchain_chain_broadcast_service::BlockInfo;
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
pub use logos_blockchain_common_http_client::{CommonHttpClient, Error};
pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx};
use reqwest::{Client, Url};
use serde::{Deserialize, Serialize};
use tokio_retry::Retry;
/// Fibonacci backoff retry strategy configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct BackoffConfig {
pub start_delay_millis: u64,
pub max_retries: usize,
}
impl Default for BackoffConfig {
fn default() -> Self {
Self {
start_delay_millis: 100,
max_retries: 5,
}
}
}
// Simple wrapper
// maybe extend in the future for our purposes
// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`).
@ -22,26 +34,37 @@ pub struct BackoffConfig {
pub struct BedrockClient {
http_client: CommonHttpClient,
node_url: Url,
backoff: BackoffConfig,
}
impl BedrockClient {
pub fn new(auth: Option<BasicAuthCredentials>, node_url: Url) -> Result<Self> {
pub fn new(backoff: BackoffConfig, node_url: Url, auth: Option<BasicAuth>) -> Result<Self> {
info!("Creating Bedrock client with node URL {node_url}");
let client = Client::builder()
//Add more fields if needed
.timeout(std::time::Duration::from_secs(60))
.build()?;
.build()
.context("Failed to build HTTP client")?;
let auth = auth.map(|a| {
logos_blockchain_common_http_client::BasicAuthCredentials::new(a.username, a.password)
});
let http_client = CommonHttpClient::new_with_client(client, auth);
Ok(Self {
http_client,
node_url,
backoff,
})
}
pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> {
self.http_client
.post_transaction(self.node_url.clone(), tx)
.await
Retry::spawn(self.backoff_strategy(), || {
self.http_client
.post_transaction(self.node_url.clone(), tx.clone())
.inspect_err(|err| warn!("Transaction posting failed with error: {err:#}"))
})
.await
}
pub async fn get_lib_stream(&self) -> Result<impl Stream<Item = BlockInfo>, Error> {
@ -51,17 +74,17 @@ impl BedrockClient {
pub async fn get_block_by_id(
&self,
header_id: HeaderId,
backoff: &BackoffConfig,
) -> Result<Option<Block<SignedMantleTx>>, Error> {
let strategy =
tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis)
.take(backoff.max_retries);
Retry::spawn(strategy, || {
Retry::spawn(self.backoff_strategy(), || {
self.http_client
.get_block_by_id(self.node_url.clone(), header_id)
.inspect_err(|err| warn!("Block fetching failed with err: {err:#?}"))
.inspect_err(|err| warn!("Block fetching failed with error: {err:#}"))
})
.await
}
fn backoff_strategy(&self) -> impl Iterator<Item = Duration> {
tokio_retry::strategy::FibonacciBackoff::from_millis(self.backoff.start_delay_millis)
.take(self.backoff.max_retries)
}
}

View File

@ -1,84 +0,0 @@
#!/usr/bin/env bash
set -e
# Base directory for deployment
LSSA_DIR="/home/arjentix/test_deploy/lssa"
# Expect GITHUB_ACTOR to be passed as first argument or environment variable
GITHUB_ACTOR="${1:-${GITHUB_ACTOR:-unknown}}"
# Function to log messages with timestamp
log_deploy() {
echo "[$(date '+%Y-%m-%d %H:%M:%S %Z')] $1" >> "${LSSA_DIR}/deploy.log"
}
# Error handler
handle_error() {
echo "✗ Deployment failed by: ${GITHUB_ACTOR}"
log_deploy "Deployment failed by: ${GITHUB_ACTOR}"
exit 1
}
find_sequencer_runner_pids() {
pgrep -f "sequencer_runner" | grep -v $$
}
# Set trap to catch any errors
trap 'handle_error' ERR
# Log deployment info
log_deploy "Deployment initiated by: ${GITHUB_ACTOR}"
# Navigate to code directory
if [ ! -d "${LSSA_DIR}/code" ]; then
mkdir -p "${LSSA_DIR}/code"
fi
cd "${LSSA_DIR}/code"
# Stop current sequencer if running
if find_sequencer_runner_pids > /dev/null; then
echo "Stopping current sequencer..."
find_sequencer_runner_pids | xargs -r kill -SIGINT || true
sleep 2
# Force kill if still running
find_sequencer_runner_pids | grep -v $$ | xargs -r kill -9 || true
fi
# Clone or update repository
if [ -d ".git" ]; then
echo "Updating existing repository..."
git fetch origin
git checkout main
git reset --hard origin/main
else
echo "Cloning repository..."
git clone https://github.com/logos-blockchain/lssa.git .
git checkout main
fi
# Build sequencer_runner and wallet in release mode
echo "Building sequencer_runner"
# That could be just `cargo build --release --bin sequencer_runner --bin wallet`
# but we have `no_docker` feature bug, see issue #179
cd sequencer_runner
cargo build --release
cd ../wallet
cargo build --release
cd ..
# Run sequencer_runner with config
echo "Starting sequencer_runner..."
export RUST_LOG=info
nohup ./target/release/sequencer_runner "${LSSA_DIR}/configs/sequencer" > "${LSSA_DIR}/sequencer.log" 2>&1 &
# Wait 5 seconds and check health using wallet
sleep 5
if ./target/release/wallet check-health; then
echo "✓ Sequencer started successfully and is healthy"
log_deploy "Deployment completed successfully by: ${GITHUB_ACTOR}"
exit 0
else
echo "✗ Sequencer failed health check"
tail -n 50 "${LSSA_DIR}/sequencer.log"
handle_error
fi

View File

@ -12,6 +12,7 @@ anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_with.workspace = true
reqwest.workspace = true
sha2.workspace = true
log.workspace = true

View File

@ -1,10 +1,10 @@
use borsh::{BorshDeserialize, BorshSerialize};
use nssa::AccountId;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256, digest::FixedOutput};
use crate::transaction::EncodedTransaction;
use crate::{HashType, transaction::NSSATransaction};
pub type HashType = [u8; 32];
pub type MantleMsgId = [u8; 32];
#[derive(Debug, Clone)]
@ -17,11 +17,11 @@ impl OwnHasher {
let mut hasher = Sha256::new();
hasher.update(data);
<HashType>::from(hasher.finalize_fixed())
HashType(<[u8; 32]>::from(hasher.finalize_fixed()))
}
}
pub type BlockHash = [u8; 32];
pub type BlockHash = HashType;
pub type BlockId = u64;
pub type TimeStamp = u64;
@ -36,7 +36,7 @@ pub struct BlockHeader {
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
pub struct BlockBody {
pub transactions: Vec<EncodedTransaction>,
pub transactions: Vec<NSSATransaction>,
}
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
@ -59,7 +59,7 @@ pub struct HashableBlockData {
pub block_id: BlockId,
pub prev_block_hash: BlockHash,
pub timestamp: TimeStamp,
pub transactions: Vec<EncodedTransaction>,
pub transactions: Vec<NSSATransaction>,
}
impl HashableBlockData {
@ -106,8 +106,7 @@ impl From<Block> for HashableBlockData {
#[derive(Debug, Serialize, Deserialize, Clone)]
/// Helperstruct for account serialization
pub struct AccountInitialData {
/// Hex encoded account id
pub account_id: String,
pub account_id: AccountId,
pub balance: u128,
}
@ -120,12 +119,12 @@ pub struct CommitmentsInitialData {
#[cfg(test)]
mod tests {
use crate::{block::HashableBlockData, test_utils};
use crate::{HashType, block::HashableBlockData, test_utils};
#[test]
fn test_encoding_roundtrip() {
let transactions = vec![test_utils::produce_dummy_empty_transaction()];
let block = test_utils::produce_dummy_block(1, Some([1; 32]), transactions);
let block = test_utils::produce_dummy_block(1, Some(HashType([1; 32])), transactions);
let hashable = HashableBlockData::from(block);
let bytes = borsh::to_vec(&hashable).unwrap();
let block_from_bytes = borsh::from_slice::<HashableBlockData>(&bytes).unwrap();

View File

@ -1,3 +1,8 @@
use std::{fmt::Display, str::FromStr};
use borsh::{BorshDeserialize, BorshSerialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
pub mod block;
pub mod config;
pub mod error;
@ -11,4 +16,78 @@ pub mod test_utils;
pub const PINATA_BASE58: &str = "EfQhKQAkX2FJiwNii2WFQsGndjvF1Mzd7RuVe7QdPLw7";
pub type HashType = [u8; 32];
#[derive(
Debug,
Default,
Copy,
Clone,
PartialEq,
Eq,
Hash,
SerializeDisplay,
DeserializeFromStr,
BorshSerialize,
BorshDeserialize,
)]
pub struct HashType(pub [u8; 32]);
impl Display for HashType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl FromStr for HashType {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0u8; 32];
hex::decode_to_slice(s, &mut bytes)?;
Ok(HashType(bytes))
}
}
impl AsRef<[u8]> for HashType {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl From<HashType> for [u8; 32] {
fn from(hash: HashType) -> Self {
hash.0
}
}
impl From<[u8; 32]> for HashType {
fn from(bytes: [u8; 32]) -> Self {
HashType(bytes)
}
}
impl TryFrom<Vec<u8>> for HashType {
type Error = <[u8; 32] as TryFrom<Vec<u8>>>::Error;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Ok(HashType(value.try_into()?))
}
}
impl From<HashType> for Vec<u8> {
fn from(hash: HashType) -> Self {
hash.0.to_vec()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn serialization_roundtrip() {
let original = HashType([1u8; 32]);
let serialized = original.to_string();
let deserialized = HashType::from_str(&serialized).unwrap();
assert_eq!(original, deserialized);
}
}

View File

@ -1,5 +1,6 @@
use std::collections::HashMap;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use serde::{Deserialize, Serialize};
use serde_json::Value;
@ -8,7 +9,7 @@ use super::{
errors::RpcParseError,
parser::{RpcRequest, parse_params},
};
use crate::parse_request;
use crate::{HashType, parse_request};
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloRequest {}
@ -50,22 +51,22 @@ pub struct GetInitialTestnetAccountsRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountBalanceRequest {
pub account_id: String,
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetTransactionByHashRequest {
pub hash: String,
pub hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountsNoncesRequest {
pub account_ids: Vec<String>,
pub account_ids: Vec<AccountId>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountRequest {
pub account_id: String,
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
@ -105,7 +106,7 @@ pub struct RegisterAccountResponse {
#[derive(Serialize, Deserialize, Debug)]
pub struct SendTxResponse {
pub status: String,
pub tx_hash: String,
pub tx_hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]

View File

@ -1,6 +1,7 @@
use std::{collections::HashMap, ops::RangeInclusive};
use anyhow::Result;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use reqwest::Client;
use serde::Deserialize;
@ -12,6 +13,7 @@ use super::rpc_primitives::requests::{
GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest,
};
use crate::{
HashType,
block::Block,
config::BasicAuth,
error::{SequencerClientError, SequencerRpcError},
@ -27,7 +29,7 @@ use crate::{
SendTxResponse,
},
},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
#[derive(Clone)]
@ -148,7 +150,7 @@ impl SequencerClient {
/// bytes.
pub async fn get_account_balance(
&self,
account_id: String,
account_id: AccountId,
) -> Result<GetAccountBalanceResponse, SequencerClientError> {
let block_req = GetAccountBalanceRequest { account_id };
@ -167,7 +169,7 @@ impl SequencerClient {
/// 32 bytes.
pub async fn get_accounts_nonces(
&self,
account_ids: Vec<String>,
account_ids: Vec<AccountId>,
) -> Result<GetAccountsNoncesResponse, SequencerClientError> {
let block_req = GetAccountsNoncesRequest { account_ids };
@ -184,7 +186,7 @@ impl SequencerClient {
pub async fn get_account(
&self,
account_id: String,
account_id: AccountId,
) -> Result<GetAccountResponse, SequencerClientError> {
let block_req = GetAccountRequest { account_id };
@ -200,7 +202,7 @@ impl SequencerClient {
/// Get transaction details for `hash`.
pub async fn get_transaction_by_hash(
&self,
hash: String,
hash: HashType,
) -> Result<GetTransactionByHashResponse, SequencerClientError> {
let block_req = GetTransactionByHashRequest { hash };
@ -220,7 +222,7 @@ impl SequencerClient {
&self,
transaction: nssa::PublicTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::Public(transaction));
let transaction = NSSATransaction::Public(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -240,7 +242,7 @@ impl SequencerClient {
&self,
transaction: nssa::PrivacyPreservingTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::PrivacyPreserving(transaction));
let transaction = NSSATransaction::PrivacyPreserving(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -332,7 +334,7 @@ impl SequencerClient {
&self,
transaction: nssa::ProgramDeploymentTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::ProgramDeployment(transaction));
let transaction = NSSATransaction::ProgramDeployment(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),

View File

@ -1,6 +1,9 @@
use nssa::AccountId;
use crate::{
HashType,
block::{Block, HashableBlockData},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
// Helpers
@ -20,8 +23,8 @@ pub fn sequencer_sign_key_for_testing() -> nssa::PrivateKey {
/// `transactions` - vector of `EncodedTransaction` objects
pub fn produce_dummy_block(
id: u64,
prev_hash: Option<[u8; 32]>,
transactions: Vec<EncodedTransaction>,
prev_hash: Option<HashType>,
transactions: Vec<NSSATransaction>,
) -> Block {
let block_data = HashableBlockData {
block_id: id,
@ -33,7 +36,7 @@ pub fn produce_dummy_block(
block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32])
}
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
pub fn produce_dummy_empty_transaction() -> NSSATransaction {
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let account_ids = vec![];
let nonces = vec![];
@ -50,17 +53,17 @@ pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
NSSATransaction::Public(nssa_tx)
}
pub fn create_transaction_native_token_transfer(
from: [u8; 32],
from: AccountId,
nonce: u128,
to: [u8; 32],
to: AccountId,
balance_to_move: u128,
signing_key: nssa::PrivateKey,
) -> EncodedTransaction {
let account_ids = vec![nssa::AccountId::new(from), nssa::AccountId::new(to)];
) -> NSSATransaction {
let account_ids = vec![from, to];
let nonces = vec![nonce];
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let message = nssa::public_transaction::Message::try_new(
@ -74,5 +77,5 @@ pub fn create_transaction_native_token_transfer(
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
NSSATransaction::Public(nssa_tx)
}

View File

@ -1,20 +1,29 @@
use std::fmt::Display;
use borsh::{BorshDeserialize, BorshSerialize};
use log::{info, warn};
use log::warn;
use nssa::{AccountId, V02State};
use serde::{Deserialize, Serialize};
use sha2::{Digest, digest::FixedOutput};
pub type HashType = [u8; 32];
use crate::HashType;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub enum NSSATransaction {
Public(nssa::PublicTransaction),
PrivacyPreserving(nssa::PrivacyPreservingTransaction),
ProgramDeployment(nssa::ProgramDeploymentTransaction),
}
impl NSSATransaction {
pub fn hash(&self) -> HashType {
HashType(match self {
NSSATransaction::Public(tx) => tx.hash(),
NSSATransaction::PrivacyPreserving(tx) => tx.hash(),
NSSATransaction::ProgramDeployment(tx) => tx.hash(),
})
}
}
impl From<nssa::PublicTransaction> for NSSATransaction {
fn from(value: nssa::PublicTransaction) -> Self {
Self::Public(value)
@ -52,67 +61,6 @@ pub enum TxKind {
ProgramDeployment,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
/// General transaction object
pub struct EncodedTransaction {
pub tx_kind: TxKind,
/// Encoded blobs of data
pub encoded_transaction_data: Vec<u8>,
}
impl From<NSSATransaction> for EncodedTransaction {
fn from(value: NSSATransaction) -> Self {
match value {
NSSATransaction::Public(tx) => Self {
tx_kind: TxKind::Public,
encoded_transaction_data: tx.to_bytes(),
},
NSSATransaction::PrivacyPreserving(tx) => Self {
tx_kind: TxKind::PrivacyPreserving,
encoded_transaction_data: tx.to_bytes(),
},
NSSATransaction::ProgramDeployment(tx) => Self {
tx_kind: TxKind::ProgramDeployment,
encoded_transaction_data: tx.to_bytes(),
},
}
}
}
impl TryFrom<&EncodedTransaction> for NSSATransaction {
type Error = nssa::error::NssaError;
fn try_from(value: &EncodedTransaction) -> Result<Self, Self::Error> {
match value.tx_kind {
TxKind::Public => nssa::PublicTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into()),
TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into())
}
TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into())
}
}
}
}
impl EncodedTransaction {
/// Computes and returns the SHA-256 hash of the JSON-serialized representation of `self`.
pub fn hash(&self) -> HashType {
let bytes_to_hash = borsh::to_vec(&self).unwrap();
let mut hasher = sha2::Sha256::new();
hasher.update(&bytes_to_hash);
HashType::from(hasher.finalize_fixed())
}
pub fn log(&self) {
info!("Transaction hash is {:?}", hex::encode(self.hash()));
info!("Transaction tx_kind is {:?}", self.tx_kind);
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum TransactionMalformationError {
InvalidSignature,
@ -168,45 +116,3 @@ pub fn execute_check_transaction_on_state(
Ok(tx)
}
#[cfg(test)]
mod tests {
use sha2::{Digest, digest::FixedOutput};
use crate::{
HashType,
transaction::{EncodedTransaction, TxKind},
};
fn test_transaction_body() -> EncodedTransaction {
EncodedTransaction {
tx_kind: TxKind::Public,
encoded_transaction_data: vec![1, 2, 3, 4],
}
}
#[test]
fn test_transaction_hash_is_sha256_of_json_bytes() {
let body = test_transaction_body();
let expected_hash = {
let data = borsh::to_vec(&body).unwrap();
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
HashType::from(hasher.finalize_fixed())
};
let hash = body.hash();
assert_eq!(expected_hash, hash);
}
#[test]
fn test_to_bytes_from_bytes() {
let body = test_transaction_body();
let body_bytes = borsh::to_vec(&body).unwrap();
let body_new = borsh::from_slice::<EncodedTransaction>(&body_bytes).unwrap();
assert_eq!(body, body_new);
}
}

View File

@ -0,0 +1,11 @@
{
"resubscribe_interval_millis": 1000,
"bedrock_client_config": {
"addr": "http://logos-blockchain-node-0:18080",
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101"
}

View File

@ -1,13 +1,22 @@
{
"home": "",
"home": "/var/lib/sequencer_runner",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"retry_pending_blocks_timeout_millis": 240000,
"port": 0,
"retry_pending_blocks_timeout_millis": 7000,
"port": 3040,
"bedrock_config": {
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://logos-blockchain-node-0:18080"
},
"indexer_rpc_url": "ws://indexer_service:8779",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
@ -155,12 +164,5 @@
37,
37,
37
],
"bedrock_config": {
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
}
}
]
}

View File

@ -0,0 +1,27 @@
# This file is automatically applied on top of docker-compose.yml when running `docker compose` commands.
services:
logos-blockchain-node-0:
ports: !override
- "18080:18080/tcp"
environment:
- RUST_LOG=error
sequencer_runner:
depends_on:
- logos-blockchain-node-0
- indexer_service
volumes: !override
- ./configs/docker-all-in-one/sequencer:/etc/sequencer_runner
indexer_service:
depends_on:
- logos-blockchain-node-0
volumes:
- ./configs/docker-all-in-one/indexer/indexer_config.json:/etc/indexer_service/indexer_config.json
explorer_service:
depends_on:
- indexer_service
environment:
- INDEXER_RPC_URL=http://indexer_service:8779

13
docker-compose.yml Normal file
View File

@ -0,0 +1,13 @@
# All-in-one docker compose configuration.
# It runs all services from this repo and the bedrock nodes in a single docker network.
# This is useful for development and testing purposes.
include:
- path:
bedrock/docker-compose.yml
- path:
sequencer_runner/docker-compose.yml
- path:
indexer/service/docker-compose.yml
- path:
explorer_service/docker-compose.yml

View File

@ -49,7 +49,7 @@ async fn main() {
let signing_key = wallet_core
.storage()
.user_data
.get_pub_account_signing_key(&account_id)
.get_pub_account_signing_key(account_id)
.expect("Input account should be a self owned public account");
// Define the desired greeting in ASCII

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use leptos::prelude::*;
use serde::{Deserialize, Serialize};
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
pub struct SearchResults {
pub blocks: Vec<Block>,
pub transactions: Vec<Transaction>,
pub accounts: Vec<(AccountId, Option<Account>)>,
pub accounts: Vec<(AccountId, Account)>,
}
/// RPC client type
@ -46,7 +46,7 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
if let Some(bytes) = parse_hex(&query)
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
{
let hash = Hash(hash_array);
let hash = HashType(hash_array);
// Try as block hash
if let Ok(block) = client.get_block_by_hash(hash).await {
@ -60,14 +60,8 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
// Try as account ID
let account_id = AccountId { value: hash_array };
match client.get_account(account_id).await {
Ok(account) => {
accounts.push((account_id, Some(account)));
}
Err(_) => {
// Account might not exist yet, still add it to results
accounts.push((account_id, None));
}
if let Ok(account) = client.get_account(account_id).await {
accounts.push((account_id, account));
}
}
@ -98,7 +92,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result<Block, ServerFnError>
/// Get block by hash
#[server]
pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError> {
pub async fn get_block_by_hash(block_hash: HashType) -> Result<Block, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client
@ -109,7 +103,7 @@ pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError>
/// Get transaction by hash
#[server]
pub async fn get_transaction(tx_hash: Hash) -> Result<Transaction, ServerFnError> {
pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client

View File

@ -6,7 +6,7 @@ use crate::format_utils;
/// Account preview component
#[component]
pub fn AccountPreview(account_id: AccountId, account: Option<Account>) -> impl IntoView {
pub fn AccountPreview(account_id: AccountId, account: Account) -> impl IntoView {
let account_id_str = format_utils::format_account_id(&account_id);
view! {
@ -19,42 +19,31 @@ pub fn AccountPreview(account_id: AccountId, account: Option<Account>) -> impl I
</div>
</div>
{move || {
account
.as_ref()
.map(|Account { program_owner, balance, data, nonce }| {
let program_id = format_utils::format_program_id(program_owner);
view! {
<div class="account-preview-body">
<div class="account-field">
<span class="field-label">"Balance: "</span>
<span class="field-value">{balance.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Program: "</span>
<span class="field-value hash">{program_id}</span>
</div>
<div class="account-field">
<span class="field-label">"Nonce: "</span>
<span class="field-value">{nonce.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Data: "</span>
<span class="field-value">
{format!("{} bytes", data.0.len())}
</span>
</div>
</div>
}
.into_any()
})
.unwrap_or_else(|| {
view! {
<div class="account-preview-body">
<div class="account-not-found">"Account not found"</div>
</div>
}
.into_any()
})
let Account { program_owner, balance, data, nonce } = &account;
let program_id = format_utils::format_program_id(program_owner);
view! {
<div class="account-preview-body">
<div class="account-field">
<span class="field-label">"Balance: "</span>
<span class="field-value">{balance.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Program: "</span>
<span class="field-value hash">{program_id}</span>
</div>
<div class="account-field">
<span class="field-label">"Nonce: "</span>
<span class="field-value">{nonce.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Data: "</span>
<span class="field-value">
{format!("{} bytes", data.0.len())}
</span>
</div>
</div>
}
.into_any()
}}
</A>

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Hash};
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, HashType};
use leptos::prelude::*;
use leptos_router::{components::A, hooks::use_params_map};
@ -7,7 +7,7 @@ use crate::{api, components::TransactionPreview, format_utils};
#[derive(Clone, PartialEq, Eq)]
enum BlockIdOrHash {
BlockId(BlockId),
Hash(Hash),
Hash(HashType),
}
/// Block page component
@ -29,7 +29,7 @@ pub fn BlockPage() -> impl IntoView {
if let Some(bytes) = format_utils::parse_hex(id_str)
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
{
return Some(BlockIdOrHash::Hash(Hash(hash_array)));
return Some(BlockIdOrHash::Hash(HashType(hash_array)));
}
None

View File

@ -1,5 +1,5 @@
use indexer_service_protocol::{
Hash, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
HashType, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet,
};
use leptos::prelude::*;
@ -18,7 +18,7 @@ pub fn TransactionPage() -> impl IntoView {
format_utils::parse_hex(&tx_hash_str).and_then(|bytes| {
if bytes.len() == 32 {
let hash_array: [u8; 32] = bytes.try_into().ok()?;
Some(Hash(hash_array))
Some(HashType(hash_array))
} else {
None
}

View File

@ -48,12 +48,12 @@ impl IndexerStore {
pub fn get_transaction_by_hash(&self, tx_hash: [u8; 32]) -> Result<NSSATransaction> {
let block = self.get_block_at_id(self.dbio.get_block_id_by_tx_hash(tx_hash)?)?;
let encoded_transaction = block
let transaction = block
.body
.transactions
.iter()
.find_map(|enc_tx| {
if enc_tx.hash() == tx_hash {
if enc_tx.hash().0 == tx_hash {
Some(enc_tx)
} else {
None
@ -61,7 +61,7 @@ impl IndexerStore {
})
.ok_or_else(|| anyhow::anyhow!("Transaction not found in DB"))?;
Ok(NSSATransaction::try_from(encoded_transaction)?)
Ok(transaction.clone())
}
pub fn get_block_by_hash(&self, hash: [u8; 32]) -> Result<Block> {
@ -98,17 +98,16 @@ impl IndexerStore {
}
pub fn get_account_final(&self, account_id: &AccountId) -> Result<Account> {
Ok(self.final_state()?.get_account_by_id(account_id))
Ok(self.final_state()?.get_account_by_id(*account_id))
}
pub fn put_block(&self, block: Block) -> Result<()> {
let mut final_state = self.dbio.final_state()?;
for encoded_transaction in &block.body.transactions {
let transaction = NSSATransaction::try_from(encoded_transaction)?;
for transaction in &block.body.transactions {
execute_check_transaction_on_state(
&mut final_state,
transaction_pre_check(transaction)?,
transaction_pre_check(transaction.clone())?,
)?;
}

View File

@ -4,19 +4,22 @@ use std::{
path::{Path, PathBuf},
};
use anyhow::{Context, Result};
use bedrock_client::BackoffConfig;
use anyhow::{Context as _, Result};
pub use bedrock_client::BackoffConfig;
use common::{
block::{AccountInitialData, CommitmentsInitialData},
config::BasicAuth,
};
use logos_blockchain_core::mantle::ops::channel::ChannelId;
pub use logos_blockchain_core::mantle::ops::channel::ChannelId;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ClientConfig {
/// For individual RPC requests we use Fibonacci backoff retry strategy.
pub backoff: BackoffConfig,
pub addr: Url,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth: Option<BasicAuth>,
}
@ -29,8 +32,6 @@ pub struct IndexerConfig {
/// List of initial commitments
pub initial_commitments: Vec<CommitmentsInitialData>,
pub resubscribe_interval_millis: u64,
/// For individual RPC requests we use Fibonacci backoff retry strategy.
pub backoff: BackoffConfig,
pub bedrock_client_config: ClientConfig,
pub sequencer_client_config: ClientConfig,
pub channel_id: ChannelId,

View File

@ -50,7 +50,7 @@ impl IndexerCore {
let init_accs: Vec<(nssa::AccountId, u128)> = config
.initial_accounts
.iter()
.map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance))
.map(|acc_data| (acc_data.account_id, acc_data.balance))
.collect();
let mut state = nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments);
@ -62,8 +62,9 @@ impl IndexerCore {
Ok(Self {
bedrock_client: BedrockClient::new(
config.bedrock_client_config.auth.clone().map(Into::into),
config.bedrock_client_config.backoff,
config.bedrock_client_config.addr.clone(),
config.bedrock_client_config.auth.clone().map(Into::into),
)?,
sequencer_client,
config,
@ -86,7 +87,7 @@ impl IndexerCore {
if let Some(l1_block) = self
.bedrock_client
.get_block_by_id(header_id, &self.config.backoff)
.get_block_by_id(header_id)
.await?
{
info!("Extracted L1 block at height {}", block_info.height);

View File

@ -19,6 +19,7 @@ serde.workspace = true
serde_json.workspace = true
futures.workspace = true
async-trait = "0.1.89"
arc-swap = "1.8.1"
[features]
# Return mock responses with generated data for testing purposes

View File

@ -36,7 +36,9 @@ RUN strip /indexer_service/target/release/indexer_service
FROM debian:trixie-slim
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user && \
mkdir -p /indexer_service /etc/indexer_service && \
chown -R indexer_service_user:indexer_service_user /indexer_service /etc/indexer_service
# Copy binary from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
@ -61,4 +63,4 @@ ENV RUST_LOG=info
USER indexer_service_user
WORKDIR /indexer_service
CMD ["indexer_service"]
CMD ["indexer_service", "/etc/indexer_service/indexer_config.json"]

View File

@ -0,0 +1,11 @@
{
"resubscribe_interval_millis": 1000,
"bedrock_client_config": {
"addr": "http://localhost:18080",
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101"
}

View File

@ -2,8 +2,11 @@ services:
indexer_service:
image: lssa/indexer_service
build:
context: ..
dockerfile: indexer_service/Dockerfile
context: ../..
dockerfile: indexer/service/Dockerfile
container_name: indexer_service
ports:
- "8779:8779"
volumes:
# Mount configuration
- ./configs/indexer_config.json:/etc/indexer_service/indexer_config.json

View File

@ -381,7 +381,7 @@ impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::
impl From<nssa::PublicTransaction> for PublicTransaction {
fn from(value: nssa::PublicTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::PublicTransaction {
message,
witness_set,
@ -430,7 +430,7 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction {
impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction {
fn from(value: nssa::PrivacyPreservingTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::PrivacyPreservingTransaction {
message,
witness_set,
@ -467,7 +467,7 @@ impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransactio
impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction {
fn from(value: nssa::ProgramDeploymentTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::ProgramDeploymentTransaction { message } = value;
Self {
@ -531,8 +531,8 @@ impl From<common::block::BlockHeader> for BlockHeader {
} = value;
Self {
block_id,
prev_block_hash: Hash(prev_block_hash),
hash: Hash(hash),
prev_block_hash: prev_block_hash.into(),
hash: hash.into(),
timestamp,
signature: signature.into(),
}
@ -552,47 +552,32 @@ impl TryFrom<BlockHeader> for common::block::BlockHeader {
} = value;
Ok(Self {
block_id,
prev_block_hash: prev_block_hash.0,
hash: hash.0,
prev_block_hash: prev_block_hash.into(),
hash: hash.into(),
timestamp,
signature: signature.into(),
})
}
}
impl TryFrom<common::block::BlockBody> for BlockBody {
type Error = std::io::Error;
fn try_from(value: common::block::BlockBody) -> Result<Self, Self::Error> {
// Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction
// This conversion will decode and re-encode the transactions
use borsh::BorshDeserialize as _;
impl From<common::block::BlockBody> for BlockBody {
fn from(value: common::block::BlockBody) -> Self {
let common::block::BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|encoded_tx| match encoded_tx.tx_kind {
common::transaction::TxKind::Public => {
nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data)
.map(|tx| Transaction::Public(tx.into()))
.map(|tx| match tx {
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
Transaction::PrivacyPreserving(tx.into())
}
common::transaction::TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::PrivacyPreserving(tx.into()))
}
common::transaction::TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::ProgramDeployment(tx.into()))
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
Transaction::ProgramDeployment(tx.into())
}
})
.collect::<Result<Vec<_>, _>>()?;
.collect();
Ok(Self { transactions })
Self { transactions }
}
}
@ -606,7 +591,7 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
.into_iter()
.map(|tx| {
let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?;
Ok::<_, nssa::error::NssaError>(nssa_tx.into())
Ok::<_, nssa::error::NssaError>(nssa_tx)
})
.collect::<Result<Vec<_>, _>>()?;
@ -614,10 +599,8 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
}
}
impl TryFrom<common::block::Block> for Block {
type Error = std::io::Error;
fn try_from(value: common::block::Block) -> Result<Self, Self::Error> {
impl From<common::block::Block> for Block {
fn from(value: common::block::Block) -> Self {
let common::block::Block {
header,
body,
@ -625,12 +608,12 @@ impl TryFrom<common::block::Block> for Block {
bedrock_parent_id,
} = value;
Ok(Self {
Self {
header: header.into(),
body: body.try_into()?,
body: body.into(),
bedrock_status: bedrock_status.into(),
bedrock_parent_id: MantleMsgId(bedrock_parent_id),
})
}
}
}
@ -673,3 +656,15 @@ impl From<BedrockStatus> for common::block::BedrockStatus {
}
}
}
impl From<common::HashType> for HashType {
fn from(value: common::HashType) -> Self {
Self(value.0)
}
}
impl From<HashType> for common::HashType {
fn from(value: HashType) -> Self {
common::HashType(value.0)
}
}

View File

@ -42,8 +42,8 @@ pub struct Block {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockHeader {
pub block_id: BlockId,
pub prev_block_hash: Hash,
pub hash: Hash,
pub prev_block_hash: HashType,
pub hash: HashType,
pub timestamp: TimeStamp,
pub signature: Signature,
}
@ -69,7 +69,7 @@ pub enum Transaction {
impl Transaction {
/// Get the hash of the transaction
pub fn hash(&self) -> &self::Hash {
pub fn hash(&self) -> &self::HashType {
match self {
Transaction::Public(tx) => &tx.hash,
Transaction::PrivacyPreserving(tx) => &tx.hash,
@ -80,14 +80,14 @@ impl Transaction {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: PublicMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: PrivacyPreservingMessage,
pub witness_set: WitnessSet,
}
@ -134,7 +134,7 @@ pub struct EncryptedAccountData {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: ProgramDeploymentMessage,
}
@ -197,7 +197,7 @@ pub struct Data(
);
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Hash(
pub struct HashType(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded hash")]
pub [u8; 32],

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::proc_macros::rpc;
#[cfg(feature = "server")]
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
@ -33,13 +33,13 @@ pub trait Rpc {
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getBlockByHash")]
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned>;
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getAccount")]
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
#[method(name = "getTransaction")]
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>;
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned>;
#[method(name = "getBlocks")]
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned>;

View File

@ -35,6 +35,12 @@ impl IndexerHandle {
handle.stopped().await
}
pub fn is_stopped(&self) -> bool {
self.server_handle
.as_ref()
.is_none_or(|handle| handle.is_stopped())
}
}
impl Drop for IndexerHandle {

View File

@ -2,9 +2,10 @@ use std::collections::HashMap;
use indexer_service_protocol::{
Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Commitment,
CommitmentSetDigest, Data, EncryptedAccountData, Hash, MantleMsgId, PrivacyPreservingMessage,
PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction,
PublicMessage, PublicTransaction, Signature, Transaction, WitnessSet,
CommitmentSetDigest, Data, EncryptedAccountData, HashType, MantleMsgId,
PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Signature, Transaction,
WitnessSet,
};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
@ -12,7 +13,7 @@ use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct MockIndexerService {
blocks: Vec<Block>,
accounts: HashMap<AccountId, Account>,
transactions: HashMap<Hash, (Transaction, BlockId)>,
transactions: HashMap<HashType, (Transaction, BlockId)>,
}
impl MockIndexerService {
@ -43,14 +44,14 @@ impl MockIndexerService {
}
// Create 10 blocks with transactions
let mut prev_hash = Hash([0u8; 32]);
let mut prev_hash = HashType([0u8; 32]);
for block_id in 0..10 {
let block_hash = {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
hash[1] = 0xff;
Hash(hash)
HashType(hash)
};
// Create 2-4 transactions per block (mix of Public, PrivacyPreserving, and
@ -63,7 +64,7 @@ impl MockIndexerService {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
hash[1] = tx_idx as u8;
Hash(hash)
HashType(hash)
};
// Vary transaction types: Public, PrivacyPreserving, or ProgramDeployment
@ -202,7 +203,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
})
}
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
self.blocks
.iter()
.find(|b| b.header.hash == block_hash)
@ -217,7 +218,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>))
}
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
self.transactions
.get(&tx_hash)
.map(|(tx, _)| tx.clone())

View File

@ -1,15 +1,17 @@
use std::{pin::pin, sync::Arc};
use anyhow::{Context as _, Result, bail};
use futures::StreamExt as _;
use arc_swap::ArcSwap;
use futures::{StreamExt as _, never::Never};
use indexer_core::{IndexerCore, config::IndexerConfig};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::{
SubscriptionSink,
core::{Serialize, SubscriptionResult},
types::ErrorObjectOwned,
types::{ErrorCode, ErrorObject, ErrorObjectOwned},
};
use tokio::sync::{Mutex, mpsc::UnboundedSender};
use log::{debug, error, info, warn};
use tokio::sync::mpsc::UnboundedSender;
pub struct IndexerService {
subscription_service: SubscriptionService,
@ -35,8 +37,13 @@ impl indexer_service_rpc::RpcServer for IndexerService {
subscription_sink: jsonrpsee::PendingSubscriptionSink,
) -> SubscriptionResult {
let sink = subscription_sink.accept().await?;
info!(
"Accepted new subscription to finalized blocks with ID {:?}",
sink.subscription_id()
);
self.subscription_service
.add_subscription(Subscription::new(sink))?;
.add_subscription(Subscription::new(sink))
.await?;
Ok(())
}
@ -64,7 +71,7 @@ impl indexer_service_rpc::RpcServer for IndexerService {
})
}
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
self.indexer
.store
.get_block_by_hash(block_hash.0)
@ -98,7 +105,7 @@ impl indexer_service_rpc::RpcServer for IndexerService {
})
}
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
self.indexer
.store
.get_transaction_by_hash(tx_hash.0)
@ -179,18 +186,58 @@ impl indexer_service_rpc::RpcServer for IndexerService {
}
struct SubscriptionService {
respond_subscribers_loop_handle: tokio::task::JoinHandle<Result<()>>,
new_subscription_sender: UnboundedSender<Subscription<BlockId>>,
parts: ArcSwap<SubscriptionLoopParts>,
indexer: IndexerCore,
}
impl SubscriptionService {
pub fn spawn_new(indexer: IndexerCore) -> Self {
let parts = Self::spawn_respond_subscribers_loop(indexer.clone());
Self {
parts: ArcSwap::new(Arc::new(parts)),
indexer,
}
}
pub async fn add_subscription(&self, subscription: Subscription<BlockId>) -> Result<()> {
let guard = self.parts.load();
if let Err(err) = guard.new_subscription_sender.send(subscription) {
error!("Failed to send new subscription to subscription service with error: {err:#?}");
// Respawn the subscription service loop if it has finished (either with error or panic)
if guard.handle.is_finished() {
drop(guard);
let new_parts = Self::spawn_respond_subscribers_loop(self.indexer.clone());
let old_handle_and_sender = self.parts.swap(Arc::new(new_parts));
let old_parts = Arc::into_inner(old_handle_and_sender)
.expect("There should be no other references to the old handle and sender");
match old_parts.handle.await {
Ok(Err(err)) => {
error!(
"Subscription service loop has unexpectedly finished with error: {err:#}"
);
}
Err(err) => {
error!("Subscription service loop has panicked with err: {err:#}");
}
}
}
bail!(err);
};
Ok(())
}
fn spawn_respond_subscribers_loop(indexer: IndexerCore) -> SubscriptionLoopParts {
let (new_subscription_sender, mut sub_receiver) =
tokio::sync::mpsc::unbounded_channel::<Subscription<BlockId>>();
let subscriptions = Arc::new(Mutex::new(Vec::new()));
let handle = tokio::spawn(async move {
let mut subscribers = Vec::new();
let respond_subscribers_loop_handle = tokio::spawn(async move {
let mut block_stream = pin!(indexer.subscribe_parse_block_stream().await);
loop {
@ -199,46 +246,48 @@ impl SubscriptionService {
let Some(subscription) = sub else {
bail!("Subscription receiver closed unexpectedly");
};
subscriptions.lock().await.push(subscription);
info!("Added new subscription with ID {:?}", subscription.sink.subscription_id());
subscribers.push(subscription);
}
block_opt = block_stream.next() => {
debug!("Got new block from block stream");
let Some(block) = block_opt else {
bail!("Block stream ended unexpectedly");
};
let block = block.context("Failed to get L2 block data")?;
let block: indexer_service_protocol::Block = block
.try_into()
.context("Failed to convert L2 Block into protocol Block")?;
let block: indexer_service_protocol::Block = block.into();
// Cloning subscriptions to avoid holding the lock while sending
let subscriptions = subscriptions.lock().await.clone();
for sink in subscriptions {
sink.send(&block.header.block_id).await?;
for sub in &mut subscribers {
if let Err(err) = sub.try_send(&block.header.block_id) {
warn!(
"Failed to send block ID {:?} to subscription ID {:?} with error: {err:#?}",
block.header.block_id,
sub.sink.subscription_id(),
);
}
}
}
}
}
});
Self {
respond_subscribers_loop_handle,
SubscriptionLoopParts {
handle,
new_subscription_sender,
}
}
pub fn add_subscription(&self, subscription: Subscription<BlockId>) -> Result<()> {
self.new_subscription_sender.send(subscription)?;
Ok(())
}
}
impl Drop for SubscriptionService {
fn drop(&mut self) {
self.respond_subscribers_loop_handle.abort();
self.parts.load().handle.abort();
}
}
#[derive(Clone)]
struct SubscriptionLoopParts {
handle: tokio::task::JoinHandle<Result<Never>>,
new_subscription_sender: UnboundedSender<Subscription<BlockId>>,
}
struct Subscription<T> {
sink: SubscriptionSink,
_marker: std::marker::PhantomData<T>,
@ -252,13 +301,30 @@ impl<T> Subscription<T> {
}
}
async fn send(&self, item: &T) -> Result<()>
fn try_send(&mut self, item: &T) -> Result<()>
where
T: Serialize,
{
let json = serde_json::value::to_raw_value(item)
.context("Failed to serialize item for subscription")?;
self.sink.send(json).await?;
self.sink.try_send(json)?;
Ok(())
}
}
impl<T> Drop for Subscription<T> {
fn drop(&mut self) {
info!(
"Subscription with ID {:?} is being dropped",
self.sink.subscription_id()
);
}
}
pub fn not_yet_implemented_error() -> ErrorObjectOwned {
ErrorObject::owned(
ErrorCode::InternalError.code(),
"Not yet implemented",
Option::<String>::None,
)
}

View File

@ -7,7 +7,7 @@ license = { workspace = true }
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_core = { workspace = true, features = ["default", "testnet"] }
sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
@ -17,8 +17,8 @@ indexer_core.workspace = true
wallet-ffi.workspace = true
serde_json.workspace = true
token_core.workspace = true
url.workspace = true
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
@ -29,3 +29,5 @@ hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
futures.workspace = true
rand.workspace = true
testcontainers = { version = "0.26.3", features = ["docker-compose"] }

View File

@ -1,159 +0,0 @@
{
"home": "",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"retry_pending_blocks_timeout_millis": 240000,
"port": 0,
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
]
}

View File

@ -1,547 +0,0 @@
{
"override_rust_log": null,
"sequencer_addr": "",
"seq_poll_timeout_millis": 12000,
"seq_tx_poll_max_blocks": 5,
"seq_poll_max_retries": 5,
"seq_block_poll_max_amount": 100,
"basic_auth": null,
"initial_accounts": [
{
"Public": {
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"pub_sign_key": [
16,
162,
106,
154,
236,
125,
52,
184,
35,
100,
238,
174,
69,
197,
41,
77,
187,
10,
118,
75,
0,
11,
148,
238,
185,
181,
133,
17,
220,
72,
124,
77
]
}
},
{
"Public": {
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"pub_sign_key": [
113,
121,
64,
177,
204,
85,
229,
214,
178,
6,
109,
191,
29,
154,
63,
38,
242,
18,
244,
219,
8,
208,
35,
136,
23,
127,
207,
237,
216,
169,
190,
27
]
}
},
{
"Private": {
"account_id": "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
251,
82,
235,
1,
146,
96,
30,
81,
162,
234,
33,
15,
123,
129,
116,
0,
84,
136,
176,
70,
190,
224,
161,
54,
134,
142,
154,
1,
18,
251,
242,
189
],
"private_key_holder": {
"nullifier_secret_key": [
29,
250,
10,
187,
35,
123,
180,
250,
246,
97,
216,
153,
44,
156,
16,
93,
241,
26,
174,
219,
72,
84,
34,
247,
112,
101,
217,
243,
189,
173,
75,
20
],
"incoming_viewing_secret_key": [
251,
201,
22,
154,
100,
165,
218,
108,
163,
190,
135,
91,
145,
84,
69,
241,
46,
117,
217,
110,
197,
248,
91,
193,
14,
104,
88,
103,
67,
153,
182,
158
],
"outgoing_viewing_secret_key": [
25,
67,
121,
76,
175,
100,
30,
198,
105,
123,
49,
169,
75,
178,
75,
210,
100,
143,
210,
243,
228,
243,
21,
18,
36,
84,
164,
186,
139,
113,
214,
12
]
},
"nullifer_public_key": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"incoming_viewing_public_key": [
3,
235,
139,
131,
237,
177,
122,
189,
6,
177,
167,
178,
202,
117,
246,
58,
28,
65,
132,
79,
220,
139,
119,
243,
187,
160,
212,
121,
61,
247,
116,
72,
205
]
}
}
},
{
"Private": {
"account_id": "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
238,
171,
241,
69,
111,
217,
85,
64,
19,
82,
18,
189,
32,
91,
78,
175,
107,
7,
109,
60,
52,
44,
243,
230,
72,
244,
192,
92,
137,
33,
118,
254
],
"private_key_holder": {
"nullifier_secret_key": [
25,
211,
215,
119,
57,
223,
247,
37,
245,
144,
122,
29,
118,
245,
83,
228,
23,
9,
101,
120,
88,
33,
238,
207,
128,
61,
110,
2,
89,
62,
164,
13
],
"incoming_viewing_secret_key": [
193,
181,
14,
196,
142,
84,
15,
65,
128,
101,
70,
196,
241,
47,
130,
221,
23,
146,
161,
237,
221,
40,
19,
126,
59,
15,
169,
236,
25,
105,
104,
231
],
"outgoing_viewing_secret_key": [
20,
170,
220,
108,
41,
23,
155,
217,
247,
190,
175,
168,
247,
34,
105,
134,
114,
74,
104,
91,
211,
62,
126,
13,
130,
100,
241,
214,
250,
236,
38,
150
]
},
"nullifer_public_key": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"incoming_viewing_public_key": [
2,
181,
98,
93,
216,
241,
241,
110,
58,
198,
119,
174,
250,
184,
1,
204,
200,
173,
44,
238,
37,
247,
170,
156,
100,
254,
116,
242,
28,
183,
187,
77,
255
]
}
}
}
]
}

View File

@ -1,41 +1,255 @@
use std::net::SocketAddr;
use std::{net::SocketAddr, path::PathBuf};
use anyhow::Result;
use indexer_service::IndexerConfig;
use anyhow::{Context, Result};
use common::block::{AccountInitialData, CommitmentsInitialData};
use indexer_service::{BackoffConfig, ChannelId, ClientConfig, IndexerConfig};
use key_protocol::key_management::KeyChain;
use nssa::{Account, AccountId, PrivateKey, PublicKey};
use nssa_core::{account::Data, program::DEFAULT_PROGRAM_ID};
use sequencer_core::config::{BedrockConfig, SequencerConfig};
use url::Url;
use wallet::config::{
InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, WalletConfig,
};
pub fn indexer_config(bedrock_addr: SocketAddr) -> IndexerConfig {
todo!()
// let channel_id: [u8; 32] = [0u8, 1]
// .repeat(16)
// .try_into()
// .unwrap_or_else(|_| unreachable!());
// let channel_id = ChannelId::try_from(channel_id).expect("Failed to create channel ID");
// IndexerConfig {
// resubscribe_interval_millis: 1000,
// backoff: BackoffConfig {
// start_delay_millis: 100,
// max_retries: 10,
// },
// bedrock_client_config: BedrockClientConfig {
// addr: addr_to_http_url(bedrock_addr).expect("Failed to convert bedrock addr to URL"),
// auth: None,
// },
// channel_id,
// }
pub fn indexer_config(bedrock_addr: SocketAddr) -> Result<IndexerConfig> {
Ok(IndexerConfig {
resubscribe_interval_millis: 1000,
bedrock_client_config: ClientConfig {
addr: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 10,
},
},
channel_id: bedrock_channel_id(),
})
}
fn addr_to_http_url(addr: SocketAddr) -> Result<Url> {
/// Sequencer config options available for custom changes in integration tests.
pub struct SequencerPartialConfig {
pub max_num_tx_in_block: usize,
pub mempool_max_size: usize,
pub block_create_timeout_millis: u64,
}
impl Default for SequencerPartialConfig {
fn default() -> Self {
Self {
max_num_tx_in_block: 20,
mempool_max_size: 10_000,
block_create_timeout_millis: 10_000,
}
}
}
pub fn sequencer_config(
partial: SequencerPartialConfig,
home: PathBuf,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<SequencerConfig> {
let SequencerPartialConfig {
max_num_tx_in_block,
mempool_max_size,
block_create_timeout_millis,
} = partial;
Ok(SequencerConfig {
home,
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
max_num_tx_in_block,
mempool_max_size,
block_create_timeout_millis,
retry_pending_blocks_timeout_millis: 240_000,
port: 0,
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 5,
},
channel_id: bedrock_channel_id(),
node_url: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
},
indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr)
.context("Failed to convert indexer addr to URL")?,
})
}
// TODO #312: Remove account id and key hardcoding
pub fn wallet_config(
sequencer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<WalletConfig> {
Ok(WalletConfig {
override_rust_log: None,
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
.context("Failed to convert sequencer addr to URL")?,
seq_poll_timeout_millis: 30_000,
seq_tx_poll_max_blocks: 15,
seq_poll_max_retries: 10,
seq_block_poll_max_amount: 100,
initial_accounts: initial_data.wallet_initial_accounts(),
basic_auth: None,
})
}
pub struct InitialData {
pub public_accounts: Vec<(PrivateKey, u128)>,
pub private_accounts: Vec<(KeyChain, Account)>,
}
impl InitialData {
pub fn with_two_public_and_two_private_initialized_accounts() -> Self {
let mut public_alice_private_key = PrivateKey::new_os_random();
let mut public_alice_public_key =
PublicKey::new_from_private_key(&public_alice_private_key);
let mut public_alice_account_id = AccountId::from(&public_alice_public_key);
let mut public_bob_private_key = PrivateKey::new_os_random();
let mut public_bob_public_key = PublicKey::new_from_private_key(&public_bob_private_key);
let mut public_bob_account_id = AccountId::from(&public_bob_public_key);
// Ensure consistent ordering
if public_alice_account_id > public_bob_account_id {
std::mem::swap(&mut public_alice_private_key, &mut public_bob_private_key);
std::mem::swap(&mut public_alice_public_key, &mut public_bob_public_key);
std::mem::swap(&mut public_alice_account_id, &mut public_bob_account_id);
}
let mut private_charlie_key_chain = KeyChain::new_os_random();
let mut private_charlie_account_id =
AccountId::from(&private_charlie_key_chain.nullifer_public_key);
let mut private_david_key_chain = KeyChain::new_os_random();
let mut private_david_account_id =
AccountId::from(&private_david_key_chain.nullifer_public_key);
// Ensure consistent ordering
if private_charlie_account_id > private_david_account_id {
std::mem::swap(&mut private_charlie_key_chain, &mut private_david_key_chain);
std::mem::swap(
&mut private_charlie_account_id,
&mut private_david_account_id,
);
}
Self {
public_accounts: vec![
(public_alice_private_key, 10_000),
(public_bob_private_key, 20_000),
],
private_accounts: vec![
(
private_charlie_key_chain,
Account {
balance: 10_000,
data: Data::default(),
program_owner: DEFAULT_PROGRAM_ID,
nonce: 0,
},
),
(
private_david_key_chain,
Account {
balance: 20_000,
data: Data::default(),
program_owner: DEFAULT_PROGRAM_ID,
nonce: 0,
},
),
],
}
}
fn sequencer_initial_accounts(&self) -> Vec<AccountInitialData> {
self.public_accounts
.iter()
.map(|(priv_key, balance)| {
let pub_key = PublicKey::new_from_private_key(priv_key);
let account_id = AccountId::from(&pub_key);
AccountInitialData {
account_id,
balance: *balance,
}
})
.collect()
}
fn sequencer_initial_commitments(&self) -> Vec<CommitmentsInitialData> {
self.private_accounts
.iter()
.map(|(key_chain, account)| CommitmentsInitialData {
npk: key_chain.nullifer_public_key.clone(),
account: account.clone(),
})
.collect()
}
fn wallet_initial_accounts(&self) -> Vec<InitialAccountData> {
self.public_accounts
.iter()
.map(|(priv_key, _)| {
let pub_key = PublicKey::new_from_private_key(priv_key);
let account_id = AccountId::from(&pub_key);
InitialAccountData::Public(InitialAccountDataPublic {
account_id,
pub_sign_key: priv_key.clone(),
})
})
.chain(self.private_accounts.iter().map(|(key_chain, account)| {
let account_id = AccountId::from(&key_chain.nullifer_public_key);
InitialAccountData::Private(InitialAccountDataPrivate {
account_id,
account: account.clone(),
key_chain: key_chain.clone(),
})
}))
.collect()
}
}
pub enum UrlProtocol {
Http,
Ws,
}
impl std::fmt::Display for UrlProtocol {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UrlProtocol::Http => write!(f, "http"),
UrlProtocol::Ws => write!(f, "ws"),
}
}
}
pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result<Url> {
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
// but clients need to connect to 127.0.0.1:<port> to work reliably
let url_string = if addr.ip().is_unspecified() {
format!("http://127.0.0.1:{}", addr.port())
format!("{protocol}://127.0.0.1:{}", addr.port())
} else {
format!("http://{addr}")
format!("{protocol}://{addr}")
};
url_string.parse().map_err(Into::into)
}
fn bedrock_channel_id() -> ChannelId {
let channel_id: [u8; 32] = [0u8, 1]
.repeat(16)
.try_into()
.unwrap_or_else(|_| unreachable!());
ChannelId::from(channel_id)
}

View File

@ -2,47 +2,41 @@
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
use anyhow::{Context, Result};
use anyhow::{Context, Result, bail};
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use common::{
sequencer_client::SequencerClient,
transaction::{EncodedTransaction, NSSATransaction},
};
use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction};
use futures::FutureExt as _;
use indexer_service::IndexerHandle;
use log::debug;
use nssa::PrivacyPreservingTransaction;
use log::{debug, error, warn};
use nssa::{AccountId, PrivacyPreservingTransaction};
use nssa_core::Commitment;
use sequencer_core::config::SequencerConfig;
use sequencer_runner::SequencerHandle;
use tempfile::TempDir;
use url::Url;
use testcontainers::compose::DockerCompose;
use wallet::{WalletCore, config::WalletConfigOverrides};
mod config;
pub mod config;
// TODO: Remove this and control time from tests
pub const TIME_TO_WAIT_FOR_BLOCK_SECONDS: u64 = 12;
pub const ACC_SENDER: &str = "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy";
pub const ACC_RECEIVER: &str = "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw";
pub const ACC_SENDER_PRIVATE: &str = "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw";
pub const ACC_RECEIVER_PRIVATE: &str = "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX";
pub const NSSA_PROGRAM_FOR_TEST_DATA_CHANGER: &str = "data_changer.bin";
const BEDROCK_SERVICE_WITH_OPEN_PORT: &str = "logos-blockchain-node-0";
const BEDROCK_SERVICE_PORT: u16 = 18080;
static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init);
/// Test context which sets up a sequencer and a wallet for integration tests.
///
/// It's memory and logically safe to create multiple instances of this struct in parallel tests,
/// as each instance uses its own temporary directories for sequencer and wallet data.
// NOTE: Order of fields is important for proper drop order.
pub struct TestContext {
sequencer_client: SequencerClient,
wallet: WalletCore,
_sequencer_handle: SequencerHandle,
_indexer_handle: IndexerHandle,
wallet_password: String,
sequencer_handle: SequencerHandle,
indexer_handle: IndexerHandle,
bedrock_compose: DockerCompose,
_temp_sequencer_dir: TempDir,
_temp_wallet_dir: TempDir,
}
@ -50,71 +44,135 @@ pub struct TestContext {
impl TestContext {
/// Create new test context.
pub async fn new() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path =
PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
Self::new_with_sequencer_config(sequencer_config).await
Self::builder().build().await
}
/// Create new test context with custom sequencer config.
///
/// `home` and `port` fields of the provided config will be overridden to meet tests parallelism
/// requirements.
pub async fn new_with_sequencer_config(sequencer_config: SequencerConfig) -> Result<Self> {
pub fn builder() -> TestContextBuilder {
TestContextBuilder::new()
}
async fn new_configured(
sequencer_partial_config: config::SequencerPartialConfig,
initial_data: config::InitialData,
) -> Result<Self> {
// Ensure logger is initialized only once
*LOGGER;
debug!("Test context setup");
let bedrock_addr = todo!();
let indexer_config = config::indexer_config(bedrock_addr);
let (bedrock_compose, bedrock_addr) = Self::setup_bedrock_node().await?;
let (_sequencer_handle, sequencer_addr, temp_sequencer_dir) =
Self::setup_sequencer(sequencer_config)
.await
.context("Failed to setup sequencer")?;
let indexer_handle = Self::setup_indexer(bedrock_addr)
.await
.context("Failed to setup Indexer")?;
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
// but clients need to connect to 127.0.0.1:<port> to work reliably
let sequencer_addr = if sequencer_addr.ip().is_unspecified() {
format!("http://127.0.0.1:{}", sequencer_addr.port())
} else {
format!("http://{sequencer_addr}")
};
let (sequencer_handle, temp_sequencer_dir) = Self::setup_sequencer(
sequencer_partial_config,
bedrock_addr,
indexer_handle.addr(),
&initial_data,
)
.await
.context("Failed to setup Sequencer")?;
let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone())
let (wallet, temp_wallet_dir) = Self::setup_wallet(sequencer_handle.addr(), &initial_data)
.await
.context("Failed to setup wallet")?;
let sequencer_client = SequencerClient::new(
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?,
)
.context("Failed to create sequencer client")?;
let _indexer_handle = indexer_service::run_server(indexer_config, 0)
.await
.context("Failed to run Indexer Service")?;
let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr())
.context("Failed to convert sequencer addr to URL")?;
let sequencer_client =
SequencerClient::new(sequencer_url).context("Failed to create sequencer client")?;
Ok(Self {
sequencer_client,
wallet,
wallet_password,
_sequencer_handle,
_indexer_handle,
bedrock_compose,
sequencer_handle,
indexer_handle,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
})
}
async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let bedrock_compose_path =
PathBuf::from(manifest_dir).join("../bedrock/docker-compose.yml");
let mut compose = DockerCompose::with_auto_client(&[bedrock_compose_path])
.await
.context("Failed to setup docker compose for Bedrock")?;
async fn up_and_retrieve_port(compose: &mut DockerCompose) -> Result<u16> {
compose
.up()
.await
.context("Failed to bring up Bedrock services")?;
let container = compose
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
.with_context(|| {
format!(
"Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`"
)
})?;
let ports = container.ports().await.with_context(|| {
format!(
"Failed to get ports for Bedrock service container `{}`",
container.id()
)
})?;
ports
.map_to_host_port_ipv4(BEDROCK_SERVICE_PORT)
.with_context(|| {
format!(
"Failed to retrieve host port of {BEDROCK_SERVICE_PORT} container \
port for container `{}`, existing ports: {ports:?}",
container.id()
)
})
}
let mut port = None;
let mut attempt = 0;
let max_attempts = 5;
while port.is_none() && attempt < max_attempts {
attempt += 1;
match up_and_retrieve_port(&mut compose).await {
Ok(p) => {
port = Some(p);
}
Err(err) => {
warn!(
"Failed to bring up Bedrock services: {err:?}, attempt {attempt}/{max_attempts}"
);
}
}
}
let Some(port) = port else {
bail!("Failed to bring up Bedrock services after {max_attempts} attempts");
};
let addr = SocketAddr::from(([127, 0, 0, 1], port));
Ok((compose, addr))
}
async fn setup_indexer(bedrock_addr: SocketAddr) -> Result<IndexerHandle> {
let indexer_config =
config::indexer_config(bedrock_addr).context("Failed to create Indexer config")?;
indexer_service::run_server(indexer_config, 0)
.await
.context("Failed to run Indexer Service")
}
async fn setup_sequencer(
mut config: SequencerConfig,
) -> Result<(SequencerHandle, SocketAddr, TempDir)> {
partial: config::SequencerPartialConfig,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &config::InitialData,
) -> Result<(SequencerHandle, TempDir)> {
let temp_sequencer_dir =
tempfile::tempdir().context("Failed to create temp dir for sequencer home")?;
@ -122,33 +180,39 @@ impl TestContext {
"Using temp sequencer home at {:?}",
temp_sequencer_dir.path()
);
config.home = temp_sequencer_dir.path().to_owned();
// Setting port to 0 lets the OS choose a free port for us
config.port = 0;
let (sequencer_handle, sequencer_addr) =
sequencer_runner::startup_sequencer(config).await?;
let config = config::sequencer_config(
partial,
temp_sequencer_dir.path().to_owned(),
bedrock_addr,
indexer_addr,
initial_data,
)
.context("Failed to create Sequencer config")?;
Ok((sequencer_handle, sequencer_addr, temp_sequencer_dir))
let sequencer_handle = sequencer_runner::startup_sequencer(config).await?;
Ok((sequencer_handle, temp_sequencer_dir))
}
async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let wallet_config_source_path =
PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json");
async fn setup_wallet(
sequencer_addr: SocketAddr,
initial_data: &config::InitialData,
) -> Result<(WalletCore, TempDir)> {
let config = config::wallet_config(sequencer_addr, initial_data)
.context("Failed to create Wallet config")?;
let config_serialized =
serde_json::to_string_pretty(&config).context("Failed to serialize Wallet config")?;
let temp_wallet_dir =
tempfile::tempdir().context("Failed to create temp dir for wallet home")?;
let config_path = temp_wallet_dir.path().join("wallet_config.json");
std::fs::copy(&wallet_config_source_path, &config_path)
.context("Failed to copy wallet config to temp dir")?;
std::fs::write(&config_path, config_serialized)
.context("Failed to write wallet config in temp dir")?;
let storage_path = temp_wallet_dir.path().join("storage.json");
let config_overrides = WalletConfigOverrides {
sequencer_addr: Some(sequencer_addr),
..Default::default()
};
let config_overrides = WalletConfigOverrides::default();
let wallet_password = "test_pass".to_owned();
let wallet = WalletCore::new_init_storage(
@ -163,7 +227,7 @@ impl TestContext {
.await
.context("Failed to store wallet persistent data")?;
Ok((wallet, temp_wallet_dir, wallet_password))
Ok((wallet, temp_wallet_dir))
}
/// Get reference to the wallet.
@ -171,10 +235,6 @@ impl TestContext {
&self.wallet
}
pub fn wallet_password(&self) -> &str {
&self.wallet_password
}
/// Get mutable reference to the wallet.
pub fn wallet_mut(&mut self) -> &mut WalletCore {
&mut self.wallet
@ -184,47 +244,132 @@ impl TestContext {
pub fn sequencer_client(&self) -> &SequencerClient {
&self.sequencer_client
}
}
/// A test context to be used in normal #[test] tests
pub struct BlockingTestContext {
pub ctx: TestContext,
pub runtime: tokio::runtime::Runtime,
}
/// Get existing public account IDs in the wallet.
pub fn existing_public_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
.user_data
.public_account_ids()
.collect()
}
impl BlockingTestContext {
pub fn new() -> Result<Self> {
let runtime = tokio::runtime::Runtime::new().unwrap();
let ctx = runtime.block_on(TestContext::new())?;
Ok(Self { ctx, runtime })
/// Get existing private account IDs in the wallet.
pub fn existing_private_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
.user_data
.private_account_ids()
.collect()
}
}
pub fn format_public_account_id(account_id: &str) -> String {
impl Drop for TestContext {
fn drop(&mut self) {
let Self {
sequencer_handle,
indexer_handle,
bedrock_compose,
_temp_sequencer_dir: _,
_temp_wallet_dir: _,
sequencer_client: _,
wallet: _,
} = self;
if sequencer_handle.is_finished() {
let Err(err) = self
.sequencer_handle
.run_forever()
.now_or_never()
.expect("Future is finished and should be ready");
error!(
"Sequencer handle has unexpectedly finished before TestContext drop with error: {err:#}"
);
}
if indexer_handle.is_stopped() {
error!("Indexer handle has unexpectedly stopped before TestContext drop");
}
let container = bedrock_compose
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
.unwrap_or_else(|| {
panic!("Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`")
});
let output = std::process::Command::new("docker")
.args(["inspect", "-f", "{{.State.Running}}", container.id()])
.output()
.expect("Failed to execute docker inspect command to check if Bedrock container is still running");
let stdout = String::from_utf8(output.stdout)
.expect("Failed to parse docker inspect output as String");
if stdout.trim() != "true" {
error!(
"Bedrock container `{}` is not running during TestContext drop, docker inspect output: {stdout}",
container.id()
);
}
}
}
pub struct TestContextBuilder {
initial_data: Option<config::InitialData>,
sequencer_partial_config: Option<config::SequencerPartialConfig>,
}
impl TestContextBuilder {
fn new() -> Self {
Self {
initial_data: None,
sequencer_partial_config: None,
}
}
pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self {
self.initial_data = Some(initial_data);
self
}
pub fn with_sequencer_partial_config(
mut self,
sequencer_partial_config: config::SequencerPartialConfig,
) -> Self {
self.sequencer_partial_config = Some(sequencer_partial_config);
self
}
pub async fn build(self) -> Result<TestContext> {
TestContext::new_configured(
self.sequencer_partial_config.unwrap_or_default(),
self.initial_data.unwrap_or_else(|| {
config::InitialData::with_two_public_and_two_private_initialized_accounts()
}),
)
.await
}
}
pub fn format_public_account_id(account_id: AccountId) -> String {
format!("Public/{account_id}")
}
pub fn format_private_account_id(account_id: &str) -> String {
pub fn format_private_account_id(account_id: AccountId) -> String {
format!("Private/{account_id}")
}
pub async fn fetch_privacy_preserving_tx(
seq_client: &SequencerClient,
tx_hash: String,
tx_hash: HashType,
) -> PrivacyPreservingTransaction {
let transaction_encoded = seq_client
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(tx_hash)
.await
.unwrap()
.transaction
.unwrap();
let tx_base64_decode = BASE64.decode(transaction_encoded).unwrap();
match NSSATransaction::try_from(
&borsh::from_slice::<EncodedTransaction>(&tx_base64_decode).unwrap(),
)
.unwrap()
{
let tx_bytes = BASE64.decode(transaction_encoded).unwrap();
let tx = borsh::from_slice(&tx_bytes).unwrap();
match tx {
NSSATransaction::PrivacyPreserving(privacy_preserving_transaction) => {
privacy_preserving_transaction
}
@ -241,20 +386,3 @@ pub async fn verify_commitment_is_in_state(
Ok(Some(_))
)
}
#[cfg(test)]
mod tests {
use super::{format_private_account_id, format_public_account_id};
#[test]
fn correct_account_id_from_prefix() {
let account_id1 = "cafecafe";
let account_id2 = "deadbeaf";
let account_id1_pub = format_public_account_id(account_id1);
let account_id2_priv = format_private_account_id(account_id2);
assert_eq!(account_id1_pub, "Public/cafecafe".to_string());
assert_eq!(account_id2_priv, "Private/deadbeaf".to_string());
}
}

View File

@ -1,5 +1,5 @@
use anyhow::Result;
use integration_tests::{ACC_SENDER, TestContext};
use integration_tests::TestContext;
use log::info;
use nssa::program::Program;
use tokio::test;
@ -10,7 +10,7 @@ async fn get_existing_account() -> Result<()> {
let account = ctx
.sequencer_client()
.get_account(ACC_SENDER.to_string())
.get_account(ctx.existing_public_accounts()[0])
.await?
.account;

View File

@ -88,8 +88,8 @@ async fn amm_public() -> Result<()> {
// Create new token
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id_1.to_string()),
supply_account_id: format_public_account_id(&supply_account_id_1.to_string()),
definition_account_id: format_public_account_id(definition_account_id_1),
supply_account_id: format_public_account_id(supply_account_id_1),
name: "A NAM1".to_string(),
total_supply: 37,
};
@ -99,10 +99,8 @@ async fn amm_public() -> Result<()> {
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_1`
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id_1.to_string()),
to: Some(format_public_account_id(
&recipient_account_id_1.to_string(),
)),
from: format_public_account_id(supply_account_id_1),
to: Some(format_public_account_id(recipient_account_id_1)),
to_npk: None,
to_ipk: None,
amount: 7,
@ -114,8 +112,8 @@ async fn amm_public() -> Result<()> {
// Create new token
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id_2.to_string()),
supply_account_id: format_public_account_id(&supply_account_id_2.to_string()),
definition_account_id: format_public_account_id(definition_account_id_2),
supply_account_id: format_public_account_id(supply_account_id_2),
name: "A NAM2".to_string(),
total_supply: 37,
};
@ -125,10 +123,8 @@ async fn amm_public() -> Result<()> {
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_2`
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id_2.to_string()),
to: Some(format_public_account_id(
&recipient_account_id_2.to_string(),
)),
from: format_public_account_id(supply_account_id_2),
to: Some(format_public_account_id(recipient_account_id_2)),
to_npk: None,
to_ipk: None,
amount: 7,
@ -157,9 +153,9 @@ async fn amm_public() -> Result<()> {
// Send creation tx
let subcommand = AmmProgramAgnosticSubcommand::New {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
balance_a: 3,
balance_b: 3,
};
@ -170,19 +166,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -206,8 +202,8 @@ async fn amm_public() -> Result<()> {
// Make swap
let subcommand = AmmProgramAgnosticSubcommand::Swap {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
amount_in: 2,
min_amount_out: 1,
token_definition: definition_account_id_1.to_string(),
@ -219,19 +215,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -255,8 +251,8 @@ async fn amm_public() -> Result<()> {
// Make swap
let subcommand = AmmProgramAgnosticSubcommand::Swap {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
amount_in: 2,
min_amount_out: 1,
token_definition: definition_account_id_2.to_string(),
@ -268,19 +264,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -304,9 +300,9 @@ async fn amm_public() -> Result<()> {
// Add liquidity
let subcommand = AmmProgramAgnosticSubcommand::AddLiquidity {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
min_amount_lp: 1,
max_amount_a: 2,
max_amount_b: 2,
@ -318,19 +314,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -354,9 +350,9 @@ async fn amm_public() -> Result<()> {
// Remove liquidity
let subcommand = AmmProgramAgnosticSubcommand::RemoveLiquidity {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
balance_lp: 2,
min_amount_a: 1,
min_amount_b: 1,
@ -368,19 +364,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;

View File

@ -2,7 +2,6 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use integration_tests::{
ACC_RECEIVER, ACC_RECEIVER_PRIVATE, ACC_SENDER, ACC_SENDER_PRIVATE,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
};
@ -20,12 +19,12 @@ use wallet::cli::{
async fn private_transfer_to_owned_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to: AccountId = ctx.existing_private_accounts()[1];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -57,13 +56,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
async fn private_transfer_to_foreign_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to_npk = NullifierPublicKey([42; 32]);
let to_npk_string = hex::encode(to_npk.0);
let to_ipk = Secp256k1Point::from_scalar(to_npk.0);
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(to_npk_string),
to_ipk: Some(hex::encode(to_ipk.0)),
@ -83,7 +82,7 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
.get_private_account_commitment(&from)
.context("Failed to get private account commitment for sender")?;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
@ -100,8 +99,8 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
async fn deshielded_transfer_to_public_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let to: AccountId = ACC_RECEIVER.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to: AccountId = ctx.existing_public_accounts()[1];
// Check initial balance of the private sender
let from_acc = ctx
@ -111,8 +110,8 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
assert_eq!(from_acc.balance, 10000);
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_public_account_id(&to.to_string())),
from: format_private_account_id(from),
to: Some(format_public_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -133,10 +132,7 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(to.to_string())
.await?;
let acc_2_balance = ctx.sequencer_client().get_account_balance(to).await?;
assert_eq!(from_acc.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
@ -150,7 +146,7 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
@ -174,7 +170,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
// Send to this account using claiming path (using npk and ipk instead of account ID)
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_ipk: Some(hex::encode(to_keys.incoming_viewing_public_key.0)),
@ -186,7 +182,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
anyhow::bail!("Expected PrivacyPreservingTransfer return value");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
// Sync the wallet to claim the new account
let command = Command::Account(AccountSubcommand::SyncPrivate {});
@ -218,12 +214,12 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
async fn shielded_transfer_to_owned_private_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER.parse()?;
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
let to: AccountId = ctx.existing_private_accounts()[1];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_private_account_id(&to.to_string())),
from: format_public_account_id(from),
to: Some(format_private_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -244,10 +240,7 @@ async fn shielded_transfer_to_owned_private_account() -> Result<()> {
.context("Failed to get receiver's commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let acc_from_balance = ctx
.sequencer_client()
.get_account_balance(from.to_string())
.await?;
let acc_from_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert_eq!(acc_from_balance.balance, 9900);
assert_eq!(acc_to.balance, 20100);
@ -264,10 +257,10 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
let to_npk = NullifierPublicKey([42; 32]);
let to_npk_string = hex::encode(to_npk.0);
let to_ipk = Secp256k1Point::from_scalar(to_npk.0);
let from: AccountId = ACC_SENDER.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
from: format_public_account_id(from),
to: None,
to_npk: Some(to_npk_string),
to_ipk: Some(hex::encode(to_ipk.0)),
@ -284,10 +277,7 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(from.to_string())
.await?;
let acc_1_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert!(
verify_commitment_is_in_state(
@ -313,7 +303,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// The original implementation spawned wallet::cli::execute_continuous_run() in background
// but this conflicts with TestContext's wallet management
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
@ -337,7 +327,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// Send transfer using nullifier and incoming viewing public keys
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_ipk: Some(hex::encode(to_keys.incoming_viewing_public_key.0)),
@ -349,7 +339,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
anyhow::bail!("Failed to send transaction");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
info!("Waiting for next blocks to check if continuous run fetches account");
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
@ -383,7 +373,7 @@ async fn initialize_private_account() -> Result<()> {
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: format_private_account_id(&account_id.to_string()),
account_id: format_private_account_id(account_id),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;

View File

@ -1,9 +1,7 @@
use std::time::Duration;
use anyhow::Result;
use integration_tests::{
ACC_RECEIVER, ACC_SENDER, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id,
};
use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id};
use log::info;
use nssa::program::Program;
use tokio::test;
@ -18,8 +16,8 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -33,11 +31,11 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -64,17 +62,15 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
.storage()
.user_data
.account_ids()
.map(ToString::to_string)
.find(|acc_id| acc_id != ACC_SENDER && acc_id != ACC_RECEIVER)
.find(|acc_id| {
*acc_id != ctx.existing_public_accounts()[0]
&& *acc_id != ctx.existing_public_accounts()[1]
})
.expect("Failed to find newly created account in the wallet storage");
if new_persistent_account_id == String::new() {
panic!("Failed to produce new account, not present in persistent accounts");
}
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(&new_persistent_account_id)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(new_persistent_account_id)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -88,7 +84,7 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
@ -109,8 +105,8 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
let mut ctx = TestContext::new().await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 1000000,
@ -125,11 +121,11 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
info!("Checking balances unchanged");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -147,8 +143,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
// First transfer
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -162,11 +158,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Checking correct balance move after first transfer");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -179,8 +175,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
// Second transfer
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -194,11 +190,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Checking correct balance move after second transfer");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -223,14 +219,14 @@ async fn initialize_public_account() -> Result<()> {
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: format_public_account_id(&account_id.to_string()),
account_id: format_public_account_id(account_id),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
info!("Checking correct execution");
let account = ctx
.sequencer_client()
.get_account(account_id.to_string())
.get_account(account_id)
.await?
.account;

View File

@ -1,23 +0,0 @@
use anyhow::Result;
use integration_tests::TestContext;
use log::info;
use tokio::test;
// #[ignore = "needs complicated setup"]
// #[test]
// To run this test properly, you need nomos node running in the background.
// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node).
//
// Recommended to run node locally from build binary.
// async fn indexer_run_local_node() -> Result<()> {
// let _ctx = TestContext::new_bedrock_local_attached().await?;
// info!("Let's observe behaviour");
// tokio::time::sleep(std::time::Duration::from_secs(180)).await;
// // No way to check state of indexer now
// // When it will be a service, then it will become possible.
// Ok(())
// }

View File

@ -2,8 +2,8 @@ use std::{str::FromStr, time::Duration};
use anyhow::Result;
use integration_tests::{
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
};
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
@ -19,7 +19,7 @@ use wallet::cli::{
async fn restore_keys_from_seed() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create first private account at root
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private {
@ -47,8 +47,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to first private account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to_account_id1.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to_account_id1)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -57,15 +57,15 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to second private account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to_account_id2.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to_account_id2)),
to_npk: None,
to_ipk: None,
amount: 101,
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let from: AccountId = ACC_SENDER.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
// Create first public account at root
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public {
@ -93,8 +93,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to first public account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_public_account_id(&to_account_id3.to_string())),
from: format_public_account_id(from),
to: Some(format_public_account_id(to_account_id3)),
to_npk: None,
to_ipk: None,
amount: 102,
@ -103,8 +103,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to second public account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_public_account_id(&to_account_id4.to_string())),
from: format_public_account_id(from),
to: Some(format_public_account_id(to_account_id4)),
to_npk: None,
to_ipk: None,
amount: 103,
@ -166,8 +166,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Test that restored accounts can send transactions
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&to_account_id1.to_string()),
to: Some(format_private_account_id(&to_account_id2.to_string())),
from: format_private_account_id(to_account_id1),
to: Some(format_private_account_id(to_account_id2)),
to_npk: None,
to_ipk: None,
amount: 10,
@ -175,8 +175,8 @@ async fn restore_keys_from_seed() -> Result<()> {
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&to_account_id3.to_string()),
to: Some(format_public_account_id(&to_account_id4.to_string())),
from: format_public_account_id(to_account_id3),
to: Some(format_public_account_id(to_account_id4)),
to_npk: None,
to_ipk: None,
amount: 11,
@ -201,11 +201,11 @@ async fn restore_keys_from_seed() -> Result<()> {
// Verify public account balances
let acc3 = ctx
.sequencer_client()
.get_account_balance(to_account_id3.to_string())
.get_account_balance(to_account_id3)
.await?;
let acc4 = ctx
.sequencer_client()
.get_account_balance(to_account_id4.to_string())
.get_account_balance(to_account_id4)
.await?;
assert_eq!(acc3.balance, 91); // 102 - 11

View File

@ -3,8 +3,8 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use common::PINATA_BASE58;
use integration_tests::{
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
};
use log::info;
use tokio::test;
@ -22,12 +22,12 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
let pinata_prize = 150;
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: format_public_account_id(ACC_SENDER),
to: format_public_account_id(ctx.existing_public_accounts()[0]),
});
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -39,13 +39,13 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
info!("Checking correct balance move");
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
let winner_balance_post = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?
.balance;
@ -63,12 +63,12 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let pinata_prize = 150;
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: format_private_account_id(ACC_SENDER_PRIVATE),
to: format_private_account_id(ctx.existing_private_accounts()[0]),
});
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -86,13 +86,13 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&ACC_SENDER_PRIVATE.parse()?)
.get_private_account_commitment(&ctx.existing_private_accounts()[0])
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -122,7 +122,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
anyhow::bail!("Expected RegisterAccount return value");
};
let winner_account_id_formatted = format_private_account_id(&winner_account_id.to_string());
let winner_account_id_formatted = format_private_account_id(winner_account_id);
// Initialize account under auth transfer program
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
@ -146,7 +146,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -163,7 +163,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;

View File

@ -45,11 +45,13 @@ async fn deploy_and_execute_program() -> Result<()> {
let _response = ctx.sequencer_client().send_tx_public(transaction).await?;
info!("Waiting for next block creation");
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
// Waiting for long time as it may take some time for such a big transaction to be included in a
// block
tokio::time::sleep(Duration::from_secs(2 * TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
let post_state_account = ctx
.sequencer_client()
.get_account(account_id.to_string())
.get_account(account_id)
.await?
.account;

View File

@ -63,10 +63,10 @@ async fn create_and_transfer_public_token() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
name: name.clone(),
total_supply,
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -76,7 +76,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -94,7 +94,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token holding account with the total supply
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
@ -112,8 +112,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
from: format_public_account_id(supply_account_id),
to: Some(format_public_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -127,7 +127,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the supply account after transfer
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
assert_eq!(supply_acc.program_owner, Program::token().id());
@ -143,7 +143,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after transfer
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
assert_eq!(recipient_acc.program_owner, Program::token().id());
@ -159,9 +159,9 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_public_account_id(&recipient_account_id.to_string()),
amount: burn_amount,
definition: format_public_account_id(definition_account_id),
holder: format_public_account_id(recipient_account_id),
amount: 3,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -172,7 +172,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account after burn
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -189,7 +189,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after burn
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -205,8 +205,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Mint 10 tokens at recipient_acc
let mint_amount = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(&recipient_account_id.to_string())),
definition: format_public_account_id(definition_account_id),
holder: Some(format_public_account_id(recipient_account_id)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount,
@ -220,7 +220,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account after mint
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -237,7 +237,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after mint
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -302,10 +302,10 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name: name.clone(),
total_supply,
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -316,7 +316,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Check the status of the token definition account
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -340,8 +340,8 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -367,9 +367,9 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_private_account_id(&recipient_account_id.to_string()),
amount: burn_amount,
definition: format_public_account_id(definition_account_id),
holder: format_private_account_id(recipient_account_id),
amount: 3,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -380,7 +380,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Check the token definition account after burn
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -458,10 +458,10 @@ async fn create_token_with_private_definition() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
name: name.clone(),
total_supply,
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -479,7 +479,7 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify supply account
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
@ -522,10 +522,8 @@ async fn create_token_with_private_definition() -> Result<()> {
// Mint to public account
let mint_amount_public = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(
&recipient_account_id_public.to_string(),
)),
definition: format_private_account_id(definition_account_id),
holder: Some(format_public_account_id(recipient_account_id_public)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount_public,
@ -555,7 +553,7 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify public recipient received tokens
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_public.to_string())
.get_account(recipient_account_id_public)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -571,10 +569,8 @@ async fn create_token_with_private_definition() -> Result<()> {
// Mint to private account
let mint_amount_private = 5;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_private_account_id(
&recipient_account_id_private.to_string(),
)),
definition: format_private_account_id(definition_account_id),
holder: Some(format_private_account_id(recipient_account_id_private)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount_private,
@ -646,10 +642,10 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name,
total_supply,
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -702,8 +698,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Transfer tokens
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -806,10 +802,10 @@ async fn shielded_token_transfer() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
name,
total_supply,
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -820,8 +816,8 @@ async fn shielded_token_transfer() -> Result<()> {
// Perform shielded transfer: public supply -> private recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_public_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -835,7 +831,7 @@ async fn shielded_token_transfer() -> Result<()> {
// Verify supply account balance
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
@ -920,10 +916,10 @@ async fn deshielded_token_transfer() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name,
total_supply,
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -934,8 +930,8 @@ async fn deshielded_token_transfer() -> Result<()> {
// Perform deshielded transfer: private supply -> public recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_public_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -970,7 +966,7 @@ async fn deshielded_token_transfer() -> Result<()> {
// Verify recipient balance
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -1021,10 +1017,10 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name,
total_supply,
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name: "A NAME".to_string(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -1057,7 +1053,7 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
// Mint using claiming path (foreign account)
let mint_amount = 9;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
definition: format_private_account_id(definition_account_id),
holder: None,
holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)),
holder_ipk: Some(hex::encode(holder_keys.incoming_viewing_public_key.0)),

View File

@ -1,9 +1,11 @@
use std::time::{Duration, Instant};
use anyhow::Result;
use common::block::{AccountInitialData, CommitmentsInitialData};
use integration_tests::TestContext;
use key_protocol::key_management::ephemeral_key_holder::EphemeralKeyHolder;
use integration_tests::{
TestContext,
config::{InitialData, SequencerPartialConfig},
};
use key_protocol::key_management::{KeyChain, ephemeral_key_holder::EphemeralKeyHolder};
use log::info;
use nssa::{
Account, AccountId, PrivacyPreservingTransaction, PrivateKey, PublicKey, PublicTransaction,
@ -16,17 +18,20 @@ use nssa_core::{
account::{AccountWithMetadata, data::Data},
encryption::IncomingViewingPublicKey,
};
use sequencer_core::config::SequencerConfig;
use tokio::test;
// TODO: Make a proper benchmark instead of an ad-hoc test
#[test]
pub async fn tps_test() -> Result<()> {
let num_transactions = 300 * 5;
let target_tps = 12;
let target_tps = 8;
let tps_test = TpsTestManager::new(target_tps, num_transactions);
let ctx = TestContext::new_with_sequencer_config(tps_test.generate_sequencer_config()).await?;
let ctx = TestContext::builder()
.with_sequencer_partial_config(TpsTestManager::generate_sequencer_partial_config())
.with_initial_data(tps_test.generate_initial_data())
.build()
.await?;
let target_time = tps_test.target_time();
info!(
@ -56,12 +61,10 @@ pub async fn tps_test() -> Result<()> {
let tx_obj = ctx
.sequencer_client()
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(*tx_hash)
.await
.inspect_err(|err| {
log::warn!(
"Failed to get transaction by hash {tx_hash:#?} with error: {err:#?}"
)
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}")
});
if let Ok(tx_obj) = tx_obj
@ -148,47 +151,35 @@ impl TpsTestManager {
/// Generates a sequencer configuration with initial balance in a number of public accounts.
/// The transactions generated with the function `build_public_txs` will be valid in a node
/// started with the config from this method.
pub(crate) fn generate_sequencer_config(&self) -> SequencerConfig {
fn generate_initial_data(&self) -> InitialData {
// Create public public keypairs
let initial_public_accounts = self
let public_accounts = self
.public_keypairs
.iter()
.map(|(_, account_id)| AccountInitialData {
account_id: account_id.to_string(),
balance: 10,
})
.map(|(key, _)| (key.clone(), 10))
.collect();
// Generate an initial commitment to be used with the privacy preserving transaction
// created with the `build_privacy_transaction` function.
let sender_nsk = [1; 32];
let sender_npk = NullifierPublicKey::from(&sender_nsk);
let key_chain = KeyChain::new_os_random();
let account = Account {
balance: 100,
nonce: 0xdeadbeef,
program_owner: Program::authenticated_transfer_program().id(),
data: Data::default(),
};
let initial_commitment = CommitmentsInitialData {
npk: sender_npk,
account,
};
SequencerConfig {
home: ".".into(),
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
InitialData {
public_accounts,
private_accounts: vec![(key_chain, account)],
}
}
fn generate_sequencer_partial_config() -> SequencerPartialConfig {
SequencerPartialConfig {
max_num_tx_in_block: 300,
mempool_max_size: 10000,
block_create_timeout_millis: 12000,
port: 3040,
initial_accounts: initial_public_accounts,
initial_commitments: vec![initial_commitment],
signing_key: [37; 32],
bedrock_config: None,
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
indexer_rpc_url: "http://localhost:8779".parse().unwrap(),
mempool_max_size: 10_000,
block_create_timeout_millis: 12_000,
}
}
}

View File

@ -272,7 +272,7 @@ impl KeyTree<ChildKeysPublic> {
while let Some(curr_id) = id_stack.pop() {
if let Some(node) = self.key_map.get(&curr_id) {
let address = node.account_id();
let node_acc = client.get_account(address.to_string()).await?.account;
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() && curr_id != ChainIndex::root() {
self.remove(address);
@ -307,7 +307,7 @@ impl KeyTree<ChildKeysPublic> {
for id in ChainIndex::chain_ids_at_depth(i) {
if let Some(node) = self.key_map.get(&id) {
let address = node.account_id();
let node_acc = client.get_account(address.to_string()).await?.account;
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() {
let addr = node.account_id();

View File

@ -66,11 +66,11 @@ impl SeedHolder {
}
// Safe unwrap
*hash.first_chunk::<32>().unwrap()
HashType(*hash.first_chunk::<32>().unwrap())
}
pub fn produce_top_secret_key_holder(&self) -> SecretSpendingKey {
SecretSpendingKey(self.generate_secret_spending_key_hash())
SecretSpendingKey(self.generate_secret_spending_key_hash().into())
}
}
@ -94,7 +94,7 @@ impl SecretSpendingKey {
hasher.update([2u8]);
hasher.update([0u8; 22]);
<HashType>::from(hasher.finalize_fixed())
hasher.finalize_fixed().into()
}
pub fn generate_outgoing_viewing_secret_key(&self) -> OutgoingViewingSecretKey {
@ -105,7 +105,7 @@ impl SecretSpendingKey {
hasher.update([3u8]);
hasher.update([0u8; 22]);
<HashType>::from(hasher.finalize_fixed())
hasher.finalize_fixed().into()
}
pub fn produce_private_key_holder(&self) -> PrivateKeyHolder {

View File

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::BTreeMap;
use anyhow::Result;
use k256::AffinePoint;
@ -15,10 +15,10 @@ pub type PublicKey = AffinePoint;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NSSAUserData {
/// Default public accounts
pub default_pub_account_signing_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
pub default_pub_account_signing_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
/// Default private accounts
pub default_user_private_accounts:
HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
/// Tree of public keys
pub public_key_tree: KeyTreePublic,
/// Tree of private keys
@ -27,7 +27,7 @@ pub struct NSSAUserData {
impl NSSAUserData {
fn valid_public_key_transaction_pairing_check(
accounts_keys_map: &HashMap<nssa::AccountId, nssa::PrivateKey>,
accounts_keys_map: &BTreeMap<nssa::AccountId, nssa::PrivateKey>,
) -> bool {
let mut check_res = true;
for (account_id, key) in accounts_keys_map {
@ -42,7 +42,7 @@ impl NSSAUserData {
}
fn valid_private_key_transaction_pairing_check(
accounts_keys_map: &HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
accounts_keys_map: &BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
) -> bool {
let mut check_res = true;
for (account_id, (key, _)) in accounts_keys_map {
@ -56,8 +56,8 @@ impl NSSAUserData {
}
pub fn new_with_accounts(
default_accounts_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
default_accounts_key_chains: HashMap<
default_accounts_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
default_accounts_key_chains: BTreeMap<
nssa::AccountId,
(KeyChain, nssa_core::account::Account),
>,
@ -106,14 +106,14 @@ impl NSSAUserData {
/// Returns the signing key for public transaction signatures
pub fn get_pub_account_signing_key(
&self,
account_id: &nssa::AccountId,
account_id: nssa::AccountId,
) -> Option<&nssa::PrivateKey> {
// First seek in defaults
if let Some(key) = self.default_pub_account_signing_keys.get(account_id) {
if let Some(key) = self.default_pub_account_signing_keys.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.public_key_tree.get_node(*account_id).map(Into::into)
self.public_key_tree.get_node(account_id).map(Into::into)
}
}
@ -166,20 +166,30 @@ impl NSSAUserData {
}
}
pub fn account_ids(&self) -> impl Iterator<Item = &nssa::AccountId> {
pub fn account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.public_account_ids().chain(self.private_account_ids())
}
pub fn public_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.default_pub_account_signing_keys
.keys()
.chain(self.public_key_tree.account_id_map.keys())
.chain(self.default_user_private_accounts.keys())
.chain(self.private_key_tree.account_id_map.keys())
.copied()
.chain(self.public_key_tree.account_id_map.keys().copied())
}
pub fn private_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.default_user_private_accounts
.keys()
.copied()
.chain(self.private_key_tree.account_id_map.keys().copied())
}
}
impl Default for NSSAUserData {
fn default() -> Self {
Self::new_with_accounts(
HashMap::new(),
HashMap::new(),
BTreeMap::new(),
BTreeMap::new(),
KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_string())),
KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_string())),
)

View File

@ -8,12 +8,12 @@ license = { workspace = true }
risc0-zkvm.workspace = true
borsh.workspace = true
serde.workspace = true
serde_with.workspace = true
thiserror.workspace = true
bytemuck.workspace = true
base58.workspace = true
k256 = { workspace = true, optional = true }
base58 = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
chacha20 = { version = "0.9", default-features = false }
[dev-dependencies]
@ -21,4 +21,4 @@ serde_json.workspace = true
[features]
default = []
host = ["dep:k256", "dep:base58", "dep:anyhow"]
host = ["dep:k256", "dep:anyhow"]

View File

@ -1,11 +1,10 @@
#[cfg(feature = "host")]
use std::{fmt::Display, str::FromStr};
#[cfg(feature = "host")]
use base58::{FromBase58, ToBase58};
use borsh::{BorshDeserialize, BorshSerialize};
pub use data::Data;
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use crate::program::ProgramId;
@ -47,8 +46,8 @@ impl AccountWithMetadata {
Default,
Copy,
Clone,
Serialize,
Deserialize,
SerializeDisplay,
DeserializeFromStr,
PartialEq,
Eq,
Hash,
@ -80,23 +79,19 @@ impl AsRef<[u8]> for AccountId {
}
}
#[cfg(feature = "host")]
#[derive(Debug, thiserror::Error)]
pub enum AccountIdError {
#[error("invalid base58")]
InvalidBase58(#[from] anyhow::Error),
#[error("invalid base58: {0:?}")]
InvalidBase58(base58::FromBase58Error),
#[error("invalid length: expected 32 bytes, got {0}")]
InvalidLength(usize),
}
#[cfg(feature = "host")]
impl FromStr for AccountId {
type Err = AccountIdError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let bytes = s
.from_base58()
.map_err(|err| anyhow::anyhow!("Invalid base58 err {err:?}"))?;
let bytes = s.from_base58().map_err(AccountIdError::InvalidBase58)?;
if bytes.len() != 32 {
return Err(AccountIdError::InvalidLength(bytes.len()));
}
@ -106,7 +101,6 @@ impl FromStr for AccountId {
}
}
#[cfg(feature = "host")]
impl Display for AccountId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value.to_base58())

View File

@ -81,7 +81,7 @@ impl PrivacyPreservingTransaction {
let signer_account_ids = self.signer_account_ids();
// Check nonces corresponds to the current nonces on the public state.
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
let current_nonce = state.get_account_by_id(account_id).nonce;
let current_nonce = state.get_account_by_id(*account_id).nonce;
if current_nonce != *nonce {
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
}
@ -93,7 +93,7 @@ impl PrivacyPreservingTransaction {
.iter()
.map(|account_id| {
AccountWithMetadata::new(
state.get_account_by_id(account_id),
state.get_account_by_id(*account_id),
signer_account_ids.contains(account_id),
*account_id,
)

View File

@ -93,7 +93,7 @@ impl PublicTransaction {
let signer_account_ids = self.signer_account_ids();
// Check nonces corresponds to the current nonces on the public state.
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
let current_nonce = state.get_account_by_id(account_id).nonce;
let current_nonce = state.get_account_by_id(*account_id).nonce;
if current_nonce != *nonce {
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
}
@ -105,7 +105,7 @@ impl PublicTransaction {
.iter()
.map(|account_id| {
AccountWithMetadata::new(
state.get_account_by_id(account_id),
state.get_account_by_id(*account_id),
signer_account_ids.contains(account_id),
*account_id,
)
@ -157,7 +157,7 @@ impl PublicTransaction {
let expected_pre = state_diff
.get(&account_id)
.cloned()
.unwrap_or_else(|| state.get_account_by_id(&account_id));
.unwrap_or_else(|| state.get_account_by_id(account_id));
if pre.account != expected_pre {
return Err(NssaError::InvalidProgramBehavior);
}
@ -212,7 +212,7 @@ impl PublicTransaction {
// Check that all modified uninitialized accounts where claimed
for post in state_diff.iter().filter_map(|(account_id, post)| {
let pre = state.get_account_by_id(account_id);
let pre = state.get_account_by_id(*account_id);
if pre.program_owner != DEFAULT_PROGRAM_ID {
return None;
}

View File

@ -221,9 +221,9 @@ impl V02State {
self.public_state.entry(account_id).or_default()
}
pub fn get_account_by_id(&self, account_id: &AccountId) -> Account {
pub fn get_account_by_id(&self, account_id: AccountId) -> Account {
self.public_state
.get(account_id)
.get(&account_id)
.cloned()
.unwrap_or(Account::default())
}
@ -416,7 +416,7 @@ pub mod tests {
let state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let expected_account = state.public_state.get(&account_id).unwrap();
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
assert_eq!(&account, expected_account);
}
@ -427,7 +427,7 @@ pub mod tests {
let state = V02State::new_with_genesis_accounts(&[], &[]);
let expected_account = Account::default();
let account = state.get_account_by_id(&addr2);
let account = state.get_account_by_id(addr2);
assert_eq!(account, expected_account);
}
@ -449,16 +449,16 @@ pub mod tests {
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
let from = account_id;
let to = AccountId::new([2; 32]);
assert_eq!(state.get_account_by_id(&to), Account::default());
assert_eq!(state.get_account_by_id(to), Account::default());
let balance_to_move = 5;
let tx = transfer_transaction(from, key, 0, to, balance_to_move);
state.transition_from_public_transaction(&tx).unwrap();
assert_eq!(state.get_account_by_id(&from).balance, 95);
assert_eq!(state.get_account_by_id(&to).balance, 5);
assert_eq!(state.get_account_by_id(&from).nonce, 1);
assert_eq!(state.get_account_by_id(&to).nonce, 0);
assert_eq!(state.get_account_by_id(from).balance, 95);
assert_eq!(state.get_account_by_id(to).balance, 5);
assert_eq!(state.get_account_by_id(from).nonce, 1);
assert_eq!(state.get_account_by_id(to).nonce, 0);
}
#[test]
@ -471,16 +471,16 @@ pub mod tests {
let from_key = key;
let to = AccountId::new([2; 32]);
let balance_to_move = 101;
assert!(state.get_account_by_id(&from).balance < balance_to_move);
assert!(state.get_account_by_id(from).balance < balance_to_move);
let tx = transfer_transaction(from, from_key, 0, to, balance_to_move);
let result = state.transition_from_public_transaction(&tx);
assert!(matches!(result, Err(NssaError::ProgramExecutionFailed(_))));
assert_eq!(state.get_account_by_id(&from).balance, 100);
assert_eq!(state.get_account_by_id(&to).balance, 0);
assert_eq!(state.get_account_by_id(&from).nonce, 0);
assert_eq!(state.get_account_by_id(&to).nonce, 0);
assert_eq!(state.get_account_by_id(from).balance, 100);
assert_eq!(state.get_account_by_id(to).balance, 0);
assert_eq!(state.get_account_by_id(from).nonce, 0);
assert_eq!(state.get_account_by_id(to).nonce, 0);
}
#[test]
@ -494,16 +494,16 @@ pub mod tests {
let from = account_id2;
let from_key = key2;
let to = account_id1;
assert_ne!(state.get_account_by_id(&to), Account::default());
assert_ne!(state.get_account_by_id(to), Account::default());
let balance_to_move = 8;
let tx = transfer_transaction(from, from_key, 0, to, balance_to_move);
state.transition_from_public_transaction(&tx).unwrap();
assert_eq!(state.get_account_by_id(&from).balance, 192);
assert_eq!(state.get_account_by_id(&to).balance, 108);
assert_eq!(state.get_account_by_id(&from).nonce, 1);
assert_eq!(state.get_account_by_id(&to).nonce, 0);
assert_eq!(state.get_account_by_id(from).balance, 192);
assert_eq!(state.get_account_by_id(to).balance, 108);
assert_eq!(state.get_account_by_id(from).nonce, 1);
assert_eq!(state.get_account_by_id(to).nonce, 0);
}
#[test]
@ -523,12 +523,12 @@ pub mod tests {
let tx = transfer_transaction(account_id2, key2, 0, account_id3, balance_to_move);
state.transition_from_public_transaction(&tx).unwrap();
assert_eq!(state.get_account_by_id(&account_id1).balance, 95);
assert_eq!(state.get_account_by_id(&account_id2).balance, 2);
assert_eq!(state.get_account_by_id(&account_id3).balance, 3);
assert_eq!(state.get_account_by_id(&account_id1).nonce, 1);
assert_eq!(state.get_account_by_id(&account_id2).nonce, 1);
assert_eq!(state.get_account_by_id(&account_id3).nonce, 0);
assert_eq!(state.get_account_by_id(account_id1).balance, 95);
assert_eq!(state.get_account_by_id(account_id2).balance, 2);
assert_eq!(state.get_account_by_id(account_id3).balance, 3);
assert_eq!(state.get_account_by_id(account_id1).nonce, 1);
assert_eq!(state.get_account_by_id(account_id2).nonce, 1);
assert_eq!(state.get_account_by_id(account_id3).nonce, 0);
}
impl V02State {
@ -655,7 +655,7 @@ pub mod tests {
let mut state =
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let account_id = AccountId::new([1; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in the program owner
// field
assert_ne!(account.program_owner, Account::default().program_owner);
@ -680,7 +680,7 @@ pub mod tests {
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([255; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in balance field
assert_eq!(account.program_owner, Account::default().program_owner);
assert_ne!(account.balance, Account::default().balance);
@ -704,7 +704,7 @@ pub mod tests {
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([254; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in nonce field
assert_eq!(account.program_owner, Account::default().program_owner);
assert_eq!(account.balance, Account::default().balance);
@ -728,7 +728,7 @@ pub mod tests {
.with_test_programs()
.with_non_default_accounts_but_default_program_owners();
let account_id = AccountId::new([253; 32]);
let account = state.get_account_by_id(&account_id);
let account = state.get_account_by_id(account_id);
// Assert the target account only differs from the default account in data field
assert_eq!(account.program_owner, Account::default().program_owner);
assert_eq!(account.balance, Account::default().balance);
@ -755,7 +755,7 @@ pub mod tests {
let balance_to_move: u128 = 1;
let program_id = Program::simple_balance_transfer().id();
assert_ne!(
state.get_account_by_id(&sender_account_id).program_owner,
state.get_account_by_id(sender_account_id).program_owner,
program_id
);
let message = public_transaction::Message::try_new(
@ -782,9 +782,9 @@ pub mod tests {
let account_id = AccountId::new([255; 32]);
let program_id = Program::data_changer().id();
assert_ne!(state.get_account_by_id(&account_id), Account::default());
assert_ne!(state.get_account_by_id(account_id), Account::default());
assert_ne!(
state.get_account_by_id(&account_id).program_owner,
state.get_account_by_id(account_id).program_owner,
program_id
);
let message =
@ -825,11 +825,11 @@ pub mod tests {
let program_id = Program::burner().id();
let account_id = AccountId::new([252; 32]);
assert_eq!(
state.get_account_by_id(&account_id).program_owner,
state.get_account_by_id(account_id).program_owner,
program_id
);
let balance_to_burn: u128 = 1;
assert!(state.get_account_by_id(&account_id).balance > balance_to_burn);
assert!(state.get_account_by_id(account_id).balance > balance_to_burn);
let message = public_transaction::Message::try_new(
program_id,
@ -897,7 +897,7 @@ pub mod tests {
state: &V02State,
) -> PrivacyPreservingTransaction {
let sender = AccountWithMetadata::new(
state.get_account_by_id(&sender_keys.account_id()),
state.get_account_by_id(sender_keys.account_id()),
true,
sender_keys.account_id(),
);
@ -1001,7 +1001,7 @@ pub mod tests {
let sender_pre =
AccountWithMetadata::new(sender_private_account.clone(), true, &sender_keys.npk());
let recipient_pre = AccountWithMetadata::new(
state.get_account_by_id(recipient_account_id),
state.get_account_by_id(*recipient_account_id),
false,
*recipient_account_id,
);
@ -1053,7 +1053,7 @@ pub mod tests {
);
let expected_sender_post = {
let mut this = state.get_account_by_id(&sender_keys.account_id());
let mut this = state.get_account_by_id(sender_keys.account_id());
this.balance -= balance_to_move;
this.nonce += 1;
this
@ -1066,12 +1066,12 @@ pub mod tests {
.transition_from_privacy_preserving_transaction(&tx)
.unwrap();
let sender_post = state.get_account_by_id(&sender_keys.account_id());
let sender_post = state.get_account_by_id(sender_keys.account_id());
assert_eq!(sender_post, expected_sender_post);
assert!(state.private_state.0.contains(&expected_new_commitment));
assert_eq!(
state.get_account_by_id(&sender_keys.account_id()).balance,
state.get_account_by_id(sender_keys.account_id()).balance,
200 - balance_to_move
);
}
@ -1162,7 +1162,7 @@ pub mod tests {
let balance_to_move = 37;
let expected_recipient_post = {
let mut this = state.get_account_by_id(&recipient_keys.account_id());
let mut this = state.get_account_by_id(recipient_keys.account_id());
this.balance += balance_to_move;
this
};
@ -1198,15 +1198,13 @@ pub mod tests {
.transition_from_privacy_preserving_transaction(&tx)
.unwrap();
let recipient_post = state.get_account_by_id(&recipient_keys.account_id());
let recipient_post = state.get_account_by_id(recipient_keys.account_id());
assert_eq!(recipient_post, expected_recipient_post);
assert!(state.private_state.0.contains(&sender_pre_commitment));
assert!(state.private_state.0.contains(&expected_new_commitment));
assert!(state.private_state.1.contains(&expected_new_nullifier));
assert_eq!(
state
.get_account_by_id(&recipient_keys.account_id())
.balance,
state.get_account_by_id(recipient_keys.account_id()).balance,
recipient_initial_balance + balance_to_move
);
}
@ -2226,7 +2224,7 @@ pub mod tests {
let amount: u128 = 37;
// Check the recipient is an uninitialized account
assert_eq!(state.get_account_by_id(&to), Account::default());
assert_eq!(state.get_account_by_id(to), Account::default());
let expected_recipient_post = Account {
program_owner: program.id(),
@ -2242,7 +2240,7 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let recipient_post = state.get_account_by_id(&to);
let recipient_post = state.get_account_by_id(to);
assert_eq!(recipient_post, expected_recipient_post);
}
@ -2285,8 +2283,8 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let from_post = state.get_account_by_id(&from);
let to_post = state.get_account_by_id(&to);
let from_post = state.get_account_by_id(from);
let to_post = state.get_account_by_id(to);
// The `chain_caller` program calls the program twice
assert_eq!(from_post.balance, initial_balance - 2 * amount);
assert_eq!(to_post, expected_to_post);
@ -3377,13 +3375,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_remove();
let expected_vault_a = AccountForTests::vault_a_remove();
@ -3458,13 +3456,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_new_init();
let expected_vault_a = AccountForTests::vault_a_init();
@ -3543,13 +3541,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_init();
let expected_vault_a = AccountForTests::vault_a_init();
@ -3616,13 +3614,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_new_init();
let expected_vault_a = AccountForTests::vault_a_init();
@ -3679,13 +3677,13 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
let expected_pool = AccountForTests::pool_definition_add();
let expected_vault_a = AccountForTests::vault_a_add();
@ -3736,11 +3734,11 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let expected_pool = AccountForTests::pool_definition_swap_1();
let expected_vault_a = AccountForTests::vault_a_swap_1();
@ -3787,11 +3785,11 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
let expected_pool = AccountForTests::pool_definition_swap_2();
let expected_vault_a = AccountForTests::vault_a_swap_2();
@ -3842,8 +3840,8 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let from_post = state.get_account_by_id(&from);
let to_post = state.get_account_by_id(&to);
let from_post = state.get_account_by_id(from);
let to_post = state.get_account_by_id(to);
assert_eq!(from_post.balance, initial_balance - amount);
assert_eq!(to_post, expected_to_post);
}
@ -3868,7 +3866,7 @@ pub mod tests {
let amount: u128 = 37;
// Check the recipient is an uninitialized account
assert_eq!(state.get_account_by_id(&to), Account::default());
assert_eq!(state.get_account_by_id(to), Account::default());
let expected_to_post = Account {
// The expected program owner is the authenticated transfer program
@ -3898,8 +3896,8 @@ pub mod tests {
state.transition_from_public_transaction(&tx).unwrap();
let from_post = state.get_account_by_id(&from);
let to_post = state.get_account_by_id(&to);
let from_post = state.get_account_by_id(from);
let to_post = state.get_account_by_id(to);
assert_eq!(from_post.balance, initial_balance - amount);
assert_eq!(to_post, expected_to_post);
}
@ -4094,7 +4092,7 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
let winner_token_holding_post = state.get_account_by_id(&winner_token_holding_id);
let winner_token_holding_post = state.get_account_by_id(winner_token_holding_id);
assert_eq!(
winner_token_holding_post,
expected_winner_token_holding_post
@ -4151,13 +4149,12 @@ pub mod tests {
let balance_to_move: u128 = 4;
let sender =
AccountWithMetadata::new(state.get_account_by_id(&sender_id.clone()), true, sender_id);
let sender = AccountWithMetadata::new(state.get_account_by_id(sender_id), true, sender_id);
let sender_nonce = sender.account.nonce;
let _recipient =
AccountWithMetadata::new(state.get_account_by_id(&recipient_id), false, sender_id);
AccountWithMetadata::new(state.get_account_by_id(recipient_id), false, sender_id);
let message = public_transaction::Message::try_new(
Program::modified_transfer_program().id(),
@ -4172,18 +4169,18 @@ pub mod tests {
let res = state.transition_from_public_transaction(&tx);
assert!(matches!(res, Err(NssaError::InvalidProgramBehavior)));
let sender_post = state.get_account_by_id(&sender_id);
let recipient_post = state.get_account_by_id(&recipient_id);
let sender_post = state.get_account_by_id(sender_id);
let recipient_post = state.get_account_by_id(recipient_id);
let expected_sender_post = {
let mut this = state.get_account_by_id(&sender_id);
let mut this = state.get_account_by_id(sender_id);
this.balance = sender_init_balance;
this.nonce = 0;
this
};
let expected_recipient_post = {
let mut this = state.get_account_by_id(&sender_id);
let mut this = state.get_account_by_id(sender_id);
this.balance = recipient_init_balance;
this.nonce = 0;
this
@ -4353,7 +4350,7 @@ pub mod tests {
// Should succeed - no changes made, no claim needed
assert!(result.is_ok());
// Account should remain default/unclaimed
assert_eq!(state.get_account_by_id(&account_id), Account::default());
assert_eq!(state.get_account_by_id(account_id), Account::default());
}
#[test]

View File

@ -1,3 +1,5 @@
# Should be kept in sync with Dockerfiles
[toolchain]
channel = "1.91.1"
profile = "default"

View File

@ -26,11 +26,13 @@ rand.workspace = true
reqwest.workspace = true
borsh.workspace = true
url.workspace = true
jsonrpsee = { workspace = true, features = ["http-client"] }
jsonrpsee = { workspace = true, features = ["ws-client"] }
[features]
default = []
testnet = []
# Generate mock external clients implementations for testing
mock = []
[dev-dependencies]
futures.workspace = true

View File

@ -1,51 +1,39 @@
use std::{fs, path::Path, str::FromStr};
use anyhow::{Context, Result, anyhow};
use anyhow::{Context, Result};
use bedrock_client::BedrockClient;
use common::block::Block;
pub use common::block::Block;
pub use logos_blockchain_core::mantle::{MantleTx, SignedMantleTx, ops::channel::MsgId};
use logos_blockchain_core::mantle::{
MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger,
ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp},
Op, OpProof, Transaction, TxHash, ledger,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use logos_blockchain_key_management_system_service::keys::{
ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey,
};
use reqwest::Url;
pub use logos_blockchain_key_management_system_service::keys::Ed25519Key;
use logos_blockchain_key_management_system_service::keys::Ed25519PublicKey;
use crate::config::BedrockConfig;
/// A component that posts block data to logos blockchain
#[derive(Clone)]
pub struct BlockSettlementClient {
bedrock_client: BedrockClient,
bedrock_signing_key: Ed25519Key,
bedrock_channel_id: ChannelId,
}
#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")]
pub trait BlockSettlementClientTrait: Clone {
//// Create a new client.
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self>;
impl BlockSettlementClient {
pub fn try_new(home: &Path, config: &BedrockConfig) -> Result<Self> {
let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key"))
.context("Failed to load or create signing key")?;
let bedrock_url = Url::from_str(config.node_url.as_ref())
.context("Bedrock node address is not a valid url")?;
let bedrock_client =
BedrockClient::new(None, bedrock_url).context("Failed to initialize bedrock client")?;
Ok(Self {
bedrock_client,
bedrock_signing_key,
bedrock_channel_id: config.channel_id,
})
}
/// Get the bedrock channel ID used by this client.
fn bedrock_channel_id(&self) -> ChannelId;
/// Create and sign a transaction for inscribing data
pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> {
/// Get the bedrock signing key used by this client.
fn bedrock_signing_key(&self) -> &Ed25519Key;
/// Post a transaction to the node.
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId>;
/// Create and sign a transaction for inscribing data.
fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> {
let inscription_data = borsh::to_vec(block)?;
let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes();
let verifying_key_bytes = self.bedrock_signing_key().public_key().to_bytes();
let verifying_key =
Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key");
let inscribe_op = InscriptionOp {
channel_id: self.bedrock_channel_id,
channel_id: self.bedrock_channel_id(),
inscription: inscription_data,
parent: block.bedrock_parent_id.into(),
signer: verifying_key,
@ -64,7 +52,7 @@ impl BlockSettlementClient {
let tx_hash = inscribe_tx.hash();
let signature_bytes = self
.bedrock_signing_key
.bedrock_signing_key()
.sign_payload(tx_hash.as_signing_bytes().as_ref())
.to_bytes();
let signature =
@ -79,31 +67,46 @@ impl BlockSettlementClient {
};
Ok((signed_mantle_tx, inscribe_op_id))
}
}
/// Post a transaction to the node
pub async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
/// A component that posts block data to logos blockchain
#[derive(Clone)]
pub struct BlockSettlementClient {
bedrock_client: BedrockClient,
bedrock_signing_key: Ed25519Key,
bedrock_channel_id: ChannelId,
}
impl BlockSettlementClientTrait for BlockSettlementClient {
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self> {
let bedrock_client =
BedrockClient::new(config.backoff, config.node_url.clone(), config.auth.clone())
.context("Failed to initialize bedrock client")?;
Ok(Self {
bedrock_client,
bedrock_signing_key,
bedrock_channel_id: config.channel_id,
})
}
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
let (tx, new_msg_id) = self.create_inscribe_tx(block)?;
// Post the transaction
self.bedrock_client.post_transaction(tx).await?;
self.bedrock_client
.post_transaction(tx)
.await
.context("Failed to post transaction to Bedrock")?;
Ok(new_msg_id)
}
}
/// Load signing key from file or generate a new one if it doesn't exist
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
if path.exists() {
let key_bytes = fs::read(path)?;
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
.try_into()
.map_err(|_| anyhow!("Found key with incorrect length"))?;
Ok(Ed25519Key::from_bytes(&key_array))
} else {
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
fs::write(path, key_bytes)?;
Ok(Ed25519Key::from_bytes(&key_bytes))
fn bedrock_channel_id(&self) -> ChannelId {
self.bedrock_channel_id
}
fn bedrock_signing_key(&self) -> &Ed25519Key {
&self.bedrock_signing_key
}
}

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, path::Path};
use anyhow::Result;
use common::{HashType, block::Block, transaction::EncodedTransaction};
use common::{HashType, block::Block, transaction::NSSATransaction};
use nssa::V02State;
use storage::sequencer::RocksDBIO;
@ -55,7 +55,7 @@ impl SequencerStore {
}
/// Returns the transaction corresponding to the given hash, if it exists in the blockchain.
pub fn get_transaction_by_hash(&self, hash: HashType) -> Option<EncodedTransaction> {
pub fn get_transaction_by_hash(&self, hash: HashType) -> Option<NSSATransaction> {
let block_id = self.tx_hash_to_block_map.get(&hash);
let block = block_id.map(|&id| self.get_block_at_id(id));
if let Some(Ok(block)) = block {
@ -68,7 +68,7 @@ impl SequencerStore {
None
}
pub fn insert(&mut self, tx: &EncodedTransaction, block_id: u64) {
pub fn insert(&mut self, tx: &NSSATransaction, block_id: u64) {
self.tx_hash_to_block_map.insert(tx.hash(), block_id);
}
@ -121,7 +121,7 @@ mod tests {
let genesis_block_hashable_data = HashableBlockData {
block_id: 0,
prev_block_hash: [0; 32],
prev_block_hash: HashType([0; 32]),
timestamp: 0,
transactions: vec![],
};

View File

@ -5,6 +5,7 @@ use std::{
};
use anyhow::Result;
use bedrock_client::BackoffConfig;
use common::{
block::{AccountInitialData, CommitmentsInitialData},
config::BasicAuth,
@ -41,13 +42,16 @@ pub struct SequencerConfig {
/// Sequencer own signing key
pub signing_key: [u8; 32],
/// Bedrock configuration options
pub bedrock_config: Option<BedrockConfig>,
pub bedrock_config: BedrockConfig,
/// Indexer RPC URL
pub indexer_rpc_url: Url,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct BedrockConfig {
/// Fibonacci backoff retry strategy configuration
#[serde(default)]
pub backoff: BackoffConfig,
/// Bedrock channel ID
pub channel_id: ChannelId,
/// Bedrock Url

View File

@ -0,0 +1,34 @@
use std::{ops::Deref, sync::Arc};
use anyhow::{Context as _, Result};
use log::info;
pub use url::Url;
#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")]
pub trait IndexerClientTrait: Clone {
async fn new(indexer_url: &Url) -> Result<Self>;
}
#[derive(Clone)]
pub struct IndexerClient(Arc<jsonrpsee::ws_client::WsClient>);
impl IndexerClientTrait for IndexerClient {
async fn new(indexer_url: &Url) -> Result<Self> {
info!("Connecting to Indexer at {indexer_url}");
let client = jsonrpsee::ws_client::WsClientBuilder::default()
.build(indexer_url)
.await
.context("Failed to create websocket client")?;
Ok(Self(Arc::new(client)))
}
}
impl Deref for IndexerClient {
type Target = jsonrpsee::ws_client::WsClient;
fn deref(&self) -> &Self::Target {
&self.0
}
}

View File

@ -1,36 +1,61 @@
use std::{sync::Arc, time::Instant};
use std::{fmt::Display, path::Path, time::Instant};
use anyhow::Result;
use anyhow::{Result, anyhow};
#[cfg(feature = "testnet")]
use common::PINATA_BASE58;
use common::{
HashType,
block::{BedrockStatus, Block, HashableBlockData, MantleMsgId},
transaction::{EncodedTransaction, NSSATransaction, TransactionMalformationError},
transaction::NSSATransaction,
};
use config::SequencerConfig;
use log::{info, warn};
use log::{error, info, warn};
use logos_blockchain_key_management_system_service::keys::{ED25519_SECRET_KEY_SIZE, Ed25519Key};
use mempool::{MemPool, MemPoolHandle};
use serde::{Deserialize, Serialize};
use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerStore};
use crate::{
block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait},
block_store::SequencerStore,
indexer_client::{IndexerClient, IndexerClientTrait},
};
mod block_settlement_client;
pub mod block_settlement_client;
pub mod block_store;
pub mod config;
pub mod indexer_client;
#[cfg(feature = "mock")]
pub mod mock;
type IndexerClient = Arc<jsonrpsee::ws_client::WsClient>;
pub struct SequencerCore {
pub struct SequencerCore<
BC: BlockSettlementClientTrait = BlockSettlementClient,
IC: IndexerClientTrait = IndexerClient,
> {
state: nssa::V02State,
store: SequencerStore,
mempool: MemPool<EncodedTransaction>,
mempool: MemPool<NSSATransaction>,
sequencer_config: SequencerConfig,
chain_height: u64,
block_settlement_client: Option<BlockSettlementClient>,
indexer_client: IndexerClient,
block_settlement_client: BC,
indexer_client: IC,
last_bedrock_msg_id: MantleMsgId,
}
impl SequencerCore {
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub enum TransactionMalformationError {
InvalidSignature,
FailedToDecode { tx: HashType },
}
impl Display for TransactionMalformationError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{self:#?}")
}
}
impl std::error::Error for TransactionMalformationError {}
impl<BC: BlockSettlementClientTrait, IC: IndexerClientTrait> SequencerCore<BC, IC> {
/// Starts the sequencer using the provided configuration.
/// If an existing database is found, the sequencer state is loaded from it and
/// assumed to represent the correct latest state consistent with Bedrock-finalized data.
@ -38,11 +63,11 @@ impl SequencerCore {
/// initializing its state with the accounts defined in the configuration file.
pub async fn start_from_config(
config: SequencerConfig,
) -> (Self, MemPoolHandle<EncodedTransaction>) {
) -> (Self, MemPoolHandle<NSSATransaction>) {
let hashable_data = HashableBlockData {
block_id: config.genesis_id,
transactions: vec![],
prev_block_hash: [0; 32],
prev_block_hash: HashType([0; 32]),
timestamp: 0,
};
@ -59,6 +84,7 @@ impl SequencerCore {
)
.unwrap();
#[cfg_attr(not(feature = "testnet"), allow(unused_mut))]
let mut state = match store.get_nssa_state() {
Some(state) => {
info!("Found local database. Loading state and pending blocks from it.");
@ -86,7 +112,7 @@ impl SequencerCore {
let init_accs: Vec<(nssa::AccountId, u128)> = config
.initial_accounts
.iter()
.map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance))
.map(|acc_data| (acc_data.account_id, acc_data.balance))
.collect();
nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments)
@ -97,26 +123,20 @@ impl SequencerCore {
state.add_pinata_program(PINATA_BASE58.parse().unwrap());
let (mempool, mempool_handle) = MemPool::new(config.mempool_max_size);
let block_settlement_client = config.bedrock_config.as_ref().map(|bedrock_config| {
BlockSettlementClient::try_new(&config.home, bedrock_config)
.expect("Block settlement client should be constructible")
});
let bedrock_signing_key =
load_or_create_signing_key(&config.home.join("bedrock_signing_key"))
.expect("Failed to load or create signing key");
let block_settlement_client = BC::new(&config.bedrock_config, bedrock_signing_key)
.expect("Failed to initialize Block Settlement Client");
let last_bedrock_msg_id = if let Some(client) = block_settlement_client.as_ref() {
let (_, msg_id) = client
.create_inscribe_tx(&genesis_block)
.expect("Inscription transaction with genesis block should be constructible");
msg_id.into()
} else {
channel_genesis_msg_id
};
let (_, msg_id) = block_settlement_client
.create_inscribe_tx(&genesis_block)
.expect("Inscription transaction with genesis block should be constructible");
let last_bedrock_msg_id = msg_id.into();
let indexer_client = Arc::new(
jsonrpsee::ws_client::WsClientBuilder::default()
.build(config.indexer_rpc_url.clone())
.await
.expect("Failed to create Indexer client"),
);
let indexer_client = IC::new(&config.indexer_rpc_url)
.await
.expect("Failed to create Indexer Client");
let sequencer_core = Self {
state,
@ -151,12 +171,21 @@ impl SequencerCore {
}
pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result<u64> {
let block = self.produce_new_block_with_mempool_transactions()?;
if let Some(client) = self.block_settlement_client.as_mut() {
let msg_id = client.submit_block_to_bedrock(&block).await?;
self.last_bedrock_msg_id = msg_id.into();
log::info!("Posted block data to Bedrock");
{
let block = self.produce_new_block_with_mempool_transactions()?;
match self
.block_settlement_client
.submit_block_to_bedrock(&block)
.await
{
Ok(msg_id) => {
self.last_bedrock_msg_id = msg_id.into();
info!("Posted block data to Bedrock, msg_id: {msg_id:?}");
}
Err(err) => {
error!("Failed to post block data to Bedrock with error: {err:#}");
}
}
}
Ok(self.chain_height)
@ -171,17 +200,22 @@ impl SequencerCore {
let mut valid_transactions = vec![];
while let Some(tx) = self.mempool.pop() {
let nssa_transaction = NSSATransaction::try_from(&tx)
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx.hash() })?;
let tx_hash = tx.hash();
match self.execute_check_transaction_on_state(tx) {
Ok(valid_tx) => {
info!("Validated transaction with hash {tx_hash}, including it in block",);
valid_transactions.push(valid_tx);
if let Ok(valid_tx) = self.execute_check_transaction_on_state(nssa_transaction) {
valid_transactions.push(valid_tx.into());
if valid_transactions.len() >= self.sequencer_config.max_num_tx_in_block {
break;
if valid_transactions.len() >= self.sequencer_config.max_num_tx_in_block {
break;
}
}
Err(err) => {
error!(
"Transaction with hash {tx_hash} failed execution check with error: {err:#?}, skipping it",
);
// TODO: Probably need to handle unsuccessful transaction execution?
}
} else {
// Probably need to handle unsuccessful transaction execution?
}
}
@ -271,33 +305,50 @@ impl SequencerCore {
.collect())
}
pub fn block_settlement_client(&self) -> Option<BlockSettlementClient> {
pub fn block_settlement_client(&self) -> BC {
self.block_settlement_client.clone()
}
pub fn indexer_client(&self) -> IndexerClient {
Arc::clone(&self.indexer_client)
pub fn indexer_client(&self) -> IC {
self.indexer_client.clone()
}
}
#[cfg(test)]
mod tests {
use std::pin::pin;
use base58::{FromBase58, ToBase58};
use common::{
block::AccountInitialData, test_utils::sequencer_sign_key_for_testing,
transaction::transaction_pre_check,
};
use nssa::PrivateKey;
use super::*;
fn parse_unwrap_tx_body_into_nssa_tx(tx_body: EncodedTransaction) -> NSSATransaction {
NSSATransaction::try_from(&tx_body)
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx_body.hash() })
.unwrap()
/// Load signing key from file or generate a new one if it doesn't exist
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
if path.exists() {
let key_bytes = std::fs::read(path)?;
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
.try_into()
.map_err(|_| anyhow!("Found key with incorrect length"))?;
Ok(Ed25519Key::from_bytes(&key_array))
} else {
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
std::fs::write(path, key_bytes)?;
Ok(Ed25519Key::from_bytes(&key_bytes))
}
}
#[cfg(all(test, feature = "mock"))]
mod tests {
use std::{pin::pin, str::FromStr as _};
use base58::ToBase58;
use bedrock_client::BackoffConfig;
use common::{
block::AccountInitialData,
test_utils::sequencer_sign_key_for_testing,
transaction::{NSSATransaction, transaction_pre_check},
};
use logos_blockchain_core::mantle::ops::channel::ChannelId;
use mempool::MemPoolHandle;
use nssa::{AccountId, PrivateKey};
use crate::{
config::{BedrockConfig, SequencerConfig},
mock::SequencerCoreWithMockClients,
};
fn setup_sequencer_config_variable_initial_accounts(
initial_accounts: Vec<AccountInitialData>,
@ -317,9 +368,17 @@ mod tests {
initial_accounts,
initial_commitments: vec![],
signing_key: *sequencer_sign_key_for_testing().value(),
bedrock_config: None,
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 5,
},
channel_id: ChannelId::from([0; 32]),
node_url: "http://not-used-in-unit-tests".parse().unwrap(),
auth: None,
},
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
indexer_rpc_url: "http://localhost:8779".parse().unwrap(),
indexer_rpc_url: "ws://localhost:8779".parse().unwrap(),
}
}
@ -335,12 +394,12 @@ mod tests {
];
let initial_acc1 = AccountInitialData {
account_id: acc1_account_id.to_base58(),
account_id: AccountId::from_str(&acc1_account_id.to_base58()).unwrap(),
balance: 10000,
};
let initial_acc2 = AccountInitialData {
account_id: acc2_account_id.to_base58(),
account_id: AccountId::from_str(&acc2_account_id.to_base58()).unwrap(),
balance: 20000,
};
@ -357,15 +416,16 @@ mod tests {
nssa::PrivateKey::try_new([2; 32]).unwrap()
}
async fn common_setup() -> (SequencerCore, MemPoolHandle<EncodedTransaction>) {
async fn common_setup() -> (SequencerCoreWithMockClients, MemPoolHandle<NSSATransaction>) {
let config = setup_sequencer_config();
common_setup_with_config(config).await
}
async fn common_setup_with_config(
config: SequencerConfig,
) -> (SequencerCore, MemPoolHandle<EncodedTransaction>) {
let (mut sequencer, mempool_handle) = SequencerCore::start_from_config(config).await;
) -> (SequencerCoreWithMockClients, MemPoolHandle<NSSATransaction>) {
let (mut sequencer, mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
let tx = common::test_utils::produce_dummy_empty_transaction();
mempool_handle.push(tx).await.unwrap();
@ -380,35 +440,18 @@ mod tests {
#[tokio::test]
async fn test_start_from_config() {
let config = setup_sequencer_config();
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone()).await;
let (sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
assert_eq!(sequencer.chain_height, config.genesis_id);
assert_eq!(sequencer.sequencer_config.max_num_tx_in_block, 10);
assert_eq!(sequencer.sequencer_config.port, 8080);
let acc1_account_id = config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2_account_id = config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1_account_id = config.initial_accounts[0].account_id;
let acc2_account_id = config.initial_accounts[1].account_id;
let balance_acc_1 = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc1_account_id))
.balance;
let balance_acc_2 = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc2_account_id))
.balance;
let balance_acc_1 = sequencer.state.get_account_by_id(acc1_account_id).balance;
let balance_acc_2 = sequencer.state.get_account_by_id(acc2_account_id).balance;
assert_eq!(10000, balance_acc_1);
assert_eq!(20000, balance_acc_2);
@ -427,55 +470,38 @@ mod tests {
];
let initial_acc1 = AccountInitialData {
account_id: acc1_account_id.to_base58(),
account_id: AccountId::from_str(&acc1_account_id.to_base58()).unwrap(),
balance: 10000,
};
let initial_acc2 = AccountInitialData {
account_id: acc2_account_id.to_base58(),
account_id: AccountId::from_str(&acc2_account_id.to_base58()).unwrap(),
balance: 20000,
};
let initial_accounts = vec![initial_acc1, initial_acc2];
let config = setup_sequencer_config_variable_initial_accounts(initial_accounts);
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone()).await;
let (sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
let acc1_account_id = config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2_account_id = config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1_account_id = config.initial_accounts[0].account_id;
let acc2_account_id = config.initial_accounts[1].account_id;
assert_eq!(
10000,
sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc1_account_id))
.balance
sequencer.state.get_account_by_id(acc1_account_id).balance
);
assert_eq!(
20000,
sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc2_account_id))
.balance
sequencer.state.get_account_by_id(acc2_account_id).balance
);
}
#[test]
fn test_transaction_pre_check_pass() {
let tx = common::test_utils::produce_dummy_empty_transaction();
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
let result = transaction_pre_check(tx);
assert!(result.is_ok());
}
@ -484,27 +510,15 @@ mod tests {
async fn test_transaction_pre_check_native_transfer_valid() {
let (sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
let tx = common::test_utils::create_transaction_native_token_transfer(
acc1, 0, acc2, 10, sign_key1,
);
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
let result = transaction_pre_check(tx);
assert!(result.is_ok());
}
@ -513,20 +527,8 @@ mod tests {
async fn test_transaction_pre_check_native_transfer_other_signature() {
let (mut sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key2 = create_signing_key_for_account2();
@ -535,7 +537,7 @@ mod tests {
);
// Signature is valid, stateless check pass
let tx = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx)).unwrap();
let tx = transaction_pre_check(tx).unwrap();
// Signature is not from sender. Execution fails
let result = sequencer.execute_check_transaction_on_state(tx);
@ -550,20 +552,8 @@ mod tests {
async fn test_transaction_pre_check_native_transfer_sent_too_much() {
let (mut sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -571,7 +561,7 @@ mod tests {
acc1, 0, acc2, 10000000, sign_key1,
);
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
let result = transaction_pre_check(tx);
// Passed pre-check
assert!(result.is_ok());
@ -589,20 +579,8 @@ mod tests {
async fn test_transaction_execute_native_transfer() {
let (mut sequencer, _mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -610,18 +588,10 @@ mod tests {
acc1, 0, acc2, 100, sign_key1,
);
sequencer
.execute_check_transaction_on_state(parse_unwrap_tx_body_into_nssa_tx(tx))
.unwrap();
sequencer.execute_check_transaction_on_state(tx).unwrap();
let bal_from = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc1))
.balance;
let bal_to = sequencer
.state
.get_account_by_id(&nssa::AccountId::new(acc2))
.balance;
let bal_from = sequencer.state.get_account_by_id(acc1).balance;
let bal_to = sequencer.state.get_account_by_id(acc2).balance;
assert_eq!(bal_from, 9900);
assert_eq!(bal_to, 20100);
@ -671,20 +641,8 @@ mod tests {
async fn test_replay_transactions_are_rejected_in_the_same_block() {
let (mut sequencer, mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -714,20 +672,8 @@ mod tests {
async fn test_replay_transactions_are_rejected_in_different_blocks() {
let (mut sequencer, mempool_handle) = common_setup().await;
let acc1 = sequencer.sequencer_config.initial_accounts[0]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc2 = sequencer.sequencer_config.initial_accounts[1]
.account_id
.clone()
.from_base58()
.unwrap()
.try_into()
.unwrap();
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
let sign_key1 = create_signing_key_for_account1();
@ -759,10 +705,8 @@ mod tests {
#[tokio::test]
async fn test_restart_from_storage() {
let config = setup_sequencer_config();
let acc1_account_id: nssa::AccountId =
config.initial_accounts[0].account_id.parse().unwrap();
let acc2_account_id: nssa::AccountId =
config.initial_accounts[1].account_id.parse().unwrap();
let acc1_account_id = config.initial_accounts[0].account_id;
let acc2_account_id = config.initial_accounts[1].account_id;
let balance_to_move = 13;
// In the following code block a transaction will be processed that moves `balance_to_move`
@ -770,13 +714,13 @@ mod tests {
// the temporary directory for the block storage of this test.
{
let (mut sequencer, mempool_handle) =
SequencerCore::start_from_config(config.clone()).await;
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
let signing_key = PrivateKey::try_new([1; 32]).unwrap();
let tx = common::test_utils::create_transaction_native_token_transfer(
*acc1_account_id.value(),
acc1_account_id,
0,
*acc2_account_id.value(),
acc2_account_id,
balance_to_move,
signing_key,
);
@ -793,9 +737,10 @@ mod tests {
// Instantiating a new sequencer from the same config. This should load the existing block
// with the above transaction and update the state to reflect that.
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone()).await;
let balance_acc_1 = sequencer.state.get_account_by_id(&acc1_account_id).balance;
let balance_acc_2 = sequencer.state.get_account_by_id(&acc2_account_id).balance;
let (sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
let balance_acc_1 = sequencer.state.get_account_by_id(acc1_account_id).balance;
let balance_acc_2 = sequencer.state.get_account_by_id(acc2_account_id).balance;
// Balances should be consistent with the stored block
assert_eq!(
@ -811,7 +756,8 @@ mod tests {
#[tokio::test]
async fn test_get_pending_blocks() {
let config = setup_sequencer_config();
let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config).await;
let (mut sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
@ -827,7 +773,8 @@ mod tests {
#[tokio::test]
async fn test_delete_blocks() {
let config = setup_sequencer_config();
let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config).await;
let (mut sequencer, _mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();

View File

@ -0,0 +1,49 @@
use anyhow::Result;
use common::block::Block;
use logos_blockchain_core::mantle::ops::channel::{ChannelId, MsgId};
use logos_blockchain_key_management_system_service::keys::Ed25519Key;
use url::Url;
use crate::{
block_settlement_client::BlockSettlementClientTrait, config::BedrockConfig,
indexer_client::IndexerClientTrait,
};
pub type SequencerCoreWithMockClients =
crate::SequencerCore<MockBlockSettlementClient, MockIndexerClient>;
#[derive(Clone)]
pub struct MockBlockSettlementClient {
bedrock_channel_id: ChannelId,
bedrock_signing_key: Ed25519Key,
}
impl BlockSettlementClientTrait for MockBlockSettlementClient {
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self> {
Ok(Self {
bedrock_channel_id: config.channel_id,
bedrock_signing_key,
})
}
fn bedrock_channel_id(&self) -> ChannelId {
self.bedrock_channel_id
}
fn bedrock_signing_key(&self) -> &Ed25519Key {
&self.bedrock_signing_key
}
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
self.create_inscribe_tx(block).map(|(_, msg_id)| msg_id)
}
}
#[derive(Copy, Clone)]
pub struct MockIndexerClient;
impl IndexerClientTrait for MockIndexerClient {
async fn new(_indexer_url: &Url) -> Result<Self> {
Ok(Self)
}
}

View File

@ -8,7 +8,8 @@ license = { workspace = true }
nssa.workspace = true
common.workspace = true
mempool.workspace = true
sequencer_core.workspace = true
sequencer_core = { workspace = true }
bedrock_client.workspace = true
anyhow.workspace = true
serde_json.workspace = true
@ -24,3 +25,6 @@ itertools.workspace = true
actix-web.workspace = true
tokio.workspace = true
borsh.workspace = true
[dev-dependencies]
sequencer_core = { workspace = true, features = ["mock"] }

View File

@ -6,11 +6,15 @@ use std::sync::Arc;
use common::{
rpc_primitives::errors::{RpcError, RpcErrorKind},
transaction::EncodedTransaction,
transaction::NSSATransaction,
};
use mempool::MemPoolHandle;
pub use net_utils::*;
use sequencer_core::SequencerCore;
use sequencer_core::{
SequencerCore,
block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait},
indexer_client::{IndexerClient, IndexerClientTrait},
};
use serde::Serialize;
use serde_json::Value;
use tokio::sync::Mutex;
@ -18,9 +22,12 @@ use tokio::sync::Mutex;
use self::types::err_rpc::RpcErr;
// ToDo: Add necessary fields
pub struct JsonHandler {
sequencer_state: Arc<Mutex<SequencerCore>>,
mempool_handle: MemPoolHandle<EncodedTransaction>,
pub struct JsonHandler<
BC: BlockSettlementClientTrait = BlockSettlementClient,
IC: IndexerClientTrait = IndexerClient,
> {
sequencer_state: Arc<Mutex<SequencerCore<BC, IC>>>,
mempool_handle: MemPoolHandle<NSSATransaction>,
}
fn respond<T: Serialize>(val: T) -> Result<Value, RpcErr> {

View File

@ -4,7 +4,7 @@ use actix_cors::Cors;
use actix_web::{App, Error as HttpError, HttpResponse, HttpServer, http, middleware, web};
use common::{
rpc_primitives::{RpcConfig, message::Message},
transaction::EncodedTransaction,
transaction::NSSATransaction,
};
use futures::{Future, FutureExt};
use log::info;
@ -13,14 +13,15 @@ use sequencer_core::SequencerCore;
use tokio::sync::Mutex;
use super::JsonHandler;
use crate::process::Process;
pub const SHUTDOWN_TIMEOUT_SECS: u64 = 10;
pub const NETWORK: &str = "network";
pub(crate) fn rpc_handler(
pub(crate) fn rpc_handler<P: Process>(
message: web::Json<Message>,
handler: web::Data<JsonHandler>,
handler: web::Data<P>,
) -> impl Future<Output = Result<HttpResponse, HttpError>> {
let response = async move {
let message = handler.process(message.0).await?;
@ -45,7 +46,7 @@ fn get_cors(cors_allowed_origins: &[String]) -> Cors {
pub fn new_http_server(
config: RpcConfig,
seuquencer_core: Arc<Mutex<SequencerCore>>,
mempool_handle: MemPoolHandle<EncodedTransaction>,
mempool_handle: MemPoolHandle<NSSATransaction>,
) -> io::Result<(actix_web::dev::Server, SocketAddr)> {
let RpcConfig {
addr,
@ -65,7 +66,7 @@ pub fn new_http_server(
.app_data(handler.clone())
.app_data(web::JsonConfig::default().limit(limits_config.json_payload_max_size))
.wrap(middleware::Logger::default())
.service(web::resource("/").route(web::post().to(rpc_handler)))
.service(web::resource("/").route(web::post().to(rpc_handler::<JsonHandler>)))
})
.bind(addr)?
.shutdown_timeout(SHUTDOWN_TIMEOUT_SECS)

View File

@ -1,10 +1,8 @@
use std::collections::HashMap;
use actix_web::Error as HttpError;
use base58::FromBase58;
use base64::{Engine, engine::general_purpose};
use common::{
HashType,
block::{AccountInitialData, HashableBlockData},
rpc_primitives::{
errors::RpcError,
@ -22,13 +20,14 @@ use common::{
SendTxRequest, SendTxResponse,
},
},
transaction::{
EncodedTransaction, NSSATransaction, TransactionMalformationError, transaction_pre_check,
},
transaction::{NSSATransaction, transaction_pre_check},
};
use itertools::Itertools as _;
use log::warn;
use nssa::{self, program::Program};
use sequencer_core::{
block_settlement_client::BlockSettlementClientTrait, indexer_client::IndexerClientTrait,
};
use serde_json::Value;
use super::{JsonHandler, respond, types::err_rpc::RpcErr};
@ -53,8 +52,16 @@ pub const TRANSACTION_SUBMITTED: &str = "Transaction submitted";
pub const GET_INITIAL_TESTNET_ACCOUNTS: &str = "get_initial_testnet_accounts";
impl JsonHandler {
pub async fn process(&self, message: Message) -> Result<Message, HttpError> {
pub trait Process: Send + Sync + 'static {
fn process(&self, message: Message) -> impl Future<Output = Result<Message, HttpError>> + Send;
}
impl<
BC: BlockSettlementClientTrait + Send + Sync + 'static,
IC: IndexerClientTrait + Send + Sync + 'static,
> Process for JsonHandler<BC, IC>
{
async fn process(&self, message: Message) -> Result<Message, HttpError> {
let id = message.id();
if let Message::Request(request) = message {
let message_inner = self
@ -68,7 +75,9 @@ impl JsonHandler {
)))
}
}
}
impl<BC: BlockSettlementClientTrait, IC: IndexerClientTrait> JsonHandler<BC, IC> {
/// Example of request processing
#[allow(clippy::unused_async)]
async fn process_temp_hello(&self, request: Request) -> Result<Value, RpcErr> {
@ -83,19 +92,16 @@ impl JsonHandler {
async fn process_send_tx(&self, request: Request) -> Result<Value, RpcErr> {
let send_tx_req = SendTxRequest::parse(Some(request.params))?;
let tx = borsh::from_slice::<EncodedTransaction>(&send_tx_req.transaction).unwrap();
let tx_hash = hex::encode(tx.hash());
let tx = borsh::from_slice::<NSSATransaction>(&send_tx_req.transaction).unwrap();
let tx_hash = tx.hash();
let transaction = NSSATransaction::try_from(&tx)
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx.hash() })?;
let authenticated_tx = transaction_pre_check(transaction)
.inspect_err(|err| warn!("Error at pre_check {err:#?}"))?;
let authenticated_tx =
transaction_pre_check(tx).inspect_err(|err| warn!("Error at pre_check {err:#?}"))?;
// TODO: Do we need a timeout here? It will be usable if we have too many transactions to
// process
self.mempool_handle
.push(authenticated_tx.into())
.push(authenticated_tx)
.await
.expect("Mempool is closed, this is a bug");
@ -210,19 +216,11 @@ impl JsonHandler {
/// The account_id must be a valid hex string of the correct length.
async fn process_get_account_balance(&self, request: Request) -> Result<Value, RpcErr> {
let get_account_req = GetAccountBalanceRequest::parse(Some(request.params))?;
let account_id_bytes = get_account_req
.account_id
.from_base58()
.map_err(|_| RpcError::invalid_params("invalid base58".to_string()))?;
let account_id = nssa::AccountId::new(
account_id_bytes
.try_into()
.map_err(|_| RpcError::invalid_params("invalid length".to_string()))?,
);
let account_id = get_account_req.account_id;
let balance = {
let state = self.sequencer_state.lock().await;
let account = state.state().get_account_by_id(&account_id);
let account = state.state().get_account_by_id(account_id);
account.balance
};
@ -235,21 +233,14 @@ impl JsonHandler {
/// Each account_id must be a valid hex string of the correct length.
async fn process_get_accounts_nonces(&self, request: Request) -> Result<Value, RpcErr> {
let get_account_nonces_req = GetAccountsNoncesRequest::parse(Some(request.params))?;
let mut account_ids = vec![];
for account_id_raw in get_account_nonces_req.account_ids {
let account_id = account_id_raw
.parse::<nssa::AccountId>()
.map_err(|e| RpcError::invalid_params(e.to_string()))?;
account_ids.push(account_id);
}
let account_ids = get_account_nonces_req.account_ids;
let nonces = {
let state = self.sequencer_state.lock().await;
account_ids
.into_iter()
.map(|account_id| state.state().get_account_by_id(&account_id).nonce)
.map(|account_id| state.state().get_account_by_id(account_id).nonce)
.collect()
};
@ -263,15 +254,12 @@ impl JsonHandler {
async fn process_get_account(&self, request: Request) -> Result<Value, RpcErr> {
let get_account_nonces_req = GetAccountRequest::parse(Some(request.params))?;
let account_id = get_account_nonces_req
.account_id
.parse::<nssa::AccountId>()
.map_err(|e| RpcError::invalid_params(e.to_string()))?;
let account_id = get_account_nonces_req.account_id;
let account = {
let state = self.sequencer_state.lock().await;
state.state().get_account_by_id(&account_id)
state.state().get_account_by_id(account_id)
};
let response = GetAccountResponse { account };
@ -283,11 +271,7 @@ impl JsonHandler {
/// The hash must be a valid hex string of the correct length.
async fn process_get_transaction_by_hash(&self, request: Request) -> Result<Value, RpcErr> {
let get_transaction_req = GetTransactionByHashRequest::parse(Some(request.params))?;
let bytes: Vec<u8> = hex::decode(get_transaction_req.hash)
.map_err(|_| RpcError::invalid_params("invalid hex".to_string()))?;
let hash: HashType = bytes
.try_into()
.map_err(|_| RpcError::invalid_params("invalid length".to_string()))?;
let hash = get_transaction_req.hash;
let transaction = {
let state = self.sequencer_state.lock().await;
@ -358,23 +342,28 @@ impl JsonHandler {
#[cfg(test)]
mod tests {
use std::sync::Arc;
use std::{str::FromStr as _, sync::Arc};
use base58::ToBase58;
use base64::{Engine, engine::general_purpose};
use bedrock_client::BackoffConfig;
use common::{
block::AccountInitialData, config::BasicAuth, test_utils::sequencer_sign_key_for_testing,
transaction::EncodedTransaction,
transaction::NSSATransaction,
};
use nssa::AccountId;
use sequencer_core::{
SequencerCore,
config::{BedrockConfig, SequencerConfig},
mock::{MockBlockSettlementClient, MockIndexerClient, SequencerCoreWithMockClients},
};
use serde_json::Value;
use tempfile::tempdir;
use tokio::sync::Mutex;
use crate::{JsonHandler, rpc_handler};
use crate::rpc_handler;
type JsonHandlerWithMockClients =
crate::JsonHandler<MockBlockSettlementClient, MockIndexerClient>;
fn sequencer_config_for_tests() -> SequencerConfig {
let tempdir = tempdir().unwrap();
@ -390,12 +379,12 @@ mod tests {
];
let initial_acc1 = AccountInitialData {
account_id: acc1_id.to_base58(),
account_id: AccountId::from_str(&acc1_id.to_base58()).unwrap(),
balance: 10000,
};
let initial_acc2 = AccountInitialData {
account_id: acc2_id.to_base58(),
account_id: AccountId::from_str(&acc2_id.to_base58()).unwrap(),
balance: 20000,
};
@ -414,33 +403,46 @@ mod tests {
initial_commitments: vec![],
signing_key: *sequencer_sign_key_for_testing().value(),
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
bedrock_config: Some(BedrockConfig {
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 5,
},
channel_id: [42; 32].into(),
node_url: "http://localhost:8080".parse().unwrap(),
auth: Some(BasicAuth {
username: "user".to_string(),
password: None,
}),
}),
indexer_rpc_url: "http://localhost:8779".parse().unwrap(),
},
indexer_rpc_url: "ws://localhost:8779".parse().unwrap(),
}
}
async fn components_for_tests() -> (JsonHandler, Vec<AccountInitialData>, EncodedTransaction) {
async fn components_for_tests() -> (
JsonHandlerWithMockClients,
Vec<AccountInitialData>,
NSSATransaction,
) {
let config = sequencer_config_for_tests();
let (mut sequencer_core, mempool_handle) = SequencerCore::start_from_config(config).await;
let (mut sequencer_core, mempool_handle) =
SequencerCoreWithMockClients::start_from_config(config).await;
let initial_accounts = sequencer_core.sequencer_config().initial_accounts.clone();
let signing_key = nssa::PrivateKey::try_new([1; 32]).unwrap();
let balance_to_move = 10;
let tx = common::test_utils::create_transaction_native_token_transfer(
[
208, 122, 210, 232, 75, 39, 250, 0, 194, 98, 240, 161, 238, 160, 255, 53, 202, 9,
115, 84, 126, 106, 16, 111, 114, 241, 147, 194, 220, 131, 139, 68,
],
AccountId::from_str(
&[
208, 122, 210, 232, 75, 39, 250, 0, 194, 98, 240, 161, 238, 160, 255, 53, 202,
9, 115, 84, 126, 106, 16, 111, 114, 241, 147, 194, 220, 131, 139, 68,
]
.to_base58(),
)
.unwrap(),
0,
[2; 32],
AccountId::from_str(&[2; 32].to_base58()).unwrap(),
balance_to_move,
signing_key,
);
@ -457,7 +459,7 @@ mod tests {
let sequencer_core = Arc::new(Mutex::new(sequencer_core));
(
JsonHandler {
JsonHandlerWithMockClients {
sequencer_state: sequencer_core,
mempool_handle,
},
@ -466,14 +468,16 @@ mod tests {
)
}
async fn call_rpc_handler_with_json(handler: JsonHandler, request_json: Value) -> Value {
async fn call_rpc_handler_with_json(
handler: JsonHandlerWithMockClients,
request_json: Value,
) -> Value {
use actix_web::{App, test, web};
let app = test::init_service(
App::new()
.app_data(web::Data::new(handler))
.route("/", web::post().to(rpc_handler)),
)
let app = test::init_service(App::new().app_data(web::Data::new(handler)).route(
"/",
web::post().to(rpc_handler::<JsonHandlerWithMockClients>),
))
.await;
let req = test::TestRequest::post()
@ -522,10 +526,17 @@ mod tests {
"jsonrpc": "2.0",
"id": 1,
"error": {
"code": -32602,
"message": "Invalid params",
"data": "invalid base58"
}
"cause": {
"info": {
"error_message": "Failed parsing args: invalid base58: InvalidBase58Character('_', 3)"
},
"name": "PARSE_ERROR"
},
"code": -32700,
"data": "Failed parsing args: invalid base58: InvalidBase58Character('_', 3)",
"message": "Parse error",
"name": "REQUEST_VALIDATION_ERROR"
},
});
let response = call_rpc_handler_with_json(json_handler, request).await;
@ -545,10 +556,17 @@ mod tests {
"jsonrpc": "2.0",
"id": 1,
"error": {
"code": -32602,
"message": "Invalid params",
"data": "invalid length"
}
"cause": {
"info": {
"error_message": "Failed parsing args: invalid length: expected 32 bytes, got 6"
},
"name": "PARSE_ERROR"
},
"code": -32700,
"data": "Failed parsing args: invalid length: expected 32 bytes, got 6",
"message": "Parse error",
"name": "REQUEST_VALIDATION_ERROR"
},
});
let response = call_rpc_handler_with_json(json_handler, request).await;
@ -559,7 +577,7 @@ mod tests {
async fn test_get_account_balance_for_existing_account() {
let (json_handler, initial_accounts, _) = components_for_tests().await;
let acc1_id = initial_accounts[0].account_id.clone();
let acc1_id = initial_accounts[0].account_id;
let request = serde_json::json!({
"jsonrpc": "2.0",
@ -606,8 +624,8 @@ mod tests {
async fn test_get_accounts_nonces_for_existent_account() {
let (json_handler, initial_accounts, _) = components_for_tests().await;
let acc1_id = initial_accounts[0].account_id.clone();
let acc2_id = initial_accounts[1].account_id.clone();
let acc1_id = initial_accounts[0].account_id;
let acc2_id = initial_accounts[1].account_id;
let request = serde_json::json!({
"jsonrpc": "2.0",
@ -690,10 +708,17 @@ mod tests {
"jsonrpc": "2.0",
"id": 1,
"error": {
"code": -32602,
"message": "Invalid params",
"data": "invalid hex"
}
"cause": {
"info": {
"error_message": "Failed parsing args: Odd number of digits"
},
"name": "PARSE_ERROR"
},
"code": -32700,
"data": "Failed parsing args: Odd number of digits",
"message": "Parse error",
"name": "REQUEST_VALIDATION_ERROR"
},
});
let response = call_rpc_handler_with_json(json_handler, request).await;
@ -714,9 +739,16 @@ mod tests {
"jsonrpc": "2.0",
"id": 1,
"error": {
"code": -32602,
"message": "Invalid params",
"data": "invalid length"
"cause": {
"info": {
"error_message": "Failed parsing args: Invalid string length"
},
"name": "PARSE_ERROR"
},
"code": -32700,
"data": "Failed parsing args: Invalid string length",
"message": "Parse error",
"name": "REQUEST_VALIDATION_ERROR"
}
});

View File

@ -1,15 +1,34 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
# Install build dependencies
# Install dependencies
RUN apt-get update && apt-get install -y \
build-essential \
pkg-config \
libssl-dev \
libclang-dev \
clang \
cmake \
ninja-build \
curl \
git \
&& rm -rf /var/lib/apt/lists/*
# Install r0vm (manual build as it's portable across different host platforms)
RUN git clone --depth 1 --branch release-3.0 https://github.com/risc0/risc0.git
RUN git clone --depth 1 --branch r0.1.91.1 https://github.com/risc0/rust.git
WORKDIR /risc0
RUN cargo install --path rzup
RUN rzup build --path /rust rust --verbose
RUN cargo install --path risc0/cargo-risczero
ENV PATH="/root/.cargo/bin:/root/.risc0/bin:${PATH}"
RUN cp "$(which r0vm)" /usr/local/bin/r0vm
RUN test -x /usr/local/bin/r0vm
RUN r0vm --version
# Install logos blockchain circuits
RUN curl -sSL https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/main/scripts/setup-logos-blockchain-circuits.sh | bash
WORKDIR /sequencer_runner
# Planner stage - generates dependency recipe
@ -32,14 +51,6 @@ RUN cargo build --release --bin sequencer_runner
# Strip debug symbols to reduce binary size
RUN strip /sequencer_runner/target/release/sequencer_runner
# Install r0vm
RUN curl -L https://risczero.com/install | bash
ENV PATH="/root/.cargo/bin:/root/.risc0/bin:${PATH}"
RUN rzup install
RUN cp "$(which r0vm)" /usr/local/bin/r0vm
RUN test -x /usr/local/bin/r0vm
RUN r0vm --version
# Runtime stage - minimal image
FROM debian:trixie-slim
@ -59,6 +70,9 @@ COPY --from=builder --chown=sequencer_user:sequencer_user /sequencer_runner/targ
# Copy r0vm binary from builder
COPY --from=builder --chown=sequencer_user:sequencer_user /usr/local/bin/r0vm /usr/local/bin/r0vm
# Copy logos blockchain circuits from builder
COPY --from=builder --chown=sequencer_user:sequencer_user /root/.logos-blockchain-circuits /home/sequencer_user/.logos-blockchain-circuits
# Copy entrypoint script
COPY sequencer_runner/docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint.sh

View File

@ -8,6 +8,15 @@
"block_create_timeout_millis": 5000,
"retry_pending_blocks_timeout_millis": 7000,
"port": 3040,
"bedrock_config": {
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://localhost:18080"
},
"indexer_rpc_url": "ws://localhost:8779",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
@ -155,13 +164,5 @@
37,
37,
37
],
"bedrock_config": {
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://localhost:8080",
"auth": {
"username": "user"
}
},
"indexer_rpc_url": "ws://localhost:8779"
]
}

View File

@ -7,6 +7,16 @@
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 3040,
"retry_pending_blocks_timeout_millis": 7000,
"bedrock_config": {
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://localhost:18080"
},
"indexer_rpc_url": "ws://localhost:8779",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",

View File

@ -6,7 +6,10 @@ use clap::Parser;
use common::rpc_primitives::RpcConfig;
use futures::{FutureExt as _, never::Never};
use log::{error, info, warn};
use sequencer_core::{SequencerCore, config::SequencerConfig};
use sequencer_core::{
SequencerCore, block_settlement_client::BlockSettlementClientTrait as _,
config::SequencerConfig,
};
use sequencer_rpc::new_http_server;
use tokio::{sync::Mutex, task::JoinHandle};
@ -23,6 +26,7 @@ struct Args {
///
/// Implements `Drop` to ensure all tasks are aborted and the HTTP server is stopped when dropped.
pub struct SequencerHandle {
addr: SocketAddr,
http_server_handle: ServerHandle,
main_loop_handle: JoinHandle<Result<Never>>,
retry_pending_blocks_loop_handle: JoinHandle<Result<Never>>,
@ -33,8 +37,9 @@ impl SequencerHandle {
/// Runs the sequencer indefinitely, monitoring its tasks.
///
/// If no error occurs, this function will never return.
async fn run_forever(&mut self) -> Result<Never> {
pub async fn run_forever(&mut self) -> Result<Never> {
let Self {
addr: _,
http_server_handle: _,
main_loop_handle,
retry_pending_blocks_loop_handle,
@ -59,11 +64,22 @@ impl SequencerHandle {
}
}
}
pub fn is_finished(&self) -> bool {
self.main_loop_handle.is_finished()
|| self.retry_pending_blocks_loop_handle.is_finished()
|| self.listen_for_bedrock_blocks_loop_handle.is_finished()
}
pub fn addr(&self) -> SocketAddr {
self.addr
}
}
impl Drop for SequencerHandle {
fn drop(&mut self) {
let Self {
addr: _,
http_server_handle,
main_loop_handle,
retry_pending_blocks_loop_handle,
@ -79,9 +95,7 @@ impl Drop for SequencerHandle {
}
}
pub async fn startup_sequencer(
app_config: SequencerConfig,
) -> Result<(SequencerHandle, SocketAddr)> {
pub async fn startup_sequencer(app_config: SequencerConfig) -> Result<SequencerHandle> {
let block_timeout = Duration::from_millis(app_config.block_create_timeout_millis);
let retry_pending_blocks_timeout =
Duration::from_millis(app_config.retry_pending_blocks_timeout_millis);
@ -115,15 +129,13 @@ pub async fn startup_sequencer(
let listen_for_bedrock_blocks_loop_handle =
tokio::spawn(listen_for_bedrock_blocks_loop(seq_core_wrapped));
Ok((
SequencerHandle {
http_server_handle,
main_loop_handle,
retry_pending_blocks_loop_handle,
listen_for_bedrock_blocks_loop_handle,
},
Ok(SequencerHandle {
addr,
))
http_server_handle,
main_loop_handle,
retry_pending_blocks_loop_handle,
listen_for_bedrock_blocks_loop_handle,
})
}
async fn main_loop(seq_core: Arc<Mutex<SequencerCore>>, block_timeout: Duration) -> Result<Never> {
@ -162,13 +174,9 @@ async fn retry_pending_blocks_loop(
(pending_blocks, client)
};
let Some(client) = block_settlement_client else {
continue;
};
info!("Resubmitting {} pending blocks", pending_blocks.len());
for block in &pending_blocks {
if let Err(e) = client.submit_block_to_bedrock(block).await {
if let Err(e) = block_settlement_client.submit_block_to_bedrock(block).await {
warn!(
"Failed to resubmit block with id {} with error {}",
block.header.block_id, e
@ -183,6 +191,8 @@ async fn listen_for_bedrock_blocks_loop(seq_core: Arc<Mutex<SequencerCore>>) ->
let indexer_client = seq_core.lock().await.indexer_client();
let retry_delay = Duration::from_secs(5);
loop {
// TODO: Subscribe from the first pending block ID?
let mut subscription = indexer_client
@ -205,9 +215,10 @@ async fn listen_for_bedrock_blocks_loop(seq_core: Arc<Mutex<SequencerCore>>) ->
}
warn!(
"Block subscription closed unexpectedly, reason: {:?}",
"Block subscription closed unexpectedly, reason: {:?}, retrying after {retry_delay:?}",
subscription.close_reason()
);
tokio::time::sleep(retry_delay).await;
}
}
@ -228,12 +239,12 @@ pub async fn main_runner() -> Result<()> {
}
// ToDo: Add restart on failures
let (mut sequencer_handle, _addr) = startup_sequencer(app_config).await?;
let mut sequencer_handle = startup_sequencer(app_config).await?;
info!("Sequencer running. Monitoring concurrent tasks...");
let Err(err) = sequencer_handle.run_forever().await;
error!("Sequencer failed: {err:?}");
error!("Sequencer failed: {err:#}");
info!("Shutting down sequencer...");

View File

@ -423,13 +423,7 @@ impl RocksDBIO {
)
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
let acc_ids = NSSATransaction::try_from(&tx)
.map_err(|err| {
DbError::db_interaction_error(format!(
"failed to decode transaction in block {} with err {err:?}",
block.header.block_id
))
})?
let acc_ids = tx
.affected_public_account_ids()
.into_iter()
.map(|account_id| account_id.into_value())
@ -438,8 +432,8 @@ impl RocksDBIO {
for acc_id in acc_ids {
acc_to_tx_map
.entry(acc_id)
.and_modify(|tx_hashes| tx_hashes.push(tx_hash))
.or_insert(vec![tx_hash]);
.and_modify(|tx_hashes| tx_hashes.push(tx_hash.into()))
.or_insert(vec![tx_hash.into()]);
}
}
@ -592,15 +586,7 @@ impl RocksDBIO {
for id in start..=block_id {
let block = self.get_block(id)?;
for encoded_transaction in block.body.transactions {
let transaction =
NSSATransaction::try_from(&encoded_transaction).map_err(|err| {
DbError::db_interaction_error(format!(
"failed to decode transaction in block {} with err {err:?}",
block.header.block_id
))
})?;
for transaction in block.body.transactions {
execute_check_transaction_on_state(
&mut breakpoint,
transaction_pre_check(transaction).map_err(|err| {
@ -854,24 +840,17 @@ impl RocksDBIO {
let block_id = self.get_block_id_by_tx_hash(tx_hash)?;
let block = self.get_block(block_id)?;
let enc_tx = block
let transaction = block
.body
.transactions
.iter()
.find(|tx| tx.hash() == tx_hash)
.find(|tx| tx.hash().0 == tx_hash)
.ok_or(DbError::db_interaction_error(format!(
"Missing transaction in block {} with hash {:#?}",
block.header.block_id, tx_hash
)))?;
let transaction = NSSATransaction::try_from(enc_tx).map_err(|err| {
DbError::db_interaction_error(format!(
"failed to decode transaction in block {} with err {err:?}",
block.header.block_id
))
})?;
tx_batch.push(transaction);
tx_batch.push(transaction.clone());
}
Ok(tx_batch)
@ -880,7 +859,6 @@ impl RocksDBIO {
#[cfg(test)]
mod tests {
use common::transaction::EncodedTransaction;
use nssa::AccountId;
use tempfile::tempdir;
@ -916,7 +894,7 @@ mod tests {
nssa::V02State::new_with_genesis_accounts(&[(acc1(), 10000), (acc2(), 20000)], &[])
}
fn transfer(amount: u128, nonce: u128, direction: bool) -> EncodedTransaction {
fn transfer(amount: u128, nonce: u128, direction: bool) -> NSSATransaction {
let from;
let to;
let sign_key;
@ -932,11 +910,7 @@ mod tests {
}
common::test_utils::create_transaction_native_token_transfer(
*from.value(),
nonce,
*to.value(),
amount,
sign_key,
from, nonce, to, amount, sign_key,
)
}
@ -962,12 +936,12 @@ mod tests {
assert_eq!(last_br_id, 0);
assert_eq!(last_block.header.hash, genesis_block().header.hash);
assert_eq!(
breakpoint.get_account_by_id(&acc1()),
final_state.get_account_by_id(&acc1())
breakpoint.get_account_by_id(acc1()),
final_state.get_account_by_id(acc1())
);
assert_eq!(
breakpoint.get_account_by_id(&acc2()),
final_state.get_account_by_id(&acc2())
breakpoint.get_account_by_id(acc2()),
final_state.get_account_by_id(acc2())
);
}
@ -999,13 +973,13 @@ mod tests {
assert_eq!(last_br_id, 0);
assert_ne!(last_block.header.hash, genesis_block().header.hash);
assert_eq!(
breakpoint.get_account_by_id(&acc1()).balance
- final_state.get_account_by_id(&acc1()).balance,
breakpoint.get_account_by_id(acc1()).balance
- final_state.get_account_by_id(acc1()).balance,
1
);
assert_eq!(
final_state.get_account_by_id(&acc2()).balance
- breakpoint.get_account_by_id(&acc2()).balance,
final_state.get_account_by_id(acc2()).balance
- breakpoint.get_account_by_id(acc2()).balance,
1
);
}
@ -1044,22 +1018,22 @@ mod tests {
assert_eq!(last_br_id, 1);
assert_ne!(last_block.header.hash, genesis_block().header.hash);
assert_eq!(
prev_breakpoint.get_account_by_id(&acc1()).balance
- final_state.get_account_by_id(&acc1()).balance,
prev_breakpoint.get_account_by_id(acc1()).balance
- final_state.get_account_by_id(acc1()).balance,
99
);
assert_eq!(
final_state.get_account_by_id(&acc2()).balance
- prev_breakpoint.get_account_by_id(&acc2()).balance,
final_state.get_account_by_id(acc2()).balance
- prev_breakpoint.get_account_by_id(acc2()).balance,
99
);
assert_eq!(
breakpoint.get_account_by_id(&acc1()),
final_state.get_account_by_id(&acc1())
breakpoint.get_account_by_id(acc1()),
final_state.get_account_by_id(acc1())
);
assert_eq!(
breakpoint.get_account_by_id(&acc2()),
final_state.get_account_by_id(&acc2())
breakpoint.get_account_by_id(acc2()),
final_state.get_account_by_id(acc2())
);
}
@ -1115,10 +1089,10 @@ mod tests {
let block = common::test_utils::produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]);
dbio.put_block(block).unwrap();
let control_block_id1 = dbio.get_block_id_by_hash(control_hash1).unwrap();
let control_block_id2 = dbio.get_block_id_by_hash(control_hash2).unwrap();
let control_block_id3 = dbio.get_block_id_by_tx_hash(control_tx_hash1).unwrap();
let control_block_id4 = dbio.get_block_id_by_tx_hash(control_tx_hash2).unwrap();
let control_block_id1 = dbio.get_block_id_by_hash(control_hash1.0).unwrap();
let control_block_id2 = dbio.get_block_id_by_hash(control_hash2.0).unwrap();
let control_block_id3 = dbio.get_block_id_by_tx_hash(control_tx_hash1.0).unwrap();
let control_block_id4 = dbio.get_block_id_by_tx_hash(control_tx_hash2.0).unwrap();
assert_eq!(control_block_id1, 2);
assert_eq!(control_block_id2, 3);
@ -1177,12 +1151,12 @@ mod tests {
dbio.put_block(block).unwrap();
let block_hashes_mem: Vec<[u8; 32]> =
block_res.into_iter().map(|bl| bl.header.hash).collect();
block_res.into_iter().map(|bl| bl.header.hash.0).collect();
let batch_res = dbio.get_block_batch(2, 4).unwrap();
let block_hashes_db: Vec<[u8; 32]> =
batch_res.into_iter().map(|bl| bl.header.hash).collect();
batch_res.into_iter().map(|bl| bl.header.hash.0).collect();
assert_eq!(block_hashes_mem, block_hashes_db);
@ -1192,7 +1166,7 @@ mod tests {
let block_hashes_db_limited: Vec<[u8; 32]> = batch_res_limited
.into_iter()
.map(|bl| bl.header.hash)
.map(|bl| bl.header.hash.0)
.collect();
assert_eq!(block_hashes_mem_limited, block_hashes_db_limited.as_slice());
@ -1214,7 +1188,7 @@ mod tests {
let prev_hash = last_block.header.hash;
let transfer_tx = transfer(1, 0, true);
tx_hash_res.push(transfer_tx.hash());
tx_hash_res.push(transfer_tx.hash().0);
let block = common::test_utils::produce_dummy_block(2, Some(prev_hash), vec![transfer_tx]);
@ -1226,7 +1200,7 @@ mod tests {
let prev_hash = last_block.header.hash;
let transfer_tx = transfer(1, 1, true);
tx_hash_res.push(transfer_tx.hash());
tx_hash_res.push(transfer_tx.hash().0);
let block = common::test_utils::produce_dummy_block(3, Some(prev_hash), vec![transfer_tx]);
@ -1238,7 +1212,7 @@ mod tests {
let prev_hash = last_block.header.hash;
let transfer_tx = transfer(1, 2, true);
tx_hash_res.push(transfer_tx.hash());
tx_hash_res.push(transfer_tx.hash().0);
let block = common::test_utils::produce_dummy_block(4, Some(prev_hash), vec![transfer_tx]);
@ -1250,25 +1224,20 @@ mod tests {
let prev_hash = last_block.header.hash;
let transfer_tx = transfer(1, 3, true);
tx_hash_res.push(transfer_tx.hash());
tx_hash_res.push(transfer_tx.hash().0);
let block = common::test_utils::produce_dummy_block(5, Some(prev_hash), vec![transfer_tx]);
dbio.put_block(block).unwrap();
let acc1_tx = dbio.get_acc_transactions(*acc1().value(), 0, 4).unwrap();
let acc1_tx_hashes: Vec<[u8; 32]> = acc1_tx
.into_iter()
.map(|tx| EncodedTransaction::from(tx).hash())
.collect();
let acc1_tx_hashes: Vec<[u8; 32]> = acc1_tx.into_iter().map(|tx| tx.hash().0).collect();
assert_eq!(acc1_tx_hashes, tx_hash_res);
let acc1_tx_limited = dbio.get_acc_transactions(*acc1().value(), 1, 4).unwrap();
let acc1_tx_limited_hashes: Vec<[u8; 32]> = acc1_tx_limited
.into_iter()
.map(|tx| EncodedTransaction::from(tx).hash())
.collect();
let acc1_tx_limited_hashes: Vec<[u8; 32]> =
acc1_tx_limited.into_iter().map(|tx| tx.hash().0).collect();
assert_eq!(acc1_tx_limited_hashes.as_slice(), &tx_hash_res[1..])
}

View File

@ -54,7 +54,7 @@ pub unsafe extern "C" fn wallet_ffi_get_public_account_key(
let account_id = AccountId::new(unsafe { (*account_id).data });
let private_key = match wallet.get_account_public_signing_key(&account_id) {
let private_key = match wallet.get_account_public_signing_key(account_id) {
Some(k) => k,
None => {
print_error("Public account key not found in wallet");

View File

@ -73,7 +73,7 @@ pub unsafe extern "C" fn wallet_ffi_transfer_public(
match block_on(transfer.send_public_transfer(from_id, to_id, amount)) {
Ok(Ok(response)) => {
let tx_hash = CString::new(response.tx_hash)
let tx_hash = CString::new(response.tx_hash.to_string())
.map(|s| s.into_raw())
.unwrap_or(ptr::null_mut());
@ -152,7 +152,7 @@ pub unsafe extern "C" fn wallet_ffi_register_public_account(
match block_on(transfer.register_account(account_id)) {
Ok(Ok(response)) => {
let tx_hash = CString::new(response.tx_hash)
let tx_hash = CString::new(response.tx_hash.to_string())
.map(|s| s.into_raw())
.unwrap_or(ptr::null_mut());

View File

@ -122,6 +122,7 @@ impl Default for FfiAccountList {
/// Result of a transfer operation.
#[repr(C)]
pub struct FfiTransferResult {
// TODO: Replace with HashType FFI representation
/// Transaction hash (null-terminated string, or null on failure)
pub tx_hash: *mut c_char,
/// Whether the transfer succeeded

View File

@ -254,7 +254,7 @@ pub unsafe extern "C" fn wallet_ffi_get_sequencer_addr(handle: *mut WalletHandle
}
};
let addr = wallet.config().sequencer_addr.clone();
let addr = wallet.config().sequencer_addr.clone().to_string();
match std::ffi::CString::new(addr) {
Ok(s) => s.into_raw(),

View File

@ -1,4 +1,4 @@
use std::collections::{HashMap, hash_map::Entry};
use std::collections::{BTreeMap, HashMap, btree_map::Entry};
use anyhow::Result;
use key_protocol::{
@ -29,8 +29,8 @@ impl WalletChainStore {
anyhow::bail!("Roots not found; please run setup beforehand");
}
let mut public_init_acc_map = HashMap::new();
let mut private_init_acc_map = HashMap::new();
let mut public_init_acc_map = BTreeMap::new();
let mut private_init_acc_map = BTreeMap::new();
let public_root = persistent_accounts
.iter()
@ -69,11 +69,11 @@ impl WalletChainStore {
}
PersistentAccountData::Preconfigured(acc_data) => match acc_data {
InitialAccountData::Public(data) => {
public_init_acc_map.insert(data.account_id.parse()?, data.pub_sign_key);
public_init_acc_map.insert(data.account_id, data.pub_sign_key);
}
InitialAccountData::Private(data) => {
private_init_acc_map
.insert(data.account_id.parse()?, (data.key_chain, data.account));
.insert(data.account_id, (data.key_chain, data.account));
}
},
}
@ -92,13 +92,13 @@ impl WalletChainStore {
}
pub fn new_storage(config: WalletConfig, password: String) -> Result<Self> {
let mut public_init_acc_map = HashMap::new();
let mut private_init_acc_map = HashMap::new();
let mut public_init_acc_map = BTreeMap::new();
let mut private_init_acc_map = BTreeMap::new();
for init_acc_data in config.initial_accounts.clone() {
match init_acc_data {
InitialAccountData::Public(data) => {
public_init_acc_map.insert(data.account_id.parse()?, data.pub_sign_key);
public_init_acc_map.insert(data.account_id, data.pub_sign_key);
}
InitialAccountData::Private(data) => {
let mut account = data.account;
@ -106,8 +106,7 @@ impl WalletChainStore {
// the config. Therefore we overwrite it here on startup. Fix this when program
// id can be fetched from the node and queried from the wallet.
account.program_owner = Program::authenticated_transfer_program().id();
private_init_acc_map
.insert(data.account_id.parse()?, (data.key_chain, account));
private_init_acc_map.insert(data.account_id, (data.key_chain, account));
}
}
}
@ -262,7 +261,7 @@ mod tests {
fn create_sample_wallet_config() -> WalletConfig {
WalletConfig {
override_rust_log: None,
sequencer_addr: "http://127.0.0.1".to_string(),
sequencer_addr: "http://127.0.0.1".parse().unwrap(),
seq_poll_timeout_millis: 12000,
seq_tx_poll_max_blocks: 5,
seq_poll_max_retries: 10,

View File

@ -80,7 +80,7 @@ impl WalletSubcommand for NewSubcommand {
let private_key = wallet_core
.storage
.user_data
.get_pub_account_signing_key(&account_id)
.get_pub_account_signing_key(account_id)
.unwrap();
let public_key = PublicKey::new_from_private_key(private_key);
@ -195,7 +195,7 @@ impl WalletSubcommand for AccountSubcommand {
let private_key = wallet_core
.storage
.user_data
.get_pub_account_signing_key(&account_id)
.get_pub_account_signing_key(account_id)
.ok_or(anyhow::anyhow!("Public account not found in storage"))?;
let public_key = PublicKey::new_from_private_key(private_key);

View File

@ -1,5 +1,6 @@
use anyhow::Result;
use clap::Subcommand;
use common::HashType;
use crate::{
WalletCore,
@ -20,7 +21,7 @@ pub enum ChainSubcommand {
Transaction {
/// hash - valid 32 byte hex string
#[arg(short = 't', long)]
hash: String,
hash: HashType,
},
}

View File

@ -95,7 +95,7 @@ impl WalletSubcommand for ConfigSubcommand {
wallet_core.storage.wallet_config.override_rust_log = Some(value);
}
"sequencer_addr" => {
wallet_core.storage.wallet_config.sequencer_addr = value;
wallet_core.storage.wallet_config.sequencer_addr = value.parse()?;
}
"seq_poll_timeout_millis" => {
wallet_core.storage.wallet_config.seq_poll_timeout_millis =

View File

@ -2,6 +2,7 @@ use std::{io::Write, path::PathBuf};
use anyhow::{Context, Result};
use clap::{Parser, Subcommand};
use common::HashType;
use nssa::{ProgramDeploymentTransaction, program::Program};
use crate::{
@ -89,7 +90,7 @@ pub struct Args {
#[derive(Debug, Clone)]
pub enum SubcommandReturnValue {
PrivacyPreservingTransfer { tx_hash: String },
PrivacyPreservingTransfer { tx_hash: HashType },
RegisterAccount { account_id: nssa::AccountId },
Account(nssa::Account),
Empty,

View File

@ -81,9 +81,7 @@ impl WalletSubcommand for AuthTransferSubcommand {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret, account_id)];
@ -320,9 +318,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_from, from), Decode(secret_to, to)];
@ -362,9 +358,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_from, from)];
@ -400,9 +394,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommandShielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret, to)];
@ -475,9 +467,7 @@ impl WalletSubcommand for NativeTokenTransferProgramSubcommand {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret, from)];

View File

@ -118,9 +118,7 @@ impl WalletSubcommand for PinataProgramSubcommandPublic {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
println!("Transaction data is {transfer_tx:?}");
@ -153,9 +151,7 @@ impl WalletSubcommand for PinataProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
println!("Transaction data is {transfer_tx:?}");

View File

@ -724,9 +724,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![
@ -775,9 +773,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_sender, sender_account_id)];
@ -811,9 +807,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![
@ -850,9 +844,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![
@ -902,9 +894,7 @@ impl WalletSubcommand for TokenProgramSubcommandPrivate {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_definition, definition_account_id)];
@ -948,9 +938,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_sender, sender_account_id)];
@ -984,9 +972,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_definition, definition_account_id)];
@ -1020,9 +1006,7 @@ impl WalletSubcommand for TokenProgramSubcommandDeshielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_definition, definition_account_id)];
@ -1078,9 +1062,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
println!("Transaction data is {:?}", tx.message);
@ -1109,9 +1091,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_recipient, recipient_account_id)];
@ -1145,9 +1125,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_holder, holder_account_id)];
@ -1181,9 +1159,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_holder, holder_account_id)];
@ -1230,9 +1206,7 @@ impl WalletSubcommand for TokenProgramSubcommandShielded {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
println!("Transaction data is {:?}", tx.message);
@ -1273,9 +1247,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![
@ -1314,9 +1286,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_definition, definition_account_id)];
@ -1352,9 +1322,7 @@ impl WalletSubcommand for CreateNewTokenProgramSubcommand {
println!("Results of tx send are {res:#?}");
let tx_hash = res.tx_hash;
let transfer_tx = wallet_core
.poll_native_token_transfer(tx_hash.clone())
.await?;
let transfer_tx = wallet_core.poll_native_token_transfer(tx_hash).await?;
if let NSSATransaction::PrivacyPreserving(tx) = transfer_tx {
let acc_decode_data = vec![Decode(secret_supply, supply_account_id)];

View File

@ -14,10 +14,11 @@ use key_protocol::key_management::{
};
use log::warn;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InitialAccountDataPublic {
pub account_id: String,
pub account_id: nssa::AccountId,
pub pub_sign_key: nssa::PrivateKey,
}
@ -30,7 +31,7 @@ pub struct PersistentAccountDataPublic {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct InitialAccountDataPrivate {
pub account_id: String,
pub account_id: nssa::AccountId,
pub account: nssa_core::account::Account,
pub key_chain: KeyChain,
}
@ -111,8 +112,8 @@ impl PersistentStorage {
impl InitialAccountData {
pub fn account_id(&self) -> nssa::AccountId {
match &self {
Self::Public(acc) => acc.account_id.parse().unwrap(),
Self::Private(acc) => acc.account_id.parse().unwrap(),
Self::Public(acc) => acc.account_id,
Self::Private(acc) => acc.account_id,
}
}
}
@ -182,7 +183,7 @@ pub struct WalletConfig {
#[serde(skip_serializing_if = "Option::is_none")]
pub override_rust_log: Option<String>,
/// Sequencer URL
pub sequencer_addr: String,
pub sequencer_addr: Url,
/// Sequencer polling duration for new blocks in milliseconds
pub seq_poll_timeout_millis: u64,
/// Sequencer polling max number of blocks to find transaction
@ -202,7 +203,7 @@ impl Default for WalletConfig {
fn default() -> Self {
Self {
override_rust_log: None,
sequencer_addr: "http://127.0.0.1:3040".to_string(),
sequencer_addr: "http://127.0.0.1:3040".parse().unwrap(),
seq_poll_timeout_millis: 12000,
seq_tx_poll_max_blocks: 5,
seq_poll_max_retries: 5,

View File

@ -90,7 +90,7 @@ pub fn produce_data_for_storage(
for (account_id, key) in &user_data.default_pub_account_signing_keys {
vec_for_storage.push(
InitialAccountData::Public(InitialAccountDataPublic {
account_id: account_id.to_string(),
account_id: *account_id,
pub_sign_key: key.clone(),
})
.into(),
@ -100,7 +100,7 @@ pub fn produce_data_for_storage(
for (account_id, (key_chain, account)) in &user_data.default_user_private_accounts {
vec_for_storage.push(
InitialAccountData::Private(InitialAccountDataPrivate {
account_id: account_id.to_string(),
account_id: *account_id,
account: account.clone(),
key_chain: key_chain.clone(),
})

View File

@ -4,10 +4,8 @@ use anyhow::{Context, Result};
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use chain_storage::WalletChainStore;
use common::{
error::ExecutionFailureKind,
rpc_primitives::requests::SendTxResponse,
sequencer_client::SequencerClient,
transaction::{EncodedTransaction, NSSATransaction},
HashType, error::ExecutionFailureKind, rpc_primitives::requests::SendTxResponse,
sequencer_client::SequencerClient, transaction::NSSATransaction,
};
use config::WalletConfig;
use key_protocol::key_management::key_tree::{chain_index::ChainIndex, traits::KeyNode as _};
@ -21,7 +19,6 @@ use nssa::{
use nssa_core::{Commitment, MembershipProof, SharedSecretKey, program::InstructionData};
pub use privacy_preserving_tx::PrivacyPreservingAccount;
use tokio::io::AsyncWriteExt;
use url::Url;
use crate::{
config::{PersistentStorage, WalletConfigOverrides},
@ -114,7 +111,7 @@ impl WalletCore {
}
let sequencer_client = Arc::new(SequencerClient::new_with_auth(
Url::parse(&config.sequencer_addr)?,
config.sequencer_addr.clone(),
config.basic_auth.clone(),
)?);
let tx_poller = TxPoller::new(config.clone(), Arc::clone(&sequencer_client));
@ -199,7 +196,7 @@ impl WalletCore {
pub async fn get_account_balance(&self, acc: AccountId) -> Result<u128> {
Ok(self
.sequencer_client
.get_account_balance(acc.to_string())
.get_account_balance(acc)
.await?
.balance)
}
@ -208,23 +205,20 @@ impl WalletCore {
pub async fn get_accounts_nonces(&self, accs: Vec<AccountId>) -> Result<Vec<u128>> {
Ok(self
.sequencer_client
.get_accounts_nonces(accs.into_iter().map(|acc| acc.to_string()).collect())
.get_accounts_nonces(accs)
.await?
.nonces)
}
/// Get account
pub async fn get_account_public(&self, account_id: AccountId) -> Result<Account> {
let response = self
.sequencer_client
.get_account(account_id.to_string())
.await?;
let response = self.sequencer_client.get_account(account_id).await?;
Ok(response.account)
}
pub fn get_account_public_signing_key(
&self,
account_id: &AccountId,
account_id: AccountId,
) -> Option<&nssa::PrivateKey> {
self.storage
.user_data
@ -244,12 +238,12 @@ impl WalletCore {
}
/// Poll transactions
pub async fn poll_native_token_transfer(&self, hash: String) -> Result<NSSATransaction> {
pub async fn poll_native_token_transfer(&self, hash: HashType) -> Result<NSSATransaction> {
let transaction_encoded = self.poller.poll_tx(hash).await?;
let tx_base64_decode = BASE64.decode(transaction_encoded)?;
let pub_tx = borsh::from_slice::<EncodedTransaction>(&tx_base64_decode).unwrap();
let pub_tx = borsh::from_slice::<NSSATransaction>(&tx_base64_decode).unwrap();
Ok(NSSATransaction::try_from(&pub_tx)?)
Ok(pub_tx)
}
pub async fn check_private_account_initialized(
@ -392,8 +386,7 @@ impl WalletCore {
let bar = indicatif::ProgressBar::new(num_of_blocks);
while let Some(block) = blocks.try_next().await? {
for tx in block.transactions {
let nssa_tx = NSSATransaction::try_from(&tx)?;
self.sync_private_accounts_with_tx(nssa_tx);
self.sync_private_accounts_with_tx(tx);
}
self.last_synced_block = block.block_id;

View File

@ -1,7 +1,7 @@
use std::sync::Arc;
use anyhow::Result;
use common::{block::HashableBlockData, sequencer_client::SequencerClient};
use common::{HashType, block::HashableBlockData, sequencer_client::SequencerClient};
use log::{info, warn};
use crate::config::WalletConfig;
@ -28,10 +28,11 @@ impl TxPoller {
}
}
pub async fn poll_tx(&self, tx_hash: String) -> Result<String> {
// TODO: this polling is not based on blocks, but on timeouts, need to fix this.
pub async fn poll_tx(&self, tx_hash: HashType) -> Result<String> {
let max_blocks_to_query = self.polling_max_blocks_to_query;
info!("Starting poll for transaction {tx_hash:#?}");
info!("Starting poll for transaction {tx_hash}");
for poll_id in 1..max_blocks_to_query {
info!("Poll {poll_id}");
@ -40,10 +41,10 @@ impl TxPoller {
let tx_obj = loop {
let tx_obj = self
.client
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(tx_hash)
.await
.inspect_err(|err| {
warn!("Failed to get transaction by hash {tx_hash:#?} with error: {err:#?}")
warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}")
});
if let Ok(tx_obj) = tx_obj {
@ -57,8 +58,8 @@ impl TxPoller {
}
};
if tx_obj.transaction.is_some() {
return Ok(tx_obj.transaction.unwrap());
if let Some(tx) = tx_obj.transaction {
return Ok(tx);
}
tokio::time::sleep(std::time::Duration::from_millis(self.polling_delay_millis)).await;

View File

@ -69,7 +69,7 @@ impl AccountManager {
.await
.map_err(|_| ExecutionFailureKind::KeyNotFoundError)?;
let sk = wallet.get_account_public_signing_key(&account_id).cloned();
let sk = wallet.get_account_public_signing_key(account_id).cloned();
let account = AccountWithMetadata::new(acc.clone(), sk.is_some(), account_id);
(State::Public { account, sk }, 0)

Some files were not shown because too many files have changed in this diff Show More