Merge pull request #316 from logos-blockchain/arjentix/full-bedrock-integration

Full bedrock integration
This commit is contained in:
Daniil Polyakov 2026-02-12 17:47:47 +03:00 committed by GitHub
commit cb6fb881ac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
144 changed files with 3365 additions and 2858 deletions

View File

@ -100,7 +100,7 @@ jobs:
RISC0_SKIP_BUILD: "1"
run: cargo clippy -p "*programs" -- -D warnings
tests:
unit-tests:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
@ -126,7 +126,35 @@ jobs:
env:
RISC0_DEV_MODE: "1"
RUST_LOG: "info"
run: cargo nextest run --no-fail-fast -- --skip tps_test
run: cargo nextest run --workspace --exclude integration_tests
integration-tests:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- uses: ./.github/actions/install-logos-blockchain-circuits
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install active toolchain
run: rustup install
- name: Install nextest
run: cargo install --locked cargo-nextest
- name: Run tests
env:
RISC0_DEV_MODE: "1"
RUST_LOG: "info"
run: cargo nextest run -p integration_tests -- --skip tps_test
valid-proof-test:
runs-on: ubuntu-latest

View File

@ -1,23 +0,0 @@
name: Deploy Sequencer
on:
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Deploy to server
uses: appleboy/ssh-action@v1.2.4
with:
host: ${{ secrets.DEPLOY_SSH_HOST }}
username: ${{ secrets.DEPLOY_SSH_USERNAME }}
key: ${{ secrets.DEPLOY_SSH_KEY }}
envs: GITHUB_ACTOR
script_path: ci_scripts/deploy.sh

View File

@ -1,4 +1,4 @@
name: Publish Sequencer Runner Image
name: Publish Docker Images
on:
workflow_dispatch:
@ -6,6 +6,15 @@ on:
jobs:
publish:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- name: sequencer_runner
dockerfile: ./sequencer_runner/Dockerfile
- name: indexer_service
dockerfile: ./indexer/service/Dockerfile
- name: explorer_service
dockerfile: ./explorer_service/Dockerfile
steps:
- uses: actions/checkout@v5
@ -23,7 +32,7 @@ jobs:
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/sequencer_runner
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/${{ matrix.name }}
tags: |
type=ref,event=branch
type=ref,event=pr
@ -36,7 +45,7 @@ jobs:
uses: docker/build-push-action@v5
with:
context: .
file: ./sequencer_runner/Dockerfile
file: ${{ matrix.dockerfile }}
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}

1
.gitignore vendored
View File

@ -9,3 +9,4 @@ rocksdb
sequencer_runner/data/
storage.json
result
wallet-ffi/wallet_ffi.h

543
Cargo.lock generated
View File

@ -69,7 +69,7 @@ dependencies = [
"actix-rt",
"actix-service",
"actix-utils",
"base64",
"base64 0.22.1",
"bitflags 2.10.0",
"bytes",
"bytestring",
@ -398,6 +398,15 @@ version = "1.0.100"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
[[package]]
name = "arc-swap"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73"
dependencies = [
"rustversion",
]
[[package]]
name = "archery"
version = "1.2.2"
@ -813,6 +822,22 @@ version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]]
name = "astral-tokio-tar"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec179a06c1769b1e42e1e2cbe74c7dcdb3d6383c838454d063eaac5bbb7ebbe5"
dependencies = [
"filetime",
"futures-core",
"libc",
"portable-atomic",
"rustc-hash",
"tokio",
"tokio-stream",
"xattr",
]
[[package]]
name = "async-lock"
version = "3.4.2"
@ -946,7 +971,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
dependencies = [
"axum-core 0.5.6",
"base64",
"base64 0.22.1",
"bytes",
"form_urlencoded",
"futures-util",
@ -1050,6 +1075,12 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581"
[[package]]
name = "base64"
version = "0.21.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
[[package]]
name = "base64"
version = "0.22.1"
@ -1067,6 +1098,7 @@ name = "bedrock_client"
version = "0.1.0"
dependencies = [
"anyhow",
"common",
"futures",
"log",
"logos-blockchain-chain-broadcast-service",
@ -1173,6 +1205,80 @@ dependencies = [
"generic-array 0.14.7",
]
[[package]]
name = "bollard"
version = "0.20.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "227aa051deec8d16bd9c34605e7aaf153f240e35483dd42f6f78903847934738"
dependencies = [
"async-stream",
"base64 0.22.1",
"bitflags 2.10.0",
"bollard-buildkit-proto",
"bollard-stubs",
"bytes",
"futures-core",
"futures-util",
"hex",
"home",
"http 1.4.0",
"http-body-util",
"hyper",
"hyper-named-pipe",
"hyper-rustls",
"hyper-util",
"hyperlocal",
"log",
"num",
"pin-project-lite",
"rand 0.9.2",
"rustls",
"rustls-native-certs",
"rustls-pki-types",
"serde",
"serde_derive",
"serde_json",
"serde_urlencoded",
"thiserror 2.0.17",
"time",
"tokio",
"tokio-stream",
"tokio-util",
"tonic",
"tower-service",
"url",
"winapi",
]
[[package]]
name = "bollard-buildkit-proto"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "85a885520bf6249ab931a764ffdb87b0ceef48e6e7d807cfdb21b751e086e1ad"
dependencies = [
"prost 0.14.3",
"prost-types",
"tonic",
"tonic-prost",
"ureq",
]
[[package]]
name = "bollard-stubs"
version = "1.52.1-rc.29.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f0a8ca8799131c1837d1282c3f81f31e76ceb0ce426e04a7fe1ccee3287c066"
dependencies = [
"base64 0.22.1",
"bollard-buildkit-proto",
"bytes",
"prost 0.14.3",
"serde",
"serde_json",
"serde_repr",
"time",
]
[[package]]
name = "bonsai-sdk"
version = "1.4.1"
@ -1512,7 +1618,7 @@ name = "common"
version = "0.1.0"
dependencies = [
"anyhow",
"base64",
"base64 0.22.1",
"borsh",
"hex",
"log",
@ -1522,6 +1628,7 @@ dependencies = [
"reqwest",
"serde",
"serde_json",
"serde_with",
"sha2",
"thiserror 2.0.17",
"url",
@ -1927,7 +2034,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976"
dependencies = [
"data-encoding",
"syn 1.0.109",
"syn 2.0.111",
]
[[package]]
@ -2084,12 +2191,35 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "docker-compose-types"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7edb75a85449fd9c34d9fb3376c6208ec4115d2ca43b965175a52d71349ecab8"
dependencies = [
"derive_builder",
"indexmap 2.12.1",
"serde",
"serde_yaml",
]
[[package]]
name = "docker-generate"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ccf673e0848ef09fa4aeeba78e681cf651c0c7d35f76ee38cec8e55bc32fa111"
[[package]]
name = "docker_credential"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d89dfcba45b4afad7450a99b39e751590463e45c04728cf555d36bb66940de8"
dependencies = [
"base64 0.21.7",
"serde",
"serde_json",
]
[[package]]
name = "downcast-rs"
version = "1.2.1"
@ -2296,6 +2426,16 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "etcetera"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96"
dependencies = [
"cfg-if",
"windows-sys 0.61.2",
]
[[package]]
name = "event-listener"
version = "5.4.1"
@ -2367,6 +2507,17 @@ version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
[[package]]
name = "ferroid"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb330bbd4cb7a5b9f559427f06f98a4f853a137c8298f3bd3f8ca57663e21986"
dependencies = [
"portable-atomic",
"rand 0.9.2",
"web-time",
]
[[package]]
name = "ff"
version = "0.13.1"
@ -2383,6 +2534,17 @@ version = "0.2.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
[[package]]
name = "filetime"
version = "0.2.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db"
dependencies = [
"cfg-if",
"libc",
"libredox",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.5"
@ -2857,6 +3019,15 @@ version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e89e8d20b3799fa526152a5301a771eaaad80857f83e01b23216ceaafb2d9280"
[[package]]
name = "home"
version = "0.5.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "html-escape"
version = "0.2.13"
@ -2973,6 +3144,21 @@ dependencies = [
"want",
]
[[package]]
name = "hyper-named-pipe"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278"
dependencies = [
"hex",
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
"winapi",
]
[[package]]
name = "hyper-rustls"
version = "0.27.7"
@ -2991,6 +3177,19 @@ dependencies = [
"webpki-roots",
]
[[package]]
name = "hyper-timeout"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
dependencies = [
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "hyper-tls"
version = "0.6.0"
@ -3013,7 +3212,7 @@ version = "0.1.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures-channel",
"futures-core",
@ -3033,6 +3232,21 @@ dependencies = [
"windows-registry",
]
[[package]]
name = "hyperlocal"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7"
dependencies = [
"hex",
"http-body-util",
"hyper",
"hyper-util",
"pin-project-lite",
"tokio",
"tower-service",
]
[[package]]
name = "iana-time-zone"
version = "0.1.64"
@ -3176,6 +3390,7 @@ name = "indexer_core"
version = "0.1.0"
dependencies = [
"anyhow",
"async-stream",
"bedrock_client",
"borsh",
"common",
@ -3193,13 +3408,17 @@ name = "indexer_service"
version = "0.1.0"
dependencies = [
"anyhow",
"arc-swap",
"async-trait",
"clap",
"env_logger",
"futures",
"indexer_core",
"indexer_service_protocol",
"indexer_service_rpc",
"jsonrpsee",
"log",
"serde_json",
"tokio",
"tokio-util",
]
@ -3208,8 +3427,7 @@ dependencies = [
name = "indexer_service_protocol"
version = "0.1.0"
dependencies = [
"base64",
"borsh",
"base64 0.22.1",
"common",
"nssa",
"nssa_core",
@ -3277,15 +3495,14 @@ dependencies = [
name = "integration_tests"
version = "0.1.0"
dependencies = [
"actix-web",
"anyhow",
"base64",
"base64 0.22.1",
"borsh",
"common",
"env_logger",
"futures",
"hex",
"indexer_core",
"indexer_service",
"key_protocol",
"log",
"nssa",
@ -3294,6 +3511,7 @@ dependencies = [
"sequencer_runner",
"serde_json",
"tempfile",
"testcontainers",
"token_core",
"tokio",
"url",
@ -3473,7 +3691,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98"
dependencies = [
"base64",
"base64 0.22.1",
"futures-channel",
"futures-util",
"gloo-net",
@ -3526,7 +3744,7 @@ version = "0.26.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8"
dependencies = [
"base64",
"base64 0.22.1",
"http-body",
"hyper",
"hyper-rustls",
@ -3712,7 +3930,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f9569fc37575a5d64c0512145af7630bf651007237ef67a8a77328199d315bb"
dependencies = [
"any_spawner",
"base64",
"base64 0.22.1",
"cfg-if",
"either_of",
"futures",
@ -3914,7 +4132,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dbf1045af93050bf3388d1c138426393fc131f6d9e46a65519da884c033ed730"
dependencies = [
"any_spawner",
"base64",
"base64 0.22.1",
"codee",
"futures",
"hydration_context",
@ -3971,6 +4189,7 @@ checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50"
dependencies = [
"bitflags 2.10.0",
"libc",
"redox_syscall 0.6.0",
]
[[package]]
@ -4851,7 +5070,6 @@ dependencies = [
name = "nssa_core"
version = "0.1.0"
dependencies = [
"anyhow",
"base58",
"borsh",
"bytemuck",
@ -4860,9 +5078,24 @@ dependencies = [
"risc0-zkvm",
"serde",
"serde_json",
"serde_with",
"thiserror 2.0.17",
]
[[package]]
name = "num"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
dependencies = [
"num-bigint",
"num-complex",
"num-integer",
"num-iter",
"num-rational",
"num-traits",
]
[[package]]
name = "num-bigint"
version = "0.4.6"
@ -4889,6 +5122,15 @@ dependencies = [
"zeroize",
]
[[package]]
name = "num-complex"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
dependencies = [
"num-traits",
]
[[package]]
name = "num-conv"
version = "0.2.0"
@ -4915,6 +5157,17 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@ -5117,11 +5370,36 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"redox_syscall 0.5.18",
"smallvec",
"windows-link",
]
[[package]]
name = "parse-display"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "914a1c2265c98e2446911282c6ac86d8524f495792c38c5bd884f80499c7538a"
dependencies = [
"parse-display-derive",
"regex",
"regex-syntax",
]
[[package]]
name = "parse-display-derive"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ae7800a4c974efd12df917266338e79a7a74415173caf7e70aa0a0707345281"
dependencies = [
"proc-macro2",
"quote",
"regex",
"regex-syntax",
"structmeta",
"syn 2.0.111",
]
[[package]]
name = "paste"
version = "1.0.15"
@ -5222,9 +5500,9 @@ dependencies = [
[[package]]
name = "portable-atomic"
version = "1.11.1"
version = "1.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
[[package]]
name = "postcard"
@ -5388,7 +5666,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
dependencies = [
"bytes",
"prost-derive",
"prost-derive 0.13.5",
]
[[package]]
name = "prost"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568"
dependencies = [
"bytes",
"prost-derive 0.14.3",
]
[[package]]
@ -5404,6 +5692,28 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "prost-derive"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b"
dependencies = [
"anyhow",
"itertools 0.14.0",
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "prost-types"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7"
dependencies = [
"prost 0.14.3",
]
[[package]]
name = "quanta"
version = "0.12.6"
@ -5651,6 +5961,15 @@ dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_syscall"
version = "0.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5"
dependencies = [
"bitflags 2.10.0",
]
[[package]]
name = "redox_users"
version = "0.5.2"
@ -5723,7 +6042,7 @@ version = "0.12.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"encoding_rs",
"futures-channel",
@ -5966,7 +6285,7 @@ dependencies = [
"derive_more 2.1.0",
"hex",
"lazy-regex",
"prost",
"prost 0.13.5",
"risc0-binfmt",
"risc0-build",
"risc0-circuit-keccak",
@ -6417,6 +6736,7 @@ dependencies = [
"chrono",
"common",
"futures",
"jsonrpsee",
"log",
"logos-blockchain-core",
"logos-blockchain-key-management-system-service",
@ -6424,12 +6744,12 @@ dependencies = [
"nssa",
"nssa_core",
"rand 0.8.5",
"reqwest",
"serde",
"serde_json",
"storage",
"tempfile",
"tokio",
"url",
]
[[package]]
@ -6440,7 +6760,7 @@ dependencies = [
"actix-web",
"anyhow",
"base58",
"base64",
"base64 0.22.1",
"borsh",
"common",
"futures",
@ -6466,6 +6786,8 @@ dependencies = [
"clap",
"common",
"env_logger",
"futures",
"indexer_service_rpc",
"log",
"sequencer_core",
"sequencer_rpc",
@ -6557,6 +6879,17 @@ dependencies = [
"thiserror 2.0.17",
]
[[package]]
name = "serde_repr"
version = "0.1.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "serde_spanned"
version = "0.6.9"
@ -6593,7 +6926,7 @@ version = "3.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
dependencies = [
"base64",
"base64 0.22.1",
"chrono",
"hex",
"indexmap 1.9.3",
@ -6618,6 +6951,19 @@ dependencies = [
"syn 2.0.111",
]
[[package]]
name = "serde_yaml"
version = "0.9.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9"
dependencies = [
"indexmap 2.12.1",
"itoa",
"ryu",
"serde",
"unsafe-libyaml",
]
[[package]]
name = "serdect"
version = "0.2.0"
@ -6635,7 +6981,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "353d02fa2886cd8dae0b8da0965289fa8f2ecc7df633d1ce965f62fdf9644d29"
dependencies = [
"axum 0.8.8",
"base64",
"base64 0.22.1",
"bytes",
"const-str 0.7.1",
"const_format",
@ -6807,7 +7153,7 @@ version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721"
dependencies = [
"base64",
"base64 0.22.1",
"bytes",
"futures",
"http 1.4.0",
@ -6881,6 +7227,29 @@ version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "structmeta"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329"
dependencies = [
"proc-macro2",
"quote",
"structmeta-derive",
"syn 2.0.111",
]
[[package]]
name = "structmeta-derive"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.111",
]
[[package]]
name = "strum"
version = "0.27.2"
@ -7087,6 +7456,39 @@ dependencies = [
"risc0-zkvm",
]
[[package]]
name = "testcontainers"
version = "0.27.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3fdcea723c64cc08dbc533b3761e345a15bf1222cbe6cb611de09b43f17a168"
dependencies = [
"astral-tokio-tar",
"async-trait",
"bollard",
"bytes",
"docker-compose-types",
"docker_credential",
"either",
"etcetera",
"ferroid",
"futures",
"http 1.4.0",
"itertools 0.14.0",
"log",
"memchr",
"parse-display",
"pin-project-lite",
"serde",
"serde_json",
"serde_with",
"thiserror 2.0.17",
"tokio",
"tokio-stream",
"tokio-util",
"url",
"uuid",
]
[[package]]
name = "thiserror"
version = "1.0.69"
@ -7407,6 +7809,46 @@ version = "1.0.6+spec-1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
[[package]]
name = "tonic"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a"
dependencies = [
"async-trait",
"axum 0.8.8",
"base64 0.22.1",
"bytes",
"h2 0.4.13",
"http 1.4.0",
"http-body",
"http-body-util",
"hyper",
"hyper-timeout",
"hyper-util",
"percent-encoding",
"pin-project",
"socket2 0.6.1",
"sync_wrapper",
"tokio",
"tokio-stream",
"tower 0.5.2",
"tower-layer",
"tower-service",
"tracing",
]
[[package]]
name = "tonic-prost"
version = "0.14.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0"
dependencies = [
"bytes",
"prost 0.14.3",
"tonic",
]
[[package]]
name = "tower"
version = "0.4.13"
@ -7426,9 +7868,12 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
dependencies = [
"futures-core",
"futures-util",
"indexmap 2.12.1",
"pin-project-lite",
"slab",
"sync_wrapper",
"tokio",
"tokio-util",
"tower-layer",
"tower-service",
"tracing",
@ -7678,6 +8123,12 @@ dependencies = [
"subtle",
]
[[package]]
name = "unsafe-libyaml"
version = "0.2.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
[[package]]
name = "unsigned-varint"
version = "0.8.0"
@ -7690,6 +8141,34 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "ureq"
version = "3.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d39cb1dbab692d82a977c0392ffac19e188bd9186a9f32806f0aaa859d75585a"
dependencies = [
"base64 0.22.1",
"log",
"percent-encoding",
"rustls",
"rustls-pki-types",
"ureq-proto",
"utf-8",
"webpki-roots",
]
[[package]]
name = "ureq-proto"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
dependencies = [
"base64 0.22.1",
"http 1.4.0",
"httparse",
"log",
]
[[package]]
name = "url"
version = "2.5.7"
@ -7779,7 +8258,7 @@ dependencies = [
"anyhow",
"async-stream",
"base58",
"base64",
"base64 0.22.1",
"borsh",
"bytemuck",
"clap",
@ -8336,6 +8815,16 @@ dependencies = [
"zeroize",
]
[[package]]
name = "xattr"
version = "1.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156"
dependencies = [
"libc",
"rustix",
]
[[package]]
name = "xxhash-rust"
version = "0.8.15"

View File

@ -20,9 +20,10 @@ members = [
"sequencer_core",
"sequencer_rpc",
"sequencer_runner",
"indexer_service",
"indexer_service/protocol",
"indexer_service/rpc",
"indexer/core",
"indexer/service",
"indexer/service/protocol",
"indexer/service/rpc",
"explorer_service",
"programs/token/core",
"programs/token",
@ -34,7 +35,6 @@ members = [
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
"bedrock_client",
"indexer_core",
]
[workspace.dependencies]
@ -47,9 +47,10 @@ key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
indexer_service = { path = "indexer_service" }
indexer_service_protocol = { path = "indexer_service/protocol" }
indexer_service_rpc = { path = "indexer_service/rpc" }
indexer_core = { path = "indexer/core" }
indexer_service = { path = "indexer/service" }
indexer_service_protocol = { path = "indexer/service/protocol" }
indexer_service_rpc = { path = "indexer/service/rpc" }
wallet = { path = "wallet" }
wallet-ffi = { path = "wallet-ffi" }
token_core = { path = "programs/token/core" }
@ -58,7 +59,6 @@ amm_core = { path = "programs/amm/core" }
amm_program = { path = "programs/amm" }
test_program_methods = { path = "test_program_methods" }
bedrock_client = { path = "bedrock_client" }
indexer_core = { path = "indexer_core" }
tokio = { version = "1.28.2", features = [
"net",
@ -75,6 +75,7 @@ openssl = { version = "0.10", features = ["vendored"] }
openssl-probe = { version = "0.1.2" }
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
serde_with = "3.16.1"
actix = "0.13.0"
actix-cors = "0.6.1"
jsonrpsee = "0.26.0"
@ -106,6 +107,7 @@ itertools = "0.14.0"
url = { version = "2.5.4", features = ["serde"] }
tokio-retry = "0.3.0"
schemars = "1.2.0"
async-stream = "0.3.6"
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }

View File

@ -43,7 +43,7 @@ To our knowledge, this design is unique to LEZ. Other privacy-focused programmab
- Execution is handled fully on-chain without ZKPs.
- Alices and Charlies public balances are updated.
### Key points:
- The same token program is used in every execution.
- The only difference is execution mode: public execution updates visible state on-chain, while private execution relies on ZKPs.
@ -127,6 +127,9 @@ RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
# Run the sequencer and node
## Running Manually
The sequencer and node can be run locally:
1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node:
@ -138,10 +141,22 @@ The sequencer and node can be run locally:
- `./target/debug/logos-blockchain-node nodes/node/config-one-node.yaml`
2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service:
- `git checkout schouhy/full-bedrock-integration`
- `RUST_LOG=info cargo run --release -p indexer_service $(pwd)/integration_tests/configs/indexer/indexer_config.json`
- `RUST_LOG=info cargo run --release -p indexer_service indexer/service/configs/indexer_config.json`
3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
- `git checkout schouhy/full-bedrock-integration`
- `RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner sequencer_runner/configs/debug`
## Running with Docker
You can run the whole setup with Docker:
```bash
docker compose up
```
With that you can send transactions from local wallet to the Sequencer running inside Docker using `wallet/configs/debug` as well as exploring blocks by opening `http://localhost:8080`.
## Caution for local image builds
If you're going to build sequencer image locally you should better adjust default docker settings and set `defaultKeepStorage` at least `25GB` so that it can keep layers properly cached.

Binary file not shown.

Binary file not shown.

Binary file not shown.

32
bedrock/README.md Normal file
View File

@ -0,0 +1,32 @@
# Bedrock Configuration Files for All-in-One run and Integration Tests
## How to update
- `docker-compose.yml` file.
Compare with `https://github.com/logos-blockchain/logos-blockchain/blob/master/compose.static.yml` and update the file accordingly, don't bring unneeded things like grafana and etc.
Replace `sha` hash with the latest `testnet` tag hash.
- `scripts` folder.
```bash
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_cfgsync.sh >> scripts/run_cfgsync.sh
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_logos_blockchain_node.sh >> scripts/run_logos_blockchain_node.sh
chmod +x scripts/*
```
Then in `scripts/run_logos_blockchain_node.sh` update `cfgsync-client` to `logos-blockchain-cfgsync-client` and in `scripts/run_cfgsync.sh` update `cfgsync-server` to `logos-blockchain-cfgsync-server` if it hasn't been fixed already, see <https://github.com/logos-blockchain/logos-blockchain/pull/2092>.
- `cfgsync.yaml` file.
```bash
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/cfgsync.yaml
```
Set `logger`, `tracing` and `metrics` to `None`
- `kzgrs_test_params` file.
```bash
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/tests/kzgrs/kzgrs_test_params
```

12
bedrock/cfgsync.yaml Normal file
View File

@ -0,0 +1,12 @@
port: 4400
n_hosts: 4
timeout: 10
# Tracing
tracing_settings:
logger: Stdout
tracing: None
filter: None
metrics: None
console: None
level: DEBUG

View File

@ -0,0 +1,47 @@
services:
cfgsync:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./cfgsync.yaml:/etc/logos-blockchain/cfgsync.yaml:z
entrypoint: /etc/logos-blockchain/scripts/run_cfgsync.sh
logos-blockchain-node-0:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
ports:
# Map 0 port so that multiple instances can run on the same host
- "0:18080/tcp"
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-1:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-2:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
logos-blockchain-node-3:
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
volumes:
- ./scripts:/etc/logos-blockchain/scripts
- ./kzgrs_test_params:/kzgrs_test_params:z
depends_on:
- cfgsync
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh

BIN
bedrock/kzgrs_test_params Normal file

Binary file not shown.

5
bedrock/scripts/run_cfgsync.sh Executable file
View File

@ -0,0 +1,5 @@
#!/bin/sh
set -e
exec /usr/bin/logos-blockchain-cfgsync-server /etc/logos-blockchain/cfgsync.yaml

View File

@ -0,0 +1,13 @@
#!/bin/sh
set -e
export CFG_FILE_PATH="/config.yaml" \
CFG_SERVER_ADDR="http://cfgsync:4400" \
CFG_HOST_IP=$(hostname -i) \
CFG_HOST_IDENTIFIER="validator-$(hostname -i)" \
LOG_LEVEL="INFO" \
POL_PROOF_DEV_MODE=true
/usr/bin/logos-blockchain-cfgsync-client && \
exec /usr/bin/logos-blockchain-node /config.yaml

View File

@ -5,6 +5,8 @@ edition = "2024"
license = { workspace = true }
[dependencies]
common.workspace = true
reqwest.workspace = true
anyhow.workspace = true
tokio-retry.workspace = true

View File

@ -1,20 +1,32 @@
use anyhow::Result;
use std::time::Duration;
use anyhow::{Context as _, Result};
use common::config::BasicAuth;
use futures::{Stream, TryFutureExt};
use log::warn;
use log::{info, warn};
pub use logos_blockchain_chain_broadcast_service::BlockInfo;
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
pub use logos_blockchain_common_http_client::{CommonHttpClient, Error};
pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx};
use reqwest::{Client, Url};
use serde::{Deserialize, Serialize};
use tokio_retry::Retry;
/// Fibonacci backoff retry strategy configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct BackoffConfig {
pub start_delay_millis: u64,
pub max_retries: usize,
}
impl Default for BackoffConfig {
fn default() -> Self {
Self {
start_delay_millis: 100,
max_retries: 5,
}
}
}
// Simple wrapper
// maybe extend in the future for our purposes
// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`).
@ -22,26 +34,37 @@ pub struct BackoffConfig {
pub struct BedrockClient {
http_client: CommonHttpClient,
node_url: Url,
backoff: BackoffConfig,
}
impl BedrockClient {
pub fn new(auth: Option<BasicAuthCredentials>, node_url: Url) -> Result<Self> {
pub fn new(backoff: BackoffConfig, node_url: Url, auth: Option<BasicAuth>) -> Result<Self> {
info!("Creating Bedrock client with node URL {node_url}");
let client = Client::builder()
//Add more fields if needed
.timeout(std::time::Duration::from_secs(60))
.build()?;
.build()
.context("Failed to build HTTP client")?;
let auth = auth.map(|a| {
logos_blockchain_common_http_client::BasicAuthCredentials::new(a.username, a.password)
});
let http_client = CommonHttpClient::new_with_client(client, auth);
Ok(Self {
http_client,
node_url,
backoff,
})
}
pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> {
self.http_client
.post_transaction(self.node_url.clone(), tx)
.await
Retry::spawn(self.backoff_strategy(), || {
self.http_client
.post_transaction(self.node_url.clone(), tx.clone())
.inspect_err(|err| warn!("Transaction posting failed with error: {err:#}"))
})
.await
}
pub async fn get_lib_stream(&self) -> Result<impl Stream<Item = BlockInfo>, Error> {
@ -51,17 +74,17 @@ impl BedrockClient {
pub async fn get_block_by_id(
&self,
header_id: HeaderId,
backoff: &BackoffConfig,
) -> Result<Option<Block<SignedMantleTx>>, Error> {
let strategy =
tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis)
.take(backoff.max_retries);
Retry::spawn(strategy, || {
Retry::spawn(self.backoff_strategy(), || {
self.http_client
.get_block_by_id(self.node_url.clone(), header_id)
.inspect_err(|err| warn!("Block fetching failed with err: {err:#?}"))
.inspect_err(|err| warn!("Block fetching failed with error: {err:#}"))
})
.await
}
fn backoff_strategy(&self) -> impl Iterator<Item = Duration> {
tokio_retry::strategy::FibonacciBackoff::from_millis(self.backoff.start_delay_millis)
.take(self.backoff.max_retries)
}
}

View File

@ -1,84 +0,0 @@
#!/usr/bin/env bash
set -e
# Base directory for deployment
LSSA_DIR="/home/arjentix/test_deploy/lssa"
# Expect GITHUB_ACTOR to be passed as first argument or environment variable
GITHUB_ACTOR="${1:-${GITHUB_ACTOR:-unknown}}"
# Function to log messages with timestamp
log_deploy() {
echo "[$(date '+%Y-%m-%d %H:%M:%S %Z')] $1" >> "${LSSA_DIR}/deploy.log"
}
# Error handler
handle_error() {
echo "✗ Deployment failed by: ${GITHUB_ACTOR}"
log_deploy "Deployment failed by: ${GITHUB_ACTOR}"
exit 1
}
find_sequencer_runner_pids() {
pgrep -f "sequencer_runner" | grep -v $$
}
# Set trap to catch any errors
trap 'handle_error' ERR
# Log deployment info
log_deploy "Deployment initiated by: ${GITHUB_ACTOR}"
# Navigate to code directory
if [ ! -d "${LSSA_DIR}/code" ]; then
mkdir -p "${LSSA_DIR}/code"
fi
cd "${LSSA_DIR}/code"
# Stop current sequencer if running
if find_sequencer_runner_pids > /dev/null; then
echo "Stopping current sequencer..."
find_sequencer_runner_pids | xargs -r kill -SIGINT || true
sleep 2
# Force kill if still running
find_sequencer_runner_pids | grep -v $$ | xargs -r kill -9 || true
fi
# Clone or update repository
if [ -d ".git" ]; then
echo "Updating existing repository..."
git fetch origin
git checkout main
git reset --hard origin/main
else
echo "Cloning repository..."
git clone https://github.com/logos-blockchain/lssa.git .
git checkout main
fi
# Build sequencer_runner and wallet in release mode
echo "Building sequencer_runner"
# That could be just `cargo build --release --bin sequencer_runner --bin wallet`
# but we have `no_docker` feature bug, see issue #179
cd sequencer_runner
cargo build --release
cd ../wallet
cargo build --release
cd ..
# Run sequencer_runner with config
echo "Starting sequencer_runner..."
export RUST_LOG=info
nohup ./target/release/sequencer_runner "${LSSA_DIR}/configs/sequencer" > "${LSSA_DIR}/sequencer.log" 2>&1 &
# Wait 5 seconds and check health using wallet
sleep 5
if ./target/release/wallet check-health; then
echo "✓ Sequencer started successfully and is healthy"
log_deploy "Deployment completed successfully by: ${GITHUB_ACTOR}"
exit 0
else
echo "✗ Sequencer failed health check"
tail -n 50 "${LSSA_DIR}/sequencer.log"
handle_error
fi

View File

@ -12,6 +12,7 @@ anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
serde_with.workspace = true
reqwest.workspace = true
sha2.workspace = true
log.workspace = true

View File

@ -1,9 +1,8 @@
use borsh::{BorshDeserialize, BorshSerialize};
use sha2::{Digest, Sha256, digest::FixedOutput};
use crate::transaction::EncodedTransaction;
use crate::{HashType, transaction::NSSATransaction};
pub type HashType = [u8; 32];
pub type MantleMsgId = [u8; 32];
#[derive(Debug, Clone)]
@ -16,11 +15,11 @@ impl OwnHasher {
let mut hasher = Sha256::new();
hasher.update(data);
<HashType>::from(hasher.finalize_fixed())
HashType(<[u8; 32]>::from(hasher.finalize_fixed()))
}
}
pub type BlockHash = [u8; 32];
pub type BlockHash = HashType;
pub type BlockId = u64;
pub type TimeStamp = u64;
@ -35,7 +34,7 @@ pub struct BlockHeader {
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
pub struct BlockBody {
pub transactions: Vec<EncodedTransaction>,
pub transactions: Vec<NSSATransaction>,
}
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
@ -58,7 +57,7 @@ pub struct HashableBlockData {
pub block_id: BlockId,
pub prev_block_hash: BlockHash,
pub timestamp: TimeStamp,
pub transactions: Vec<EncodedTransaction>,
pub transactions: Vec<NSSATransaction>,
}
impl HashableBlockData {
@ -104,12 +103,12 @@ impl From<Block> for HashableBlockData {
#[cfg(test)]
mod tests {
use crate::{block::HashableBlockData, test_utils};
use crate::{HashType, block::HashableBlockData, test_utils};
#[test]
fn test_encoding_roundtrip() {
let transactions = vec![test_utils::produce_dummy_empty_transaction()];
let block = test_utils::produce_dummy_block(1, Some([1; 32]), transactions);
let block = test_utils::produce_dummy_block(1, Some(HashType([1; 32])), transactions);
let hashable = HashableBlockData::from(block);
let bytes = borsh::to_vec(&hashable).unwrap();
let block_from_bytes = borsh::from_slice::<HashableBlockData>(&bytes).unwrap();

View File

@ -1,6 +0,0 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Message {
L2BlockFinalized { l2_block_height: u64 },
}

View File

@ -1 +0,0 @@
pub mod indexer;

55
common/src/config.rs Normal file
View File

@ -0,0 +1,55 @@
//! Common configuration structures and utilities.
use std::str::FromStr;
use logos_blockchain_common_http_client::BasicAuthCredentials;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BasicAuth {
pub username: String,
pub password: Option<String>,
}
impl std::fmt::Display for BasicAuth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.username)?;
if let Some(password) = &self.password {
write!(f, ":{password}")?;
}
Ok(())
}
}
impl FromStr for BasicAuth {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parse = || {
let mut parts = s.splitn(2, ':');
let username = parts.next()?;
let password = parts.next().filter(|p| !p.is_empty());
if parts.next().is_some() {
return None;
}
Some((username, password))
};
let (username, password) = parse().ok_or_else(|| {
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
})?;
Ok(Self {
username: username.to_string(),
password: password.map(|p| p.to_string()),
})
}
}
impl From<BasicAuth> for BasicAuthCredentials {
fn from(value: BasicAuth) -> Self {
BasicAuthCredentials::new(value.username, value.password)
}
}

View File

@ -1,5 +1,10 @@
use std::{fmt::Display, str::FromStr};
use borsh::{BorshDeserialize, BorshSerialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
pub mod block;
pub mod communication;
pub mod config;
pub mod error;
pub mod rpc_primitives;
pub mod sequencer_client;
@ -8,6 +13,81 @@ pub mod transaction;
// Module for tests utility functions
// TODO: Compile only for tests
pub mod test_utils;
pub type HashType = [u8; 32];
pub const PINATA_BASE58: &str = "EfQhKQAkX2FJiwNii2WFQsGndjvF1Mzd7RuVe7QdPLw7";
#[derive(
Debug,
Default,
Copy,
Clone,
PartialEq,
Eq,
Hash,
SerializeDisplay,
DeserializeFromStr,
BorshSerialize,
BorshDeserialize,
)]
pub struct HashType(pub [u8; 32]);
impl Display for HashType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl FromStr for HashType {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0u8; 32];
hex::decode_to_slice(s, &mut bytes)?;
Ok(HashType(bytes))
}
}
impl AsRef<[u8]> for HashType {
fn as_ref(&self) -> &[u8] {
&self.0
}
}
impl From<HashType> for [u8; 32] {
fn from(hash: HashType) -> Self {
hash.0
}
}
impl From<[u8; 32]> for HashType {
fn from(bytes: [u8; 32]) -> Self {
HashType(bytes)
}
}
impl TryFrom<Vec<u8>> for HashType {
type Error = <[u8; 32] as TryFrom<Vec<u8>>>::Error;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Ok(HashType(value.try_into()?))
}
}
impl From<HashType> for Vec<u8> {
fn from(hash: HashType) -> Self {
hash.0.to_vec()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn serialization_roundtrip() {
let original = HashType([1u8; 32]);
let serialized = original.to_string();
let deserialized = HashType::from_str(&serialized).unwrap();
assert_eq!(original, deserialized);
}
}

View File

@ -1,5 +1,6 @@
use std::collections::HashMap;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use serde::{Deserialize, Serialize};
use serde_json::Value;
@ -8,7 +9,7 @@ use super::{
errors::RpcParseError,
parser::{RpcRequest, parse_params},
};
use crate::parse_request;
use crate::{HashType, parse_request};
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloRequest {}
@ -47,22 +48,22 @@ pub struct GetInitialTestnetAccountsRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountBalanceRequest {
pub account_id: String,
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetTransactionByHashRequest {
pub hash: String,
pub hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountsNoncesRequest {
pub account_ids: Vec<String>,
pub account_ids: Vec<AccountId>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountRequest {
pub account_id: String,
pub account_id: AccountId,
}
#[derive(Serialize, Deserialize, Debug)]
@ -73,11 +74,6 @@ pub struct GetProofForCommitmentRequest {
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProgramIdsRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIndexerMessageRequest {
pub message: crate::communication::indexer::Message,
}
parse_request!(HelloRequest);
parse_request!(RegisterAccountRequest);
parse_request!(SendTxRequest);
@ -92,7 +88,6 @@ parse_request!(GetAccountsNoncesRequest);
parse_request!(GetProofForCommitmentRequest);
parse_request!(GetAccountRequest);
parse_request!(GetProgramIdsRequest);
parse_request!(PostIndexerMessageRequest);
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloResponse {
@ -107,7 +102,7 @@ pub struct RegisterAccountResponse {
#[derive(Serialize, Deserialize, Debug)]
pub struct SendTxResponse {
pub status: String,
pub tx_hash: String,
pub tx_hash: HashType,
}
#[derive(Serialize, Deserialize, Debug)]
@ -222,8 +217,3 @@ pub struct GetInitialTestnetAccountsResponse {
pub account_id: String,
pub balance: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIndexerMessageResponse {
pub status: String,
}

View File

@ -1,10 +1,10 @@
use std::{collections::HashMap, ops::RangeInclusive, str::FromStr};
use std::{collections::HashMap, ops::RangeInclusive};
use anyhow::Result;
use logos_blockchain_common_http_client::BasicAuthCredentials;
use nssa::AccountId;
use nssa_core::program::ProgramId;
use reqwest::Client;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use serde_json::Value;
use url::Url;
@ -13,6 +13,8 @@ use super::rpc_primitives::requests::{
GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest,
};
use crate::{
HashType,
config::BasicAuth,
error::{SequencerClientError, SequencerRpcError},
rpc_primitives::{
self,
@ -22,62 +24,12 @@ use crate::{
GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse,
GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest,
GetProofForCommitmentResponse, GetTransactionByHashRequest,
GetTransactionByHashResponse, PostIndexerMessageRequest, PostIndexerMessageResponse,
SendTxRequest, SendTxResponse,
GetTransactionByHashResponse, SendTxRequest, SendTxResponse,
},
},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BasicAuth {
pub username: String,
pub password: Option<String>,
}
impl std::fmt::Display for BasicAuth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.username)?;
if let Some(password) = &self.password {
write!(f, ":{password}")?;
}
Ok(())
}
}
impl FromStr for BasicAuth {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parse = || {
let mut parts = s.splitn(2, ':');
let username = parts.next()?;
let password = parts.next().filter(|p| !p.is_empty());
if parts.next().is_some() {
return None;
}
Some((username, password))
};
let (username, password) = parse().ok_or_else(|| {
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
})?;
Ok(Self {
username: username.to_string(),
password: password.map(|p| p.to_string()),
})
}
}
impl From<BasicAuth> for BasicAuthCredentials {
fn from(value: BasicAuth) -> Self {
BasicAuthCredentials::new(value.username, value.password)
}
}
#[derive(Clone)]
pub struct SequencerClient {
pub client: reqwest::Client,
@ -196,7 +148,7 @@ impl SequencerClient {
/// bytes.
pub async fn get_account_balance(
&self,
account_id: String,
account_id: AccountId,
) -> Result<GetAccountBalanceResponse, SequencerClientError> {
let block_req = GetAccountBalanceRequest { account_id };
@ -215,7 +167,7 @@ impl SequencerClient {
/// 32 bytes.
pub async fn get_accounts_nonces(
&self,
account_ids: Vec<String>,
account_ids: Vec<AccountId>,
) -> Result<GetAccountsNoncesResponse, SequencerClientError> {
let block_req = GetAccountsNoncesRequest { account_ids };
@ -232,7 +184,7 @@ impl SequencerClient {
pub async fn get_account(
&self,
account_id: String,
account_id: AccountId,
) -> Result<GetAccountResponse, SequencerClientError> {
let block_req = GetAccountRequest { account_id };
@ -248,7 +200,7 @@ impl SequencerClient {
/// Get transaction details for `hash`.
pub async fn get_transaction_by_hash(
&self,
hash: String,
hash: HashType,
) -> Result<GetTransactionByHashResponse, SequencerClientError> {
let block_req = GetTransactionByHashRequest { hash };
@ -268,7 +220,7 @@ impl SequencerClient {
&self,
transaction: nssa::PublicTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::Public(transaction));
let transaction = NSSATransaction::Public(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -288,7 +240,7 @@ impl SequencerClient {
&self,
transaction: nssa::PrivacyPreservingTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::PrivacyPreserving(transaction));
let transaction = NSSATransaction::PrivacyPreserving(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -362,7 +314,7 @@ impl SequencerClient {
&self,
transaction: nssa::ProgramDeploymentTransaction,
) -> Result<SendTxResponse, SequencerClientError> {
let transaction = EncodedTransaction::from(NSSATransaction::ProgramDeployment(transaction));
let transaction = NSSATransaction::ProgramDeployment(transaction);
let tx_req = SendTxRequest {
transaction: borsh::to_vec(&transaction).unwrap(),
@ -396,23 +348,4 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Post indexer into sequencer
pub async fn post_indexer_message(
&self,
message: crate::communication::indexer::Message,
) -> Result<PostIndexerMessageResponse, SequencerClientError> {
let last_req = PostIndexerMessageRequest { message };
let req = serde_json::to_value(last_req).unwrap();
let resp = self
.call_method_with_payload("post_indexer_message", req)
.await
.unwrap();
let resp_deser = serde_json::from_value(resp).unwrap();
Ok(resp_deser)
}
}

View File

@ -1,6 +1,9 @@
use nssa::AccountId;
use crate::{
HashType,
block::{Block, HashableBlockData},
transaction::{EncodedTransaction, NSSATransaction},
transaction::NSSATransaction,
};
// Helpers
@ -20,8 +23,8 @@ pub fn sequencer_sign_key_for_testing() -> nssa::PrivateKey {
/// `transactions` - vector of `EncodedTransaction` objects
pub fn produce_dummy_block(
id: u64,
prev_hash: Option<[u8; 32]>,
transactions: Vec<EncodedTransaction>,
prev_hash: Option<HashType>,
transactions: Vec<NSSATransaction>,
) -> Block {
let block_data = HashableBlockData {
block_id: id,
@ -33,7 +36,7 @@ pub fn produce_dummy_block(
block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32])
}
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
pub fn produce_dummy_empty_transaction() -> NSSATransaction {
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let account_ids = vec![];
let nonces = vec![];
@ -50,17 +53,17 @@ pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
NSSATransaction::Public(nssa_tx)
}
pub fn create_transaction_native_token_transfer(
from: [u8; 32],
from: AccountId,
nonce: u128,
to: [u8; 32],
to: AccountId,
balance_to_move: u128,
signing_key: nssa::PrivateKey,
) -> EncodedTransaction {
let account_ids = vec![nssa::AccountId::new(from), nssa::AccountId::new(to)];
) -> NSSATransaction {
let account_ids = vec![from, to];
let nonces = vec![nonce];
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let message = nssa::public_transaction::Message::try_new(
@ -74,5 +77,5 @@ pub fn create_transaction_native_token_transfer(
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
NSSATransaction::Public(nssa_tx)
}

View File

@ -1,17 +1,25 @@
use borsh::{BorshDeserialize, BorshSerialize};
use log::info;
use serde::{Deserialize, Serialize};
use sha2::{Digest, digest::FixedOutput};
pub type HashType = [u8; 32];
use crate::HashType;
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub enum NSSATransaction {
Public(nssa::PublicTransaction),
PrivacyPreserving(nssa::PrivacyPreservingTransaction),
ProgramDeployment(nssa::ProgramDeploymentTransaction),
}
impl NSSATransaction {
pub fn hash(&self) -> HashType {
HashType(match self {
NSSATransaction::Public(tx) => tx.hash(),
NSSATransaction::PrivacyPreserving(tx) => tx.hash(),
NSSATransaction::ProgramDeployment(tx) => tx.hash(),
})
}
}
impl From<nssa::PublicTransaction> for NSSATransaction {
fn from(value: nssa::PublicTransaction) -> Self {
Self::Public(value)
@ -38,106 +46,3 @@ pub enum TxKind {
PrivacyPreserving,
ProgramDeployment,
}
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
/// General transaction object
pub struct EncodedTransaction {
pub tx_kind: TxKind,
/// Encoded blobs of data
pub encoded_transaction_data: Vec<u8>,
}
impl From<NSSATransaction> for EncodedTransaction {
fn from(value: NSSATransaction) -> Self {
match value {
NSSATransaction::Public(tx) => Self {
tx_kind: TxKind::Public,
encoded_transaction_data: tx.to_bytes(),
},
NSSATransaction::PrivacyPreserving(tx) => Self {
tx_kind: TxKind::PrivacyPreserving,
encoded_transaction_data: tx.to_bytes(),
},
NSSATransaction::ProgramDeployment(tx) => Self {
tx_kind: TxKind::ProgramDeployment,
encoded_transaction_data: tx.to_bytes(),
},
}
}
}
impl TryFrom<&EncodedTransaction> for NSSATransaction {
type Error = nssa::error::NssaError;
fn try_from(value: &EncodedTransaction) -> Result<Self, Self::Error> {
match value.tx_kind {
TxKind::Public => nssa::PublicTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into()),
TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into())
}
TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::from_bytes(&value.encoded_transaction_data)
.map(|tx| tx.into())
}
}
}
}
impl EncodedTransaction {
/// Computes and returns the SHA-256 hash of the JSON-serialized representation of `self`.
pub fn hash(&self) -> HashType {
let bytes_to_hash = borsh::to_vec(&self).unwrap();
let mut hasher = sha2::Sha256::new();
hasher.update(&bytes_to_hash);
HashType::from(hasher.finalize_fixed())
}
pub fn log(&self) {
info!("Transaction hash is {:?}", hex::encode(self.hash()));
info!("Transaction tx_kind is {:?}", self.tx_kind);
}
}
#[cfg(test)]
mod tests {
use sha2::{Digest, digest::FixedOutput};
use crate::{
HashType,
transaction::{EncodedTransaction, TxKind},
};
fn test_transaction_body() -> EncodedTransaction {
EncodedTransaction {
tx_kind: TxKind::Public,
encoded_transaction_data: vec![1, 2, 3, 4],
}
}
#[test]
fn test_transaction_hash_is_sha256_of_json_bytes() {
let body = test_transaction_body();
let expected_hash = {
let data = borsh::to_vec(&body).unwrap();
let mut hasher = sha2::Sha256::new();
hasher.update(&data);
HashType::from(hasher.finalize_fixed())
};
let hash = body.hash();
assert_eq!(expected_hash, hash);
}
#[test]
fn test_to_bytes_from_bytes() {
let body = test_transaction_body();
let body_bytes = borsh::to_vec(&body).unwrap();
let body_new = borsh::from_slice::<EncodedTransaction>(&body_bytes).unwrap();
assert_eq!(body, body_new);
}
}

View File

@ -0,0 +1,11 @@
{
"resubscribe_interval_millis": 1000,
"bedrock_client_config": {
"addr": "http://logos-blockchain-node-0:18080",
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101"
}

View File

@ -1,12 +1,22 @@
{
"home": "",
"home": "/var/lib/sequencer_runner",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 0,
"retry_pending_blocks_timeout_millis": 7000,
"port": 3040,
"bedrock_config": {
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://logos-blockchain-node-0:18080"
},
"indexer_rpc_url": "ws://indexer_service:8779",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
@ -154,12 +164,5 @@
37,
37,
37
],
"bedrock_config": {
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
}
}
]
}

View File

@ -0,0 +1,27 @@
# This file is automatically applied on top of docker-compose.yml when running `docker compose` commands.
services:
logos-blockchain-node-0:
ports: !override
- "18080:18080/tcp"
environment:
- RUST_LOG=error
sequencer_runner:
depends_on:
- logos-blockchain-node-0
- indexer_service
volumes: !override
- ./configs/docker-all-in-one/sequencer:/etc/sequencer_runner
indexer_service:
depends_on:
- logos-blockchain-node-0
volumes:
- ./configs/docker-all-in-one/indexer/indexer_config.json:/etc/indexer_service/indexer_config.json
explorer_service:
depends_on:
- indexer_service
environment:
- INDEXER_RPC_URL=http://indexer_service:8779

13
docker-compose.yml Normal file
View File

@ -0,0 +1,13 @@
# All-in-one docker compose configuration.
# It runs all services from this repo and the bedrock nodes in a single docker network.
# This is useful for development and testing purposes.
include:
- path:
bedrock/docker-compose.yml
- path:
sequencer_runner/docker-compose.yml
- path:
indexer/service/docker-compose.yml
- path:
explorer_service/docker-compose.yml

View File

@ -49,7 +49,7 @@ async fn main() {
let signing_key = wallet_core
.storage()
.user_data
.get_pub_account_signing_key(&account_id)
.get_pub_account_signing_key(account_id)
.expect("Input account should be a self owned public account");
// Define the desired greeting in ASCII

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use leptos::prelude::*;
use serde::{Deserialize, Serialize};
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
pub struct SearchResults {
pub blocks: Vec<Block>,
pub transactions: Vec<Transaction>,
pub accounts: Vec<(AccountId, Option<Account>)>,
pub accounts: Vec<(AccountId, Account)>,
}
/// RPC client type
@ -46,7 +46,7 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
if let Some(bytes) = parse_hex(&query)
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
{
let hash = Hash(hash_array);
let hash = HashType(hash_array);
// Try as block hash
if let Ok(block) = client.get_block_by_hash(hash).await {
@ -60,14 +60,8 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
// Try as account ID
let account_id = AccountId { value: hash_array };
match client.get_account(account_id).await {
Ok(account) => {
accounts.push((account_id, Some(account)));
}
Err(_) => {
// Account might not exist yet, still add it to results
accounts.push((account_id, None));
}
if let Ok(account) = client.get_account(account_id).await {
accounts.push((account_id, account));
}
}
@ -98,7 +92,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result<Block, ServerFnError>
/// Get block by hash
#[server]
pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError> {
pub async fn get_block_by_hash(block_hash: HashType) -> Result<Block, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client
@ -109,7 +103,7 @@ pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError>
/// Get transaction by hash
#[server]
pub async fn get_transaction(tx_hash: Hash) -> Result<Transaction, ServerFnError> {
pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client

View File

@ -6,7 +6,7 @@ use crate::format_utils;
/// Account preview component
#[component]
pub fn AccountPreview(account_id: AccountId, account: Option<Account>) -> impl IntoView {
pub fn AccountPreview(account_id: AccountId, account: Account) -> impl IntoView {
let account_id_str = format_utils::format_account_id(&account_id);
view! {
@ -19,42 +19,31 @@ pub fn AccountPreview(account_id: AccountId, account: Option<Account>) -> impl I
</div>
</div>
{move || {
account
.as_ref()
.map(|Account { program_owner, balance, data, nonce }| {
let program_id = format_utils::format_program_id(program_owner);
view! {
<div class="account-preview-body">
<div class="account-field">
<span class="field-label">"Balance: "</span>
<span class="field-value">{balance.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Program: "</span>
<span class="field-value hash">{program_id}</span>
</div>
<div class="account-field">
<span class="field-label">"Nonce: "</span>
<span class="field-value">{nonce.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Data: "</span>
<span class="field-value">
{format!("{} bytes", data.0.len())}
</span>
</div>
</div>
}
.into_any()
})
.unwrap_or_else(|| {
view! {
<div class="account-preview-body">
<div class="account-not-found">"Account not found"</div>
</div>
}
.into_any()
})
let Account { program_owner, balance, data, nonce } = &account;
let program_id = format_utils::format_program_id(program_owner);
view! {
<div class="account-preview-body">
<div class="account-field">
<span class="field-label">"Balance: "</span>
<span class="field-value">{balance.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Program: "</span>
<span class="field-value hash">{program_id}</span>
</div>
<div class="account-field">
<span class="field-label">"Nonce: "</span>
<span class="field-value">{nonce.to_string()}</span>
</div>
<div class="account-field">
<span class="field-label">"Data: "</span>
<span class="field-value">
{format!("{} bytes", data.0.len())}
</span>
</div>
</div>
}
.into_any()
}}
</A>

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Hash};
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, HashType};
use leptos::prelude::*;
use leptos_router::{components::A, hooks::use_params_map};
@ -7,7 +7,7 @@ use crate::{api, components::TransactionPreview, format_utils};
#[derive(Clone, PartialEq, Eq)]
enum BlockIdOrHash {
BlockId(BlockId),
Hash(Hash),
Hash(HashType),
}
/// Block page component
@ -29,7 +29,7 @@ pub fn BlockPage() -> impl IntoView {
if let Some(bytes) = format_utils::parse_hex(id_str)
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
{
return Some(BlockIdOrHash::Hash(Hash(hash_array)));
return Some(BlockIdOrHash::Hash(HashType(hash_array)));
}
None

View File

@ -1,5 +1,5 @@
use indexer_service_protocol::{
Hash, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
HashType, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet,
};
use leptos::prelude::*;
@ -18,7 +18,7 @@ pub fn TransactionPage() -> impl IntoView {
format_utils::parse_hex(&tx_hash_str).and_then(|bytes| {
if bytes.len() == 32 {
let hash_array: [u8; 32] = bytes.try_into().ok()?;
Some(Hash(hash_array))
Some(HashType(hash_array))
} else {
None
}

View File

@ -17,3 +17,4 @@ futures.workspace = true
url.workspace = true
logos-blockchain-core.workspace = true
serde_json.workspace = true
async-stream.workspace = true

View File

@ -1,36 +1,35 @@
use std::{fs::File, io::BufReader, path::Path};
use anyhow::{Context, Result};
use bedrock_client::BackoffConfig;
use common::sequencer_client::BasicAuth;
use logos_blockchain_core::mantle::ops::channel::ChannelId;
use anyhow::{Context as _, Result};
pub use bedrock_client::BackoffConfig;
use common::config::BasicAuth;
pub use logos_blockchain_core::mantle::ops::channel::ChannelId;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
/// ToDo: Expand if necessary
pub struct ClientConfig {
pub struct BedrockClientConfig {
/// For individual RPC requests we use Fibonacci backoff retry strategy.
pub backoff: BackoffConfig,
pub addr: Url,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub auth: Option<BasicAuth>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
/// Note: For individual RPC requests we use Fibonacci backoff retry strategy
pub struct IndexerConfig {
pub resubscribe_interval_millis: u64,
pub backoff: BackoffConfig,
pub bedrock_client_config: ClientConfig,
pub sequencer_client_config: ClientConfig,
pub bedrock_client_config: BedrockClientConfig,
pub channel_id: ChannelId,
}
impl IndexerConfig {
pub fn from_path(config_home: &Path) -> Result<IndexerConfig> {
let file = File::open(config_home)
.with_context(|| format!("Failed to open indexer config at {config_home:?}"))?;
pub fn from_path(config_path: &Path) -> Result<IndexerConfig> {
let file = File::open(config_path)
.with_context(|| format!("Failed to open indexer config at {config_path:?}"))?;
let reader = BufReader::new(file);
serde_json::from_reader(reader)
.with_context(|| format!("Failed to parse indexer config at {config_home:?}"))
.with_context(|| format!("Failed to parse indexer config at {config_path:?}"))
}
}

110
indexer/core/src/lib.rs Normal file
View File

@ -0,0 +1,110 @@
use std::sync::Arc;
use anyhow::{Context as _, Result};
use bedrock_client::BedrockClient;
use common::block::Block;
use futures::StreamExt;
use log::{debug, info};
use logos_blockchain_core::mantle::{
Op, SignedMantleTx,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use tokio::sync::RwLock;
use crate::{config::IndexerConfig, state::IndexerState};
pub mod config;
pub mod state;
#[derive(Clone)]
pub struct IndexerCore {
bedrock_client: BedrockClient,
config: IndexerConfig,
state: IndexerState,
}
impl IndexerCore {
pub fn new(config: IndexerConfig) -> Result<Self> {
Ok(Self {
bedrock_client: BedrockClient::new(
config.bedrock_client_config.backoff,
config.bedrock_client_config.addr.clone(),
config.bedrock_client_config.auth.clone(),
)
.context("Failed to create Bedrock client")?,
config,
// No state setup for now, future task.
state: IndexerState {
latest_seen_block: Arc::new(RwLock::new(0)),
},
})
}
pub async fn subscribe_parse_block_stream(&self) -> impl futures::Stream<Item = Result<Block>> {
debug!("Subscribing to Bedrock block stream");
async_stream::stream! {
loop {
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
info!("Block stream joined");
while let Some(block_info) = stream_pinned.next().await {
let header_id = block_info.header_id;
info!("Observed L1 block at height {}", block_info.height);
if let Some(l1_block) = self
.bedrock_client
.get_block_by_id(header_id)
.await?
{
info!("Extracted L1 block at height {}", block_info.height);
let l2_blocks_parsed = parse_blocks(
l1_block.into_transactions().into_iter(),
&self.config.channel_id,
).collect::<Vec<_>>();
info!("Parsed {} L2 blocks", l2_blocks_parsed.len());
for l2_block in l2_blocks_parsed {
// State modification, will be updated in future
{
let mut guard = self.state.latest_seen_block.write().await;
if l2_block.header.block_id > *guard {
*guard = l2_block.header.block_id;
}
}
yield Ok(l2_block);
}
}
}
// Refetch stream after delay
tokio::time::sleep(std::time::Duration::from_millis(
self.config.resubscribe_interval_millis,
))
.await;
}
}
}
}
fn parse_blocks(
block_txs: impl Iterator<Item = SignedMantleTx>,
decoded_channel_id: &ChannelId,
) -> impl Iterator<Item = Block> {
block_txs.flat_map(|tx| {
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
Op::ChannelInscribe(InscriptionOp {
channel_id,
inscription,
..
}) if channel_id == *decoded_channel_id => {
borsh::from_slice::<Block>(&inscription).ok()
}
_ => None,
})
})
}

View File

@ -5,8 +5,9 @@ edition = "2024"
license = { workspace = true }
[dependencies]
indexer_service_protocol.workspace = true
indexer_service_protocol = { workspace = true, features = ["convert"] }
indexer_service_rpc = { workspace = true, features = ["server"] }
indexer_core.workspace = true
clap = { workspace = true, features = ["derive"] }
anyhow.workspace = true
@ -15,7 +16,10 @@ tokio-util.workspace = true
env_logger.workspace = true
log.workspace = true
jsonrpsee.workspace = true
serde_json.workspace = true
futures.workspace = true
async-trait = "0.1.89"
arc-swap = "1.8.1"
[features]
# Return mock responses with generated data for testing purposes

View File

@ -36,7 +36,9 @@ RUN strip /indexer_service/target/release/indexer_service
FROM debian:trixie-slim
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user && \
mkdir -p /indexer_service /etc/indexer_service && \
chown -R indexer_service_user:indexer_service_user /indexer_service /etc/indexer_service
# Copy binary from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
@ -61,4 +63,4 @@ ENV RUST_LOG=info
USER indexer_service_user
WORKDIR /indexer_service
CMD ["indexer_service"]
CMD ["indexer_service", "/etc/indexer_service/indexer_config.json"]

View File

@ -0,0 +1,11 @@
{
"resubscribe_interval_millis": 1000,
"bedrock_client_config": {
"addr": "http://localhost:18080",
"backoff": {
"start_delay_millis": 100,
"max_retries": 5
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101"
}

View File

@ -0,0 +1,12 @@
services:
indexer_service:
image: lssa/indexer_service
build:
context: ../..
dockerfile: indexer/service/Dockerfile
container_name: indexer_service
ports:
- "8779:8779"
volumes:
# Mount configuration
- ./configs/indexer_config.json:/etc/indexer_service/indexer_config.json

View File

@ -12,8 +12,7 @@ common = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }
schemars.workspace = true
base64.workspace = true
borsh = { workspace = true, optional = true }
[features]
# Enable conversion to/from NSSA core types
convert = ["dep:nssa_core", "dep:nssa", "dep:common", "dep:borsh"]
convert = ["dep:nssa_core", "dep:nssa", "dep:common"]

View File

@ -381,7 +381,7 @@ impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::
impl From<nssa::PublicTransaction> for PublicTransaction {
fn from(value: nssa::PublicTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::PublicTransaction {
message,
witness_set,
@ -430,7 +430,7 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction {
impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction {
fn from(value: nssa::PrivacyPreservingTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::PrivacyPreservingTransaction {
message,
witness_set,
@ -467,7 +467,7 @@ impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransactio
impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction {
fn from(value: nssa::ProgramDeploymentTransaction) -> Self {
let hash = Hash(value.hash());
let hash = HashType(value.hash());
let nssa::ProgramDeploymentTransaction { message } = value;
Self {
@ -531,8 +531,8 @@ impl From<common::block::BlockHeader> for BlockHeader {
} = value;
Self {
block_id,
prev_block_hash: Hash(prev_block_hash),
hash: Hash(hash),
prev_block_hash: prev_block_hash.into(),
hash: hash.into(),
timestamp,
signature: signature.into(),
}
@ -552,47 +552,32 @@ impl TryFrom<BlockHeader> for common::block::BlockHeader {
} = value;
Ok(Self {
block_id,
prev_block_hash: prev_block_hash.0,
hash: hash.0,
prev_block_hash: prev_block_hash.into(),
hash: hash.into(),
timestamp,
signature: signature.into(),
})
}
}
impl TryFrom<common::block::BlockBody> for BlockBody {
type Error = std::io::Error;
fn try_from(value: common::block::BlockBody) -> Result<Self, Self::Error> {
// Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction
// This conversion will decode and re-encode the transactions
use borsh::BorshDeserialize as _;
impl From<common::block::BlockBody> for BlockBody {
fn from(value: common::block::BlockBody) -> Self {
let common::block::BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|encoded_tx| match encoded_tx.tx_kind {
common::transaction::TxKind::Public => {
nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data)
.map(|tx| Transaction::Public(tx.into()))
.map(|tx| match tx {
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
Transaction::PrivacyPreserving(tx.into())
}
common::transaction::TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::PrivacyPreserving(tx.into()))
}
common::transaction::TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::ProgramDeployment(tx.into()))
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
Transaction::ProgramDeployment(tx.into())
}
})
.collect::<Result<Vec<_>, _>>()?;
.collect();
Ok(Self { transactions })
Self { transactions }
}
}
@ -606,7 +591,7 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
.into_iter()
.map(|tx| {
let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?;
Ok::<_, nssa::error::NssaError>(nssa_tx.into())
Ok::<_, nssa::error::NssaError>(nssa_tx)
})
.collect::<Result<Vec<_>, _>>()?;
@ -614,10 +599,8 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
}
}
impl TryFrom<common::block::Block> for Block {
type Error = std::io::Error;
fn try_from(value: common::block::Block) -> Result<Self, Self::Error> {
impl From<common::block::Block> for Block {
fn from(value: common::block::Block) -> Self {
let common::block::Block {
header,
body,
@ -625,12 +608,12 @@ impl TryFrom<common::block::Block> for Block {
bedrock_parent_id,
} = value;
Ok(Self {
Self {
header: header.into(),
body: body.try_into()?,
body: body.into(),
bedrock_status: bedrock_status.into(),
bedrock_parent_id: MantleMsgId(bedrock_parent_id),
})
}
}
}
@ -673,3 +656,15 @@ impl From<BedrockStatus> for common::block::BedrockStatus {
}
}
}
impl From<common::HashType> for HashType {
fn from(value: common::HashType) -> Self {
Self(value.0)
}
}
impl From<HashType> for common::HashType {
fn from(value: HashType) -> Self {
common::HashType(value.0)
}
}

View File

@ -42,8 +42,8 @@ pub struct Block {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockHeader {
pub block_id: BlockId,
pub prev_block_hash: Hash,
pub hash: Hash,
pub prev_block_hash: HashType,
pub hash: HashType,
pub timestamp: TimeStamp,
pub signature: Signature,
}
@ -69,7 +69,7 @@ pub enum Transaction {
impl Transaction {
/// Get the hash of the transaction
pub fn hash(&self) -> &self::Hash {
pub fn hash(&self) -> &self::HashType {
match self {
Transaction::Public(tx) => &tx.hash,
Transaction::PrivacyPreserving(tx) => &tx.hash,
@ -80,14 +80,14 @@ impl Transaction {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: PublicMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: PrivacyPreservingMessage,
pub witness_set: WitnessSet,
}
@ -134,7 +134,7 @@ pub struct EncryptedAccountData {
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentTransaction {
pub hash: Hash,
pub hash: HashType,
pub message: ProgramDeploymentMessage,
}
@ -197,7 +197,7 @@ pub struct Data(
);
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Hash(
pub struct HashType(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded hash")]
pub [u8; 32],

View File

@ -1,4 +1,4 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::proc_macros::rpc;
#[cfg(feature = "server")]
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
@ -23,23 +23,20 @@ pub trait Rpc {
Ok(serde_json::to_value(block_schema).expect("Schema serialization should not fail"))
}
#[subscription(name = "subscribeToBlocks", item = Vec<Block>)]
async fn subscribe_to_blocks(&self, from: BlockId) -> SubscriptionResult;
#[subscription(name = "subscribeToFinalizedBlocks", item = BlockId)]
async fn subscribe_to_finalized_blocks(&self) -> SubscriptionResult;
#[method(name = "getBlockById")]
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getBlockByHash")]
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getLastBlockId")]
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned>;
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getAccount")]
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
#[method(name = "getTransaction")]
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>;
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned>;
#[method(name = "getBlocks")]
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned>;

View File

@ -0,0 +1,88 @@
use std::net::SocketAddr;
use anyhow::{Context as _, Result};
pub use indexer_core::config::*;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use log::{error, info};
pub mod service;
#[cfg(feature = "mock-responses")]
pub mod mock_service;
pub struct IndexerHandle {
addr: SocketAddr,
server_handle: Option<jsonrpsee::server::ServerHandle>,
}
impl IndexerHandle {
fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self {
Self {
addr,
server_handle: Some(server_handle),
}
}
pub fn addr(&self) -> SocketAddr {
self.addr
}
pub async fn stopped(mut self) {
let handle = self
.server_handle
.take()
.expect("Indexer server handle is set");
handle.stopped().await
}
pub fn is_stopped(&self) -> bool {
self.server_handle
.as_ref()
.is_none_or(|handle| handle.is_stopped())
}
}
impl Drop for IndexerHandle {
fn drop(&mut self) {
let Self {
addr: _,
server_handle,
} = self;
let Some(handle) = server_handle else {
return;
};
if let Err(err) = handle.stop() {
error!("An error occurred while stopping Indexer RPC server: {err}");
}
}
}
pub async fn run_server(config: IndexerConfig, port: u16) -> Result<IndexerHandle> {
#[cfg(feature = "mock-responses")]
let _ = config;
let server = Server::builder()
.build(SocketAddr::from(([0, 0, 0, 0], port)))
.await
.context("Failed to build RPC server")?;
let addr = server
.local_addr()
.context("Failed to get local address of RPC server")?;
info!("Starting Indexer Service RPC server on {addr}");
#[cfg(not(feature = "mock-responses"))]
let handle = {
let service =
service::IndexerService::new(config).context("Failed to initialize indexer service")?;
server.start(service.into_rpc())
};
#[cfg(feature = "mock-responses")]
let handle = server.start(mock_service::MockIndexerService::new_with_mock_blocks().into_rpc());
Ok(IndexerHandle::new(addr, handle))
}

View File

@ -1,15 +1,15 @@
use std::net::SocketAddr;
use std::path::PathBuf;
use anyhow::{Context as _, Result};
use anyhow::Result;
use clap::Parser;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use log::{error, info};
use tokio_util::sync::CancellationToken;
#[derive(Debug, Parser)]
#[clap(version)]
struct Args {
#[clap(name = "config")]
config_path: PathBuf,
#[clap(short, long, default_value = "8779")]
port: u16,
}
@ -18,18 +18,18 @@ struct Args {
async fn main() -> Result<()> {
env_logger::init();
let args = Args::parse();
let Args { config_path, port } = Args::parse();
let cancellation_token = listen_for_shutdown_signal();
let handle = run_server(args.port).await?;
let handle_clone = handle.clone();
let config = indexer_service::IndexerConfig::from_path(&config_path)?;
let indexer_handle = indexer_service::run_server(config, port).await?;
tokio::select! {
_ = cancellation_token.cancelled() => {
info!("Shutting down server...");
}
_ = handle_clone.stopped() => {
_ = indexer_handle.stopped() => {
error!("Server stopped unexpectedly");
}
}
@ -39,28 +39,6 @@ async fn main() -> Result<()> {
Ok(())
}
async fn run_server(port: u16) -> Result<jsonrpsee::server::ServerHandle> {
let server = Server::builder()
.build(SocketAddr::from(([0, 0, 0, 0], port)))
.await
.context("Failed to build RPC server")?;
let addr = server
.local_addr()
.context("Failed to get local address of RPC server")?;
info!("Starting Indexer Service RPC server on {addr}");
#[cfg(not(feature = "mock-responses"))]
let handle = server.start(indexer_service::service::IndexerService.into_rpc());
#[cfg(feature = "mock-responses")]
let handle = server.start(
indexer_service::mock_service::MockIndexerService::new_with_mock_blocks().into_rpc(),
);
Ok(handle)
}
fn listen_for_shutdown_signal() -> CancellationToken {
let cancellation_token = CancellationToken::new();
let cancellation_token_clone = cancellation_token.clone();

View File

@ -2,9 +2,10 @@ use std::collections::HashMap;
use indexer_service_protocol::{
Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Commitment,
CommitmentSetDigest, Data, EncryptedAccountData, Hash, MantleMsgId, PrivacyPreservingMessage,
PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction,
PublicMessage, PublicTransaction, Signature, Transaction, WitnessSet,
CommitmentSetDigest, Data, EncryptedAccountData, HashType, MantleMsgId,
PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Signature, Transaction,
WitnessSet,
};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
@ -12,7 +13,7 @@ use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct MockIndexerService {
blocks: Vec<Block>,
accounts: HashMap<AccountId, Account>,
transactions: HashMap<Hash, (Transaction, BlockId)>,
transactions: HashMap<HashType, (Transaction, BlockId)>,
}
impl MockIndexerService {
@ -43,14 +44,14 @@ impl MockIndexerService {
}
// Create 10 blocks with transactions
let mut prev_hash = Hash([0u8; 32]);
let mut prev_hash = HashType([0u8; 32]);
for block_id in 0..10 {
let block_hash = {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
hash[1] = 0xff;
Hash(hash)
HashType(hash)
};
// Create 2-4 transactions per block (mix of Public, PrivacyPreserving, and
@ -63,7 +64,7 @@ impl MockIndexerService {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
hash[1] = tx_idx as u8;
Hash(hash)
HashType(hash)
};
// Vary transaction types: Public, PrivacyPreserving, or ProgramDeployment
@ -161,16 +162,22 @@ impl MockIndexerService {
}
}
// `async_trait` is required by `jsonrpsee`
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for MockIndexerService {
async fn subscribe_to_blocks(
async fn subscribe_to_finalized_blocks(
&self,
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
_from: BlockId,
subscription_sink: jsonrpsee::PendingSubscriptionSink,
) -> SubscriptionResult {
// Subscription not implemented for mock service
Err("Subscriptions not supported in mock service".into())
let sink = subscription_sink.accept().await?;
for block in self
.blocks
.iter()
.filter(|b| b.bedrock_status == BedrockStatus::Finalized)
{
let json = serde_json::value::to_raw_value(block).unwrap();
sink.send(json).await?;
}
Ok(())
}
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
@ -187,7 +194,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
})
}
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
self.blocks
.iter()
.find(|b| b.header.hash == block_hash)
@ -195,13 +202,6 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Block with hash not found", None::<()>))
}
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
self.blocks
.last()
.map(|b| b.header.block_id)
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "No blocks available", None::<()>))
}
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
self.accounts
.get(&account_id)
@ -209,7 +209,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>))
}
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
self.transactions
.get(&tx_hash)
.map(|(tx, _)| tx.clone())

View File

@ -0,0 +1,228 @@
use std::{pin::pin, sync::Arc};
use anyhow::{Context as _, Result, bail};
use arc_swap::ArcSwap;
use futures::{StreamExt as _, never::Never};
use indexer_core::{IndexerCore, config::IndexerConfig};
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
use jsonrpsee::{
SubscriptionSink,
core::{Serialize, SubscriptionResult},
types::{ErrorCode, ErrorObject, ErrorObjectOwned},
};
use log::{debug, error, info, warn};
use tokio::sync::mpsc::UnboundedSender;
pub struct IndexerService {
subscription_service: SubscriptionService,
#[expect(
dead_code,
reason = "Will be used in future implementations of RPC methods"
)]
indexer: IndexerCore,
}
impl IndexerService {
pub fn new(config: IndexerConfig) -> Result<Self> {
let indexer = IndexerCore::new(config)?;
let subscription_service = SubscriptionService::spawn_new(indexer.clone());
Ok(Self {
subscription_service,
indexer,
})
}
}
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_finalized_blocks(
&self,
subscription_sink: jsonrpsee::PendingSubscriptionSink,
) -> SubscriptionResult {
let sink = subscription_sink.accept().await?;
info!(
"Accepted new subscription to finalized blocks with ID {:?}",
sink.subscription_id()
);
self.subscription_service
.add_subscription(Subscription::new(sink))
.await?;
Ok(())
}
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
Err(not_yet_implemented_error())
}
async fn get_block_by_hash(&self, _block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
Err(not_yet_implemented_error())
}
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
Err(not_yet_implemented_error())
}
async fn get_transaction(&self, _tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
Err(not_yet_implemented_error())
}
async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
Err(not_yet_implemented_error())
}
async fn get_transactions_by_account(
&self,
_account_id: AccountId,
_limit: u32,
_offset: u32,
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
Err(not_yet_implemented_error())
}
}
struct SubscriptionService {
parts: ArcSwap<SubscriptionLoopParts>,
indexer: IndexerCore,
}
impl SubscriptionService {
pub fn spawn_new(indexer: IndexerCore) -> Self {
let parts = Self::spawn_respond_subscribers_loop(indexer.clone());
Self {
parts: ArcSwap::new(Arc::new(parts)),
indexer,
}
}
pub async fn add_subscription(&self, subscription: Subscription<BlockId>) -> Result<()> {
let guard = self.parts.load();
if let Err(err) = guard.new_subscription_sender.send(subscription) {
error!("Failed to send new subscription to subscription service with error: {err:#?}");
// Respawn the subscription service loop if it has finished (either with error or panic)
if guard.handle.is_finished() {
drop(guard);
let new_parts = Self::spawn_respond_subscribers_loop(self.indexer.clone());
let old_handle_and_sender = self.parts.swap(Arc::new(new_parts));
let old_parts = Arc::into_inner(old_handle_and_sender)
.expect("There should be no other references to the old handle and sender");
match old_parts.handle.await {
Ok(Err(err)) => {
error!(
"Subscription service loop has unexpectedly finished with error: {err:#}"
);
}
Err(err) => {
error!("Subscription service loop has panicked with err: {err:#}");
}
}
}
bail!(err);
};
Ok(())
}
fn spawn_respond_subscribers_loop(indexer: IndexerCore) -> SubscriptionLoopParts {
let (new_subscription_sender, mut sub_receiver) =
tokio::sync::mpsc::unbounded_channel::<Subscription<BlockId>>();
let handle = tokio::spawn(async move {
let mut subscribers = Vec::new();
let mut block_stream = pin!(indexer.subscribe_parse_block_stream().await);
loop {
tokio::select! {
sub = sub_receiver.recv() => {
let Some(subscription) = sub else {
bail!("Subscription receiver closed unexpectedly");
};
info!("Added new subscription with ID {:?}", subscription.sink.subscription_id());
subscribers.push(subscription);
}
block_opt = block_stream.next() => {
debug!("Got new block from block stream");
let Some(block) = block_opt else {
bail!("Block stream ended unexpectedly");
};
let block = block.context("Failed to get L2 block data")?;
let block: indexer_service_protocol::Block = block.into();
for sub in &mut subscribers {
if let Err(err) = sub.try_send(&block.header.block_id) {
warn!(
"Failed to send block ID {:?} to subscription ID {:?} with error: {err:#?}",
block.header.block_id,
sub.sink.subscription_id(),
);
}
}
}
}
}
});
SubscriptionLoopParts {
handle,
new_subscription_sender,
}
}
}
impl Drop for SubscriptionService {
fn drop(&mut self) {
self.parts.load().handle.abort();
}
}
struct SubscriptionLoopParts {
handle: tokio::task::JoinHandle<Result<Never>>,
new_subscription_sender: UnboundedSender<Subscription<BlockId>>,
}
struct Subscription<T> {
sink: SubscriptionSink,
_marker: std::marker::PhantomData<T>,
}
impl<T> Subscription<T> {
fn new(sink: SubscriptionSink) -> Self {
Self {
sink,
_marker: std::marker::PhantomData,
}
}
fn try_send(&mut self, item: &T) -> Result<()>
where
T: Serialize,
{
let json = serde_json::value::to_raw_value(item)
.context("Failed to serialize item for subscription")?;
self.sink.try_send(json)?;
Ok(())
}
}
impl<T> Drop for Subscription<T> {
fn drop(&mut self) {
info!(
"Subscription with ID {:?} is being dropped",
self.sink.subscription_id()
);
}
}
fn not_yet_implemented_error() -> ErrorObjectOwned {
ErrorObject::owned(
ErrorCode::InternalError.code(),
"Not yet implemented",
Option::<String>::None,
)
}

View File

@ -1,124 +0,0 @@
use std::sync::Arc;
use anyhow::Result;
use bedrock_client::BedrockClient;
use common::{
block::HashableBlockData, communication::indexer::Message,
rpc_primitives::requests::PostIndexerMessageResponse, sequencer_client::SequencerClient,
};
use futures::StreamExt;
use log::info;
use logos_blockchain_core::mantle::{
Op, SignedMantleTx,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use tokio::sync::RwLock;
use crate::{config::IndexerConfig, state::IndexerState};
pub mod config;
pub mod state;
pub struct IndexerCore {
pub bedrock_client: BedrockClient,
pub sequencer_client: SequencerClient,
pub config: IndexerConfig,
pub state: IndexerState,
}
impl IndexerCore {
pub fn new(config: IndexerConfig) -> Result<Self> {
Ok(Self {
bedrock_client: BedrockClient::new(
config.bedrock_client_config.auth.clone().map(Into::into),
config.bedrock_client_config.addr.clone(),
)?,
sequencer_client: SequencerClient::new_with_auth(
config.sequencer_client_config.addr.clone(),
config.sequencer_client_config.auth.clone(),
)?,
config,
// No state setup for now, future task.
state: IndexerState {
latest_seen_block: Arc::new(RwLock::new(0)),
},
})
}
pub async fn subscribe_parse_block_stream(&self) -> Result<()> {
loop {
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
info!("Block stream joined");
while let Some(block_info) = stream_pinned.next().await {
let header_id = block_info.header_id;
info!("Observed L1 block at height {}", block_info.height);
if let Some(l1_block) = self
.bedrock_client
.get_block_by_id(header_id, &self.config.backoff)
.await?
{
info!("Extracted L1 block at height {}", block_info.height);
let l2_blocks_parsed = parse_blocks(
l1_block.into_transactions().into_iter(),
&self.config.channel_id,
);
for l2_block in l2_blocks_parsed {
// State modification, will be updated in future
{
let mut guard = self.state.latest_seen_block.write().await;
if l2_block.block_id > *guard {
*guard = l2_block.block_id;
}
}
// Sending data into sequencer, may need to be expanded.
let message = Message::L2BlockFinalized {
l2_block_height: l2_block.block_id,
};
let status = self.send_message_to_sequencer(message.clone()).await?;
info!("Sent message {message:#?} to sequencer; status {status:#?}");
}
}
}
// Refetch stream after delay
tokio::time::sleep(std::time::Duration::from_millis(
self.config.resubscribe_interval_millis,
))
.await;
}
}
pub async fn send_message_to_sequencer(
&self,
message: Message,
) -> Result<PostIndexerMessageResponse> {
Ok(self.sequencer_client.post_indexer_message(message).await?)
}
}
fn parse_blocks(
block_txs: impl Iterator<Item = SignedMantleTx>,
decoded_channel_id: &ChannelId,
) -> impl Iterator<Item = HashableBlockData> {
block_txs.flat_map(|tx| {
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
Op::ChannelInscribe(InscriptionOp {
channel_id,
inscription,
..
}) if channel_id == *decoded_channel_id => {
borsh::from_slice::<HashableBlockData>(&inscription).ok()
}
_ => None,
})
})
}

View File

@ -1,9 +0,0 @@
services:
indexer_service:
image: lssa/indexer_service
build:
context: ..
dockerfile: indexer_service/Dockerfile
container_name: indexer_service
ports:
- "8779:8779"

View File

@ -1,4 +0,0 @@
pub mod service;
#[cfg(feature = "mock-responses")]
pub mod mock_service;

View File

@ -1,49 +0,0 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct IndexerService;
// `async_trait` is required by `jsonrpsee`
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_blocks(
&self,
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
_from: BlockId,
) -> SubscriptionResult {
todo!()
}
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_block_by_hash(&self, _block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
todo!()
}
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
todo!()
}
async fn get_transaction(&self, _tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
todo!()
}
async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
todo!()
}
async fn get_transactions_by_account(
&self,
_account_id: AccountId,
_limit: u32,
_offset: u32,
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
todo!()
}
}

View File

@ -7,24 +7,24 @@ license = { workspace = true }
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_core = { workspace = true, features = ["default", "testnet"] }
sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
indexer_core.workspace = true
wallet-ffi.workspace = true
serde_json.workspace = true
token_core.workspace = true
indexer_service.workspace = true
url.workspace = true
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
actix-web.workspace = true
serde_json.workspace = true
base64.workspace = true
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
futures.workspace = true
testcontainers = { version = "0.27.0", features = ["docker-compose"] }

View File

@ -1,17 +0,0 @@
{
"bedrock_client_config": {
"addr": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"backoff": {
"max_retries": 10,
"start_delay_millis": 100
},
"resubscribe_interval_millis": 1000,
"sequencer_client_config": {
"addr": "will_be_replaced_in_runtime"
}
}

View File

@ -1,159 +0,0 @@
{
"home": "",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"retry_pending_blocks_timeout_millis": 240000,
"port": 0,
"initial_accounts": [
{
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"balance": 10000
},
{
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
]
}

View File

@ -1,547 +0,0 @@
{
"override_rust_log": null,
"sequencer_addr": "",
"seq_poll_timeout_millis": 12000,
"seq_tx_poll_max_blocks": 5,
"seq_poll_max_retries": 5,
"seq_block_poll_max_amount": 100,
"basic_auth": null,
"initial_accounts": [
{
"Public": {
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"pub_sign_key": [
16,
162,
106,
154,
236,
125,
52,
184,
35,
100,
238,
174,
69,
197,
41,
77,
187,
10,
118,
75,
0,
11,
148,
238,
185,
181,
133,
17,
220,
72,
124,
77
]
}
},
{
"Public": {
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"pub_sign_key": [
113,
121,
64,
177,
204,
85,
229,
214,
178,
6,
109,
191,
29,
154,
63,
38,
242,
18,
244,
219,
8,
208,
35,
136,
23,
127,
207,
237,
216,
169,
190,
27
]
}
},
{
"Private": {
"account_id": "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
251,
82,
235,
1,
146,
96,
30,
81,
162,
234,
33,
15,
123,
129,
116,
0,
84,
136,
176,
70,
190,
224,
161,
54,
134,
142,
154,
1,
18,
251,
242,
189
],
"private_key_holder": {
"nullifier_secret_key": [
29,
250,
10,
187,
35,
123,
180,
250,
246,
97,
216,
153,
44,
156,
16,
93,
241,
26,
174,
219,
72,
84,
34,
247,
112,
101,
217,
243,
189,
173,
75,
20
],
"incoming_viewing_secret_key": [
251,
201,
22,
154,
100,
165,
218,
108,
163,
190,
135,
91,
145,
84,
69,
241,
46,
117,
217,
110,
197,
248,
91,
193,
14,
104,
88,
103,
67,
153,
182,
158
],
"outgoing_viewing_secret_key": [
25,
67,
121,
76,
175,
100,
30,
198,
105,
123,
49,
169,
75,
178,
75,
210,
100,
143,
210,
243,
228,
243,
21,
18,
36,
84,
164,
186,
139,
113,
214,
12
]
},
"nullifer_public_key": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"incoming_viewing_public_key": [
3,
235,
139,
131,
237,
177,
122,
189,
6,
177,
167,
178,
202,
117,
246,
58,
28,
65,
132,
79,
220,
139,
119,
243,
187,
160,
212,
121,
61,
247,
116,
72,
205
]
}
}
},
{
"Private": {
"account_id": "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
238,
171,
241,
69,
111,
217,
85,
64,
19,
82,
18,
189,
32,
91,
78,
175,
107,
7,
109,
60,
52,
44,
243,
230,
72,
244,
192,
92,
137,
33,
118,
254
],
"private_key_holder": {
"nullifier_secret_key": [
25,
211,
215,
119,
57,
223,
247,
37,
245,
144,
122,
29,
118,
245,
83,
228,
23,
9,
101,
120,
88,
33,
238,
207,
128,
61,
110,
2,
89,
62,
164,
13
],
"incoming_viewing_secret_key": [
193,
181,
14,
196,
142,
84,
15,
65,
128,
101,
70,
196,
241,
47,
130,
221,
23,
146,
161,
237,
221,
40,
19,
126,
59,
15,
169,
236,
25,
105,
104,
231
],
"outgoing_viewing_secret_key": [
20,
170,
220,
108,
41,
23,
155,
217,
247,
190,
175,
168,
247,
34,
105,
134,
114,
74,
104,
91,
211,
62,
126,
13,
130,
100,
241,
214,
250,
236,
38,
150
]
},
"nullifer_public_key": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"incoming_viewing_public_key": [
2,
181,
98,
93,
216,
241,
241,
110,
58,
198,
119,
174,
250,
184,
1,
204,
200,
173,
44,
238,
37,
247,
170,
156,
100,
254,
116,
242,
28,
183,
187,
77,
255
]
}
}
}
]
}

View File

@ -0,0 +1,255 @@
use std::{net::SocketAddr, path::PathBuf};
use anyhow::{Context, Result};
use indexer_service::{BackoffConfig, BedrockClientConfig, ChannelId, IndexerConfig};
use key_protocol::key_management::KeyChain;
use nssa::{Account, AccountId, PrivateKey, PublicKey};
use nssa_core::{account::Data, program::DEFAULT_PROGRAM_ID};
use sequencer_core::config::{
AccountInitialData, BedrockConfig, CommitmentsInitialData, SequencerConfig,
};
use url::Url;
use wallet::config::{
InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, WalletConfig,
};
pub fn indexer_config(bedrock_addr: SocketAddr) -> Result<IndexerConfig> {
Ok(IndexerConfig {
resubscribe_interval_millis: 1000,
bedrock_client_config: BedrockClientConfig {
addr: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 10,
},
},
channel_id: bedrock_channel_id(),
})
}
/// Sequencer config options available for custom changes in integration tests.
pub struct SequencerPartialConfig {
pub max_num_tx_in_block: usize,
pub mempool_max_size: usize,
pub block_create_timeout_millis: u64,
}
impl Default for SequencerPartialConfig {
fn default() -> Self {
Self {
max_num_tx_in_block: 20,
mempool_max_size: 10_000,
block_create_timeout_millis: 10_000,
}
}
}
pub fn sequencer_config(
partial: SequencerPartialConfig,
home: PathBuf,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<SequencerConfig> {
let SequencerPartialConfig {
max_num_tx_in_block,
mempool_max_size,
block_create_timeout_millis,
} = partial;
Ok(SequencerConfig {
home,
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
max_num_tx_in_block,
mempool_max_size,
block_create_timeout_millis,
retry_pending_blocks_timeout_millis: 240_000,
port: 0,
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay_millis: 100,
max_retries: 5,
},
channel_id: bedrock_channel_id(),
node_url: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
},
indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr)
.context("Failed to convert indexer addr to URL")?,
})
}
pub fn wallet_config(
sequencer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<WalletConfig> {
Ok(WalletConfig {
override_rust_log: None,
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
.context("Failed to convert sequencer addr to URL")?,
seq_poll_timeout_millis: 30_000,
seq_tx_poll_max_blocks: 15,
seq_poll_max_retries: 10,
seq_block_poll_max_amount: 100,
initial_accounts: initial_data.wallet_initial_accounts(),
basic_auth: None,
})
}
pub struct InitialData {
pub public_accounts: Vec<(PrivateKey, u128)>,
pub private_accounts: Vec<(KeyChain, Account)>,
}
impl InitialData {
pub fn with_two_public_and_two_private_initialized_accounts() -> Self {
let mut public_alice_private_key = PrivateKey::new_os_random();
let mut public_alice_public_key =
PublicKey::new_from_private_key(&public_alice_private_key);
let mut public_alice_account_id = AccountId::from(&public_alice_public_key);
let mut public_bob_private_key = PrivateKey::new_os_random();
let mut public_bob_public_key = PublicKey::new_from_private_key(&public_bob_private_key);
let mut public_bob_account_id = AccountId::from(&public_bob_public_key);
// Ensure consistent ordering
if public_alice_account_id > public_bob_account_id {
std::mem::swap(&mut public_alice_private_key, &mut public_bob_private_key);
std::mem::swap(&mut public_alice_public_key, &mut public_bob_public_key);
std::mem::swap(&mut public_alice_account_id, &mut public_bob_account_id);
}
let mut private_charlie_key_chain = KeyChain::new_os_random();
let mut private_charlie_account_id =
AccountId::from(&private_charlie_key_chain.nullifer_public_key);
let mut private_david_key_chain = KeyChain::new_os_random();
let mut private_david_account_id =
AccountId::from(&private_david_key_chain.nullifer_public_key);
// Ensure consistent ordering
if private_charlie_account_id > private_david_account_id {
std::mem::swap(&mut private_charlie_key_chain, &mut private_david_key_chain);
std::mem::swap(
&mut private_charlie_account_id,
&mut private_david_account_id,
);
}
Self {
public_accounts: vec![
(public_alice_private_key, 10_000),
(public_bob_private_key, 20_000),
],
private_accounts: vec![
(
private_charlie_key_chain,
Account {
balance: 10_000,
data: Data::default(),
program_owner: DEFAULT_PROGRAM_ID,
nonce: 0,
},
),
(
private_david_key_chain,
Account {
balance: 20_000,
data: Data::default(),
program_owner: DEFAULT_PROGRAM_ID,
nonce: 0,
},
),
],
}
}
fn sequencer_initial_accounts(&self) -> Vec<AccountInitialData> {
self.public_accounts
.iter()
.map(|(priv_key, balance)| {
let pub_key = PublicKey::new_from_private_key(priv_key);
let account_id = AccountId::from(&pub_key);
AccountInitialData {
account_id,
balance: *balance,
}
})
.collect()
}
fn sequencer_initial_commitments(&self) -> Vec<CommitmentsInitialData> {
self.private_accounts
.iter()
.map(|(key_chain, account)| CommitmentsInitialData {
npk: key_chain.nullifer_public_key.clone(),
account: account.clone(),
})
.collect()
}
fn wallet_initial_accounts(&self) -> Vec<InitialAccountData> {
self.public_accounts
.iter()
.map(|(priv_key, _)| {
let pub_key = PublicKey::new_from_private_key(priv_key);
let account_id = AccountId::from(&pub_key);
InitialAccountData::Public(InitialAccountDataPublic {
account_id,
pub_sign_key: priv_key.clone(),
})
})
.chain(self.private_accounts.iter().map(|(key_chain, account)| {
let account_id = AccountId::from(&key_chain.nullifer_public_key);
InitialAccountData::Private(InitialAccountDataPrivate {
account_id,
account: account.clone(),
key_chain: key_chain.clone(),
})
}))
.collect()
}
}
pub enum UrlProtocol {
Http,
Ws,
}
impl std::fmt::Display for UrlProtocol {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UrlProtocol::Http => write!(f, "http"),
UrlProtocol::Ws => write!(f, "ws"),
}
}
}
pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result<Url> {
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
// but clients need to connect to 127.0.0.1:<port> to work reliably
let url_string = if addr.ip().is_unspecified() {
format!("{protocol}://127.0.0.1:{}", addr.port())
} else {
format!("{protocol}://{addr}")
};
url_string.parse().map_err(Into::into)
}
fn bedrock_channel_id() -> ChannelId {
let channel_id: [u8; 32] = [0u8, 1]
.repeat(16)
.try_into()
.unwrap_or_else(|_| unreachable!());
ChannelId::from(channel_id)
}

View File

@ -2,173 +2,180 @@
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
use actix_web::dev::ServerHandle;
use anyhow::{Context, Result};
use anyhow::{Context, Result, bail};
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use common::{
sequencer_client::SequencerClient,
transaction::{EncodedTransaction, NSSATransaction},
};
use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction};
use futures::FutureExt as _;
use indexer_core::{IndexerCore, config::IndexerConfig};
use log::debug;
use nssa::PrivacyPreservingTransaction;
use indexer_service::IndexerHandle;
use log::{debug, error, warn};
use nssa::{AccountId, PrivacyPreservingTransaction};
use nssa_core::Commitment;
use sequencer_core::config::SequencerConfig;
use sequencer_runner::SequencerHandle;
use tempfile::TempDir;
use tokio::task::JoinHandle;
use url::Url;
use testcontainers::compose::DockerCompose;
use wallet::{WalletCore, config::WalletConfigOverrides};
pub mod config;
// TODO: Remove this and control time from tests
pub const TIME_TO_WAIT_FOR_BLOCK_SECONDS: u64 = 12;
pub const ACC_SENDER: &str = "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV";
pub const ACC_RECEIVER: &str = "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo";
pub const ACC_SENDER_PRIVATE: &str = "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw";
pub const ACC_RECEIVER_PRIVATE: &str = "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX";
pub const NSSA_PROGRAM_FOR_TEST_DATA_CHANGER: &str = "data_changer.bin";
const BEDROCK_SERVICE_WITH_OPEN_PORT: &str = "logos-blockchain-node-0";
const BEDROCK_SERVICE_PORT: u16 = 18080;
static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init);
/// Test context which sets up a sequencer and a wallet for integration tests.
///
/// It's memory and logically safe to create multiple instances of this struct in parallel tests,
/// as each instance uses its own temporary directories for sequencer and wallet data.
// NOTE: Order of fields is important for proper drop order.
pub struct TestContext {
sequencer_server_handle: ServerHandle,
sequencer_loop_handle: JoinHandle<Result<()>>,
sequencer_retry_pending_blocks_handle: JoinHandle<Result<()>>,
indexer_loop_handle: Option<JoinHandle<Result<()>>>,
sequencer_client: SequencerClient,
wallet: WalletCore,
wallet_password: String,
sequencer_handle: SequencerHandle,
indexer_handle: IndexerHandle,
bedrock_compose: DockerCompose,
_temp_sequencer_dir: TempDir,
_temp_wallet_dir: TempDir,
}
impl TestContext {
/// Create new test context in detached mode. Default.
/// Create new test context.
pub async fn new() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path =
PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, None).await
Self::builder().build().await
}
/// Create new test context in local bedrock node attached mode.
pub async fn new_bedrock_local_attached() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path = PathBuf::from(manifest_dir)
.join("configs/sequencer/bedrock_local_attached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
let indexer_config_path =
PathBuf::from(manifest_dir).join("configs/indexer/indexer_config.json");
let indexer_config = IndexerConfig::from_path(&indexer_config_path)
.context("Failed to create indexer config from file")?;
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, Some(indexer_config))
.await
pub fn builder() -> TestContextBuilder {
TestContextBuilder::new()
}
/// Create new test context with custom sequencer config and maybe indexer config.
///
/// `home` and `port` fields of the provided config will be overridden to meet tests parallelism
/// requirements.
pub async fn new_with_sequencer_and_maybe_indexer_configs(
sequencer_config: SequencerConfig,
indexer_config: Option<IndexerConfig>,
async fn new_configured(
sequencer_partial_config: config::SequencerPartialConfig,
initial_data: config::InitialData,
) -> Result<Self> {
// Ensure logger is initialized only once
*LOGGER;
debug!("Test context setup");
let (
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
temp_sequencer_dir,
) = Self::setup_sequencer(sequencer_config)
.await
.context("Failed to setup sequencer")?;
let (bedrock_compose, bedrock_addr) = Self::setup_bedrock_node().await?;
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
// but clients need to connect to 127.0.0.1:<port> to work reliably
let sequencer_addr = if sequencer_addr.ip().is_unspecified() {
format!("http://127.0.0.1:{}", sequencer_addr.port())
} else {
format!("http://{sequencer_addr}")
let indexer_handle = Self::setup_indexer(bedrock_addr)
.await
.context("Failed to setup Indexer")?;
let (sequencer_handle, temp_sequencer_dir) = Self::setup_sequencer(
sequencer_partial_config,
bedrock_addr,
indexer_handle.addr(),
&initial_data,
)
.await
.context("Failed to setup Sequencer")?;
let (wallet, temp_wallet_dir, wallet_password) =
Self::setup_wallet(sequencer_handle.addr(), &initial_data)
.await
.context("Failed to setup wallet")?;
let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr())
.context("Failed to convert sequencer addr to URL")?;
let sequencer_client =
SequencerClient::new(sequencer_url).context("Failed to create sequencer client")?;
Ok(Self {
sequencer_client,
wallet,
wallet_password,
bedrock_compose,
sequencer_handle,
indexer_handle,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
})
}
async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let bedrock_compose_path =
PathBuf::from(manifest_dir).join("../bedrock/docker-compose.yml");
let mut compose = DockerCompose::with_auto_client(&[bedrock_compose_path])
.await
.context("Failed to setup docker compose for Bedrock")?;
async fn up_and_retrieve_port(compose: &mut DockerCompose) -> Result<u16> {
compose
.up()
.await
.context("Failed to bring up Bedrock services")?;
let container = compose
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
.with_context(|| {
format!(
"Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`"
)
})?;
let ports = container.ports().await.with_context(|| {
format!(
"Failed to get ports for Bedrock service container `{}`",
container.id()
)
})?;
ports
.map_to_host_port_ipv4(BEDROCK_SERVICE_PORT)
.with_context(|| {
format!(
"Failed to retrieve host port of {BEDROCK_SERVICE_PORT} container \
port for container `{}`, existing ports: {ports:?}",
container.id()
)
})
}
let mut port = None;
let mut attempt = 0;
let max_attempts = 5;
while port.is_none() && attempt < max_attempts {
attempt += 1;
match up_and_retrieve_port(&mut compose).await {
Ok(p) => {
port = Some(p);
}
Err(err) => {
warn!(
"Failed to bring up Bedrock services: {err:?}, attempt {attempt}/{max_attempts}"
);
}
}
}
let Some(port) = port else {
bail!("Failed to bring up Bedrock services after {max_attempts} attempts");
};
let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone())
let addr = SocketAddr::from(([127, 0, 0, 1], port));
Ok((compose, addr))
}
async fn setup_indexer(bedrock_addr: SocketAddr) -> Result<IndexerHandle> {
let indexer_config =
config::indexer_config(bedrock_addr).context("Failed to create Indexer config")?;
indexer_service::run_server(indexer_config, 0)
.await
.context("Failed to setup wallet")?;
let sequencer_client = SequencerClient::new(
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?,
)
.context("Failed to create sequencer client")?;
if let Some(mut indexer_config) = indexer_config {
indexer_config.sequencer_client_config.addr =
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?;
let indexer_core = IndexerCore::new(indexer_config)?;
let indexer_loop_handle = Some(tokio::spawn(async move {
indexer_core.subscribe_parse_block_stream().await
}));
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
wallet_password,
})
} else {
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle: None,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
wallet_password,
})
}
.context("Failed to run Indexer Service")
}
async fn setup_sequencer(
mut config: SequencerConfig,
) -> Result<(
ServerHandle,
SocketAddr,
JoinHandle<Result<()>>,
JoinHandle<Result<()>>,
TempDir,
)> {
partial: config::SequencerPartialConfig,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &config::InitialData,
) -> Result<(SequencerHandle, TempDir)> {
let temp_sequencer_dir =
tempfile::tempdir().context("Failed to create temp dir for sequencer home")?;
@ -176,43 +183,39 @@ impl TestContext {
"Using temp sequencer home at {:?}",
temp_sequencer_dir.path()
);
config.home = temp_sequencer_dir.path().to_owned();
// Setting port to 0 lets the OS choose a free port for us
config.port = 0;
let (
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
) = sequencer_runner::startup_sequencer(config).await?;
let config = config::sequencer_config(
partial,
temp_sequencer_dir.path().to_owned(),
bedrock_addr,
indexer_addr,
initial_data,
)
.context("Failed to create Sequencer config")?;
Ok((
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
temp_sequencer_dir,
))
let sequencer_handle = sequencer_runner::startup_sequencer(config).await?;
Ok((sequencer_handle, temp_sequencer_dir))
}
async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let wallet_config_source_path =
PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json");
async fn setup_wallet(
sequencer_addr: SocketAddr,
initial_data: &config::InitialData,
) -> Result<(WalletCore, TempDir, String)> {
let config = config::wallet_config(sequencer_addr, initial_data)
.context("Failed to create Wallet config")?;
let config_serialized =
serde_json::to_string_pretty(&config).context("Failed to serialize Wallet config")?;
let temp_wallet_dir =
tempfile::tempdir().context("Failed to create temp dir for wallet home")?;
let config_path = temp_wallet_dir.path().join("wallet_config.json");
std::fs::copy(&wallet_config_source_path, &config_path)
.context("Failed to copy wallet config to temp dir")?;
std::fs::write(&config_path, config_serialized)
.context("Failed to write wallet config in temp dir")?;
let storage_path = temp_wallet_dir.path().join("storage.json");
let config_overrides = WalletConfigOverrides {
sequencer_addr: Some(sequencer_addr),
..Default::default()
};
let config_overrides = WalletConfigOverrides::default();
let wallet_password = "test_pass".to_owned();
let wallet = WalletCore::new_init_storage(
@ -248,32 +251,71 @@ impl TestContext {
pub fn sequencer_client(&self) -> &SequencerClient {
&self.sequencer_client
}
/// Get existing public account IDs in the wallet.
pub fn existing_public_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
.user_data
.public_account_ids()
.collect()
}
/// Get existing private account IDs in the wallet.
pub fn existing_private_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
.user_data
.private_account_ids()
.collect()
}
}
impl Drop for TestContext {
fn drop(&mut self) {
debug!("Test context cleanup");
let Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle,
sequencer_handle,
indexer_handle,
bedrock_compose,
_temp_sequencer_dir: _,
_temp_wallet_dir: _,
sequencer_client: _,
wallet: _,
_temp_sequencer_dir,
_temp_wallet_dir,
wallet_password: _,
} = self;
sequencer_loop_handle.abort();
sequencer_retry_pending_blocks_handle.abort();
if let Some(indexer_loop_handle) = indexer_loop_handle {
indexer_loop_handle.abort();
if sequencer_handle.is_finished() {
let Err(err) = self
.sequencer_handle
.run_forever()
.now_or_never()
.expect("Future is finished and should be ready");
error!(
"Sequencer handle has unexpectedly finished before TestContext drop with error: {err:#}"
);
}
// Can't wait here as Drop can't be async, but anyway stop signal should be sent
sequencer_server_handle.stop(true).now_or_never();
if indexer_handle.is_stopped() {
error!("Indexer handle has unexpectedly stopped before TestContext drop");
}
let container = bedrock_compose
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
.unwrap_or_else(|| {
panic!("Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`")
});
let output = std::process::Command::new("docker")
.args(["inspect", "-f", "{{.State.Running}}", container.id()])
.output()
.expect("Failed to execute docker inspect command to check if Bedrock container is still running");
let stdout = String::from_utf8(output.stdout)
.expect("Failed to parse docker inspect output as String");
if stdout.trim() != "true" {
error!(
"Bedrock container `{}` is not running during TestContext drop, docker inspect output: {stdout}",
container.id()
);
}
}
}
@ -291,31 +333,65 @@ impl BlockingTestContext {
}
}
pub fn format_public_account_id(account_id: &str) -> String {
pub struct TestContextBuilder {
initial_data: Option<config::InitialData>,
sequencer_partial_config: Option<config::SequencerPartialConfig>,
}
impl TestContextBuilder {
fn new() -> Self {
Self {
initial_data: None,
sequencer_partial_config: None,
}
}
pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self {
self.initial_data = Some(initial_data);
self
}
pub fn with_sequencer_partial_config(
mut self,
sequencer_partial_config: config::SequencerPartialConfig,
) -> Self {
self.sequencer_partial_config = Some(sequencer_partial_config);
self
}
pub async fn build(self) -> Result<TestContext> {
TestContext::new_configured(
self.sequencer_partial_config.unwrap_or_default(),
self.initial_data.unwrap_or_else(|| {
config::InitialData::with_two_public_and_two_private_initialized_accounts()
}),
)
.await
}
}
pub fn format_public_account_id(account_id: AccountId) -> String {
format!("Public/{account_id}")
}
pub fn format_private_account_id(account_id: &str) -> String {
pub fn format_private_account_id(account_id: AccountId) -> String {
format!("Private/{account_id}")
}
pub async fn fetch_privacy_preserving_tx(
seq_client: &SequencerClient,
tx_hash: String,
tx_hash: HashType,
) -> PrivacyPreservingTransaction {
let transaction_encoded = seq_client
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(tx_hash)
.await
.unwrap()
.transaction
.unwrap();
let tx_base64_decode = BASE64.decode(transaction_encoded).unwrap();
match NSSATransaction::try_from(
&borsh::from_slice::<EncodedTransaction>(&tx_base64_decode).unwrap(),
)
.unwrap()
{
let tx_bytes = BASE64.decode(transaction_encoded).unwrap();
let tx = borsh::from_slice(&tx_bytes).unwrap();
match tx {
NSSATransaction::PrivacyPreserving(privacy_preserving_transaction) => {
privacy_preserving_transaction
}
@ -332,20 +408,3 @@ pub async fn verify_commitment_is_in_state(
Ok(Some(_))
)
}
#[cfg(test)]
mod tests {
use super::{format_private_account_id, format_public_account_id};
#[test]
fn correct_account_id_from_prefix() {
let account_id1 = "cafecafe";
let account_id2 = "deadbeaf";
let account_id1_pub = format_public_account_id(account_id1);
let account_id2_priv = format_private_account_id(account_id2);
assert_eq!(account_id1_pub, "Public/cafecafe".to_string());
assert_eq!(account_id2_priv, "Private/deadbeaf".to_string());
}
}

View File

@ -1,5 +1,5 @@
use anyhow::Result;
use integration_tests::{ACC_SENDER, TestContext};
use integration_tests::TestContext;
use log::info;
use nssa::program::Program;
use tokio::test;
@ -10,7 +10,7 @@ async fn get_existing_account() -> Result<()> {
let account = ctx
.sequencer_client()
.get_account(ACC_SENDER.to_string())
.get_account(ctx.existing_public_accounts()[0])
.await?
.account;

View File

@ -88,8 +88,8 @@ async fn amm_public() -> Result<()> {
// Create new token
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id_1.to_string()),
supply_account_id: format_public_account_id(&supply_account_id_1.to_string()),
definition_account_id: format_public_account_id(definition_account_id_1),
supply_account_id: format_public_account_id(supply_account_id_1),
name: "A NAM1".to_string(),
total_supply: 37,
};
@ -99,10 +99,8 @@ async fn amm_public() -> Result<()> {
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_1`
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id_1.to_string()),
to: Some(format_public_account_id(
&recipient_account_id_1.to_string(),
)),
from: format_public_account_id(supply_account_id_1),
to: Some(format_public_account_id(recipient_account_id_1)),
to_npk: None,
to_ipk: None,
amount: 7,
@ -114,8 +112,8 @@ async fn amm_public() -> Result<()> {
// Create new token
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id_2.to_string()),
supply_account_id: format_public_account_id(&supply_account_id_2.to_string()),
definition_account_id: format_public_account_id(definition_account_id_2),
supply_account_id: format_public_account_id(supply_account_id_2),
name: "A NAM2".to_string(),
total_supply: 37,
};
@ -125,10 +123,8 @@ async fn amm_public() -> Result<()> {
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_2`
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id_2.to_string()),
to: Some(format_public_account_id(
&recipient_account_id_2.to_string(),
)),
from: format_public_account_id(supply_account_id_2),
to: Some(format_public_account_id(recipient_account_id_2)),
to_npk: None,
to_ipk: None,
amount: 7,
@ -157,9 +153,9 @@ async fn amm_public() -> Result<()> {
// Send creation tx
let subcommand = AmmProgramAgnosticSubcommand::New {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
balance_a: 3,
balance_b: 3,
};
@ -170,19 +166,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -206,8 +202,8 @@ async fn amm_public() -> Result<()> {
// Make swap
let subcommand = AmmProgramAgnosticSubcommand::Swap {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
amount_in: 2,
min_amount_out: 1,
token_definition: definition_account_id_1.to_string(),
@ -219,19 +215,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -255,8 +251,8 @@ async fn amm_public() -> Result<()> {
// Make swap
let subcommand = AmmProgramAgnosticSubcommand::Swap {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
amount_in: 2,
min_amount_out: 1,
token_definition: definition_account_id_2.to_string(),
@ -268,19 +264,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -304,9 +300,9 @@ async fn amm_public() -> Result<()> {
// Add liquidity
let subcommand = AmmProgramAgnosticSubcommand::AddLiquidity {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
min_amount_lp: 1,
max_amount_a: 2,
max_amount_b: 2,
@ -318,19 +314,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;
@ -354,9 +350,9 @@ async fn amm_public() -> Result<()> {
// Remove liquidity
let subcommand = AmmProgramAgnosticSubcommand::RemoveLiquidity {
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
user_holding_a: format_public_account_id(recipient_account_id_1),
user_holding_b: format_public_account_id(recipient_account_id_2),
user_holding_lp: format_public_account_id(user_holding_lp),
balance_lp: 2,
min_amount_a: 1,
min_amount_b: 1,
@ -368,19 +364,19 @@ async fn amm_public() -> Result<()> {
let user_holding_a_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_1.to_string())
.get_account(recipient_account_id_1)
.await?
.account;
let user_holding_b_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_2.to_string())
.get_account(recipient_account_id_2)
.await?
.account;
let user_holding_lp_acc = ctx
.sequencer_client()
.get_account(user_holding_lp.to_string())
.get_account(user_holding_lp)
.await?
.account;

View File

@ -2,7 +2,6 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use integration_tests::{
ACC_RECEIVER, ACC_RECEIVER_PRIVATE, ACC_SENDER, ACC_SENDER_PRIVATE,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
};
@ -20,12 +19,12 @@ use wallet::cli::{
async fn private_transfer_to_owned_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to: AccountId = ctx.existing_private_accounts()[1];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -38,13 +37,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
let new_commitment2 = ctx
.wallet()
.get_private_account_commitment(&to)
.get_private_account_commitment(to)
.context("Failed to get private account commitment for receiver")?;
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
@ -57,13 +56,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
async fn private_transfer_to_foreign_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to_npk = NullifierPublicKey([42; 32]);
let to_npk_string = hex::encode(to_npk.0);
let to_ipk = Secp256k1Point::from_scalar(to_npk.0);
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(to_npk_string),
to_ipk: Some(hex::encode(to_ipk.0)),
@ -80,10 +79,10 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
@ -100,19 +99,19 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
async fn deshielded_transfer_to_public_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let to: AccountId = ACC_RECEIVER.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
let to: AccountId = ctx.existing_public_accounts()[1];
// Check initial balance of the private sender
let from_acc = ctx
.wallet()
.get_account_private(&from)
.get_account_private(from)
.context("Failed to get sender's private account")?;
assert_eq!(from_acc.balance, 10000);
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_public_account_id(&to.to_string())),
from: format_private_account_id(from),
to: Some(format_public_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -125,18 +124,15 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
let from_acc = ctx
.wallet()
.get_account_private(&from)
.get_account_private(from)
.context("Failed to get sender's private account")?;
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(to.to_string())
.await?;
let acc_2_balance = ctx.sequencer_client().get_account_balance(to).await?;
assert_eq!(from_acc.balance, 9900);
assert_eq!(acc_2_balance.balance, 20100);
@ -150,7 +146,7 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
@ -168,13 +164,13 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&to_account_id)
.get_private_account(to_account_id)
.cloned()
.context("Failed to get private account")?;
// Send to this account using claiming path (using npk and ipk instead of account ID)
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_ipk: Some(hex::encode(to_keys.incoming_viewing_public_key.0)),
@ -186,7 +182,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
anyhow::bail!("Expected PrivacyPreservingTransfer return value");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
// Sync the wallet to claim the new account
let command = Command::Account(AccountSubcommand::SyncPrivate {});
@ -194,7 +190,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&from)
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
assert_eq!(tx.message.new_commitments[0], new_commitment1);
@ -205,7 +201,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
let to_res_acc = ctx
.wallet()
.get_account_private(&to_account_id)
.get_account_private(to_account_id)
.context("Failed to get recipient's private account")?;
assert_eq!(to_res_acc.balance, 100);
@ -218,12 +214,12 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
async fn shielded_transfer_to_owned_private_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER.parse()?;
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
let to: AccountId = ctx.existing_private_accounts()[1];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_private_account_id(&to.to_string())),
from: format_public_account_id(from),
to: Some(format_private_account_id(to)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -236,18 +232,15 @@ async fn shielded_transfer_to_owned_private_account() -> Result<()> {
let acc_to = ctx
.wallet()
.get_account_private(&to)
.get_account_private(to)
.context("Failed to get receiver's private account")?;
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&to)
.get_private_account_commitment(to)
.context("Failed to get receiver's commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let acc_from_balance = ctx
.sequencer_client()
.get_account_balance(from.to_string())
.await?;
let acc_from_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert_eq!(acc_from_balance.balance, 9900);
assert_eq!(acc_to.balance, 20100);
@ -264,10 +257,10 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
let to_npk = NullifierPublicKey([42; 32]);
let to_npk_string = hex::encode(to_npk.0);
let to_ipk = Secp256k1Point::from_scalar(to_npk.0);
let from: AccountId = ACC_SENDER.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
from: format_public_account_id(from),
to: None,
to_npk: Some(to_npk_string),
to_ipk: Some(hex::encode(to_ipk.0)),
@ -284,10 +277,7 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(from.to_string())
.await?;
let acc_1_balance = ctx.sequencer_client().get_account_balance(from).await?;
assert!(
verify_commitment_is_in_state(
@ -313,7 +303,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// The original implementation spawned wallet::cli::execute_continuous_run() in background
// but this conflicts with TestContext's wallet management
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
@ -331,13 +321,13 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&to_account_id)
.get_private_account(to_account_id)
.cloned()
.context("Failed to get private account")?;
// Send transfer using nullifier and incoming viewing public keys
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_ipk: Some(hex::encode(to_keys.incoming_viewing_public_key.0)),
@ -349,7 +339,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
anyhow::bail!("Failed to send transaction");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
info!("Waiting for next blocks to check if continuous run fetches account");
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
@ -364,7 +354,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// Verify receiver account balance
let to_res_acc = ctx
.wallet()
.get_account_private(&to_account_id)
.get_account_private(to_account_id)
.context("Failed to get receiver account")?;
assert_eq!(to_res_acc.balance, 100);
@ -383,7 +373,7 @@ async fn initialize_private_account() -> Result<()> {
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: format_private_account_id(&account_id.to_string()),
account_id: format_private_account_id(account_id),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
@ -395,13 +385,13 @@ async fn initialize_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&account_id)
.get_private_account_commitment(account_id)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let account = ctx
.wallet()
.get_account_private(&account_id)
.get_account_private(account_id)
.context("Failed to get private account")?;
assert_eq!(

View File

@ -1,9 +1,7 @@
use std::time::Duration;
use anyhow::Result;
use integration_tests::{
ACC_RECEIVER, ACC_SENDER, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id,
};
use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id};
use log::info;
use nssa::program::Program;
use tokio::test;
@ -18,8 +16,8 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
let mut ctx = TestContext::new().await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -33,11 +31,11 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -64,17 +62,15 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
.storage()
.user_data
.account_ids()
.map(ToString::to_string)
.find(|acc_id| acc_id != ACC_SENDER && acc_id != ACC_RECEIVER)
.find(|acc_id| {
*acc_id != ctx.existing_public_accounts()[0]
&& *acc_id != ctx.existing_public_accounts()[1]
})
.expect("Failed to find newly created account in the wallet storage");
if new_persistent_account_id == String::new() {
panic!("Failed to produce new account, not present in persistent accounts");
}
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(&new_persistent_account_id)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(new_persistent_account_id)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -88,7 +84,7 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
info!("Checking correct balance move");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
@ -109,8 +105,8 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
let mut ctx = TestContext::new().await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 1000000,
@ -125,11 +121,11 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
info!("Checking balances unchanged");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -147,8 +143,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
// First transfer
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -162,11 +158,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Checking correct balance move after first transfer");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -179,8 +175,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
// Second transfer
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(ACC_SENDER),
to: Some(format_public_account_id(ACC_RECEIVER)),
from: format_public_account_id(ctx.existing_public_accounts()[0]),
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_ipk: None,
amount: 100,
@ -194,11 +190,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
info!("Checking correct balance move after second transfer");
let acc_1_balance = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?;
let acc_2_balance = ctx
.sequencer_client()
.get_account_balance(ACC_RECEIVER.to_string())
.get_account_balance(ctx.existing_public_accounts()[1])
.await?;
info!("Balance of sender: {acc_1_balance:#?}");
@ -223,14 +219,14 @@ async fn initialize_public_account() -> Result<()> {
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: format_public_account_id(&account_id.to_string()),
account_id: format_public_account_id(account_id),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
info!("Checking correct execution");
let account = ctx
.sequencer_client()
.get_account(account_id.to_string())
.get_account(account_id)
.await?
.account;

View File

@ -1,23 +0,0 @@
use anyhow::Result;
use integration_tests::TestContext;
use log::info;
use tokio::test;
#[ignore = "needs complicated setup"]
#[test]
// To run this test properly, you need nomos node running in the background.
// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node).
//
// Recommended to run node locally from build binary.
async fn indexer_run_local_node() -> Result<()> {
let _ctx = TestContext::new_bedrock_local_attached().await?;
info!("Let's observe behaviour");
tokio::time::sleep(std::time::Duration::from_secs(180)).await;
// No way to check state of indexer now
// When it will be a service, then it will become possible.
Ok(())
}

View File

@ -2,8 +2,8 @@ use std::{str::FromStr, time::Duration};
use anyhow::Result;
use integration_tests::{
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
};
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
@ -19,7 +19,7 @@ use wallet::cli::{
async fn restore_keys_from_seed() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create first private account at root
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private {
@ -47,8 +47,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to first private account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to_account_id1.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to_account_id1)),
to_npk: None,
to_ipk: None,
amount: 100,
@ -57,15 +57,15 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to second private account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&from.to_string()),
to: Some(format_private_account_id(&to_account_id2.to_string())),
from: format_private_account_id(from),
to: Some(format_private_account_id(to_account_id2)),
to_npk: None,
to_ipk: None,
amount: 101,
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let from: AccountId = ACC_SENDER.parse()?;
let from: AccountId = ctx.existing_public_accounts()[0];
// Create first public account at root
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public {
@ -93,8 +93,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to first public account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_public_account_id(&to_account_id3.to_string())),
from: format_public_account_id(from),
to: Some(format_public_account_id(to_account_id3)),
to_npk: None,
to_ipk: None,
amount: 102,
@ -103,8 +103,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Send to second public account
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&from.to_string()),
to: Some(format_public_account_id(&to_account_id4.to_string())),
from: format_public_account_id(from),
to: Some(format_public_account_id(to_account_id4)),
to_npk: None,
to_ipk: None,
amount: 103,
@ -166,8 +166,8 @@ async fn restore_keys_from_seed() -> Result<()> {
// Test that restored accounts can send transactions
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(&to_account_id1.to_string()),
to: Some(format_private_account_id(&to_account_id2.to_string())),
from: format_private_account_id(to_account_id1),
to: Some(format_private_account_id(to_account_id2)),
to_npk: None,
to_ipk: None,
amount: 10,
@ -175,8 +175,8 @@ async fn restore_keys_from_seed() -> Result<()> {
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_public_account_id(&to_account_id3.to_string()),
to: Some(format_public_account_id(&to_account_id4.to_string())),
from: format_public_account_id(to_account_id3),
to: Some(format_public_account_id(to_account_id4)),
to_npk: None,
to_ipk: None,
amount: 11,
@ -188,11 +188,11 @@ async fn restore_keys_from_seed() -> Result<()> {
// Verify commitments exist for private accounts
let comm1 = ctx
.wallet()
.get_private_account_commitment(&to_account_id1)
.get_private_account_commitment(to_account_id1)
.expect("Acc 1 commitment should exist");
let comm2 = ctx
.wallet()
.get_private_account_commitment(&to_account_id2)
.get_private_account_commitment(to_account_id2)
.expect("Acc 2 commitment should exist");
assert!(verify_commitment_is_in_state(comm1, ctx.sequencer_client()).await);
@ -201,11 +201,11 @@ async fn restore_keys_from_seed() -> Result<()> {
// Verify public account balances
let acc3 = ctx
.sequencer_client()
.get_account_balance(to_account_id3.to_string())
.get_account_balance(to_account_id3)
.await?;
let acc4 = ctx
.sequencer_client()
.get_account_balance(to_account_id4.to_string())
.get_account_balance(to_account_id4)
.await?;
assert_eq!(acc3.balance, 91); // 102 - 11

View File

@ -3,8 +3,8 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use common::PINATA_BASE58;
use integration_tests::{
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
};
use log::info;
use tokio::test;
@ -22,12 +22,12 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
let pinata_prize = 150;
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: format_public_account_id(ACC_SENDER),
to: format_public_account_id(ctx.existing_public_accounts()[0]),
});
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -39,13 +39,13 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
info!("Checking correct balance move");
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
let winner_balance_post = ctx
.sequencer_client()
.get_account_balance(ACC_SENDER.to_string())
.get_account_balance(ctx.existing_public_accounts()[0])
.await?
.balance;
@ -63,12 +63,12 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let pinata_prize = 150;
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: format_private_account_id(ACC_SENDER_PRIVATE),
to: format_private_account_id(ctx.existing_private_accounts()[0]),
});
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -86,13 +86,13 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&ACC_SENDER_PRIVATE.parse()?)
.get_private_account_commitment(ctx.existing_private_accounts()[0])
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -122,7 +122,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
anyhow::bail!("Expected RegisterAccount return value");
};
let winner_account_id_formatted = format_private_account_id(&winner_account_id.to_string());
let winner_account_id_formatted = format_private_account_id(winner_account_id);
// Initialize account under auth transfer program
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
@ -135,7 +135,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&winner_account_id)
.get_private_account_commitment(winner_account_id)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
@ -146,7 +146,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
@ -157,13 +157,13 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&winner_account_id)
.get_private_account_commitment(winner_account_id)
.context("Failed to get private account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.to_string())
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;

View File

@ -45,11 +45,13 @@ async fn deploy_and_execute_program() -> Result<()> {
let _response = ctx.sequencer_client().send_tx_public(transaction).await?;
info!("Waiting for next block creation");
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
// Waiting for long time as it may take some time for such a big transaction to be included in a
// block
tokio::time::sleep(Duration::from_secs(2 * TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
let post_state_account = ctx
.sequencer_client()
.get_account(account_id.to_string())
.get_account(account_id)
.await?
.account;

View File

@ -63,8 +63,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: name.clone(),
total_supply,
};
@ -76,7 +76,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -94,7 +94,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token holding account with the total supply
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
@ -112,8 +112,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
from: format_public_account_id(supply_account_id),
to: Some(format_public_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -127,7 +127,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the supply account after transfer
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
assert_eq!(supply_acc.program_owner, Program::token().id());
@ -143,7 +143,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after transfer
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
assert_eq!(recipient_acc.program_owner, Program::token().id());
@ -159,8 +159,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_public_account_id(&recipient_account_id.to_string()),
definition: format_public_account_id(definition_account_id),
holder: format_public_account_id(recipient_account_id),
amount: burn_amount,
};
@ -172,7 +172,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account after burn
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -189,7 +189,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after burn
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -205,8 +205,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Mint 10 tokens at recipient_acc
let mint_amount = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(&recipient_account_id.to_string())),
definition: format_public_account_id(definition_account_id),
holder: Some(format_public_account_id(recipient_account_id)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount,
@ -220,7 +220,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the token definition account after mint
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -237,7 +237,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
// Check the status of the recipient account after mint
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -302,8 +302,8 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name: name.clone(),
total_supply,
};
@ -316,7 +316,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Check the status of the token definition account
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -333,15 +333,15 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -354,21 +354,21 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
let new_commitment2 = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_private_account_id(&recipient_account_id.to_string()),
definition: format_public_account_id(definition_account_id),
holder: format_private_account_id(recipient_account_id),
amount: burn_amount,
};
@ -380,7 +380,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
// Check the token definition account after burn
let definition_acc = ctx
.sequencer_client()
.get_account(definition_account_id.to_string())
.get_account(definition_account_id)
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -396,14 +396,14 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
let new_commitment2 = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient account commitment")?;
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
// Check the recipient account balance after burn
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -458,8 +458,8 @@ async fn create_token_with_private_definition() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name: name.clone(),
total_supply,
};
@ -472,14 +472,14 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify private definition commitment
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&definition_account_id)
.get_private_account_commitment(definition_account_id)
.context("Failed to get definition commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify supply account
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
@ -522,10 +522,8 @@ async fn create_token_with_private_definition() -> Result<()> {
// Mint to public account
let mint_amount_public = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(
&recipient_account_id_public.to_string(),
)),
definition: format_private_account_id(definition_account_id),
holder: Some(format_public_account_id(recipient_account_id_public)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount_public,
@ -539,7 +537,7 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify definition account has updated supply
let definition_acc = ctx
.wallet()
.get_account_private(&definition_account_id)
.get_account_private(definition_account_id)
.context("Failed to get definition account")?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
@ -555,7 +553,7 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify public recipient received tokens
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id_public.to_string())
.get_account(recipient_account_id_public)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -571,10 +569,8 @@ async fn create_token_with_private_definition() -> Result<()> {
// Mint to private account
let mint_amount_private = 5;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_private_account_id(
&recipient_account_id_private.to_string(),
)),
definition: format_private_account_id(definition_account_id),
holder: Some(format_private_account_id(recipient_account_id_private)),
holder_npk: None,
holder_ipk: None,
amount: mint_amount_private,
@ -588,14 +584,14 @@ async fn create_token_with_private_definition() -> Result<()> {
// Verify private recipient commitment
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id_private)
.get_private_account_commitment(recipient_account_id_private)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify private recipient balance
let recipient_acc_private = ctx
.wallet()
.get_account_private(&recipient_account_id_private)
.get_account_private(recipient_account_id_private)
.context("Failed to get private recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc_private.data)?;
@ -646,8 +642,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name,
total_supply,
};
@ -660,21 +656,21 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Verify definition commitment
let definition_commitment = ctx
.wallet()
.get_private_account_commitment(&definition_account_id)
.get_private_account_commitment(definition_account_id)
.context("Failed to get definition commitment")?;
assert!(verify_commitment_is_in_state(definition_commitment, ctx.sequencer_client()).await);
// Verify supply commitment
let supply_commitment = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply commitment")?;
assert!(verify_commitment_is_in_state(supply_commitment, ctx.sequencer_client()).await);
// Verify supply balance
let supply_acc = ctx
.wallet()
.get_account_private(&supply_account_id)
.get_account_private(supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
@ -702,8 +698,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Transfer tokens
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -717,20 +713,20 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
// Verify both commitments updated
let supply_commitment = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply commitment")?;
assert!(verify_commitment_is_in_state(supply_commitment, ctx.sequencer_client()).await);
let recipient_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(recipient_commitment, ctx.sequencer_client()).await);
// Verify balances
let supply_acc = ctx
.wallet()
.get_account_private(&supply_account_id)
.get_account_private(supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
@ -743,7 +739,7 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -806,8 +802,8 @@ async fn shielded_token_transfer() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_public_account_id(supply_account_id),
name,
total_supply,
};
@ -820,8 +816,8 @@ async fn shielded_token_transfer() -> Result<()> {
// Perform shielded transfer: public supply -> private recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
from: format_public_account_id(supply_account_id),
to: Some(format_private_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -835,7 +831,7 @@ async fn shielded_token_transfer() -> Result<()> {
// Verify supply account balance
let supply_acc = ctx
.sequencer_client()
.get_account(supply_account_id.to_string())
.get_account(supply_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
@ -850,14 +846,14 @@ async fn shielded_token_transfer() -> Result<()> {
// Verify recipient commitment exists
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify recipient balance
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
@ -920,8 +916,8 @@ async fn deshielded_token_transfer() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_public_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name,
total_supply,
};
@ -934,8 +930,8 @@ async fn deshielded_token_transfer() -> Result<()> {
// Perform deshielded transfer: private supply -> public recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
from: format_private_account_id(supply_account_id),
to: Some(format_public_account_id(recipient_account_id)),
to_npk: None,
to_ipk: None,
amount: transfer_amount,
@ -949,14 +945,14 @@ async fn deshielded_token_transfer() -> Result<()> {
// Verify supply account commitment exists
let new_commitment = ctx
.wallet()
.get_private_account_commitment(&supply_account_id)
.get_private_account_commitment(supply_account_id)
.context("Failed to get supply commitment")?;
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
// Verify supply balance
let supply_acc = ctx
.wallet()
.get_account_private(&supply_account_id)
.get_account_private(supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
@ -970,7 +966,7 @@ async fn deshielded_token_transfer() -> Result<()> {
// Verify recipient balance
let recipient_acc = ctx
.sequencer_client()
.get_account(recipient_account_id.to_string())
.get_account(recipient_account_id)
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
@ -1021,8 +1017,8 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
definition_account_id: format_private_account_id(definition_account_id),
supply_account_id: format_private_account_id(supply_account_id),
name,
total_supply,
};
@ -1050,14 +1046,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&recipient_account_id)
.get_private_account(recipient_account_id)
.cloned()
.context("Failed to get private account keys")?;
// Mint using claiming path (foreign account)
let mint_amount = 9;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
definition: format_private_account_id(definition_account_id),
holder: None,
holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)),
holder_ipk: Some(hex::encode(holder_keys.incoming_viewing_public_key.0)),
@ -1076,14 +1072,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
// Verify commitment exists
let recipient_commitment = ctx
.wallet()
.get_private_account_commitment(&recipient_account_id)
.get_private_account_commitment(recipient_account_id)
.context("Failed to get recipient commitment")?;
assert!(verify_commitment_is_in_state(recipient_commitment, ctx.sequencer_client()).await);
// Verify balance
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.get_account_private(recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(

View File

@ -1,8 +1,11 @@
use std::time::{Duration, Instant};
use anyhow::Result;
use integration_tests::TestContext;
use key_protocol::key_management::ephemeral_key_holder::EphemeralKeyHolder;
use integration_tests::{
TestContext,
config::{InitialData, SequencerPartialConfig},
};
use key_protocol::key_management::{KeyChain, ephemeral_key_holder::EphemeralKeyHolder};
use log::info;
use nssa::{
Account, AccountId, PrivacyPreservingTransaction, PrivateKey, PublicKey, PublicTransaction,
@ -15,21 +18,20 @@ use nssa_core::{
account::{AccountWithMetadata, data::Data},
encryption::IncomingViewingPublicKey,
};
use sequencer_core::config::{AccountInitialData, CommitmentsInitialData, SequencerConfig};
use tokio::test;
// TODO: Make a proper benchmark instead of an ad-hoc test
#[test]
pub async fn tps_test() -> Result<()> {
let num_transactions = 300 * 5;
let target_tps = 12;
let target_tps = 8;
let tps_test = TpsTestManager::new(target_tps, num_transactions);
let ctx = TestContext::new_with_sequencer_and_maybe_indexer_configs(
tps_test.generate_sequencer_config(),
None,
)
.await?;
let ctx = TestContext::builder()
.with_sequencer_partial_config(TpsTestManager::generate_sequencer_partial_config())
.with_initial_data(tps_test.generate_initial_data())
.build()
.await?;
let target_time = tps_test.target_time();
info!(
@ -59,12 +61,10 @@ pub async fn tps_test() -> Result<()> {
let tx_obj = ctx
.sequencer_client()
.get_transaction_by_hash(tx_hash.clone())
.get_transaction_by_hash(*tx_hash)
.await
.inspect_err(|err| {
log::warn!(
"Failed to get transaction by hash {tx_hash:#?} with error: {err:#?}"
)
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}")
});
if let Ok(tx_obj) = tx_obj
@ -151,46 +151,35 @@ impl TpsTestManager {
/// Generates a sequencer configuration with initial balance in a number of public accounts.
/// The transactions generated with the function `build_public_txs` will be valid in a node
/// started with the config from this method.
pub(crate) fn generate_sequencer_config(&self) -> SequencerConfig {
fn generate_initial_data(&self) -> InitialData {
// Create public public keypairs
let initial_public_accounts = self
let public_accounts = self
.public_keypairs
.iter()
.map(|(_, account_id)| AccountInitialData {
account_id: account_id.to_string(),
balance: 10,
})
.map(|(key, _)| (key.clone(), 10))
.collect();
// Generate an initial commitment to be used with the privacy preserving transaction
// created with the `build_privacy_transaction` function.
let sender_nsk = [1; 32];
let sender_npk = NullifierPublicKey::from(&sender_nsk);
let key_chain = KeyChain::new_os_random();
let account = Account {
balance: 100,
nonce: 0xdeadbeef,
program_owner: Program::authenticated_transfer_program().id(),
data: Data::default(),
};
let initial_commitment = CommitmentsInitialData {
npk: sender_npk,
account,
};
SequencerConfig {
home: ".".into(),
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
InitialData {
public_accounts,
private_accounts: vec![(key_chain, account)],
}
}
fn generate_sequencer_partial_config() -> SequencerPartialConfig {
SequencerPartialConfig {
max_num_tx_in_block: 300,
mempool_max_size: 10000,
block_create_timeout_millis: 12000,
port: 3040,
initial_accounts: initial_public_accounts,
initial_commitments: vec![initial_commitment],
signing_key: [37; 32],
bedrock_config: None,
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
mempool_max_size: 10_000,
block_create_timeout_millis: 12_000,
}
}
}

View File

@ -6,12 +6,9 @@ use std::{
};
use anyhow::Result;
use integration_tests::{
ACC_RECEIVER, ACC_SENDER, ACC_SENDER_PRIVATE, BlockingTestContext,
TIME_TO_WAIT_FOR_BLOCK_SECONDS,
};
use integration_tests::{BlockingTestContext, TIME_TO_WAIT_FOR_BLOCK_SECONDS};
use log::info;
use nssa::{Account, AccountId, PublicKey, program::Program};
use nssa::{Account, AccountId, PrivateKey, PublicKey, program::Program};
use nssa_core::program::DEFAULT_PROGRAM_ID;
use tempfile::tempdir;
use wallet::WalletCore;
@ -328,7 +325,7 @@ fn test_wallet_ffi_list_accounts() {
#[test]
fn test_wallet_ffi_get_balance_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let balance = unsafe {
@ -356,7 +353,7 @@ fn test_wallet_ffi_get_balance_public() -> Result<()> {
#[test]
fn test_wallet_ffi_get_account_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut out_account = FfiAccount::default();
@ -391,7 +388,7 @@ fn test_wallet_ffi_get_account_public() -> Result<()> {
#[test]
fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut out_key = FfiPublicAccountKey::default();
@ -409,7 +406,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
let private_key = ctx
.ctx
.wallet()
.get_account_public_signing_key(&account_id)
.get_account_public_signing_key(account_id)
.unwrap();
PublicKey::new_from_private_key(private_key)
};
@ -428,7 +425,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
#[test]
fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER_PRIVATE.parse().unwrap();
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut keys = FfiPrivateAccountKeys::default();
@ -446,7 +443,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
.wallet()
.storage()
.user_data
.get_private_account(&account_id)
.get_private_account(account_id)
.unwrap()
.0;
@ -468,14 +465,15 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
#[test]
fn test_wallet_ffi_account_id_to_base58() {
let account_id_str = ACC_SENDER;
let account_id: AccountId = account_id_str.parse().unwrap();
let private_key = PrivateKey::new_os_random();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
let ffi_bytes: FfiBytes32 = (&account_id).into();
let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) };
let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() };
assert_eq!(account_id_str, ffi_result);
assert_eq!(account_id.to_string(), ffi_result);
unsafe {
wallet_ffi_free_string(ptr);
@ -484,8 +482,11 @@ fn test_wallet_ffi_account_id_to_base58() {
#[test]
fn test_wallet_ffi_base58_to_account_id() {
let account_id_str = ACC_SENDER;
let account_id_c_str = CString::new(account_id_str).unwrap();
let private_key = PrivateKey::new_os_random();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
let account_id_str = account_id.to_string();
let account_id_c_str = CString::new(account_id_str.clone()).unwrap();
let account_id: AccountId = unsafe {
let mut out_account_id_bytes = FfiBytes32::default();
wallet_ffi_account_id_from_base58(
@ -566,8 +567,8 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
fn test_wallet_ffi_transfer_public() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let from: FfiBytes32 = (&ACC_SENDER.parse::<AccountId>().unwrap()).into();
let to: FfiBytes32 = (&ACC_RECEIVER.parse::<AccountId>().unwrap()).into();
let from: FfiBytes32 = (&ctx.ctx.existing_public_accounts()[0]).into();
let to: FfiBytes32 = (&ctx.ctx.existing_public_accounts()[1]).into();
let amount: [u8; 16] = 100u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();

View File

@ -22,4 +22,4 @@ aes-gcm.workspace = true
bip39.workspace = true
hmac-sha512.workspace = true
thiserror.workspace = true
itertools.workspace = true
itertools.workspace = true

View File

@ -272,7 +272,7 @@ impl KeyTree<ChildKeysPublic> {
while let Some(curr_id) = id_stack.pop() {
if let Some(node) = self.key_map.get(&curr_id) {
let address = node.account_id();
let node_acc = client.get_account(address.to_string()).await?.account;
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() && curr_id != ChainIndex::root() {
self.remove(address);
@ -307,7 +307,7 @@ impl KeyTree<ChildKeysPublic> {
for id in ChainIndex::chain_ids_at_depth(i) {
if let Some(node) = self.key_map.get(&id) {
let address = node.account_id();
let node_acc = client.get_account(address.to_string()).await?.account;
let node_acc = client.get_account(address).await?.account;
if node_acc == nssa::Account::default() {
let addr = node.account_id();

View File

@ -66,11 +66,11 @@ impl SeedHolder {
}
// Safe unwrap
*hash.first_chunk::<32>().unwrap()
HashType(*hash.first_chunk::<32>().unwrap())
}
pub fn produce_top_secret_key_holder(&self) -> SecretSpendingKey {
SecretSpendingKey(self.generate_secret_spending_key_hash())
SecretSpendingKey(self.generate_secret_spending_key_hash().into())
}
}
@ -94,7 +94,7 @@ impl SecretSpendingKey {
hasher.update([2u8]);
hasher.update([0u8; 22]);
<HashType>::from(hasher.finalize_fixed())
hasher.finalize_fixed().into()
}
pub fn generate_outgoing_viewing_secret_key(&self) -> OutgoingViewingSecretKey {
@ -105,7 +105,7 @@ impl SecretSpendingKey {
hasher.update([3u8]);
hasher.update([0u8; 22]);
<HashType>::from(hasher.finalize_fixed())
hasher.finalize_fixed().into()
}
pub fn produce_private_key_holder(&self) -> PrivateKeyHolder {

View File

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::BTreeMap;
use anyhow::Result;
use k256::AffinePoint;
@ -15,10 +15,10 @@ pub type PublicKey = AffinePoint;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NSSAUserData {
/// Default public accounts
pub default_pub_account_signing_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
pub default_pub_account_signing_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
/// Default private accounts
pub default_user_private_accounts:
HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
/// Tree of public keys
pub public_key_tree: KeyTreePublic,
/// Tree of private keys
@ -27,7 +27,7 @@ pub struct NSSAUserData {
impl NSSAUserData {
fn valid_public_key_transaction_pairing_check(
accounts_keys_map: &HashMap<nssa::AccountId, nssa::PrivateKey>,
accounts_keys_map: &BTreeMap<nssa::AccountId, nssa::PrivateKey>,
) -> bool {
let mut check_res = true;
for (account_id, key) in accounts_keys_map {
@ -42,7 +42,7 @@ impl NSSAUserData {
}
fn valid_private_key_transaction_pairing_check(
accounts_keys_map: &HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
accounts_keys_map: &BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
) -> bool {
let mut check_res = true;
for (account_id, (key, _)) in accounts_keys_map {
@ -56,8 +56,8 @@ impl NSSAUserData {
}
pub fn new_with_accounts(
default_accounts_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
default_accounts_key_chains: HashMap<
default_accounts_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
default_accounts_key_chains: BTreeMap<
nssa::AccountId,
(KeyChain, nssa_core::account::Account),
>,
@ -106,14 +106,14 @@ impl NSSAUserData {
/// Returns the signing key for public transaction signatures
pub fn get_pub_account_signing_key(
&self,
account_id: &nssa::AccountId,
account_id: nssa::AccountId,
) -> Option<&nssa::PrivateKey> {
// First seek in defaults
if let Some(key) = self.default_pub_account_signing_keys.get(account_id) {
if let Some(key) = self.default_pub_account_signing_keys.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.public_key_tree.get_node(*account_id).map(Into::into)
self.public_key_tree.get_node(account_id).map(Into::into)
}
}
@ -139,14 +139,14 @@ impl NSSAUserData {
/// Returns the signing key for public transaction signatures
pub fn get_private_account(
&self,
account_id: &nssa::AccountId,
account_id: nssa::AccountId,
) -> Option<&(KeyChain, nssa_core::account::Account)> {
// First seek in defaults
if let Some(key) = self.default_user_private_accounts.get(account_id) {
if let Some(key) = self.default_user_private_accounts.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.private_key_tree.get_node(*account_id).map(Into::into)
self.private_key_tree.get_node(account_id).map(Into::into)
}
}
@ -166,20 +166,30 @@ impl NSSAUserData {
}
}
pub fn account_ids(&self) -> impl Iterator<Item = &nssa::AccountId> {
pub fn account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.public_account_ids().chain(self.private_account_ids())
}
pub fn public_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.default_pub_account_signing_keys
.keys()
.chain(self.public_key_tree.account_id_map.keys())
.chain(self.default_user_private_accounts.keys())
.chain(self.private_key_tree.account_id_map.keys())
.copied()
.chain(self.public_key_tree.account_id_map.keys().copied())
}
pub fn private_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
self.default_user_private_accounts
.keys()
.copied()
.chain(self.private_key_tree.account_id_map.keys().copied())
}
}
impl Default for NSSAUserData {
fn default() -> Self {
Self::new_with_accounts(
HashMap::new(),
HashMap::new(),
BTreeMap::new(),
BTreeMap::new(),
KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_string())),
KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_string())),
)
@ -198,16 +208,13 @@ mod tests {
let (account_id_private, _) = user_data
.generate_new_privacy_preserving_transaction_key_chain(Some(ChainIndex::root()));
let is_key_chain_generated = user_data.get_private_account(&account_id_private).is_some();
let is_key_chain_generated = user_data.get_private_account(account_id_private).is_some();
assert!(is_key_chain_generated);
let account_id_private_str = account_id_private.to_string();
println!("{account_id_private_str:#?}");
let key_chain = &user_data
.get_private_account(&account_id_private)
.unwrap()
.0;
let key_chain = &user_data.get_private_account(account_id_private).unwrap().0;
println!("{key_chain:#?}");
}
}

View File

@ -8,12 +8,11 @@ license = { workspace = true }
risc0-zkvm.workspace = true
borsh.workspace = true
serde.workspace = true
serde_with.workspace = true
thiserror.workspace = true
bytemuck.workspace = true
base58.workspace = true
k256 = { workspace = true, optional = true }
base58 = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
chacha20 = { version = "0.9", default-features = false }
[dev-dependencies]
@ -21,4 +20,4 @@ serde_json.workspace = true
[features]
default = []
host = ["dep:k256", "dep:base58", "dep:anyhow"]
host = ["dep:k256"]

View File

@ -1,11 +1,10 @@
#[cfg(feature = "host")]
use std::{fmt::Display, str::FromStr};
#[cfg(feature = "host")]
use base58::{FromBase58, ToBase58};
use borsh::{BorshDeserialize, BorshSerialize};
pub use data::Data;
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use crate::program::ProgramId;
@ -47,8 +46,8 @@ impl AccountWithMetadata {
Default,
Copy,
Clone,
Serialize,
Deserialize,
SerializeDisplay,
DeserializeFromStr,
PartialEq,
Eq,
Hash,
@ -80,23 +79,19 @@ impl AsRef<[u8]> for AccountId {
}
}
#[cfg(feature = "host")]
#[derive(Debug, thiserror::Error)]
pub enum AccountIdError {
#[error("invalid base58")]
InvalidBase58(#[from] anyhow::Error),
#[error("invalid base58: {0:?}")]
InvalidBase58(base58::FromBase58Error),
#[error("invalid length: expected 32 bytes, got {0}")]
InvalidLength(usize),
}
#[cfg(feature = "host")]
impl FromStr for AccountId {
type Err = AccountIdError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let bytes = s
.from_base58()
.map_err(|err| anyhow::anyhow!("Invalid base58 err {err:?}"))?;
let bytes = s.from_base58().map_err(AccountIdError::InvalidBase58)?;
if bytes.len() != 32 {
return Err(AccountIdError::InvalidLength(bytes.len()));
}
@ -106,7 +101,6 @@ impl FromStr for AccountId {
}
}
#[cfg(feature = "host")]
impl Display for AccountId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value.to_base58())

View File

@ -81,7 +81,7 @@ impl PrivacyPreservingTransaction {
let signer_account_ids = self.signer_account_ids();
// Check nonces corresponds to the current nonces on the public state.
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
let current_nonce = state.get_account_by_id(account_id).nonce;
let current_nonce = state.get_account_by_id(*account_id).nonce;
if current_nonce != *nonce {
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
}
@ -93,7 +93,7 @@ impl PrivacyPreservingTransaction {
.iter()
.map(|account_id| {
AccountWithMetadata::new(
state.get_account_by_id(account_id),
state.get_account_by_id(*account_id),
signer_account_ids.contains(account_id),
*account_id,
)

Some files were not shown because too many files have changed in this diff Show More