mirror of
https://github.com/logos-blockchain/lssa.git
synced 2026-02-17 03:43:10 +00:00
Merge branch 'main' into marvin/private_keys
This commit is contained in:
commit
943efd1433
32
.github/workflows/ci.yml
vendored
32
.github/workflows/ci.yml
vendored
@ -100,7 +100,7 @@ jobs:
|
||||
RISC0_SKIP_BUILD: "1"
|
||||
run: cargo clippy -p "*programs" -- -D warnings
|
||||
|
||||
tests:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
@ -126,7 +126,35 @@ jobs:
|
||||
env:
|
||||
RISC0_DEV_MODE: "1"
|
||||
RUST_LOG: "info"
|
||||
run: cargo nextest run --no-fail-fast -- --skip tps_test
|
||||
run: cargo nextest run --workspace --exclude integration_tests
|
||||
|
||||
integration-tests:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- uses: ./.github/actions/install-system-deps
|
||||
|
||||
- uses: ./.github/actions/install-risc0
|
||||
|
||||
- uses: ./.github/actions/install-logos-blockchain-circuits
|
||||
with:
|
||||
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install active toolchain
|
||||
run: rustup install
|
||||
|
||||
- name: Install nextest
|
||||
run: cargo install --locked cargo-nextest
|
||||
|
||||
- name: Run tests
|
||||
env:
|
||||
RISC0_DEV_MODE: "1"
|
||||
RUST_LOG: "info"
|
||||
run: cargo nextest run -p integration_tests -- --skip tps_test
|
||||
|
||||
valid-proof-test:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
23
.github/workflows/deploy.yml
vendored
23
.github/workflows/deploy.yml
vendored
@ -1,23 +0,0 @@
|
||||
name: Deploy Sequencer
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Deploy to server
|
||||
uses: appleboy/ssh-action@v1.2.4
|
||||
with:
|
||||
host: ${{ secrets.DEPLOY_SSH_HOST }}
|
||||
username: ${{ secrets.DEPLOY_SSH_USERNAME }}
|
||||
key: ${{ secrets.DEPLOY_SSH_KEY }}
|
||||
envs: GITHUB_ACTOR
|
||||
script_path: ci_scripts/deploy.sh
|
||||
15
.github/workflows/publish_image.yml
vendored
15
.github/workflows/publish_image.yml
vendored
@ -1,4 +1,4 @@
|
||||
name: Publish Sequencer Runner Image
|
||||
name: Publish Docker Images
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
@ -6,6 +6,15 @@ on:
|
||||
jobs:
|
||||
publish:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
include:
|
||||
- name: sequencer_runner
|
||||
dockerfile: ./sequencer_runner/Dockerfile
|
||||
- name: indexer_service
|
||||
dockerfile: ./indexer/service/Dockerfile
|
||||
- name: explorer_service
|
||||
dockerfile: ./explorer_service/Dockerfile
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
@ -23,7 +32,7 @@ jobs:
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/sequencer_runner
|
||||
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/${{ matrix.name }}
|
||||
tags: |
|
||||
type=ref,event=branch
|
||||
type=ref,event=pr
|
||||
@ -36,7 +45,7 @@ jobs:
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
file: ./sequencer_runner/Dockerfile
|
||||
file: ${{ matrix.dockerfile }}
|
||||
push: true
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -9,3 +9,4 @@ rocksdb
|
||||
sequencer_runner/data/
|
||||
storage.json
|
||||
result
|
||||
wallet-ffi/wallet_ffi.h
|
||||
|
||||
568
Cargo.lock
generated
568
Cargo.lock
generated
@ -69,7 +69,7 @@ dependencies = [
|
||||
"actix-rt",
|
||||
"actix-service",
|
||||
"actix-utils",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"bitflags 2.10.0",
|
||||
"bytes",
|
||||
"bytestring",
|
||||
@ -302,6 +302,25 @@ version = "0.2.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923"
|
||||
|
||||
[[package]]
|
||||
name = "amm_core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"borsh",
|
||||
"nssa_core",
|
||||
"risc0-zkvm",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "amm_program"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"amm_core",
|
||||
"nssa_core",
|
||||
"token_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "android_system_properties"
|
||||
version = "0.1.5"
|
||||
@ -379,6 +398,15 @@ version = "1.0.100"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61"
|
||||
|
||||
[[package]]
|
||||
name = "arc-swap"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73"
|
||||
dependencies = [
|
||||
"rustversion",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "archery"
|
||||
version = "1.2.2"
|
||||
@ -794,6 +822,22 @@ version = "0.7.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
|
||||
|
||||
[[package]]
|
||||
name = "astral-tokio-tar"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec179a06c1769b1e42e1e2cbe74c7dcdb3d6383c838454d063eaac5bbb7ebbe5"
|
||||
dependencies = [
|
||||
"filetime",
|
||||
"futures-core",
|
||||
"libc",
|
||||
"portable-atomic",
|
||||
"rustc-hash",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"xattr",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "async-lock"
|
||||
version = "3.4.2"
|
||||
@ -927,7 +971,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b52af3cb4058c895d37317bb27508dccc8e5f2d39454016b297bf4a400597b8"
|
||||
dependencies = [
|
||||
"axum-core 0.5.6",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"form_urlencoded",
|
||||
"futures-util",
|
||||
@ -1031,6 +1075,12 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6107fe1be6682a68940da878d9e9f5e90ca5745b3dec9fd1bb393c8777d4f581"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.21.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567"
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
@ -1048,6 +1098,7 @@ name = "bedrock_client"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"common",
|
||||
"futures",
|
||||
"log",
|
||||
"logos-blockchain-chain-broadcast-service",
|
||||
@ -1154,6 +1205,80 @@ dependencies = [
|
||||
"generic-array 0.14.7",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bollard"
|
||||
version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "227aa051deec8d16bd9c34605e7aaf153f240e35483dd42f6f78903847934738"
|
||||
dependencies = [
|
||||
"async-stream",
|
||||
"base64 0.22.1",
|
||||
"bitflags 2.10.0",
|
||||
"bollard-buildkit-proto",
|
||||
"bollard-stubs",
|
||||
"bytes",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"hex",
|
||||
"home",
|
||||
"http 1.4.0",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-named-pipe",
|
||||
"hyper-rustls",
|
||||
"hyper-util",
|
||||
"hyperlocal",
|
||||
"log",
|
||||
"num",
|
||||
"pin-project-lite",
|
||||
"rand 0.9.2",
|
||||
"rustls",
|
||||
"rustls-native-certs",
|
||||
"rustls-pki-types",
|
||||
"serde",
|
||||
"serde_derive",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"thiserror 2.0.17",
|
||||
"time",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
"tonic",
|
||||
"tower-service",
|
||||
"url",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bollard-buildkit-proto"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "85a885520bf6249ab931a764ffdb87b0ceef48e6e7d807cfdb21b751e086e1ad"
|
||||
dependencies = [
|
||||
"prost 0.14.3",
|
||||
"prost-types",
|
||||
"tonic",
|
||||
"tonic-prost",
|
||||
"ureq",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bollard-stubs"
|
||||
version = "1.52.1-rc.29.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0f0a8ca8799131c1837d1282c3f81f31e76ceb0ce426e04a7fe1ccee3287c066"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bollard-buildkit-proto",
|
||||
"bytes",
|
||||
"prost 0.14.3",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
"time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bonsai-sdk"
|
||||
version = "1.4.1"
|
||||
@ -1493,7 +1618,7 @@ name = "common"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"borsh",
|
||||
"hex",
|
||||
"log",
|
||||
@ -1503,6 +1628,7 @@ dependencies = [
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"sha2",
|
||||
"thiserror 2.0.17",
|
||||
"url",
|
||||
@ -1908,7 +2034,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8d162beedaa69905488a8da94f5ac3edb4dd4788b732fadb7bd120b2625c1976"
|
||||
dependencies = [
|
||||
"data-encoding",
|
||||
"syn 1.0.109",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -2065,12 +2191,35 @@ dependencies = [
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docker-compose-types"
|
||||
version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7edb75a85449fd9c34d9fb3376c6208ec4115d2ca43b965175a52d71349ecab8"
|
||||
dependencies = [
|
||||
"derive_builder",
|
||||
"indexmap 2.12.1",
|
||||
"serde",
|
||||
"serde_yaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "docker-generate"
|
||||
version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccf673e0848ef09fa4aeeba78e681cf651c0c7d35f76ee38cec8e55bc32fa111"
|
||||
|
||||
[[package]]
|
||||
name = "docker_credential"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1d89dfcba45b4afad7450a99b39e751590463e45c04728cf555d36bb66940de8"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "downcast-rs"
|
||||
version = "1.2.1"
|
||||
@ -2277,6 +2426,16 @@ dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "etcetera"
|
||||
version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "de48cc4d1c1d97a20fd819def54b890cadde72ed3ad0c614822a0a433361be96"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "event-listener"
|
||||
version = "5.4.1"
|
||||
@ -2348,6 +2507,17 @@ version = "2.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
|
||||
|
||||
[[package]]
|
||||
name = "ferroid"
|
||||
version = "0.8.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bb330bbd4cb7a5b9f559427f06f98a4f853a137c8298f3bd3f8ca57663e21986"
|
||||
dependencies = [
|
||||
"portable-atomic",
|
||||
"rand 0.9.2",
|
||||
"web-time",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ff"
|
||||
version = "0.13.1"
|
||||
@ -2364,6 +2534,17 @@ version = "0.2.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d"
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"libredox",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "find-msvc-tools"
|
||||
version = "0.1.5"
|
||||
@ -2838,6 +3019,15 @@ version = "1.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e89e8d20b3799fa526152a5301a771eaaad80857f83e01b23216ceaafb2d9280"
|
||||
|
||||
[[package]]
|
||||
name = "home"
|
||||
version = "0.5.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc627f471c528ff0c4a49e1d5e60450c8f6461dd6d10ba9dcd3a61d3dff7728d"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "html-escape"
|
||||
version = "0.2.13"
|
||||
@ -2954,6 +3144,21 @@ dependencies = [
|
||||
"want",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-named-pipe"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278"
|
||||
dependencies = [
|
||||
"hex",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-rustls"
|
||||
version = "0.27.7"
|
||||
@ -2972,6 +3177,19 @@ dependencies = [
|
||||
"webpki-roots",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-timeout"
|
||||
version = "0.5.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
|
||||
dependencies = [
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-tls"
|
||||
version = "0.6.0"
|
||||
@ -2994,7 +3212,7 @@ version = "0.1.19"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
@ -3014,6 +3232,21 @@ dependencies = [
|
||||
"windows-registry",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyperlocal"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7"
|
||||
dependencies = [
|
||||
"hex",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-util",
|
||||
"pin-project-lite",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone"
|
||||
version = "0.1.64"
|
||||
@ -3157,6 +3390,7 @@ name = "indexer_core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
"bedrock_client",
|
||||
"borsh",
|
||||
"common",
|
||||
@ -3174,13 +3408,17 @@ name = "indexer_service"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arc-swap",
|
||||
"async-trait",
|
||||
"clap",
|
||||
"env_logger",
|
||||
"futures",
|
||||
"indexer_core",
|
||||
"indexer_service_protocol",
|
||||
"indexer_service_rpc",
|
||||
"jsonrpsee",
|
||||
"log",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
]
|
||||
@ -3189,8 +3427,7 @@ dependencies = [
|
||||
name = "indexer_service_protocol"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"borsh",
|
||||
"base64 0.22.1",
|
||||
"common",
|
||||
"nssa",
|
||||
"nssa_core",
|
||||
@ -3258,15 +3495,14 @@ dependencies = [
|
||||
name = "integration_tests"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"borsh",
|
||||
"common",
|
||||
"env_logger",
|
||||
"futures",
|
||||
"hex",
|
||||
"indexer_core",
|
||||
"indexer_service",
|
||||
"key_protocol",
|
||||
"log",
|
||||
"nssa",
|
||||
@ -3275,6 +3511,7 @@ dependencies = [
|
||||
"sequencer_runner",
|
||||
"serde_json",
|
||||
"tempfile",
|
||||
"testcontainers",
|
||||
"token_core",
|
||||
"tokio",
|
||||
"url",
|
||||
@ -3454,7 +3691,7 @@ version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf36eb27f8e13fa93dcb50ccb44c417e25b818cfa1a481b5470cd07b19c60b98"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"gloo-net",
|
||||
@ -3507,7 +3744,7 @@ version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "790bedefcec85321e007ff3af84b4e417540d5c87b3c9779b9e247d1bcc3dab8"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"http-body",
|
||||
"hyper",
|
||||
"hyper-rustls",
|
||||
@ -3693,7 +3930,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f9569fc37575a5d64c0512145af7630bf651007237ef67a8a77328199d315bb"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"cfg-if",
|
||||
"either_of",
|
||||
"futures",
|
||||
@ -3895,7 +4132,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dbf1045af93050bf3388d1c138426393fc131f6d9e46a65519da884c033ed730"
|
||||
dependencies = [
|
||||
"any_spawner",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"codee",
|
||||
"futures",
|
||||
"hydration_context",
|
||||
@ -3952,6 +4189,7 @@ checksum = "df15f6eac291ed1cf25865b1ee60399f57e7c227e7f51bdbd4c5270396a9ed50"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
"libc",
|
||||
"redox_syscall 0.6.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -4808,8 +5046,8 @@ checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
|
||||
name = "nssa"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"amm_core",
|
||||
"borsh",
|
||||
"bytemuck",
|
||||
"env_logger",
|
||||
"hex",
|
||||
"hex-literal 1.1.0",
|
||||
@ -4832,7 +5070,6 @@ dependencies = [
|
||||
name = "nssa_core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"base58",
|
||||
"borsh",
|
||||
"bytemuck",
|
||||
@ -4841,9 +5078,24 @@ dependencies = [
|
||||
"risc0-zkvm",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"num-complex",
|
||||
"num-integer",
|
||||
"num-iter",
|
||||
"num-rational",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint"
|
||||
version = "0.4.6"
|
||||
@ -4870,6 +5122,15 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-complex"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-conv"
|
||||
version = "0.2.0"
|
||||
@ -4896,6 +5157,17 @@ dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-rational"
|
||||
version = "0.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
|
||||
dependencies = [
|
||||
"num-bigint",
|
||||
"num-integer",
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num-traits"
|
||||
version = "0.2.19"
|
||||
@ -5098,11 +5370,36 @@ checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall",
|
||||
"redox_syscall 0.5.18",
|
||||
"smallvec",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse-display"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "914a1c2265c98e2446911282c6ac86d8524f495792c38c5bd884f80499c7538a"
|
||||
dependencies = [
|
||||
"parse-display-derive",
|
||||
"regex",
|
||||
"regex-syntax",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse-display-derive"
|
||||
version = "0.9.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2ae7800a4c974efd12df917266338e79a7a74415173caf7e70aa0a0707345281"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"regex-syntax",
|
||||
"structmeta",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "paste"
|
||||
version = "1.0.15"
|
||||
@ -5203,9 +5500,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "portable-atomic"
|
||||
version = "1.11.1"
|
||||
version = "1.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483"
|
||||
checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49"
|
||||
|
||||
[[package]]
|
||||
name = "postcard"
|
||||
@ -5339,6 +5636,8 @@ dependencies = [
|
||||
name = "programs"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"amm_core",
|
||||
"amm_program",
|
||||
"nssa_core",
|
||||
"risc0-zkvm",
|
||||
"serde",
|
||||
@ -5367,7 +5666,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost-derive",
|
||||
"prost-derive 0.13.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2ea70524a2f82d518bce41317d0fae74151505651af45faf1ffbd6fd33f0568"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost-derive 0.14.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -5383,6 +5692,28 @@ dependencies = [
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost-derive"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "27c6023962132f4b30eb4c172c91ce92d933da334c59c23cddee82358ddafb0b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools 0.14.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "prost-types"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8991c4cbdb8bc5b11f0b074ffe286c30e523de90fee5ba8132f1399f23cb3dd7"
|
||||
dependencies = [
|
||||
"prost 0.14.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quanta"
|
||||
version = "0.12.6"
|
||||
@ -5630,6 +5961,15 @@ dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec96166dafa0886eb81fe1c0a388bece180fbef2135f97c1e2cf8302e74b43b5"
|
||||
dependencies = [
|
||||
"bitflags 2.10.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_users"
|
||||
version = "0.5.2"
|
||||
@ -5702,7 +6042,7 @@ version = "0.12.26"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b4c14b2d9afca6a60277086b0cc6a6ae0b568f6f7916c943a8cdc79f8be240f"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"encoding_rs",
|
||||
"futures-channel",
|
||||
@ -5945,7 +6285,7 @@ dependencies = [
|
||||
"derive_more 2.1.0",
|
||||
"hex",
|
||||
"lazy-regex",
|
||||
"prost",
|
||||
"prost 0.13.5",
|
||||
"risc0-binfmt",
|
||||
"risc0-build",
|
||||
"risc0-circuit-keccak",
|
||||
@ -6396,6 +6736,7 @@ dependencies = [
|
||||
"chrono",
|
||||
"common",
|
||||
"futures",
|
||||
"jsonrpsee",
|
||||
"log",
|
||||
"logos-blockchain-core",
|
||||
"logos-blockchain-key-management-system-service",
|
||||
@ -6403,12 +6744,12 @@ dependencies = [
|
||||
"nssa",
|
||||
"nssa_core",
|
||||
"rand 0.8.5",
|
||||
"reqwest",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"storage",
|
||||
"tempfile",
|
||||
"tokio",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -6419,7 +6760,7 @@ dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
"base58",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"borsh",
|
||||
"common",
|
||||
"futures",
|
||||
@ -6445,6 +6786,8 @@ dependencies = [
|
||||
"clap",
|
||||
"common",
|
||||
"env_logger",
|
||||
"futures",
|
||||
"indexer_service_rpc",
|
||||
"log",
|
||||
"sequencer_core",
|
||||
"sequencer_rpc",
|
||||
@ -6536,6 +6879,17 @@ dependencies = [
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_repr"
|
||||
version = "0.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_spanned"
|
||||
version = "0.6.9"
|
||||
@ -6572,7 +6926,7 @@ version = "3.16.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"chrono",
|
||||
"hex",
|
||||
"indexmap 1.9.3",
|
||||
@ -6597,6 +6951,19 @@ dependencies = [
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_yaml"
|
||||
version = "0.9.33"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9"
|
||||
dependencies = [
|
||||
"indexmap 2.12.1",
|
||||
"itoa",
|
||||
"ryu",
|
||||
"serde",
|
||||
"unsafe-libyaml",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serdect"
|
||||
version = "0.2.0"
|
||||
@ -6614,7 +6981,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "353d02fa2886cd8dae0b8da0965289fa8f2ecc7df633d1ce965f62fdf9644d29"
|
||||
dependencies = [
|
||||
"axum 0.8.8",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"const-str 0.7.1",
|
||||
"const_format",
|
||||
@ -6786,7 +7153,7 @@ version = "0.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e859df029d160cb88608f5d7df7fb4753fd20fdfb4de5644f3d8b8440841721"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"futures",
|
||||
"http 1.4.0",
|
||||
@ -6860,6 +7227,29 @@ version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "structmeta"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2e1575d8d40908d70f6fd05537266b90ae71b15dbbe7a8b7dffa2b759306d329"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"structmeta-derive",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "structmeta-derive"
|
||||
version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "152a0b65a590ff6c3da95cabe2353ee04e6167c896b28e3b14478c2636c922fc"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.111",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strum"
|
||||
version = "0.27.2"
|
||||
@ -7066,6 +7456,39 @@ dependencies = [
|
||||
"risc0-zkvm",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "testcontainers"
|
||||
version = "0.27.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3fdcea723c64cc08dbc533b3761e345a15bf1222cbe6cb611de09b43f17a168"
|
||||
dependencies = [
|
||||
"astral-tokio-tar",
|
||||
"async-trait",
|
||||
"bollard",
|
||||
"bytes",
|
||||
"docker-compose-types",
|
||||
"docker_credential",
|
||||
"either",
|
||||
"etcetera",
|
||||
"ferroid",
|
||||
"futures",
|
||||
"http 1.4.0",
|
||||
"itertools 0.14.0",
|
||||
"log",
|
||||
"memchr",
|
||||
"parse-display",
|
||||
"pin-project-lite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_with",
|
||||
"thiserror 2.0.17",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tokio-util",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thiserror"
|
||||
version = "1.0.69"
|
||||
@ -7386,6 +7809,46 @@ version = "1.0.6+spec-1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607"
|
||||
|
||||
[[package]]
|
||||
name = "tonic"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"axum 0.8.8",
|
||||
"base64 0.22.1",
|
||||
"bytes",
|
||||
"h2 0.4.13",
|
||||
"http 1.4.0",
|
||||
"http-body",
|
||||
"http-body-util",
|
||||
"hyper",
|
||||
"hyper-timeout",
|
||||
"hyper-util",
|
||||
"percent-encoding",
|
||||
"pin-project",
|
||||
"socket2 0.6.1",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-stream",
|
||||
"tower 0.5.2",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tonic-prost"
|
||||
version = "0.14.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"prost 0.14.3",
|
||||
"tonic",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tower"
|
||||
version = "0.4.13"
|
||||
@ -7405,9 +7868,12 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9"
|
||||
dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"indexmap 2.12.1",
|
||||
"pin-project-lite",
|
||||
"slab",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
@ -7657,6 +8123,12 @@ dependencies = [
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unsafe-libyaml"
|
||||
version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
|
||||
|
||||
[[package]]
|
||||
name = "unsigned-varint"
|
||||
version = "0.8.0"
|
||||
@ -7669,6 +8141,34 @@ version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "3.1.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d39cb1dbab692d82a977c0392ffac19e188bd9186a9f32806f0aaa859d75585a"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"log",
|
||||
"percent-encoding",
|
||||
"rustls",
|
||||
"rustls-pki-types",
|
||||
"ureq-proto",
|
||||
"utf-8",
|
||||
"webpki-roots",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ureq-proto"
|
||||
version = "0.5.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d81f9efa9df032be5934a46a068815a10a042b494b6a58cb0a1a97bb5467ed6f"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"http 1.4.0",
|
||||
"httparse",
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.7"
|
||||
@ -7754,10 +8254,11 @@ dependencies = [
|
||||
name = "wallet"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"amm_core",
|
||||
"anyhow",
|
||||
"async-stream",
|
||||
"base58",
|
||||
"base64",
|
||||
"base64 0.22.1",
|
||||
"borsh",
|
||||
"bytemuck",
|
||||
"clap",
|
||||
@ -7773,7 +8274,6 @@ dependencies = [
|
||||
"nssa_core",
|
||||
"optfield",
|
||||
"rand 0.8.5",
|
||||
"risc0-zkvm",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sha2",
|
||||
@ -8315,6 +8815,16 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xattr"
|
||||
version = "1.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32e45ad4206f6d2479085147f02bc2ef834ac85886624a23575ae137c8aa8156"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"rustix",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xxhash-rust"
|
||||
version = "0.8.15"
|
||||
|
||||
24
Cargo.toml
24
Cargo.toml
@ -13,12 +13,17 @@ members = [
|
||||
"common",
|
||||
"nssa",
|
||||
"nssa/core",
|
||||
"programs/amm/core",
|
||||
"programs/amm",
|
||||
"programs/token/core",
|
||||
"programs/token",
|
||||
"sequencer_core",
|
||||
"sequencer_rpc",
|
||||
"sequencer_runner",
|
||||
"indexer_service",
|
||||
"indexer_service/protocol",
|
||||
"indexer_service/rpc",
|
||||
"indexer/core",
|
||||
"indexer/service",
|
||||
"indexer/service/protocol",
|
||||
"indexer/service/rpc",
|
||||
"explorer_service",
|
||||
"programs/token/core",
|
||||
"programs/token",
|
||||
@ -30,7 +35,6 @@ members = [
|
||||
"examples/program_deployment/methods",
|
||||
"examples/program_deployment/methods/guest",
|
||||
"bedrock_client",
|
||||
"indexer_core",
|
||||
]
|
||||
|
||||
[workspace.dependencies]
|
||||
@ -43,16 +47,18 @@ key_protocol = { path = "key_protocol" }
|
||||
sequencer_core = { path = "sequencer_core" }
|
||||
sequencer_rpc = { path = "sequencer_rpc" }
|
||||
sequencer_runner = { path = "sequencer_runner" }
|
||||
indexer_service = { path = "indexer_service" }
|
||||
indexer_service_protocol = { path = "indexer_service/protocol" }
|
||||
indexer_service_rpc = { path = "indexer_service/rpc" }
|
||||
indexer_core = { path = "indexer/core" }
|
||||
indexer_service = { path = "indexer/service" }
|
||||
indexer_service_protocol = { path = "indexer/service/protocol" }
|
||||
indexer_service_rpc = { path = "indexer/service/rpc" }
|
||||
wallet = { path = "wallet" }
|
||||
wallet-ffi = { path = "wallet-ffi" }
|
||||
token_core = { path = "programs/token/core" }
|
||||
token_program = { path = "programs/token" }
|
||||
amm_core = { path = "programs/amm/core" }
|
||||
amm_program = { path = "programs/amm" }
|
||||
test_program_methods = { path = "test_program_methods" }
|
||||
bedrock_client = { path = "bedrock_client" }
|
||||
indexer_core = { path = "indexer_core" }
|
||||
|
||||
tokio = { version = "1.28.2", features = [
|
||||
"net",
|
||||
@ -69,6 +75,7 @@ openssl = { version = "0.10", features = ["vendored"] }
|
||||
openssl-probe = { version = "0.1.2" }
|
||||
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
|
||||
serde_json = "1.0.81"
|
||||
serde_with = "3.16.1"
|
||||
actix = "0.13.0"
|
||||
actix-cors = "0.6.1"
|
||||
jsonrpsee = "0.26.0"
|
||||
@ -100,6 +107,7 @@ itertools = "0.14.0"
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
tokio-retry = "0.3.0"
|
||||
schemars = "1.2.0"
|
||||
async-stream = "0.3.6"
|
||||
|
||||
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
|
||||
logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
|
||||
|
||||
23
README.md
23
README.md
@ -43,7 +43,7 @@ To our knowledge, this design is unique to LEZ. Other privacy-focused programmab
|
||||
- Execution is handled fully on-chain without ZKPs.
|
||||
- Alice’s and Charlie’s public balances are updated.
|
||||
|
||||
|
||||
|
||||
### Key points:
|
||||
- The same token program is used in every execution.
|
||||
- The only difference is execution mode: public execution updates visible state on-chain, while private execution relies on ZKPs.
|
||||
@ -127,6 +127,9 @@ RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
|
||||
|
||||
# Run the sequencer and node
|
||||
|
||||
|
||||
## Running Manually
|
||||
|
||||
The sequencer and node can be run locally:
|
||||
|
||||
1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node:
|
||||
@ -138,10 +141,22 @@ The sequencer and node can be run locally:
|
||||
- `./target/debug/logos-blockchain-node nodes/node/config-one-node.yaml`
|
||||
|
||||
2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service:
|
||||
- `git checkout schouhy/full-bedrock-integration`
|
||||
- `RUST_LOG=info cargo run --release -p indexer_service $(pwd)/integration_tests/configs/indexer/indexer_config.json`
|
||||
- `RUST_LOG=info cargo run --release -p indexer_service indexer/service/configs/indexer_config.json`
|
||||
|
||||
3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
|
||||
- `git checkout schouhy/full-bedrock-integration`
|
||||
- `RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner sequencer_runner/configs/debug`
|
||||
|
||||
## Running with Docker
|
||||
|
||||
You can run the whole setup with Docker:
|
||||
|
||||
```bash
|
||||
docker compose up
|
||||
```
|
||||
|
||||
With that you can send transactions from local wallet to the Sequencer running inside Docker using `wallet/configs/debug` as well as exploring blocks by opening `http://localhost:8080`.
|
||||
|
||||
## Caution for local image builds
|
||||
|
||||
If you're going to build sequencer image locally you should better adjust default docker settings and set `defaultKeepStorage` at least `25GB` so that it can keep layers properly cached.
|
||||
|
||||
|
||||
32
bedrock/README.md
Normal file
32
bedrock/README.md
Normal file
@ -0,0 +1,32 @@
|
||||
# Bedrock Configuration Files for All-in-One run and Integration Tests
|
||||
|
||||
## How to update
|
||||
|
||||
- `docker-compose.yml` file.
|
||||
|
||||
Compare with `https://github.com/logos-blockchain/logos-blockchain/blob/master/compose.static.yml` and update the file accordingly, don't bring unneeded things like grafana and etc.
|
||||
Replace `sha` hash with the latest `testnet` tag hash.
|
||||
|
||||
- `scripts` folder.
|
||||
|
||||
```bash
|
||||
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_cfgsync.sh >> scripts/run_cfgsync.sh
|
||||
curl https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/scripts/run_logos_blockchain_node.sh >> scripts/run_logos_blockchain_node.sh
|
||||
chmod +x scripts/*
|
||||
```
|
||||
|
||||
Then in `scripts/run_logos_blockchain_node.sh` update `cfgsync-client` to `logos-blockchain-cfgsync-client` and in `scripts/run_cfgsync.sh` update `cfgsync-server` to `logos-blockchain-cfgsync-server` if it hasn't been fixed already, see <https://github.com/logos-blockchain/logos-blockchain/pull/2092>.
|
||||
|
||||
- `cfgsync.yaml` file.
|
||||
|
||||
```bash
|
||||
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/testnet/cfgsync.yaml
|
||||
```
|
||||
|
||||
Set `logger`, `tracing` and `metrics` to `None`
|
||||
|
||||
- `kzgrs_test_params` file.
|
||||
|
||||
```bash
|
||||
curl -O https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/master/tests/kzgrs/kzgrs_test_params
|
||||
```
|
||||
12
bedrock/cfgsync.yaml
Normal file
12
bedrock/cfgsync.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
port: 4400
|
||||
n_hosts: 4
|
||||
timeout: 10
|
||||
|
||||
# Tracing
|
||||
tracing_settings:
|
||||
logger: Stdout
|
||||
tracing: None
|
||||
filter: None
|
||||
metrics: None
|
||||
console: None
|
||||
level: DEBUG
|
||||
47
bedrock/docker-compose.yml
Normal file
47
bedrock/docker-compose.yml
Normal file
@ -0,0 +1,47 @@
|
||||
services:
|
||||
|
||||
cfgsync:
|
||||
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
|
||||
volumes:
|
||||
- ./scripts:/etc/logos-blockchain/scripts
|
||||
- ./cfgsync.yaml:/etc/logos-blockchain/cfgsync.yaml:z
|
||||
entrypoint: /etc/logos-blockchain/scripts/run_cfgsync.sh
|
||||
|
||||
logos-blockchain-node-0:
|
||||
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
|
||||
ports:
|
||||
# Map 0 port so that multiple instances can run on the same host
|
||||
- "0:18080/tcp"
|
||||
volumes:
|
||||
- ./scripts:/etc/logos-blockchain/scripts
|
||||
- ./kzgrs_test_params:/kzgrs_test_params:z
|
||||
depends_on:
|
||||
- cfgsync
|
||||
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
|
||||
|
||||
logos-blockchain-node-1:
|
||||
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
|
||||
volumes:
|
||||
- ./scripts:/etc/logos-blockchain/scripts
|
||||
- ./kzgrs_test_params:/kzgrs_test_params:z
|
||||
depends_on:
|
||||
- cfgsync
|
||||
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
|
||||
|
||||
logos-blockchain-node-2:
|
||||
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
|
||||
volumes:
|
||||
- ./scripts:/etc/logos-blockchain/scripts
|
||||
- ./kzgrs_test_params:/kzgrs_test_params:z
|
||||
depends_on:
|
||||
- cfgsync
|
||||
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
|
||||
|
||||
logos-blockchain-node-3:
|
||||
image: ghcr.io/logos-blockchain/logos-blockchain@sha256:000982e751dfd346ca5346b8025c685fc3abc585079c59cde3bde7fd63100657
|
||||
volumes:
|
||||
- ./scripts:/etc/logos-blockchain/scripts
|
||||
- ./kzgrs_test_params:/kzgrs_test_params:z
|
||||
depends_on:
|
||||
- cfgsync
|
||||
entrypoint: /etc/logos-blockchain/scripts/run_logos_blockchain_node.sh
|
||||
BIN
bedrock/kzgrs_test_params
Normal file
BIN
bedrock/kzgrs_test_params
Normal file
Binary file not shown.
5
bedrock/scripts/run_cfgsync.sh
Executable file
5
bedrock/scripts/run_cfgsync.sh
Executable file
@ -0,0 +1,5 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
exec /usr/bin/logos-blockchain-cfgsync-server /etc/logos-blockchain/cfgsync.yaml
|
||||
13
bedrock/scripts/run_logos_blockchain_node.sh
Executable file
13
bedrock/scripts/run_logos_blockchain_node.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
|
||||
set -e
|
||||
|
||||
export CFG_FILE_PATH="/config.yaml" \
|
||||
CFG_SERVER_ADDR="http://cfgsync:4400" \
|
||||
CFG_HOST_IP=$(hostname -i) \
|
||||
CFG_HOST_IDENTIFIER="validator-$(hostname -i)" \
|
||||
LOG_LEVEL="INFO" \
|
||||
POL_PROOF_DEV_MODE=true
|
||||
|
||||
/usr/bin/logos-blockchain-cfgsync-client && \
|
||||
exec /usr/bin/logos-blockchain-node /config.yaml
|
||||
@ -5,6 +5,8 @@ edition = "2024"
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
common.workspace = true
|
||||
|
||||
reqwest.workspace = true
|
||||
anyhow.workspace = true
|
||||
tokio-retry.workspace = true
|
||||
|
||||
@ -1,20 +1,32 @@
|
||||
use anyhow::Result;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use common::config::BasicAuth;
|
||||
use futures::{Stream, TryFutureExt};
|
||||
use log::warn;
|
||||
use log::{info, warn};
|
||||
pub use logos_blockchain_chain_broadcast_service::BlockInfo;
|
||||
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
|
||||
pub use logos_blockchain_common_http_client::{CommonHttpClient, Error};
|
||||
pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx};
|
||||
use reqwest::{Client, Url};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio_retry::Retry;
|
||||
|
||||
/// Fibonacci backoff retry strategy configuration
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
|
||||
pub struct BackoffConfig {
|
||||
pub start_delay_millis: u64,
|
||||
pub max_retries: usize,
|
||||
}
|
||||
|
||||
impl Default for BackoffConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start_delay_millis: 100,
|
||||
max_retries: 5,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Simple wrapper
|
||||
// maybe extend in the future for our purposes
|
||||
// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`).
|
||||
@ -22,26 +34,37 @@ pub struct BackoffConfig {
|
||||
pub struct BedrockClient {
|
||||
http_client: CommonHttpClient,
|
||||
node_url: Url,
|
||||
backoff: BackoffConfig,
|
||||
}
|
||||
|
||||
impl BedrockClient {
|
||||
pub fn new(auth: Option<BasicAuthCredentials>, node_url: Url) -> Result<Self> {
|
||||
pub fn new(backoff: BackoffConfig, node_url: Url, auth: Option<BasicAuth>) -> Result<Self> {
|
||||
info!("Creating Bedrock client with node URL {node_url}");
|
||||
let client = Client::builder()
|
||||
//Add more fields if needed
|
||||
.timeout(std::time::Duration::from_secs(60))
|
||||
.build()?;
|
||||
.build()
|
||||
.context("Failed to build HTTP client")?;
|
||||
|
||||
let auth = auth.map(|a| {
|
||||
logos_blockchain_common_http_client::BasicAuthCredentials::new(a.username, a.password)
|
||||
});
|
||||
|
||||
let http_client = CommonHttpClient::new_with_client(client, auth);
|
||||
Ok(Self {
|
||||
http_client,
|
||||
node_url,
|
||||
backoff,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> {
|
||||
self.http_client
|
||||
.post_transaction(self.node_url.clone(), tx)
|
||||
.await
|
||||
Retry::spawn(self.backoff_strategy(), || {
|
||||
self.http_client
|
||||
.post_transaction(self.node_url.clone(), tx.clone())
|
||||
.inspect_err(|err| warn!("Transaction posting failed with error: {err:#}"))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_lib_stream(&self) -> Result<impl Stream<Item = BlockInfo>, Error> {
|
||||
@ -51,17 +74,17 @@ impl BedrockClient {
|
||||
pub async fn get_block_by_id(
|
||||
&self,
|
||||
header_id: HeaderId,
|
||||
backoff: &BackoffConfig,
|
||||
) -> Result<Option<Block<SignedMantleTx>>, Error> {
|
||||
let strategy =
|
||||
tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis)
|
||||
.take(backoff.max_retries);
|
||||
|
||||
Retry::spawn(strategy, || {
|
||||
Retry::spawn(self.backoff_strategy(), || {
|
||||
self.http_client
|
||||
.get_block_by_id(self.node_url.clone(), header_id)
|
||||
.inspect_err(|err| warn!("Block fetching failed with err: {err:#?}"))
|
||||
.inspect_err(|err| warn!("Block fetching failed with error: {err:#}"))
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
fn backoff_strategy(&self) -> impl Iterator<Item = Duration> {
|
||||
tokio_retry::strategy::FibonacciBackoff::from_millis(self.backoff.start_delay_millis)
|
||||
.take(self.backoff.max_retries)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,84 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# Base directory for deployment
|
||||
LSSA_DIR="/home/arjentix/test_deploy/lssa"
|
||||
|
||||
# Expect GITHUB_ACTOR to be passed as first argument or environment variable
|
||||
GITHUB_ACTOR="${1:-${GITHUB_ACTOR:-unknown}}"
|
||||
|
||||
# Function to log messages with timestamp
|
||||
log_deploy() {
|
||||
echo "[$(date '+%Y-%m-%d %H:%M:%S %Z')] $1" >> "${LSSA_DIR}/deploy.log"
|
||||
}
|
||||
|
||||
# Error handler
|
||||
handle_error() {
|
||||
echo "✗ Deployment failed by: ${GITHUB_ACTOR}"
|
||||
log_deploy "Deployment failed by: ${GITHUB_ACTOR}"
|
||||
exit 1
|
||||
}
|
||||
|
||||
find_sequencer_runner_pids() {
|
||||
pgrep -f "sequencer_runner" | grep -v $$
|
||||
}
|
||||
|
||||
# Set trap to catch any errors
|
||||
trap 'handle_error' ERR
|
||||
|
||||
# Log deployment info
|
||||
log_deploy "Deployment initiated by: ${GITHUB_ACTOR}"
|
||||
|
||||
# Navigate to code directory
|
||||
if [ ! -d "${LSSA_DIR}/code" ]; then
|
||||
mkdir -p "${LSSA_DIR}/code"
|
||||
fi
|
||||
cd "${LSSA_DIR}/code"
|
||||
|
||||
# Stop current sequencer if running
|
||||
if find_sequencer_runner_pids > /dev/null; then
|
||||
echo "Stopping current sequencer..."
|
||||
find_sequencer_runner_pids | xargs -r kill -SIGINT || true
|
||||
sleep 2
|
||||
# Force kill if still running
|
||||
find_sequencer_runner_pids | grep -v $$ | xargs -r kill -9 || true
|
||||
fi
|
||||
|
||||
# Clone or update repository
|
||||
if [ -d ".git" ]; then
|
||||
echo "Updating existing repository..."
|
||||
git fetch origin
|
||||
git checkout main
|
||||
git reset --hard origin/main
|
||||
else
|
||||
echo "Cloning repository..."
|
||||
git clone https://github.com/logos-blockchain/lssa.git .
|
||||
git checkout main
|
||||
fi
|
||||
|
||||
# Build sequencer_runner and wallet in release mode
|
||||
echo "Building sequencer_runner"
|
||||
# That could be just `cargo build --release --bin sequencer_runner --bin wallet`
|
||||
# but we have `no_docker` feature bug, see issue #179
|
||||
cd sequencer_runner
|
||||
cargo build --release
|
||||
cd ../wallet
|
||||
cargo build --release
|
||||
cd ..
|
||||
|
||||
# Run sequencer_runner with config
|
||||
echo "Starting sequencer_runner..."
|
||||
export RUST_LOG=info
|
||||
nohup ./target/release/sequencer_runner "${LSSA_DIR}/configs/sequencer" > "${LSSA_DIR}/sequencer.log" 2>&1 &
|
||||
|
||||
# Wait 5 seconds and check health using wallet
|
||||
sleep 5
|
||||
if ./target/release/wallet check-health; then
|
||||
echo "✓ Sequencer started successfully and is healthy"
|
||||
log_deploy "Deployment completed successfully by: ${GITHUB_ACTOR}"
|
||||
exit 0
|
||||
else
|
||||
echo "✗ Sequencer failed health check"
|
||||
tail -n 50 "${LSSA_DIR}/sequencer.log"
|
||||
handle_error
|
||||
fi
|
||||
@ -12,6 +12,7 @@ anyhow.workspace = true
|
||||
thiserror.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde.workspace = true
|
||||
serde_with.workspace = true
|
||||
reqwest.workspace = true
|
||||
sha2.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@ -1,9 +1,8 @@
|
||||
use borsh::{BorshDeserialize, BorshSerialize};
|
||||
use sha2::{Digest, Sha256, digest::FixedOutput};
|
||||
|
||||
use crate::transaction::EncodedTransaction;
|
||||
use crate::{HashType, transaction::NSSATransaction};
|
||||
|
||||
pub type HashType = [u8; 32];
|
||||
pub type MantleMsgId = [u8; 32];
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@ -16,11 +15,11 @@ impl OwnHasher {
|
||||
let mut hasher = Sha256::new();
|
||||
|
||||
hasher.update(data);
|
||||
<HashType>::from(hasher.finalize_fixed())
|
||||
HashType(<[u8; 32]>::from(hasher.finalize_fixed()))
|
||||
}
|
||||
}
|
||||
|
||||
pub type BlockHash = [u8; 32];
|
||||
pub type BlockHash = HashType;
|
||||
pub type BlockId = u64;
|
||||
pub type TimeStamp = u64;
|
||||
|
||||
@ -35,7 +34,7 @@ pub struct BlockHeader {
|
||||
|
||||
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
|
||||
pub struct BlockBody {
|
||||
pub transactions: Vec<EncodedTransaction>,
|
||||
pub transactions: Vec<NSSATransaction>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
|
||||
@ -58,7 +57,7 @@ pub struct HashableBlockData {
|
||||
pub block_id: BlockId,
|
||||
pub prev_block_hash: BlockHash,
|
||||
pub timestamp: TimeStamp,
|
||||
pub transactions: Vec<EncodedTransaction>,
|
||||
pub transactions: Vec<NSSATransaction>,
|
||||
}
|
||||
|
||||
impl HashableBlockData {
|
||||
@ -104,12 +103,12 @@ impl From<Block> for HashableBlockData {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::{block::HashableBlockData, test_utils};
|
||||
use crate::{HashType, block::HashableBlockData, test_utils};
|
||||
|
||||
#[test]
|
||||
fn test_encoding_roundtrip() {
|
||||
let transactions = vec![test_utils::produce_dummy_empty_transaction()];
|
||||
let block = test_utils::produce_dummy_block(1, Some([1; 32]), transactions);
|
||||
let block = test_utils::produce_dummy_block(1, Some(HashType([1; 32])), transactions);
|
||||
let hashable = HashableBlockData::from(block);
|
||||
let bytes = borsh::to_vec(&hashable).unwrap();
|
||||
let block_from_bytes = borsh::from_slice::<HashableBlockData>(&bytes).unwrap();
|
||||
|
||||
@ -1,6 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum Message {
|
||||
L2BlockFinalized { l2_block_height: u64 },
|
||||
}
|
||||
@ -1 +0,0 @@
|
||||
pub mod indexer;
|
||||
55
common/src/config.rs
Normal file
55
common/src/config.rs
Normal file
@ -0,0 +1,55 @@
|
||||
//! Common configuration structures and utilities.
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
use logos_blockchain_common_http_client::BasicAuthCredentials;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BasicAuth {
|
||||
pub username: String,
|
||||
pub password: Option<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BasicAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.username)?;
|
||||
if let Some(password) = &self.password {
|
||||
write!(f, ":{password}")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for BasicAuth {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let parse = || {
|
||||
let mut parts = s.splitn(2, ':');
|
||||
let username = parts.next()?;
|
||||
let password = parts.next().filter(|p| !p.is_empty());
|
||||
if parts.next().is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((username, password))
|
||||
};
|
||||
|
||||
let (username, password) = parse().ok_or_else(|| {
|
||||
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
username: username.to_string(),
|
||||
password: password.map(|p| p.to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BasicAuth> for BasicAuthCredentials {
|
||||
fn from(value: BasicAuth) -> Self {
|
||||
BasicAuthCredentials::new(value.username, value.password)
|
||||
}
|
||||
}
|
||||
@ -1,5 +1,10 @@
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
use borsh::{BorshDeserialize, BorshSerialize};
|
||||
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
||||
|
||||
pub mod block;
|
||||
pub mod communication;
|
||||
pub mod config;
|
||||
pub mod error;
|
||||
pub mod rpc_primitives;
|
||||
pub mod sequencer_client;
|
||||
@ -8,6 +13,81 @@ pub mod transaction;
|
||||
// Module for tests utility functions
|
||||
// TODO: Compile only for tests
|
||||
pub mod test_utils;
|
||||
pub type HashType = [u8; 32];
|
||||
|
||||
pub const PINATA_BASE58: &str = "EfQhKQAkX2FJiwNii2WFQsGndjvF1Mzd7RuVe7QdPLw7";
|
||||
|
||||
#[derive(
|
||||
Debug,
|
||||
Default,
|
||||
Copy,
|
||||
Clone,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
SerializeDisplay,
|
||||
DeserializeFromStr,
|
||||
BorshSerialize,
|
||||
BorshDeserialize,
|
||||
)]
|
||||
pub struct HashType(pub [u8; 32]);
|
||||
|
||||
impl Display for HashType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", hex::encode(self.0))
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for HashType {
|
||||
type Err = hex::FromHexError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut bytes = [0u8; 32];
|
||||
hex::decode_to_slice(s, &mut bytes)?;
|
||||
Ok(HashType(bytes))
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<[u8]> for HashType {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashType> for [u8; 32] {
|
||||
fn from(hash: HashType) -> Self {
|
||||
hash.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<[u8; 32]> for HashType {
|
||||
fn from(bytes: [u8; 32]) -> Self {
|
||||
HashType(bytes)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<Vec<u8>> for HashType {
|
||||
type Error = <[u8; 32] as TryFrom<Vec<u8>>>::Error;
|
||||
|
||||
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
|
||||
Ok(HashType(value.try_into()?))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashType> for Vec<u8> {
|
||||
fn from(hash: HashType) -> Self {
|
||||
hash.0.to_vec()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn serialization_roundtrip() {
|
||||
let original = HashType([1u8; 32]);
|
||||
let serialized = original.to_string();
|
||||
let deserialized = HashType::from_str(&serialized).unwrap();
|
||||
assert_eq!(original, deserialized);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
use std::collections::HashMap;
|
||||
|
||||
use nssa::AccountId;
|
||||
use nssa_core::program::ProgramId;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::Value;
|
||||
@ -8,7 +9,7 @@ use super::{
|
||||
errors::RpcParseError,
|
||||
parser::{RpcRequest, parse_params},
|
||||
};
|
||||
use crate::parse_request;
|
||||
use crate::{HashType, parse_request};
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct HelloRequest {}
|
||||
@ -47,22 +48,22 @@ pub struct GetInitialTestnetAccountsRequest {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GetAccountBalanceRequest {
|
||||
pub account_id: String,
|
||||
pub account_id: AccountId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GetTransactionByHashRequest {
|
||||
pub hash: String,
|
||||
pub hash: HashType,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GetAccountsNoncesRequest {
|
||||
pub account_ids: Vec<String>,
|
||||
pub account_ids: Vec<AccountId>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GetAccountRequest {
|
||||
pub account_id: String,
|
||||
pub account_id: AccountId,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@ -73,11 +74,6 @@ pub struct GetProofForCommitmentRequest {
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct GetProgramIdsRequest {}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct PostIndexerMessageRequest {
|
||||
pub message: crate::communication::indexer::Message,
|
||||
}
|
||||
|
||||
parse_request!(HelloRequest);
|
||||
parse_request!(RegisterAccountRequest);
|
||||
parse_request!(SendTxRequest);
|
||||
@ -92,7 +88,6 @@ parse_request!(GetAccountsNoncesRequest);
|
||||
parse_request!(GetProofForCommitmentRequest);
|
||||
parse_request!(GetAccountRequest);
|
||||
parse_request!(GetProgramIdsRequest);
|
||||
parse_request!(PostIndexerMessageRequest);
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct HelloResponse {
|
||||
@ -107,7 +102,7 @@ pub struct RegisterAccountResponse {
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct SendTxResponse {
|
||||
pub status: String,
|
||||
pub tx_hash: String,
|
||||
pub tx_hash: HashType,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@ -222,8 +217,3 @@ pub struct GetInitialTestnetAccountsResponse {
|
||||
pub account_id: String,
|
||||
pub balance: u64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct PostIndexerMessageResponse {
|
||||
pub status: String,
|
||||
}
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
use std::{collections::HashMap, ops::RangeInclusive, str::FromStr};
|
||||
use std::{collections::HashMap, ops::RangeInclusive};
|
||||
|
||||
use anyhow::Result;
|
||||
use logos_blockchain_common_http_client::BasicAuthCredentials;
|
||||
use nssa::AccountId;
|
||||
use nssa_core::program::ProgramId;
|
||||
use reqwest::Client;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use url::Url;
|
||||
|
||||
@ -13,6 +13,8 @@ use super::rpc_primitives::requests::{
|
||||
GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest,
|
||||
};
|
||||
use crate::{
|
||||
HashType,
|
||||
config::BasicAuth,
|
||||
error::{SequencerClientError, SequencerRpcError},
|
||||
rpc_primitives::{
|
||||
self,
|
||||
@ -22,62 +24,12 @@ use crate::{
|
||||
GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse,
|
||||
GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest,
|
||||
GetProofForCommitmentResponse, GetTransactionByHashRequest,
|
||||
GetTransactionByHashResponse, PostIndexerMessageRequest, PostIndexerMessageResponse,
|
||||
SendTxRequest, SendTxResponse,
|
||||
GetTransactionByHashResponse, SendTxRequest, SendTxResponse,
|
||||
},
|
||||
},
|
||||
transaction::{EncodedTransaction, NSSATransaction},
|
||||
transaction::NSSATransaction,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct BasicAuth {
|
||||
pub username: String,
|
||||
pub password: Option<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for BasicAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.username)?;
|
||||
if let Some(password) = &self.password {
|
||||
write!(f, ":{password}")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for BasicAuth {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let parse = || {
|
||||
let mut parts = s.splitn(2, ':');
|
||||
let username = parts.next()?;
|
||||
let password = parts.next().filter(|p| !p.is_empty());
|
||||
if parts.next().is_some() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some((username, password))
|
||||
};
|
||||
|
||||
let (username, password) = parse().ok_or_else(|| {
|
||||
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
username: username.to_string(),
|
||||
password: password.map(|p| p.to_string()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<BasicAuth> for BasicAuthCredentials {
|
||||
fn from(value: BasicAuth) -> Self {
|
||||
BasicAuthCredentials::new(value.username, value.password)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct SequencerClient {
|
||||
pub client: reqwest::Client,
|
||||
@ -196,7 +148,7 @@ impl SequencerClient {
|
||||
/// bytes.
|
||||
pub async fn get_account_balance(
|
||||
&self,
|
||||
account_id: String,
|
||||
account_id: AccountId,
|
||||
) -> Result<GetAccountBalanceResponse, SequencerClientError> {
|
||||
let block_req = GetAccountBalanceRequest { account_id };
|
||||
|
||||
@ -215,7 +167,7 @@ impl SequencerClient {
|
||||
/// 32 bytes.
|
||||
pub async fn get_accounts_nonces(
|
||||
&self,
|
||||
account_ids: Vec<String>,
|
||||
account_ids: Vec<AccountId>,
|
||||
) -> Result<GetAccountsNoncesResponse, SequencerClientError> {
|
||||
let block_req = GetAccountsNoncesRequest { account_ids };
|
||||
|
||||
@ -232,7 +184,7 @@ impl SequencerClient {
|
||||
|
||||
pub async fn get_account(
|
||||
&self,
|
||||
account_id: String,
|
||||
account_id: AccountId,
|
||||
) -> Result<GetAccountResponse, SequencerClientError> {
|
||||
let block_req = GetAccountRequest { account_id };
|
||||
|
||||
@ -248,7 +200,7 @@ impl SequencerClient {
|
||||
/// Get transaction details for `hash`.
|
||||
pub async fn get_transaction_by_hash(
|
||||
&self,
|
||||
hash: String,
|
||||
hash: HashType,
|
||||
) -> Result<GetTransactionByHashResponse, SequencerClientError> {
|
||||
let block_req = GetTransactionByHashRequest { hash };
|
||||
|
||||
@ -268,7 +220,7 @@ impl SequencerClient {
|
||||
&self,
|
||||
transaction: nssa::PublicTransaction,
|
||||
) -> Result<SendTxResponse, SequencerClientError> {
|
||||
let transaction = EncodedTransaction::from(NSSATransaction::Public(transaction));
|
||||
let transaction = NSSATransaction::Public(transaction);
|
||||
|
||||
let tx_req = SendTxRequest {
|
||||
transaction: borsh::to_vec(&transaction).unwrap(),
|
||||
@ -288,7 +240,7 @@ impl SequencerClient {
|
||||
&self,
|
||||
transaction: nssa::PrivacyPreservingTransaction,
|
||||
) -> Result<SendTxResponse, SequencerClientError> {
|
||||
let transaction = EncodedTransaction::from(NSSATransaction::PrivacyPreserving(transaction));
|
||||
let transaction = NSSATransaction::PrivacyPreserving(transaction);
|
||||
|
||||
let tx_req = SendTxRequest {
|
||||
transaction: borsh::to_vec(&transaction).unwrap(),
|
||||
@ -362,7 +314,7 @@ impl SequencerClient {
|
||||
&self,
|
||||
transaction: nssa::ProgramDeploymentTransaction,
|
||||
) -> Result<SendTxResponse, SequencerClientError> {
|
||||
let transaction = EncodedTransaction::from(NSSATransaction::ProgramDeployment(transaction));
|
||||
let transaction = NSSATransaction::ProgramDeployment(transaction);
|
||||
|
||||
let tx_req = SendTxRequest {
|
||||
transaction: borsh::to_vec(&transaction).unwrap(),
|
||||
@ -396,23 +348,4 @@ impl SequencerClient {
|
||||
|
||||
Ok(resp_deser)
|
||||
}
|
||||
|
||||
/// Post indexer into sequencer
|
||||
pub async fn post_indexer_message(
|
||||
&self,
|
||||
message: crate::communication::indexer::Message,
|
||||
) -> Result<PostIndexerMessageResponse, SequencerClientError> {
|
||||
let last_req = PostIndexerMessageRequest { message };
|
||||
|
||||
let req = serde_json::to_value(last_req).unwrap();
|
||||
|
||||
let resp = self
|
||||
.call_method_with_payload("post_indexer_message", req)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let resp_deser = serde_json::from_value(resp).unwrap();
|
||||
|
||||
Ok(resp_deser)
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,9 @@
|
||||
use nssa::AccountId;
|
||||
|
||||
use crate::{
|
||||
HashType,
|
||||
block::{Block, HashableBlockData},
|
||||
transaction::{EncodedTransaction, NSSATransaction},
|
||||
transaction::NSSATransaction,
|
||||
};
|
||||
|
||||
// Helpers
|
||||
@ -20,8 +23,8 @@ pub fn sequencer_sign_key_for_testing() -> nssa::PrivateKey {
|
||||
/// `transactions` - vector of `EncodedTransaction` objects
|
||||
pub fn produce_dummy_block(
|
||||
id: u64,
|
||||
prev_hash: Option<[u8; 32]>,
|
||||
transactions: Vec<EncodedTransaction>,
|
||||
prev_hash: Option<HashType>,
|
||||
transactions: Vec<NSSATransaction>,
|
||||
) -> Block {
|
||||
let block_data = HashableBlockData {
|
||||
block_id: id,
|
||||
@ -33,7 +36,7 @@ pub fn produce_dummy_block(
|
||||
block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32])
|
||||
}
|
||||
|
||||
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
|
||||
pub fn produce_dummy_empty_transaction() -> NSSATransaction {
|
||||
let program_id = nssa::program::Program::authenticated_transfer_program().id();
|
||||
let account_ids = vec![];
|
||||
let nonces = vec![];
|
||||
@ -50,17 +53,17 @@ pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
|
||||
|
||||
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
|
||||
|
||||
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
|
||||
NSSATransaction::Public(nssa_tx)
|
||||
}
|
||||
|
||||
pub fn create_transaction_native_token_transfer(
|
||||
from: [u8; 32],
|
||||
from: AccountId,
|
||||
nonce: u128,
|
||||
to: [u8; 32],
|
||||
to: AccountId,
|
||||
balance_to_move: u128,
|
||||
signing_key: nssa::PrivateKey,
|
||||
) -> EncodedTransaction {
|
||||
let account_ids = vec![nssa::AccountId::new(from), nssa::AccountId::new(to)];
|
||||
) -> NSSATransaction {
|
||||
let account_ids = vec![from, to];
|
||||
let nonces = vec![nonce];
|
||||
let program_id = nssa::program::Program::authenticated_transfer_program().id();
|
||||
let message = nssa::public_transaction::Message::try_new(
|
||||
@ -74,5 +77,5 @@ pub fn create_transaction_native_token_transfer(
|
||||
|
||||
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);
|
||||
|
||||
EncodedTransaction::from(NSSATransaction::Public(nssa_tx))
|
||||
NSSATransaction::Public(nssa_tx)
|
||||
}
|
||||
|
||||
@ -1,17 +1,25 @@
|
||||
use borsh::{BorshDeserialize, BorshSerialize};
|
||||
use log::info;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sha2::{Digest, digest::FixedOutput};
|
||||
|
||||
pub type HashType = [u8; 32];
|
||||
use crate::HashType;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
|
||||
pub enum NSSATransaction {
|
||||
Public(nssa::PublicTransaction),
|
||||
PrivacyPreserving(nssa::PrivacyPreservingTransaction),
|
||||
ProgramDeployment(nssa::ProgramDeploymentTransaction),
|
||||
}
|
||||
|
||||
impl NSSATransaction {
|
||||
pub fn hash(&self) -> HashType {
|
||||
HashType(match self {
|
||||
NSSATransaction::Public(tx) => tx.hash(),
|
||||
NSSATransaction::PrivacyPreserving(tx) => tx.hash(),
|
||||
NSSATransaction::ProgramDeployment(tx) => tx.hash(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<nssa::PublicTransaction> for NSSATransaction {
|
||||
fn from(value: nssa::PublicTransaction) -> Self {
|
||||
Self::Public(value)
|
||||
@ -38,106 +46,3 @@ pub enum TxKind {
|
||||
PrivacyPreserving,
|
||||
ProgramDeployment,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
|
||||
/// General transaction object
|
||||
pub struct EncodedTransaction {
|
||||
pub tx_kind: TxKind,
|
||||
/// Encoded blobs of data
|
||||
pub encoded_transaction_data: Vec<u8>,
|
||||
}
|
||||
|
||||
impl From<NSSATransaction> for EncodedTransaction {
|
||||
fn from(value: NSSATransaction) -> Self {
|
||||
match value {
|
||||
NSSATransaction::Public(tx) => Self {
|
||||
tx_kind: TxKind::Public,
|
||||
encoded_transaction_data: tx.to_bytes(),
|
||||
},
|
||||
NSSATransaction::PrivacyPreserving(tx) => Self {
|
||||
tx_kind: TxKind::PrivacyPreserving,
|
||||
encoded_transaction_data: tx.to_bytes(),
|
||||
},
|
||||
NSSATransaction::ProgramDeployment(tx) => Self {
|
||||
tx_kind: TxKind::ProgramDeployment,
|
||||
encoded_transaction_data: tx.to_bytes(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&EncodedTransaction> for NSSATransaction {
|
||||
type Error = nssa::error::NssaError;
|
||||
|
||||
fn try_from(value: &EncodedTransaction) -> Result<Self, Self::Error> {
|
||||
match value.tx_kind {
|
||||
TxKind::Public => nssa::PublicTransaction::from_bytes(&value.encoded_transaction_data)
|
||||
.map(|tx| tx.into()),
|
||||
TxKind::PrivacyPreserving => {
|
||||
nssa::PrivacyPreservingTransaction::from_bytes(&value.encoded_transaction_data)
|
||||
.map(|tx| tx.into())
|
||||
}
|
||||
TxKind::ProgramDeployment => {
|
||||
nssa::ProgramDeploymentTransaction::from_bytes(&value.encoded_transaction_data)
|
||||
.map(|tx| tx.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EncodedTransaction {
|
||||
/// Computes and returns the SHA-256 hash of the JSON-serialized representation of `self`.
|
||||
pub fn hash(&self) -> HashType {
|
||||
let bytes_to_hash = borsh::to_vec(&self).unwrap();
|
||||
let mut hasher = sha2::Sha256::new();
|
||||
hasher.update(&bytes_to_hash);
|
||||
HashType::from(hasher.finalize_fixed())
|
||||
}
|
||||
|
||||
pub fn log(&self) {
|
||||
info!("Transaction hash is {:?}", hex::encode(self.hash()));
|
||||
info!("Transaction tx_kind is {:?}", self.tx_kind);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use sha2::{Digest, digest::FixedOutput};
|
||||
|
||||
use crate::{
|
||||
HashType,
|
||||
transaction::{EncodedTransaction, TxKind},
|
||||
};
|
||||
|
||||
fn test_transaction_body() -> EncodedTransaction {
|
||||
EncodedTransaction {
|
||||
tx_kind: TxKind::Public,
|
||||
encoded_transaction_data: vec![1, 2, 3, 4],
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transaction_hash_is_sha256_of_json_bytes() {
|
||||
let body = test_transaction_body();
|
||||
let expected_hash = {
|
||||
let data = borsh::to_vec(&body).unwrap();
|
||||
let mut hasher = sha2::Sha256::new();
|
||||
hasher.update(&data);
|
||||
HashType::from(hasher.finalize_fixed())
|
||||
};
|
||||
|
||||
let hash = body.hash();
|
||||
|
||||
assert_eq!(expected_hash, hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_bytes_from_bytes() {
|
||||
let body = test_transaction_body();
|
||||
|
||||
let body_bytes = borsh::to_vec(&body).unwrap();
|
||||
let body_new = borsh::from_slice::<EncodedTransaction>(&body_bytes).unwrap();
|
||||
|
||||
assert_eq!(body, body_new);
|
||||
}
|
||||
}
|
||||
|
||||
11
configs/docker-all-in-one/indexer/indexer_config.json
Normal file
11
configs/docker-all-in-one/indexer/indexer_config.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"resubscribe_interval_millis": 1000,
|
||||
"bedrock_client_config": {
|
||||
"addr": "http://logos-blockchain-node-0:18080",
|
||||
"backoff": {
|
||||
"start_delay_millis": 100,
|
||||
"max_retries": 5
|
||||
}
|
||||
},
|
||||
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101"
|
||||
}
|
||||
@ -1,20 +1,29 @@
|
||||
{
|
||||
"home": "",
|
||||
"home": "/var/lib/sequencer_runner",
|
||||
"override_rust_log": null,
|
||||
"genesis_id": 1,
|
||||
"is_genesis_random": true,
|
||||
"max_num_tx_in_block": 20,
|
||||
"mempool_max_size": 10000,
|
||||
"block_create_timeout_millis": 10000,
|
||||
"retry_pending_blocks_timeout_millis": 240000,
|
||||
"port": 0,
|
||||
"retry_pending_blocks_timeout_millis": 7000,
|
||||
"port": 3040,
|
||||
"bedrock_config": {
|
||||
"backoff": {
|
||||
"start_delay_millis": 100,
|
||||
"max_retries": 5
|
||||
},
|
||||
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||
"node_url": "http://logos-blockchain-node-0:18080"
|
||||
},
|
||||
"indexer_rpc_url": "ws://indexer_service:8779",
|
||||
"initial_accounts": [
|
||||
{
|
||||
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
|
||||
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
|
||||
"balance": 10000
|
||||
},
|
||||
{
|
||||
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
|
||||
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
|
||||
"balance": 20000
|
||||
}
|
||||
],
|
||||
@ -90,4 +99,4 @@
|
||||
37,
|
||||
37
|
||||
]
|
||||
}
|
||||
}
|
||||
27
docker-compose.override.yml
Normal file
27
docker-compose.override.yml
Normal file
@ -0,0 +1,27 @@
|
||||
# This file is automatically applied on top of docker-compose.yml when running `docker compose` commands.
|
||||
|
||||
services:
|
||||
logos-blockchain-node-0:
|
||||
ports: !override
|
||||
- "18080:18080/tcp"
|
||||
environment:
|
||||
- RUST_LOG=error
|
||||
|
||||
sequencer_runner:
|
||||
depends_on:
|
||||
- logos-blockchain-node-0
|
||||
- indexer_service
|
||||
volumes: !override
|
||||
- ./configs/docker-all-in-one/sequencer:/etc/sequencer_runner
|
||||
|
||||
indexer_service:
|
||||
depends_on:
|
||||
- logos-blockchain-node-0
|
||||
volumes:
|
||||
- ./configs/docker-all-in-one/indexer/indexer_config.json:/etc/indexer_service/indexer_config.json
|
||||
|
||||
explorer_service:
|
||||
depends_on:
|
||||
- indexer_service
|
||||
environment:
|
||||
- INDEXER_RPC_URL=http://indexer_service:8779
|
||||
13
docker-compose.yml
Normal file
13
docker-compose.yml
Normal file
@ -0,0 +1,13 @@
|
||||
# All-in-one docker compose configuration.
|
||||
# It runs all services from this repo and the bedrock nodes in a single docker network.
|
||||
# This is useful for development and testing purposes.
|
||||
|
||||
include:
|
||||
- path:
|
||||
bedrock/docker-compose.yml
|
||||
- path:
|
||||
sequencer_runner/docker-compose.yml
|
||||
- path:
|
||||
indexer/service/docker-compose.yml
|
||||
- path:
|
||||
explorer_service/docker-compose.yml
|
||||
@ -49,7 +49,7 @@ async fn main() {
|
||||
let signing_key = wallet_core
|
||||
.storage()
|
||||
.user_data
|
||||
.get_pub_account_signing_key(&account_id)
|
||||
.get_pub_account_signing_key(account_id)
|
||||
.expect("Input account should be a self owned public account");
|
||||
|
||||
// Define the desired greeting in ASCII
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
|
||||
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
|
||||
use leptos::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
|
||||
pub struct SearchResults {
|
||||
pub blocks: Vec<Block>,
|
||||
pub transactions: Vec<Transaction>,
|
||||
pub accounts: Vec<(AccountId, Option<Account>)>,
|
||||
pub accounts: Vec<(AccountId, Account)>,
|
||||
}
|
||||
|
||||
/// RPC client type
|
||||
@ -46,7 +46,7 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
|
||||
if let Some(bytes) = parse_hex(&query)
|
||||
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
|
||||
{
|
||||
let hash = Hash(hash_array);
|
||||
let hash = HashType(hash_array);
|
||||
|
||||
// Try as block hash
|
||||
if let Ok(block) = client.get_block_by_hash(hash).await {
|
||||
@ -60,14 +60,8 @@ pub async fn search(query: String) -> Result<SearchResults, ServerFnError> {
|
||||
|
||||
// Try as account ID
|
||||
let account_id = AccountId { value: hash_array };
|
||||
match client.get_account(account_id).await {
|
||||
Ok(account) => {
|
||||
accounts.push((account_id, Some(account)));
|
||||
}
|
||||
Err(_) => {
|
||||
// Account might not exist yet, still add it to results
|
||||
accounts.push((account_id, None));
|
||||
}
|
||||
if let Ok(account) = client.get_account(account_id).await {
|
||||
accounts.push((account_id, account));
|
||||
}
|
||||
}
|
||||
|
||||
@ -98,7 +92,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result<Block, ServerFnError>
|
||||
|
||||
/// Get block by hash
|
||||
#[server]
|
||||
pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError> {
|
||||
pub async fn get_block_by_hash(block_hash: HashType) -> Result<Block, ServerFnError> {
|
||||
use indexer_service_rpc::RpcClient as _;
|
||||
let client = expect_context::<IndexerRpcClient>();
|
||||
client
|
||||
@ -109,7 +103,7 @@ pub async fn get_block_by_hash(block_hash: Hash) -> Result<Block, ServerFnError>
|
||||
|
||||
/// Get transaction by hash
|
||||
#[server]
|
||||
pub async fn get_transaction(tx_hash: Hash) -> Result<Transaction, ServerFnError> {
|
||||
pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnError> {
|
||||
use indexer_service_rpc::RpcClient as _;
|
||||
let client = expect_context::<IndexerRpcClient>();
|
||||
client
|
||||
|
||||
@ -6,7 +6,7 @@ use crate::format_utils;
|
||||
|
||||
/// Account preview component
|
||||
#[component]
|
||||
pub fn AccountPreview(account_id: AccountId, account: Option<Account>) -> impl IntoView {
|
||||
pub fn AccountPreview(account_id: AccountId, account: Account) -> impl IntoView {
|
||||
let account_id_str = format_utils::format_account_id(&account_id);
|
||||
|
||||
view! {
|
||||
@ -19,42 +19,31 @@ pub fn AccountPreview(account_id: AccountId, account: Option<Account>) -> impl I
|
||||
</div>
|
||||
</div>
|
||||
{move || {
|
||||
account
|
||||
.as_ref()
|
||||
.map(|Account { program_owner, balance, data, nonce }| {
|
||||
let program_id = format_utils::format_program_id(program_owner);
|
||||
view! {
|
||||
<div class="account-preview-body">
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Balance: "</span>
|
||||
<span class="field-value">{balance.to_string()}</span>
|
||||
</div>
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Program: "</span>
|
||||
<span class="field-value hash">{program_id}</span>
|
||||
</div>
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Nonce: "</span>
|
||||
<span class="field-value">{nonce.to_string()}</span>
|
||||
</div>
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Data: "</span>
|
||||
<span class="field-value">
|
||||
{format!("{} bytes", data.0.len())}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
.into_any()
|
||||
})
|
||||
.unwrap_or_else(|| {
|
||||
view! {
|
||||
<div class="account-preview-body">
|
||||
<div class="account-not-found">"Account not found"</div>
|
||||
</div>
|
||||
}
|
||||
.into_any()
|
||||
})
|
||||
let Account { program_owner, balance, data, nonce } = &account;
|
||||
let program_id = format_utils::format_program_id(program_owner);
|
||||
view! {
|
||||
<div class="account-preview-body">
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Balance: "</span>
|
||||
<span class="field-value">{balance.to_string()}</span>
|
||||
</div>
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Program: "</span>
|
||||
<span class="field-value hash">{program_id}</span>
|
||||
</div>
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Nonce: "</span>
|
||||
<span class="field-value">{nonce.to_string()}</span>
|
||||
</div>
|
||||
<div class="account-field">
|
||||
<span class="field-label">"Data: "</span>
|
||||
<span class="field-value">
|
||||
{format!("{} bytes", data.0.len())}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
.into_any()
|
||||
}}
|
||||
|
||||
</A>
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Hash};
|
||||
use indexer_service_protocol::{BedrockStatus, Block, BlockBody, BlockHeader, BlockId, HashType};
|
||||
use leptos::prelude::*;
|
||||
use leptos_router::{components::A, hooks::use_params_map};
|
||||
|
||||
@ -7,7 +7,7 @@ use crate::{api, components::TransactionPreview, format_utils};
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
enum BlockIdOrHash {
|
||||
BlockId(BlockId),
|
||||
Hash(Hash),
|
||||
Hash(HashType),
|
||||
}
|
||||
|
||||
/// Block page component
|
||||
@ -29,7 +29,7 @@ pub fn BlockPage() -> impl IntoView {
|
||||
if let Some(bytes) = format_utils::parse_hex(id_str)
|
||||
&& let Ok(hash_array) = <[u8; 32]>::try_from(bytes)
|
||||
{
|
||||
return Some(BlockIdOrHash::Hash(Hash(hash_array)));
|
||||
return Some(BlockIdOrHash::Hash(HashType(hash_array)));
|
||||
}
|
||||
|
||||
None
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use indexer_service_protocol::{
|
||||
Hash, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
|
||||
HashType, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
|
||||
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet,
|
||||
};
|
||||
use leptos::prelude::*;
|
||||
@ -18,7 +18,7 @@ pub fn TransactionPage() -> impl IntoView {
|
||||
format_utils::parse_hex(&tx_hash_str).and_then(|bytes| {
|
||||
if bytes.len() == 32 {
|
||||
let hash_array: [u8; 32] = bytes.try_into().ok()?;
|
||||
Some(Hash(hash_array))
|
||||
Some(HashType(hash_array))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
||||
@ -17,3 +17,4 @@ futures.workspace = true
|
||||
url.workspace = true
|
||||
logos-blockchain-core.workspace = true
|
||||
serde_json.workspace = true
|
||||
async-stream.workspace = true
|
||||
@ -1,36 +1,35 @@
|
||||
use std::{fs::File, io::BufReader, path::Path};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use bedrock_client::BackoffConfig;
|
||||
use common::sequencer_client::BasicAuth;
|
||||
use logos_blockchain_core::mantle::ops::channel::ChannelId;
|
||||
use anyhow::{Context as _, Result};
|
||||
pub use bedrock_client::BackoffConfig;
|
||||
use common::config::BasicAuth;
|
||||
pub use logos_blockchain_core::mantle::ops::channel::ChannelId;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
/// ToDo: Expand if necessary
|
||||
pub struct ClientConfig {
|
||||
pub struct BedrockClientConfig {
|
||||
/// For individual RPC requests we use Fibonacci backoff retry strategy.
|
||||
pub backoff: BackoffConfig,
|
||||
pub addr: Url,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub auth: Option<BasicAuth>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
/// Note: For individual RPC requests we use Fibonacci backoff retry strategy
|
||||
pub struct IndexerConfig {
|
||||
pub resubscribe_interval_millis: u64,
|
||||
pub backoff: BackoffConfig,
|
||||
pub bedrock_client_config: ClientConfig,
|
||||
pub sequencer_client_config: ClientConfig,
|
||||
pub bedrock_client_config: BedrockClientConfig,
|
||||
pub channel_id: ChannelId,
|
||||
}
|
||||
|
||||
impl IndexerConfig {
|
||||
pub fn from_path(config_home: &Path) -> Result<IndexerConfig> {
|
||||
let file = File::open(config_home)
|
||||
.with_context(|| format!("Failed to open indexer config at {config_home:?}"))?;
|
||||
pub fn from_path(config_path: &Path) -> Result<IndexerConfig> {
|
||||
let file = File::open(config_path)
|
||||
.with_context(|| format!("Failed to open indexer config at {config_path:?}"))?;
|
||||
let reader = BufReader::new(file);
|
||||
|
||||
serde_json::from_reader(reader)
|
||||
.with_context(|| format!("Failed to parse indexer config at {config_home:?}"))
|
||||
.with_context(|| format!("Failed to parse indexer config at {config_path:?}"))
|
||||
}
|
||||
}
|
||||
110
indexer/core/src/lib.rs
Normal file
110
indexer/core/src/lib.rs
Normal file
@ -0,0 +1,110 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use bedrock_client::BedrockClient;
|
||||
use common::block::Block;
|
||||
use futures::StreamExt;
|
||||
use log::{debug, info};
|
||||
use logos_blockchain_core::mantle::{
|
||||
Op, SignedMantleTx,
|
||||
ops::channel::{ChannelId, inscribe::InscriptionOp},
|
||||
};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{config::IndexerConfig, state::IndexerState};
|
||||
|
||||
pub mod config;
|
||||
pub mod state;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IndexerCore {
|
||||
bedrock_client: BedrockClient,
|
||||
config: IndexerConfig,
|
||||
state: IndexerState,
|
||||
}
|
||||
|
||||
impl IndexerCore {
|
||||
pub fn new(config: IndexerConfig) -> Result<Self> {
|
||||
Ok(Self {
|
||||
bedrock_client: BedrockClient::new(
|
||||
config.bedrock_client_config.backoff,
|
||||
config.bedrock_client_config.addr.clone(),
|
||||
config.bedrock_client_config.auth.clone(),
|
||||
)
|
||||
.context("Failed to create Bedrock client")?,
|
||||
config,
|
||||
// No state setup for now, future task.
|
||||
state: IndexerState {
|
||||
latest_seen_block: Arc::new(RwLock::new(0)),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn subscribe_parse_block_stream(&self) -> impl futures::Stream<Item = Result<Block>> {
|
||||
debug!("Subscribing to Bedrock block stream");
|
||||
async_stream::stream! {
|
||||
loop {
|
||||
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
|
||||
|
||||
info!("Block stream joined");
|
||||
|
||||
while let Some(block_info) = stream_pinned.next().await {
|
||||
let header_id = block_info.header_id;
|
||||
|
||||
info!("Observed L1 block at height {}", block_info.height);
|
||||
|
||||
if let Some(l1_block) = self
|
||||
.bedrock_client
|
||||
.get_block_by_id(header_id)
|
||||
.await?
|
||||
{
|
||||
info!("Extracted L1 block at height {}", block_info.height);
|
||||
|
||||
let l2_blocks_parsed = parse_blocks(
|
||||
l1_block.into_transactions().into_iter(),
|
||||
&self.config.channel_id,
|
||||
).collect::<Vec<_>>();
|
||||
|
||||
info!("Parsed {} L2 blocks", l2_blocks_parsed.len());
|
||||
|
||||
for l2_block in l2_blocks_parsed {
|
||||
// State modification, will be updated in future
|
||||
{
|
||||
let mut guard = self.state.latest_seen_block.write().await;
|
||||
if l2_block.header.block_id > *guard {
|
||||
*guard = l2_block.header.block_id;
|
||||
}
|
||||
}
|
||||
|
||||
yield Ok(l2_block);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Refetch stream after delay
|
||||
tokio::time::sleep(std::time::Duration::from_millis(
|
||||
self.config.resubscribe_interval_millis,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_blocks(
|
||||
block_txs: impl Iterator<Item = SignedMantleTx>,
|
||||
decoded_channel_id: &ChannelId,
|
||||
) -> impl Iterator<Item = Block> {
|
||||
block_txs.flat_map(|tx| {
|
||||
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
|
||||
Op::ChannelInscribe(InscriptionOp {
|
||||
channel_id,
|
||||
inscription,
|
||||
..
|
||||
}) if channel_id == *decoded_channel_id => {
|
||||
borsh::from_slice::<Block>(&inscription).ok()
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -5,8 +5,9 @@ edition = "2024"
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
indexer_service_protocol.workspace = true
|
||||
indexer_service_protocol = { workspace = true, features = ["convert"] }
|
||||
indexer_service_rpc = { workspace = true, features = ["server"] }
|
||||
indexer_core.workspace = true
|
||||
|
||||
clap = { workspace = true, features = ["derive"] }
|
||||
anyhow.workspace = true
|
||||
@ -15,7 +16,10 @@ tokio-util.workspace = true
|
||||
env_logger.workspace = true
|
||||
log.workspace = true
|
||||
jsonrpsee.workspace = true
|
||||
serde_json.workspace = true
|
||||
futures.workspace = true
|
||||
async-trait = "0.1.89"
|
||||
arc-swap = "1.8.1"
|
||||
|
||||
[features]
|
||||
# Return mock responses with generated data for testing purposes
|
||||
@ -36,7 +36,9 @@ RUN strip /indexer_service/target/release/indexer_service
|
||||
FROM debian:trixie-slim
|
||||
|
||||
# Create non-root user for security
|
||||
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user
|
||||
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user && \
|
||||
mkdir -p /indexer_service /etc/indexer_service && \
|
||||
chown -R indexer_service_user:indexer_service_user /indexer_service /etc/indexer_service
|
||||
|
||||
# Copy binary from builder
|
||||
COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
|
||||
@ -61,4 +63,4 @@ ENV RUST_LOG=info
|
||||
USER indexer_service_user
|
||||
|
||||
WORKDIR /indexer_service
|
||||
CMD ["indexer_service"]
|
||||
CMD ["indexer_service", "/etc/indexer_service/indexer_config.json"]
|
||||
11
indexer/service/configs/indexer_config.json
Normal file
11
indexer/service/configs/indexer_config.json
Normal file
@ -0,0 +1,11 @@
|
||||
{
|
||||
"resubscribe_interval_millis": 1000,
|
||||
"bedrock_client_config": {
|
||||
"addr": "http://localhost:18080",
|
||||
"backoff": {
|
||||
"start_delay_millis": 100,
|
||||
"max_retries": 5
|
||||
}
|
||||
},
|
||||
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101"
|
||||
}
|
||||
12
indexer/service/docker-compose.yml
Normal file
12
indexer/service/docker-compose.yml
Normal file
@ -0,0 +1,12 @@
|
||||
services:
|
||||
indexer_service:
|
||||
image: lssa/indexer_service
|
||||
build:
|
||||
context: ../..
|
||||
dockerfile: indexer/service/Dockerfile
|
||||
container_name: indexer_service
|
||||
ports:
|
||||
- "8779:8779"
|
||||
volumes:
|
||||
# Mount configuration
|
||||
- ./configs/indexer_config.json:/etc/indexer_service/indexer_config.json
|
||||
@ -12,8 +12,7 @@ common = { workspace = true, optional = true }
|
||||
serde = { workspace = true, features = ["derive"] }
|
||||
schemars.workspace = true
|
||||
base64.workspace = true
|
||||
borsh = { workspace = true, optional = true }
|
||||
|
||||
[features]
|
||||
# Enable conversion to/from NSSA core types
|
||||
convert = ["dep:nssa_core", "dep:nssa", "dep:common", "dep:borsh"]
|
||||
convert = ["dep:nssa_core", "dep:nssa", "dep:common"]
|
||||
@ -381,7 +381,7 @@ impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::
|
||||
|
||||
impl From<nssa::PublicTransaction> for PublicTransaction {
|
||||
fn from(value: nssa::PublicTransaction) -> Self {
|
||||
let hash = Hash(value.hash());
|
||||
let hash = HashType(value.hash());
|
||||
let nssa::PublicTransaction {
|
||||
message,
|
||||
witness_set,
|
||||
@ -430,7 +430,7 @@ impl TryFrom<PublicTransaction> for nssa::PublicTransaction {
|
||||
|
||||
impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction {
|
||||
fn from(value: nssa::PrivacyPreservingTransaction) -> Self {
|
||||
let hash = Hash(value.hash());
|
||||
let hash = HashType(value.hash());
|
||||
let nssa::PrivacyPreservingTransaction {
|
||||
message,
|
||||
witness_set,
|
||||
@ -467,7 +467,7 @@ impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransactio
|
||||
|
||||
impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction {
|
||||
fn from(value: nssa::ProgramDeploymentTransaction) -> Self {
|
||||
let hash = Hash(value.hash());
|
||||
let hash = HashType(value.hash());
|
||||
let nssa::ProgramDeploymentTransaction { message } = value;
|
||||
|
||||
Self {
|
||||
@ -531,8 +531,8 @@ impl From<common::block::BlockHeader> for BlockHeader {
|
||||
} = value;
|
||||
Self {
|
||||
block_id,
|
||||
prev_block_hash: Hash(prev_block_hash),
|
||||
hash: Hash(hash),
|
||||
prev_block_hash: prev_block_hash.into(),
|
||||
hash: hash.into(),
|
||||
timestamp,
|
||||
signature: signature.into(),
|
||||
}
|
||||
@ -552,47 +552,32 @@ impl TryFrom<BlockHeader> for common::block::BlockHeader {
|
||||
} = value;
|
||||
Ok(Self {
|
||||
block_id,
|
||||
prev_block_hash: prev_block_hash.0,
|
||||
hash: hash.0,
|
||||
prev_block_hash: prev_block_hash.into(),
|
||||
hash: hash.into(),
|
||||
timestamp,
|
||||
signature: signature.into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<common::block::BlockBody> for BlockBody {
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn try_from(value: common::block::BlockBody) -> Result<Self, Self::Error> {
|
||||
// Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction
|
||||
// This conversion will decode and re-encode the transactions
|
||||
use borsh::BorshDeserialize as _;
|
||||
|
||||
impl From<common::block::BlockBody> for BlockBody {
|
||||
fn from(value: common::block::BlockBody) -> Self {
|
||||
let common::block::BlockBody { transactions } = value;
|
||||
|
||||
let transactions = transactions
|
||||
.into_iter()
|
||||
.map(|encoded_tx| match encoded_tx.tx_kind {
|
||||
common::transaction::TxKind::Public => {
|
||||
nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data)
|
||||
.map(|tx| Transaction::Public(tx.into()))
|
||||
.map(|tx| match tx {
|
||||
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
|
||||
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
|
||||
Transaction::PrivacyPreserving(tx.into())
|
||||
}
|
||||
common::transaction::TxKind::PrivacyPreserving => {
|
||||
nssa::PrivacyPreservingTransaction::try_from_slice(
|
||||
&encoded_tx.encoded_transaction_data,
|
||||
)
|
||||
.map(|tx| Transaction::PrivacyPreserving(tx.into()))
|
||||
}
|
||||
common::transaction::TxKind::ProgramDeployment => {
|
||||
nssa::ProgramDeploymentTransaction::try_from_slice(
|
||||
&encoded_tx.encoded_transaction_data,
|
||||
)
|
||||
.map(|tx| Transaction::ProgramDeployment(tx.into()))
|
||||
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
|
||||
Transaction::ProgramDeployment(tx.into())
|
||||
}
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
.collect();
|
||||
|
||||
Ok(Self { transactions })
|
||||
Self { transactions }
|
||||
}
|
||||
}
|
||||
|
||||
@ -606,7 +591,7 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
|
||||
.into_iter()
|
||||
.map(|tx| {
|
||||
let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?;
|
||||
Ok::<_, nssa::error::NssaError>(nssa_tx.into())
|
||||
Ok::<_, nssa::error::NssaError>(nssa_tx)
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
@ -614,10 +599,8 @@ impl TryFrom<BlockBody> for common::block::BlockBody {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<common::block::Block> for Block {
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn try_from(value: common::block::Block) -> Result<Self, Self::Error> {
|
||||
impl From<common::block::Block> for Block {
|
||||
fn from(value: common::block::Block) -> Self {
|
||||
let common::block::Block {
|
||||
header,
|
||||
body,
|
||||
@ -625,12 +608,12 @@ impl TryFrom<common::block::Block> for Block {
|
||||
bedrock_parent_id,
|
||||
} = value;
|
||||
|
||||
Ok(Self {
|
||||
Self {
|
||||
header: header.into(),
|
||||
body: body.try_into()?,
|
||||
body: body.into(),
|
||||
bedrock_status: bedrock_status.into(),
|
||||
bedrock_parent_id: MantleMsgId(bedrock_parent_id),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -673,3 +656,15 @@ impl From<BedrockStatus> for common::block::BedrockStatus {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<common::HashType> for HashType {
|
||||
fn from(value: common::HashType) -> Self {
|
||||
Self(value.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<HashType> for common::HashType {
|
||||
fn from(value: HashType) -> Self {
|
||||
common::HashType(value.0)
|
||||
}
|
||||
}
|
||||
@ -42,8 +42,8 @@ pub struct Block {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BlockHeader {
|
||||
pub block_id: BlockId,
|
||||
pub prev_block_hash: Hash,
|
||||
pub hash: Hash,
|
||||
pub prev_block_hash: HashType,
|
||||
pub hash: HashType,
|
||||
pub timestamp: TimeStamp,
|
||||
pub signature: Signature,
|
||||
}
|
||||
@ -69,7 +69,7 @@ pub enum Transaction {
|
||||
|
||||
impl Transaction {
|
||||
/// Get the hash of the transaction
|
||||
pub fn hash(&self) -> &self::Hash {
|
||||
pub fn hash(&self) -> &self::HashType {
|
||||
match self {
|
||||
Transaction::Public(tx) => &tx.hash,
|
||||
Transaction::PrivacyPreserving(tx) => &tx.hash,
|
||||
@ -80,14 +80,14 @@ impl Transaction {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PublicTransaction {
|
||||
pub hash: Hash,
|
||||
pub hash: HashType,
|
||||
pub message: PublicMessage,
|
||||
pub witness_set: WitnessSet,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PrivacyPreservingTransaction {
|
||||
pub hash: Hash,
|
||||
pub hash: HashType,
|
||||
pub message: PrivacyPreservingMessage,
|
||||
pub witness_set: WitnessSet,
|
||||
}
|
||||
@ -134,7 +134,7 @@ pub struct EncryptedAccountData {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ProgramDeploymentTransaction {
|
||||
pub hash: Hash,
|
||||
pub hash: HashType,
|
||||
pub message: ProgramDeploymentMessage,
|
||||
}
|
||||
|
||||
@ -197,7 +197,7 @@ pub struct Data(
|
||||
);
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct Hash(
|
||||
pub struct HashType(
|
||||
#[serde(with = "base64::arr")]
|
||||
#[schemars(with = "String", description = "base64-encoded hash")]
|
||||
pub [u8; 32],
|
||||
@ -1,4 +1,4 @@
|
||||
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
|
||||
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
|
||||
use jsonrpsee::proc_macros::rpc;
|
||||
#[cfg(feature = "server")]
|
||||
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
|
||||
@ -23,23 +23,20 @@ pub trait Rpc {
|
||||
Ok(serde_json::to_value(block_schema).expect("Schema serialization should not fail"))
|
||||
}
|
||||
|
||||
#[subscription(name = "subscribeToBlocks", item = Vec<Block>)]
|
||||
async fn subscribe_to_blocks(&self, from: BlockId) -> SubscriptionResult;
|
||||
#[subscription(name = "subscribeToFinalizedBlocks", item = BlockId)]
|
||||
async fn subscribe_to_finalized_blocks(&self) -> SubscriptionResult;
|
||||
|
||||
#[method(name = "getBlockById")]
|
||||
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
|
||||
|
||||
#[method(name = "getBlockByHash")]
|
||||
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned>;
|
||||
|
||||
#[method(name = "getLastBlockId")]
|
||||
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned>;
|
||||
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned>;
|
||||
|
||||
#[method(name = "getAccount")]
|
||||
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
|
||||
|
||||
#[method(name = "getTransaction")]
|
||||
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>;
|
||||
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned>;
|
||||
|
||||
#[method(name = "getBlocks")]
|
||||
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned>;
|
||||
88
indexer/service/src/lib.rs
Normal file
88
indexer/service/src/lib.rs
Normal file
@ -0,0 +1,88 @@
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
pub use indexer_core::config::*;
|
||||
use indexer_service_rpc::RpcServer as _;
|
||||
use jsonrpsee::server::Server;
|
||||
use log::{error, info};
|
||||
|
||||
pub mod service;
|
||||
|
||||
#[cfg(feature = "mock-responses")]
|
||||
pub mod mock_service;
|
||||
|
||||
pub struct IndexerHandle {
|
||||
addr: SocketAddr,
|
||||
server_handle: Option<jsonrpsee::server::ServerHandle>,
|
||||
}
|
||||
impl IndexerHandle {
|
||||
fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self {
|
||||
Self {
|
||||
addr,
|
||||
server_handle: Some(server_handle),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addr(&self) -> SocketAddr {
|
||||
self.addr
|
||||
}
|
||||
|
||||
pub async fn stopped(mut self) {
|
||||
let handle = self
|
||||
.server_handle
|
||||
.take()
|
||||
.expect("Indexer server handle is set");
|
||||
|
||||
handle.stopped().await
|
||||
}
|
||||
|
||||
pub fn is_stopped(&self) -> bool {
|
||||
self.server_handle
|
||||
.as_ref()
|
||||
.is_none_or(|handle| handle.is_stopped())
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for IndexerHandle {
|
||||
fn drop(&mut self) {
|
||||
let Self {
|
||||
addr: _,
|
||||
server_handle,
|
||||
} = self;
|
||||
|
||||
let Some(handle) = server_handle else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Err(err) = handle.stop() {
|
||||
error!("An error occurred while stopping Indexer RPC server: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run_server(config: IndexerConfig, port: u16) -> Result<IndexerHandle> {
|
||||
#[cfg(feature = "mock-responses")]
|
||||
let _ = config;
|
||||
|
||||
let server = Server::builder()
|
||||
.build(SocketAddr::from(([0, 0, 0, 0], port)))
|
||||
.await
|
||||
.context("Failed to build RPC server")?;
|
||||
|
||||
let addr = server
|
||||
.local_addr()
|
||||
.context("Failed to get local address of RPC server")?;
|
||||
|
||||
info!("Starting Indexer Service RPC server on {addr}");
|
||||
|
||||
#[cfg(not(feature = "mock-responses"))]
|
||||
let handle = {
|
||||
let service =
|
||||
service::IndexerService::new(config).context("Failed to initialize indexer service")?;
|
||||
server.start(service.into_rpc())
|
||||
};
|
||||
#[cfg(feature = "mock-responses")]
|
||||
let handle = server.start(mock_service::MockIndexerService::new_with_mock_blocks().into_rpc());
|
||||
|
||||
Ok(IndexerHandle::new(addr, handle))
|
||||
}
|
||||
@ -1,15 +1,15 @@
|
||||
use std::net::SocketAddr;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use anyhow::Result;
|
||||
use clap::Parser;
|
||||
use indexer_service_rpc::RpcServer as _;
|
||||
use jsonrpsee::server::Server;
|
||||
use log::{error, info};
|
||||
use tokio_util::sync::CancellationToken;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
#[clap(version)]
|
||||
struct Args {
|
||||
#[clap(name = "config")]
|
||||
config_path: PathBuf,
|
||||
#[clap(short, long, default_value = "8779")]
|
||||
port: u16,
|
||||
}
|
||||
@ -18,18 +18,18 @@ struct Args {
|
||||
async fn main() -> Result<()> {
|
||||
env_logger::init();
|
||||
|
||||
let args = Args::parse();
|
||||
let Args { config_path, port } = Args::parse();
|
||||
|
||||
let cancellation_token = listen_for_shutdown_signal();
|
||||
|
||||
let handle = run_server(args.port).await?;
|
||||
let handle_clone = handle.clone();
|
||||
let config = indexer_service::IndexerConfig::from_path(&config_path)?;
|
||||
let indexer_handle = indexer_service::run_server(config, port).await?;
|
||||
|
||||
tokio::select! {
|
||||
_ = cancellation_token.cancelled() => {
|
||||
info!("Shutting down server...");
|
||||
}
|
||||
_ = handle_clone.stopped() => {
|
||||
_ = indexer_handle.stopped() => {
|
||||
error!("Server stopped unexpectedly");
|
||||
}
|
||||
}
|
||||
@ -39,28 +39,6 @@ async fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_server(port: u16) -> Result<jsonrpsee::server::ServerHandle> {
|
||||
let server = Server::builder()
|
||||
.build(SocketAddr::from(([0, 0, 0, 0], port)))
|
||||
.await
|
||||
.context("Failed to build RPC server")?;
|
||||
|
||||
let addr = server
|
||||
.local_addr()
|
||||
.context("Failed to get local address of RPC server")?;
|
||||
|
||||
info!("Starting Indexer Service RPC server on {addr}");
|
||||
|
||||
#[cfg(not(feature = "mock-responses"))]
|
||||
let handle = server.start(indexer_service::service::IndexerService.into_rpc());
|
||||
#[cfg(feature = "mock-responses")]
|
||||
let handle = server.start(
|
||||
indexer_service::mock_service::MockIndexerService::new_with_mock_blocks().into_rpc(),
|
||||
);
|
||||
|
||||
Ok(handle)
|
||||
}
|
||||
|
||||
fn listen_for_shutdown_signal() -> CancellationToken {
|
||||
let cancellation_token = CancellationToken::new();
|
||||
let cancellation_token_clone = cancellation_token.clone();
|
||||
@ -2,9 +2,10 @@ use std::collections::HashMap;
|
||||
|
||||
use indexer_service_protocol::{
|
||||
Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, BlockId, Commitment,
|
||||
CommitmentSetDigest, Data, EncryptedAccountData, Hash, MantleMsgId, PrivacyPreservingMessage,
|
||||
PrivacyPreservingTransaction, ProgramDeploymentMessage, ProgramDeploymentTransaction,
|
||||
PublicMessage, PublicTransaction, Signature, Transaction, WitnessSet,
|
||||
CommitmentSetDigest, Data, EncryptedAccountData, HashType, MantleMsgId,
|
||||
PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
|
||||
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Signature, Transaction,
|
||||
WitnessSet,
|
||||
};
|
||||
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
|
||||
|
||||
@ -12,7 +13,7 @@ use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
|
||||
pub struct MockIndexerService {
|
||||
blocks: Vec<Block>,
|
||||
accounts: HashMap<AccountId, Account>,
|
||||
transactions: HashMap<Hash, (Transaction, BlockId)>,
|
||||
transactions: HashMap<HashType, (Transaction, BlockId)>,
|
||||
}
|
||||
|
||||
impl MockIndexerService {
|
||||
@ -43,14 +44,14 @@ impl MockIndexerService {
|
||||
}
|
||||
|
||||
// Create 10 blocks with transactions
|
||||
let mut prev_hash = Hash([0u8; 32]);
|
||||
let mut prev_hash = HashType([0u8; 32]);
|
||||
|
||||
for block_id in 0..10 {
|
||||
let block_hash = {
|
||||
let mut hash = [0u8; 32];
|
||||
hash[0] = block_id as u8;
|
||||
hash[1] = 0xff;
|
||||
Hash(hash)
|
||||
HashType(hash)
|
||||
};
|
||||
|
||||
// Create 2-4 transactions per block (mix of Public, PrivacyPreserving, and
|
||||
@ -63,7 +64,7 @@ impl MockIndexerService {
|
||||
let mut hash = [0u8; 32];
|
||||
hash[0] = block_id as u8;
|
||||
hash[1] = tx_idx as u8;
|
||||
Hash(hash)
|
||||
HashType(hash)
|
||||
};
|
||||
|
||||
// Vary transaction types: Public, PrivacyPreserving, or ProgramDeployment
|
||||
@ -161,16 +162,22 @@ impl MockIndexerService {
|
||||
}
|
||||
}
|
||||
|
||||
// `async_trait` is required by `jsonrpsee`
|
||||
#[async_trait::async_trait]
|
||||
impl indexer_service_rpc::RpcServer for MockIndexerService {
|
||||
async fn subscribe_to_blocks(
|
||||
async fn subscribe_to_finalized_blocks(
|
||||
&self,
|
||||
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
|
||||
_from: BlockId,
|
||||
subscription_sink: jsonrpsee::PendingSubscriptionSink,
|
||||
) -> SubscriptionResult {
|
||||
// Subscription not implemented for mock service
|
||||
Err("Subscriptions not supported in mock service".into())
|
||||
let sink = subscription_sink.accept().await?;
|
||||
for block in self
|
||||
.blocks
|
||||
.iter()
|
||||
.filter(|b| b.bedrock_status == BedrockStatus::Finalized)
|
||||
{
|
||||
let json = serde_json::value::to_raw_value(block).unwrap();
|
||||
sink.send(json).await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
|
||||
@ -187,7 +194,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
|
||||
async fn get_block_by_hash(&self, block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
|
||||
self.blocks
|
||||
.iter()
|
||||
.find(|b| b.header.hash == block_hash)
|
||||
@ -195,13 +202,6 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
|
||||
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Block with hash not found", None::<()>))
|
||||
}
|
||||
|
||||
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
|
||||
self.blocks
|
||||
.last()
|
||||
.map(|b| b.header.block_id)
|
||||
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "No blocks available", None::<()>))
|
||||
}
|
||||
|
||||
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
|
||||
self.accounts
|
||||
.get(&account_id)
|
||||
@ -209,7 +209,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
|
||||
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Account not found", None::<()>))
|
||||
}
|
||||
|
||||
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
|
||||
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
|
||||
self.transactions
|
||||
.get(&tx_hash)
|
||||
.map(|(tx, _)| tx.clone())
|
||||
228
indexer/service/src/service.rs
Normal file
228
indexer/service/src/service.rs
Normal file
@ -0,0 +1,228 @@
|
||||
use std::{pin::pin, sync::Arc};
|
||||
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use arc_swap::ArcSwap;
|
||||
use futures::{StreamExt as _, never::Never};
|
||||
use indexer_core::{IndexerCore, config::IndexerConfig};
|
||||
use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Transaction};
|
||||
use jsonrpsee::{
|
||||
SubscriptionSink,
|
||||
core::{Serialize, SubscriptionResult},
|
||||
types::{ErrorCode, ErrorObject, ErrorObjectOwned},
|
||||
};
|
||||
use log::{debug, error, info, warn};
|
||||
use tokio::sync::mpsc::UnboundedSender;
|
||||
|
||||
pub struct IndexerService {
|
||||
subscription_service: SubscriptionService,
|
||||
|
||||
#[expect(
|
||||
dead_code,
|
||||
reason = "Will be used in future implementations of RPC methods"
|
||||
)]
|
||||
indexer: IndexerCore,
|
||||
}
|
||||
|
||||
impl IndexerService {
|
||||
pub fn new(config: IndexerConfig) -> Result<Self> {
|
||||
let indexer = IndexerCore::new(config)?;
|
||||
let subscription_service = SubscriptionService::spawn_new(indexer.clone());
|
||||
|
||||
Ok(Self {
|
||||
subscription_service,
|
||||
indexer,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl indexer_service_rpc::RpcServer for IndexerService {
|
||||
async fn subscribe_to_finalized_blocks(
|
||||
&self,
|
||||
subscription_sink: jsonrpsee::PendingSubscriptionSink,
|
||||
) -> SubscriptionResult {
|
||||
let sink = subscription_sink.accept().await?;
|
||||
info!(
|
||||
"Accepted new subscription to finalized blocks with ID {:?}",
|
||||
sink.subscription_id()
|
||||
);
|
||||
self.subscription_service
|
||||
.add_subscription(Subscription::new(sink))
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
|
||||
Err(not_yet_implemented_error())
|
||||
}
|
||||
|
||||
async fn get_block_by_hash(&self, _block_hash: HashType) -> Result<Block, ErrorObjectOwned> {
|
||||
Err(not_yet_implemented_error())
|
||||
}
|
||||
|
||||
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
|
||||
Err(not_yet_implemented_error())
|
||||
}
|
||||
|
||||
async fn get_transaction(&self, _tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned> {
|
||||
Err(not_yet_implemented_error())
|
||||
}
|
||||
|
||||
async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
|
||||
Err(not_yet_implemented_error())
|
||||
}
|
||||
|
||||
async fn get_transactions_by_account(
|
||||
&self,
|
||||
_account_id: AccountId,
|
||||
_limit: u32,
|
||||
_offset: u32,
|
||||
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
|
||||
Err(not_yet_implemented_error())
|
||||
}
|
||||
}
|
||||
|
||||
struct SubscriptionService {
|
||||
parts: ArcSwap<SubscriptionLoopParts>,
|
||||
indexer: IndexerCore,
|
||||
}
|
||||
|
||||
impl SubscriptionService {
|
||||
pub fn spawn_new(indexer: IndexerCore) -> Self {
|
||||
let parts = Self::spawn_respond_subscribers_loop(indexer.clone());
|
||||
|
||||
Self {
|
||||
parts: ArcSwap::new(Arc::new(parts)),
|
||||
indexer,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn add_subscription(&self, subscription: Subscription<BlockId>) -> Result<()> {
|
||||
let guard = self.parts.load();
|
||||
if let Err(err) = guard.new_subscription_sender.send(subscription) {
|
||||
error!("Failed to send new subscription to subscription service with error: {err:#?}");
|
||||
|
||||
// Respawn the subscription service loop if it has finished (either with error or panic)
|
||||
if guard.handle.is_finished() {
|
||||
drop(guard);
|
||||
let new_parts = Self::spawn_respond_subscribers_loop(self.indexer.clone());
|
||||
let old_handle_and_sender = self.parts.swap(Arc::new(new_parts));
|
||||
let old_parts = Arc::into_inner(old_handle_and_sender)
|
||||
.expect("There should be no other references to the old handle and sender");
|
||||
|
||||
match old_parts.handle.await {
|
||||
Ok(Err(err)) => {
|
||||
error!(
|
||||
"Subscription service loop has unexpectedly finished with error: {err:#}"
|
||||
);
|
||||
}
|
||||
Err(err) => {
|
||||
error!("Subscription service loop has panicked with err: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bail!(err);
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn spawn_respond_subscribers_loop(indexer: IndexerCore) -> SubscriptionLoopParts {
|
||||
let (new_subscription_sender, mut sub_receiver) =
|
||||
tokio::sync::mpsc::unbounded_channel::<Subscription<BlockId>>();
|
||||
|
||||
let handle = tokio::spawn(async move {
|
||||
let mut subscribers = Vec::new();
|
||||
|
||||
let mut block_stream = pin!(indexer.subscribe_parse_block_stream().await);
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
sub = sub_receiver.recv() => {
|
||||
let Some(subscription) = sub else {
|
||||
bail!("Subscription receiver closed unexpectedly");
|
||||
};
|
||||
info!("Added new subscription with ID {:?}", subscription.sink.subscription_id());
|
||||
subscribers.push(subscription);
|
||||
}
|
||||
block_opt = block_stream.next() => {
|
||||
debug!("Got new block from block stream");
|
||||
let Some(block) = block_opt else {
|
||||
bail!("Block stream ended unexpectedly");
|
||||
};
|
||||
let block = block.context("Failed to get L2 block data")?;
|
||||
let block: indexer_service_protocol::Block = block.into();
|
||||
|
||||
for sub in &mut subscribers {
|
||||
if let Err(err) = sub.try_send(&block.header.block_id) {
|
||||
warn!(
|
||||
"Failed to send block ID {:?} to subscription ID {:?} with error: {err:#?}",
|
||||
block.header.block_id,
|
||||
sub.sink.subscription_id(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
SubscriptionLoopParts {
|
||||
handle,
|
||||
new_subscription_sender,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for SubscriptionService {
|
||||
fn drop(&mut self) {
|
||||
self.parts.load().handle.abort();
|
||||
}
|
||||
}
|
||||
|
||||
struct SubscriptionLoopParts {
|
||||
handle: tokio::task::JoinHandle<Result<Never>>,
|
||||
new_subscription_sender: UnboundedSender<Subscription<BlockId>>,
|
||||
}
|
||||
|
||||
struct Subscription<T> {
|
||||
sink: SubscriptionSink,
|
||||
_marker: std::marker::PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T> Subscription<T> {
|
||||
fn new(sink: SubscriptionSink) -> Self {
|
||||
Self {
|
||||
sink,
|
||||
_marker: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
fn try_send(&mut self, item: &T) -> Result<()>
|
||||
where
|
||||
T: Serialize,
|
||||
{
|
||||
let json = serde_json::value::to_raw_value(item)
|
||||
.context("Failed to serialize item for subscription")?;
|
||||
self.sink.try_send(json)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Drop for Subscription<T> {
|
||||
fn drop(&mut self) {
|
||||
info!(
|
||||
"Subscription with ID {:?} is being dropped",
|
||||
self.sink.subscription_id()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn not_yet_implemented_error() -> ErrorObjectOwned {
|
||||
ErrorObject::owned(
|
||||
ErrorCode::InternalError.code(),
|
||||
"Not yet implemented",
|
||||
Option::<String>::None,
|
||||
)
|
||||
}
|
||||
@ -1,124 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use bedrock_client::BedrockClient;
|
||||
use common::{
|
||||
block::HashableBlockData, communication::indexer::Message,
|
||||
rpc_primitives::requests::PostIndexerMessageResponse, sequencer_client::SequencerClient,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use log::info;
|
||||
use logos_blockchain_core::mantle::{
|
||||
Op, SignedMantleTx,
|
||||
ops::channel::{ChannelId, inscribe::InscriptionOp},
|
||||
};
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::{config::IndexerConfig, state::IndexerState};
|
||||
|
||||
pub mod config;
|
||||
pub mod state;
|
||||
|
||||
pub struct IndexerCore {
|
||||
pub bedrock_client: BedrockClient,
|
||||
pub sequencer_client: SequencerClient,
|
||||
pub config: IndexerConfig,
|
||||
pub state: IndexerState,
|
||||
}
|
||||
|
||||
impl IndexerCore {
|
||||
pub fn new(config: IndexerConfig) -> Result<Self> {
|
||||
Ok(Self {
|
||||
bedrock_client: BedrockClient::new(
|
||||
config.bedrock_client_config.auth.clone().map(Into::into),
|
||||
config.bedrock_client_config.addr.clone(),
|
||||
)?,
|
||||
sequencer_client: SequencerClient::new_with_auth(
|
||||
config.sequencer_client_config.addr.clone(),
|
||||
config.sequencer_client_config.auth.clone(),
|
||||
)?,
|
||||
config,
|
||||
// No state setup for now, future task.
|
||||
state: IndexerState {
|
||||
latest_seen_block: Arc::new(RwLock::new(0)),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn subscribe_parse_block_stream(&self) -> Result<()> {
|
||||
loop {
|
||||
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
|
||||
|
||||
info!("Block stream joined");
|
||||
|
||||
while let Some(block_info) = stream_pinned.next().await {
|
||||
let header_id = block_info.header_id;
|
||||
|
||||
info!("Observed L1 block at height {}", block_info.height);
|
||||
|
||||
if let Some(l1_block) = self
|
||||
.bedrock_client
|
||||
.get_block_by_id(header_id, &self.config.backoff)
|
||||
.await?
|
||||
{
|
||||
info!("Extracted L1 block at height {}", block_info.height);
|
||||
|
||||
let l2_blocks_parsed = parse_blocks(
|
||||
l1_block.into_transactions().into_iter(),
|
||||
&self.config.channel_id,
|
||||
);
|
||||
|
||||
for l2_block in l2_blocks_parsed {
|
||||
// State modification, will be updated in future
|
||||
{
|
||||
let mut guard = self.state.latest_seen_block.write().await;
|
||||
if l2_block.block_id > *guard {
|
||||
*guard = l2_block.block_id;
|
||||
}
|
||||
}
|
||||
|
||||
// Sending data into sequencer, may need to be expanded.
|
||||
let message = Message::L2BlockFinalized {
|
||||
l2_block_height: l2_block.block_id,
|
||||
};
|
||||
|
||||
let status = self.send_message_to_sequencer(message.clone()).await?;
|
||||
|
||||
info!("Sent message {message:#?} to sequencer; status {status:#?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Refetch stream after delay
|
||||
tokio::time::sleep(std::time::Duration::from_millis(
|
||||
self.config.resubscribe_interval_millis,
|
||||
))
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_message_to_sequencer(
|
||||
&self,
|
||||
message: Message,
|
||||
) -> Result<PostIndexerMessageResponse> {
|
||||
Ok(self.sequencer_client.post_indexer_message(message).await?)
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_blocks(
|
||||
block_txs: impl Iterator<Item = SignedMantleTx>,
|
||||
decoded_channel_id: &ChannelId,
|
||||
) -> impl Iterator<Item = HashableBlockData> {
|
||||
block_txs.flat_map(|tx| {
|
||||
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
|
||||
Op::ChannelInscribe(InscriptionOp {
|
||||
channel_id,
|
||||
inscription,
|
||||
..
|
||||
}) if channel_id == *decoded_channel_id => {
|
||||
borsh::from_slice::<HashableBlockData>(&inscription).ok()
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
})
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
services:
|
||||
indexer_service:
|
||||
image: lssa/indexer_service
|
||||
build:
|
||||
context: ..
|
||||
dockerfile: indexer_service/Dockerfile
|
||||
container_name: indexer_service
|
||||
ports:
|
||||
- "8779:8779"
|
||||
@ -1,4 +0,0 @@
|
||||
pub mod service;
|
||||
|
||||
#[cfg(feature = "mock-responses")]
|
||||
pub mod mock_service;
|
||||
@ -1,49 +0,0 @@
|
||||
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
|
||||
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
|
||||
|
||||
pub struct IndexerService;
|
||||
|
||||
// `async_trait` is required by `jsonrpsee`
|
||||
#[async_trait::async_trait]
|
||||
impl indexer_service_rpc::RpcServer for IndexerService {
|
||||
async fn subscribe_to_blocks(
|
||||
&self,
|
||||
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
|
||||
_from: BlockId,
|
||||
) -> SubscriptionResult {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_block_by_hash(&self, _block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_transaction(&self, _tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_blocks(&self, _offset: u32, _limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
|
||||
async fn get_transactions_by_account(
|
||||
&self,
|
||||
_account_id: AccountId,
|
||||
_limit: u32,
|
||||
_offset: u32,
|
||||
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
@ -7,24 +7,24 @@ license = { workspace = true }
|
||||
[dependencies]
|
||||
nssa_core = { workspace = true, features = ["host"] }
|
||||
nssa.workspace = true
|
||||
sequencer_core = { workspace = true, features = ["testnet"] }
|
||||
sequencer_core = { workspace = true, features = ["default", "testnet"] }
|
||||
sequencer_runner.workspace = true
|
||||
wallet.workspace = true
|
||||
common.workspace = true
|
||||
key_protocol.workspace = true
|
||||
indexer_core.workspace = true
|
||||
wallet-ffi.workspace = true
|
||||
serde_json.workspace = true
|
||||
token_core.workspace = true
|
||||
indexer_service.workspace = true
|
||||
|
||||
url.workspace = true
|
||||
anyhow.workspace = true
|
||||
env_logger.workspace = true
|
||||
log.workspace = true
|
||||
actix-web.workspace = true
|
||||
serde_json.workspace = true
|
||||
base64.workspace = true
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
hex.workspace = true
|
||||
tempfile.workspace = true
|
||||
borsh.workspace = true
|
||||
futures.workspace = true
|
||||
testcontainers = { version = "0.27.0", features = ["docker-compose"] }
|
||||
|
||||
@ -1,17 +0,0 @@
|
||||
{
|
||||
"bedrock_client_config": {
|
||||
"addr": "http://127.0.0.1:8080",
|
||||
"auth": {
|
||||
"username": "user"
|
||||
}
|
||||
},
|
||||
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||
"backoff": {
|
||||
"max_retries": 10,
|
||||
"start_delay_millis": 100
|
||||
},
|
||||
"resubscribe_interval_millis": 1000,
|
||||
"sequencer_client_config": {
|
||||
"addr": "will_be_replaced_in_runtime"
|
||||
}
|
||||
}
|
||||
@ -1,165 +0,0 @@
|
||||
{
|
||||
"home": "",
|
||||
"override_rust_log": null,
|
||||
"genesis_id": 1,
|
||||
"is_genesis_random": true,
|
||||
"max_num_tx_in_block": 20,
|
||||
"mempool_max_size": 10000,
|
||||
"block_create_timeout_millis": 10000,
|
||||
"port": 0,
|
||||
"initial_accounts": [
|
||||
{
|
||||
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
|
||||
"balance": 10000
|
||||
},
|
||||
{
|
||||
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
|
||||
"balance": 20000
|
||||
}
|
||||
],
|
||||
"initial_commitments": [
|
||||
{
|
||||
"npk": [
|
||||
63,
|
||||
202,
|
||||
178,
|
||||
231,
|
||||
183,
|
||||
82,
|
||||
237,
|
||||
212,
|
||||
216,
|
||||
221,
|
||||
215,
|
||||
255,
|
||||
153,
|
||||
101,
|
||||
177,
|
||||
161,
|
||||
254,
|
||||
210,
|
||||
128,
|
||||
122,
|
||||
54,
|
||||
190,
|
||||
230,
|
||||
151,
|
||||
183,
|
||||
64,
|
||||
225,
|
||||
229,
|
||||
113,
|
||||
1,
|
||||
228,
|
||||
97
|
||||
],
|
||||
"account": {
|
||||
"program_owner": [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"balance": 10000,
|
||||
"data": [],
|
||||
"nonce": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"npk": [
|
||||
192,
|
||||
251,
|
||||
166,
|
||||
243,
|
||||
167,
|
||||
236,
|
||||
84,
|
||||
249,
|
||||
35,
|
||||
136,
|
||||
130,
|
||||
172,
|
||||
219,
|
||||
225,
|
||||
161,
|
||||
139,
|
||||
229,
|
||||
89,
|
||||
243,
|
||||
125,
|
||||
194,
|
||||
213,
|
||||
209,
|
||||
30,
|
||||
23,
|
||||
174,
|
||||
100,
|
||||
244,
|
||||
124,
|
||||
74,
|
||||
140,
|
||||
47
|
||||
],
|
||||
"account": {
|
||||
"program_owner": [
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0,
|
||||
0
|
||||
],
|
||||
"balance": 20000,
|
||||
"data": [],
|
||||
"nonce": 0
|
||||
}
|
||||
}
|
||||
],
|
||||
"signing_key": [
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37,
|
||||
37
|
||||
],
|
||||
"bedrock_config": {
|
||||
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
|
||||
"node_url": "http://127.0.0.1:8080",
|
||||
"auth": {
|
||||
"username": "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
255
integration_tests/src/config.rs
Normal file
255
integration_tests/src/config.rs
Normal file
@ -0,0 +1,255 @@
|
||||
use std::{net::SocketAddr, path::PathBuf};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use indexer_service::{BackoffConfig, BedrockClientConfig, ChannelId, IndexerConfig};
|
||||
use key_protocol::key_management::KeyChain;
|
||||
use nssa::{Account, AccountId, PrivateKey, PublicKey};
|
||||
use nssa_core::{account::Data, program::DEFAULT_PROGRAM_ID};
|
||||
use sequencer_core::config::{
|
||||
AccountInitialData, BedrockConfig, CommitmentsInitialData, SequencerConfig,
|
||||
};
|
||||
use url::Url;
|
||||
use wallet::config::{
|
||||
InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, WalletConfig,
|
||||
};
|
||||
|
||||
pub fn indexer_config(bedrock_addr: SocketAddr) -> Result<IndexerConfig> {
|
||||
Ok(IndexerConfig {
|
||||
resubscribe_interval_millis: 1000,
|
||||
bedrock_client_config: BedrockClientConfig {
|
||||
addr: addr_to_url(UrlProtocol::Http, bedrock_addr)
|
||||
.context("Failed to convert bedrock addr to URL")?,
|
||||
auth: None,
|
||||
backoff: BackoffConfig {
|
||||
start_delay_millis: 100,
|
||||
max_retries: 10,
|
||||
},
|
||||
},
|
||||
channel_id: bedrock_channel_id(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Sequencer config options available for custom changes in integration tests.
|
||||
pub struct SequencerPartialConfig {
|
||||
pub max_num_tx_in_block: usize,
|
||||
pub mempool_max_size: usize,
|
||||
pub block_create_timeout_millis: u64,
|
||||
}
|
||||
|
||||
impl Default for SequencerPartialConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_num_tx_in_block: 20,
|
||||
mempool_max_size: 10_000,
|
||||
block_create_timeout_millis: 10_000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn sequencer_config(
|
||||
partial: SequencerPartialConfig,
|
||||
home: PathBuf,
|
||||
bedrock_addr: SocketAddr,
|
||||
indexer_addr: SocketAddr,
|
||||
initial_data: &InitialData,
|
||||
) -> Result<SequencerConfig> {
|
||||
let SequencerPartialConfig {
|
||||
max_num_tx_in_block,
|
||||
mempool_max_size,
|
||||
block_create_timeout_millis,
|
||||
} = partial;
|
||||
|
||||
Ok(SequencerConfig {
|
||||
home,
|
||||
override_rust_log: None,
|
||||
genesis_id: 1,
|
||||
is_genesis_random: true,
|
||||
max_num_tx_in_block,
|
||||
mempool_max_size,
|
||||
block_create_timeout_millis,
|
||||
retry_pending_blocks_timeout_millis: 240_000,
|
||||
port: 0,
|
||||
initial_accounts: initial_data.sequencer_initial_accounts(),
|
||||
initial_commitments: initial_data.sequencer_initial_commitments(),
|
||||
signing_key: [37; 32],
|
||||
bedrock_config: BedrockConfig {
|
||||
backoff: BackoffConfig {
|
||||
start_delay_millis: 100,
|
||||
max_retries: 5,
|
||||
},
|
||||
channel_id: bedrock_channel_id(),
|
||||
node_url: addr_to_url(UrlProtocol::Http, bedrock_addr)
|
||||
.context("Failed to convert bedrock addr to URL")?,
|
||||
auth: None,
|
||||
},
|
||||
indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr)
|
||||
.context("Failed to convert indexer addr to URL")?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn wallet_config(
|
||||
sequencer_addr: SocketAddr,
|
||||
initial_data: &InitialData,
|
||||
) -> Result<WalletConfig> {
|
||||
Ok(WalletConfig {
|
||||
override_rust_log: None,
|
||||
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
|
||||
.context("Failed to convert sequencer addr to URL")?,
|
||||
seq_poll_timeout_millis: 30_000,
|
||||
seq_tx_poll_max_blocks: 15,
|
||||
seq_poll_max_retries: 10,
|
||||
seq_block_poll_max_amount: 100,
|
||||
initial_accounts: initial_data.wallet_initial_accounts(),
|
||||
basic_auth: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub struct InitialData {
|
||||
pub public_accounts: Vec<(PrivateKey, u128)>,
|
||||
pub private_accounts: Vec<(KeyChain, Account)>,
|
||||
}
|
||||
|
||||
impl InitialData {
|
||||
pub fn with_two_public_and_two_private_initialized_accounts() -> Self {
|
||||
let mut public_alice_private_key = PrivateKey::new_os_random();
|
||||
let mut public_alice_public_key =
|
||||
PublicKey::new_from_private_key(&public_alice_private_key);
|
||||
let mut public_alice_account_id = AccountId::from(&public_alice_public_key);
|
||||
|
||||
let mut public_bob_private_key = PrivateKey::new_os_random();
|
||||
let mut public_bob_public_key = PublicKey::new_from_private_key(&public_bob_private_key);
|
||||
let mut public_bob_account_id = AccountId::from(&public_bob_public_key);
|
||||
|
||||
// Ensure consistent ordering
|
||||
if public_alice_account_id > public_bob_account_id {
|
||||
std::mem::swap(&mut public_alice_private_key, &mut public_bob_private_key);
|
||||
std::mem::swap(&mut public_alice_public_key, &mut public_bob_public_key);
|
||||
std::mem::swap(&mut public_alice_account_id, &mut public_bob_account_id);
|
||||
}
|
||||
|
||||
let mut private_charlie_key_chain = KeyChain::new_os_random();
|
||||
let mut private_charlie_account_id =
|
||||
AccountId::from(&private_charlie_key_chain.nullifer_public_key);
|
||||
|
||||
let mut private_david_key_chain = KeyChain::new_os_random();
|
||||
let mut private_david_account_id =
|
||||
AccountId::from(&private_david_key_chain.nullifer_public_key);
|
||||
|
||||
// Ensure consistent ordering
|
||||
if private_charlie_account_id > private_david_account_id {
|
||||
std::mem::swap(&mut private_charlie_key_chain, &mut private_david_key_chain);
|
||||
std::mem::swap(
|
||||
&mut private_charlie_account_id,
|
||||
&mut private_david_account_id,
|
||||
);
|
||||
}
|
||||
|
||||
Self {
|
||||
public_accounts: vec![
|
||||
(public_alice_private_key, 10_000),
|
||||
(public_bob_private_key, 20_000),
|
||||
],
|
||||
private_accounts: vec![
|
||||
(
|
||||
private_charlie_key_chain,
|
||||
Account {
|
||||
balance: 10_000,
|
||||
data: Data::default(),
|
||||
program_owner: DEFAULT_PROGRAM_ID,
|
||||
nonce: 0,
|
||||
},
|
||||
),
|
||||
(
|
||||
private_david_key_chain,
|
||||
Account {
|
||||
balance: 20_000,
|
||||
data: Data::default(),
|
||||
program_owner: DEFAULT_PROGRAM_ID,
|
||||
nonce: 0,
|
||||
},
|
||||
),
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
fn sequencer_initial_accounts(&self) -> Vec<AccountInitialData> {
|
||||
self.public_accounts
|
||||
.iter()
|
||||
.map(|(priv_key, balance)| {
|
||||
let pub_key = PublicKey::new_from_private_key(priv_key);
|
||||
let account_id = AccountId::from(&pub_key);
|
||||
AccountInitialData {
|
||||
account_id,
|
||||
balance: *balance,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn sequencer_initial_commitments(&self) -> Vec<CommitmentsInitialData> {
|
||||
self.private_accounts
|
||||
.iter()
|
||||
.map(|(key_chain, account)| CommitmentsInitialData {
|
||||
npk: key_chain.nullifer_public_key.clone(),
|
||||
account: account.clone(),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn wallet_initial_accounts(&self) -> Vec<InitialAccountData> {
|
||||
self.public_accounts
|
||||
.iter()
|
||||
.map(|(priv_key, _)| {
|
||||
let pub_key = PublicKey::new_from_private_key(priv_key);
|
||||
let account_id = AccountId::from(&pub_key);
|
||||
InitialAccountData::Public(InitialAccountDataPublic {
|
||||
account_id,
|
||||
pub_sign_key: priv_key.clone(),
|
||||
})
|
||||
})
|
||||
.chain(self.private_accounts.iter().map(|(key_chain, account)| {
|
||||
let account_id = AccountId::from(&key_chain.nullifer_public_key);
|
||||
InitialAccountData::Private(InitialAccountDataPrivate {
|
||||
account_id,
|
||||
account: account.clone(),
|
||||
key_chain: key_chain.clone(),
|
||||
})
|
||||
}))
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum UrlProtocol {
|
||||
Http,
|
||||
Ws,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for UrlProtocol {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
UrlProtocol::Http => write!(f, "http"),
|
||||
UrlProtocol::Ws => write!(f, "ws"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result<Url> {
|
||||
// Convert 0.0.0.0 to 127.0.0.1 for client connections
|
||||
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
|
||||
// but clients need to connect to 127.0.0.1:<port> to work reliably
|
||||
let url_string = if addr.ip().is_unspecified() {
|
||||
format!("{protocol}://127.0.0.1:{}", addr.port())
|
||||
} else {
|
||||
format!("{protocol}://{addr}")
|
||||
};
|
||||
|
||||
url_string.parse().map_err(Into::into)
|
||||
}
|
||||
|
||||
fn bedrock_channel_id() -> ChannelId {
|
||||
let channel_id: [u8; 32] = [0u8, 1]
|
||||
.repeat(16)
|
||||
.try_into()
|
||||
.unwrap_or_else(|_| unreachable!());
|
||||
ChannelId::from(channel_id)
|
||||
}
|
||||
@ -2,173 +2,180 @@
|
||||
|
||||
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
|
||||
|
||||
use actix_web::dev::ServerHandle;
|
||||
use anyhow::{Context, Result};
|
||||
use anyhow::{Context, Result, bail};
|
||||
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
|
||||
use common::{
|
||||
sequencer_client::SequencerClient,
|
||||
transaction::{EncodedTransaction, NSSATransaction},
|
||||
};
|
||||
use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction};
|
||||
use futures::FutureExt as _;
|
||||
use indexer_core::{IndexerCore, config::IndexerConfig};
|
||||
use log::debug;
|
||||
use nssa::PrivacyPreservingTransaction;
|
||||
use indexer_service::IndexerHandle;
|
||||
use log::{debug, error, warn};
|
||||
use nssa::{AccountId, PrivacyPreservingTransaction};
|
||||
use nssa_core::Commitment;
|
||||
use sequencer_core::config::SequencerConfig;
|
||||
use sequencer_runner::SequencerHandle;
|
||||
use tempfile::TempDir;
|
||||
use tokio::task::JoinHandle;
|
||||
use url::Url;
|
||||
use testcontainers::compose::DockerCompose;
|
||||
use wallet::{WalletCore, config::WalletConfigOverrides};
|
||||
|
||||
pub mod config;
|
||||
|
||||
// TODO: Remove this and control time from tests
|
||||
pub const TIME_TO_WAIT_FOR_BLOCK_SECONDS: u64 = 12;
|
||||
|
||||
pub const ACC_SENDER: &str = "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV";
|
||||
pub const ACC_RECEIVER: &str = "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo";
|
||||
|
||||
pub const ACC_SENDER_PRIVATE: &str = "2ECgkFTaXzwjJBXR7ZKmXYQtpHbvTTHK9Auma4NL9AUo";
|
||||
pub const ACC_RECEIVER_PRIVATE: &str = "E8HwiTyQe4H9HK7icTvn95HQMnzx49mP9A2ddtMLpNaN";
|
||||
|
||||
pub const NSSA_PROGRAM_FOR_TEST_DATA_CHANGER: &str = "data_changer.bin";
|
||||
|
||||
const BEDROCK_SERVICE_WITH_OPEN_PORT: &str = "logos-blockchain-node-0";
|
||||
const BEDROCK_SERVICE_PORT: u16 = 18080;
|
||||
|
||||
static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init);
|
||||
|
||||
/// Test context which sets up a sequencer and a wallet for integration tests.
|
||||
///
|
||||
/// It's memory and logically safe to create multiple instances of this struct in parallel tests,
|
||||
/// as each instance uses its own temporary directories for sequencer and wallet data.
|
||||
// NOTE: Order of fields is important for proper drop order.
|
||||
pub struct TestContext {
|
||||
sequencer_server_handle: ServerHandle,
|
||||
sequencer_loop_handle: JoinHandle<Result<()>>,
|
||||
sequencer_retry_pending_blocks_handle: JoinHandle<Result<()>>,
|
||||
indexer_loop_handle: Option<JoinHandle<Result<()>>>,
|
||||
sequencer_client: SequencerClient,
|
||||
wallet: WalletCore,
|
||||
wallet_password: String,
|
||||
sequencer_handle: SequencerHandle,
|
||||
indexer_handle: IndexerHandle,
|
||||
bedrock_compose: DockerCompose,
|
||||
_temp_sequencer_dir: TempDir,
|
||||
_temp_wallet_dir: TempDir,
|
||||
}
|
||||
|
||||
impl TestContext {
|
||||
/// Create new test context in detached mode. Default.
|
||||
/// Create new test context.
|
||||
pub async fn new() -> Result<Self> {
|
||||
let manifest_dir = env!("CARGO_MANIFEST_DIR");
|
||||
|
||||
let sequencer_config_path =
|
||||
PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json");
|
||||
|
||||
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
|
||||
.context("Failed to create sequencer config from file")?;
|
||||
|
||||
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, None).await
|
||||
Self::builder().build().await
|
||||
}
|
||||
|
||||
/// Create new test context in local bedrock node attached mode.
|
||||
pub async fn new_bedrock_local_attached() -> Result<Self> {
|
||||
let manifest_dir = env!("CARGO_MANIFEST_DIR");
|
||||
|
||||
let sequencer_config_path = PathBuf::from(manifest_dir)
|
||||
.join("configs/sequencer/bedrock_local_attached/sequencer_config.json");
|
||||
|
||||
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
|
||||
.context("Failed to create sequencer config from file")?;
|
||||
|
||||
let indexer_config_path =
|
||||
PathBuf::from(manifest_dir).join("configs/indexer/indexer_config.json");
|
||||
|
||||
let indexer_config = IndexerConfig::from_path(&indexer_config_path)
|
||||
.context("Failed to create indexer config from file")?;
|
||||
|
||||
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, Some(indexer_config))
|
||||
.await
|
||||
pub fn builder() -> TestContextBuilder {
|
||||
TestContextBuilder::new()
|
||||
}
|
||||
|
||||
/// Create new test context with custom sequencer config and maybe indexer config.
|
||||
///
|
||||
/// `home` and `port` fields of the provided config will be overridden to meet tests parallelism
|
||||
/// requirements.
|
||||
pub async fn new_with_sequencer_and_maybe_indexer_configs(
|
||||
sequencer_config: SequencerConfig,
|
||||
indexer_config: Option<IndexerConfig>,
|
||||
async fn new_configured(
|
||||
sequencer_partial_config: config::SequencerPartialConfig,
|
||||
initial_data: config::InitialData,
|
||||
) -> Result<Self> {
|
||||
// Ensure logger is initialized only once
|
||||
*LOGGER;
|
||||
|
||||
debug!("Test context setup");
|
||||
|
||||
let (
|
||||
sequencer_server_handle,
|
||||
sequencer_addr,
|
||||
sequencer_loop_handle,
|
||||
sequencer_retry_pending_blocks_handle,
|
||||
temp_sequencer_dir,
|
||||
) = Self::setup_sequencer(sequencer_config)
|
||||
.await
|
||||
.context("Failed to setup sequencer")?;
|
||||
let (bedrock_compose, bedrock_addr) = Self::setup_bedrock_node().await?;
|
||||
|
||||
// Convert 0.0.0.0 to 127.0.0.1 for client connections
|
||||
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
|
||||
// but clients need to connect to 127.0.0.1:<port> to work reliably
|
||||
let sequencer_addr = if sequencer_addr.ip().is_unspecified() {
|
||||
format!("http://127.0.0.1:{}", sequencer_addr.port())
|
||||
} else {
|
||||
format!("http://{sequencer_addr}")
|
||||
let indexer_handle = Self::setup_indexer(bedrock_addr)
|
||||
.await
|
||||
.context("Failed to setup Indexer")?;
|
||||
|
||||
let (sequencer_handle, temp_sequencer_dir) = Self::setup_sequencer(
|
||||
sequencer_partial_config,
|
||||
bedrock_addr,
|
||||
indexer_handle.addr(),
|
||||
&initial_data,
|
||||
)
|
||||
.await
|
||||
.context("Failed to setup Sequencer")?;
|
||||
|
||||
let (wallet, temp_wallet_dir, wallet_password) =
|
||||
Self::setup_wallet(sequencer_handle.addr(), &initial_data)
|
||||
.await
|
||||
.context("Failed to setup wallet")?;
|
||||
|
||||
let sequencer_url = config::addr_to_url(config::UrlProtocol::Http, sequencer_handle.addr())
|
||||
.context("Failed to convert sequencer addr to URL")?;
|
||||
let sequencer_client =
|
||||
SequencerClient::new(sequencer_url).context("Failed to create sequencer client")?;
|
||||
|
||||
Ok(Self {
|
||||
sequencer_client,
|
||||
wallet,
|
||||
wallet_password,
|
||||
bedrock_compose,
|
||||
sequencer_handle,
|
||||
indexer_handle,
|
||||
_temp_sequencer_dir: temp_sequencer_dir,
|
||||
_temp_wallet_dir: temp_wallet_dir,
|
||||
})
|
||||
}
|
||||
|
||||
async fn setup_bedrock_node() -> Result<(DockerCompose, SocketAddr)> {
|
||||
let manifest_dir = env!("CARGO_MANIFEST_DIR");
|
||||
let bedrock_compose_path =
|
||||
PathBuf::from(manifest_dir).join("../bedrock/docker-compose.yml");
|
||||
|
||||
let mut compose = DockerCompose::with_auto_client(&[bedrock_compose_path])
|
||||
.await
|
||||
.context("Failed to setup docker compose for Bedrock")?;
|
||||
|
||||
async fn up_and_retrieve_port(compose: &mut DockerCompose) -> Result<u16> {
|
||||
compose
|
||||
.up()
|
||||
.await
|
||||
.context("Failed to bring up Bedrock services")?;
|
||||
let container = compose
|
||||
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`"
|
||||
)
|
||||
})?;
|
||||
|
||||
let ports = container.ports().await.with_context(|| {
|
||||
format!(
|
||||
"Failed to get ports for Bedrock service container `{}`",
|
||||
container.id()
|
||||
)
|
||||
})?;
|
||||
ports
|
||||
.map_to_host_port_ipv4(BEDROCK_SERVICE_PORT)
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to retrieve host port of {BEDROCK_SERVICE_PORT} container \
|
||||
port for container `{}`, existing ports: {ports:?}",
|
||||
container.id()
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
let mut port = None;
|
||||
let mut attempt = 0;
|
||||
let max_attempts = 5;
|
||||
while port.is_none() && attempt < max_attempts {
|
||||
attempt += 1;
|
||||
match up_and_retrieve_port(&mut compose).await {
|
||||
Ok(p) => {
|
||||
port = Some(p);
|
||||
}
|
||||
Err(err) => {
|
||||
warn!(
|
||||
"Failed to bring up Bedrock services: {err:?}, attempt {attempt}/{max_attempts}"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
let Some(port) = port else {
|
||||
bail!("Failed to bring up Bedrock services after {max_attempts} attempts");
|
||||
};
|
||||
|
||||
let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone())
|
||||
let addr = SocketAddr::from(([127, 0, 0, 1], port));
|
||||
Ok((compose, addr))
|
||||
}
|
||||
|
||||
async fn setup_indexer(bedrock_addr: SocketAddr) -> Result<IndexerHandle> {
|
||||
let indexer_config =
|
||||
config::indexer_config(bedrock_addr).context("Failed to create Indexer config")?;
|
||||
|
||||
indexer_service::run_server(indexer_config, 0)
|
||||
.await
|
||||
.context("Failed to setup wallet")?;
|
||||
|
||||
let sequencer_client = SequencerClient::new(
|
||||
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?,
|
||||
)
|
||||
.context("Failed to create sequencer client")?;
|
||||
|
||||
if let Some(mut indexer_config) = indexer_config {
|
||||
indexer_config.sequencer_client_config.addr =
|
||||
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?;
|
||||
|
||||
let indexer_core = IndexerCore::new(indexer_config)?;
|
||||
|
||||
let indexer_loop_handle = Some(tokio::spawn(async move {
|
||||
indexer_core.subscribe_parse_block_stream().await
|
||||
}));
|
||||
|
||||
Ok(Self {
|
||||
sequencer_server_handle,
|
||||
sequencer_loop_handle,
|
||||
sequencer_retry_pending_blocks_handle,
|
||||
indexer_loop_handle,
|
||||
sequencer_client,
|
||||
wallet,
|
||||
_temp_sequencer_dir: temp_sequencer_dir,
|
||||
_temp_wallet_dir: temp_wallet_dir,
|
||||
wallet_password,
|
||||
})
|
||||
} else {
|
||||
Ok(Self {
|
||||
sequencer_server_handle,
|
||||
sequencer_loop_handle,
|
||||
sequencer_retry_pending_blocks_handle,
|
||||
indexer_loop_handle: None,
|
||||
sequencer_client,
|
||||
wallet,
|
||||
_temp_sequencer_dir: temp_sequencer_dir,
|
||||
_temp_wallet_dir: temp_wallet_dir,
|
||||
wallet_password,
|
||||
})
|
||||
}
|
||||
.context("Failed to run Indexer Service")
|
||||
}
|
||||
|
||||
async fn setup_sequencer(
|
||||
mut config: SequencerConfig,
|
||||
) -> Result<(
|
||||
ServerHandle,
|
||||
SocketAddr,
|
||||
JoinHandle<Result<()>>,
|
||||
JoinHandle<Result<()>>,
|
||||
TempDir,
|
||||
)> {
|
||||
partial: config::SequencerPartialConfig,
|
||||
bedrock_addr: SocketAddr,
|
||||
indexer_addr: SocketAddr,
|
||||
initial_data: &config::InitialData,
|
||||
) -> Result<(SequencerHandle, TempDir)> {
|
||||
let temp_sequencer_dir =
|
||||
tempfile::tempdir().context("Failed to create temp dir for sequencer home")?;
|
||||
|
||||
@ -176,43 +183,39 @@ impl TestContext {
|
||||
"Using temp sequencer home at {:?}",
|
||||
temp_sequencer_dir.path()
|
||||
);
|
||||
config.home = temp_sequencer_dir.path().to_owned();
|
||||
// Setting port to 0 lets the OS choose a free port for us
|
||||
config.port = 0;
|
||||
|
||||
let (
|
||||
sequencer_server_handle,
|
||||
sequencer_addr,
|
||||
sequencer_loop_handle,
|
||||
sequencer_retry_pending_blocks_handle,
|
||||
) = sequencer_runner::startup_sequencer(config).await?;
|
||||
let config = config::sequencer_config(
|
||||
partial,
|
||||
temp_sequencer_dir.path().to_owned(),
|
||||
bedrock_addr,
|
||||
indexer_addr,
|
||||
initial_data,
|
||||
)
|
||||
.context("Failed to create Sequencer config")?;
|
||||
|
||||
Ok((
|
||||
sequencer_server_handle,
|
||||
sequencer_addr,
|
||||
sequencer_loop_handle,
|
||||
sequencer_retry_pending_blocks_handle,
|
||||
temp_sequencer_dir,
|
||||
))
|
||||
let sequencer_handle = sequencer_runner::startup_sequencer(config).await?;
|
||||
|
||||
Ok((sequencer_handle, temp_sequencer_dir))
|
||||
}
|
||||
|
||||
async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> {
|
||||
let manifest_dir = env!("CARGO_MANIFEST_DIR");
|
||||
let wallet_config_source_path =
|
||||
PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json");
|
||||
async fn setup_wallet(
|
||||
sequencer_addr: SocketAddr,
|
||||
initial_data: &config::InitialData,
|
||||
) -> Result<(WalletCore, TempDir, String)> {
|
||||
let config = config::wallet_config(sequencer_addr, initial_data)
|
||||
.context("Failed to create Wallet config")?;
|
||||
let config_serialized =
|
||||
serde_json::to_string_pretty(&config).context("Failed to serialize Wallet config")?;
|
||||
|
||||
let temp_wallet_dir =
|
||||
tempfile::tempdir().context("Failed to create temp dir for wallet home")?;
|
||||
|
||||
let config_path = temp_wallet_dir.path().join("wallet_config.json");
|
||||
std::fs::copy(&wallet_config_source_path, &config_path)
|
||||
.context("Failed to copy wallet config to temp dir")?;
|
||||
std::fs::write(&config_path, config_serialized)
|
||||
.context("Failed to write wallet config in temp dir")?;
|
||||
|
||||
let storage_path = temp_wallet_dir.path().join("storage.json");
|
||||
let config_overrides = WalletConfigOverrides {
|
||||
sequencer_addr: Some(sequencer_addr),
|
||||
..Default::default()
|
||||
};
|
||||
let config_overrides = WalletConfigOverrides::default();
|
||||
|
||||
let wallet_password = "test_pass".to_owned();
|
||||
let wallet = WalletCore::new_init_storage(
|
||||
@ -248,32 +251,71 @@ impl TestContext {
|
||||
pub fn sequencer_client(&self) -> &SequencerClient {
|
||||
&self.sequencer_client
|
||||
}
|
||||
|
||||
/// Get existing public account IDs in the wallet.
|
||||
pub fn existing_public_accounts(&self) -> Vec<AccountId> {
|
||||
self.wallet
|
||||
.storage()
|
||||
.user_data
|
||||
.public_account_ids()
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Get existing private account IDs in the wallet.
|
||||
pub fn existing_private_accounts(&self) -> Vec<AccountId> {
|
||||
self.wallet
|
||||
.storage()
|
||||
.user_data
|
||||
.private_account_ids()
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for TestContext {
|
||||
fn drop(&mut self) {
|
||||
debug!("Test context cleanup");
|
||||
|
||||
let Self {
|
||||
sequencer_server_handle,
|
||||
sequencer_loop_handle,
|
||||
sequencer_retry_pending_blocks_handle,
|
||||
indexer_loop_handle,
|
||||
sequencer_handle,
|
||||
indexer_handle,
|
||||
bedrock_compose,
|
||||
_temp_sequencer_dir: _,
|
||||
_temp_wallet_dir: _,
|
||||
sequencer_client: _,
|
||||
wallet: _,
|
||||
_temp_sequencer_dir,
|
||||
_temp_wallet_dir,
|
||||
wallet_password: _,
|
||||
} = self;
|
||||
|
||||
sequencer_loop_handle.abort();
|
||||
sequencer_retry_pending_blocks_handle.abort();
|
||||
if let Some(indexer_loop_handle) = indexer_loop_handle {
|
||||
indexer_loop_handle.abort();
|
||||
if sequencer_handle.is_finished() {
|
||||
let Err(err) = self
|
||||
.sequencer_handle
|
||||
.run_forever()
|
||||
.now_or_never()
|
||||
.expect("Future is finished and should be ready");
|
||||
error!(
|
||||
"Sequencer handle has unexpectedly finished before TestContext drop with error: {err:#}"
|
||||
);
|
||||
}
|
||||
|
||||
// Can't wait here as Drop can't be async, but anyway stop signal should be sent
|
||||
sequencer_server_handle.stop(true).now_or_never();
|
||||
if indexer_handle.is_stopped() {
|
||||
error!("Indexer handle has unexpectedly stopped before TestContext drop");
|
||||
}
|
||||
|
||||
let container = bedrock_compose
|
||||
.service(BEDROCK_SERVICE_WITH_OPEN_PORT)
|
||||
.unwrap_or_else(|| {
|
||||
panic!("Failed to get Bedrock service container `{BEDROCK_SERVICE_WITH_OPEN_PORT}`")
|
||||
});
|
||||
let output = std::process::Command::new("docker")
|
||||
.args(["inspect", "-f", "{{.State.Running}}", container.id()])
|
||||
.output()
|
||||
.expect("Failed to execute docker inspect command to check if Bedrock container is still running");
|
||||
let stdout = String::from_utf8(output.stdout)
|
||||
.expect("Failed to parse docker inspect output as String");
|
||||
if stdout.trim() != "true" {
|
||||
error!(
|
||||
"Bedrock container `{}` is not running during TestContext drop, docker inspect output: {stdout}",
|
||||
container.id()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -291,31 +333,65 @@ impl BlockingTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_public_account_id(account_id: &str) -> String {
|
||||
pub struct TestContextBuilder {
|
||||
initial_data: Option<config::InitialData>,
|
||||
sequencer_partial_config: Option<config::SequencerPartialConfig>,
|
||||
}
|
||||
|
||||
impl TestContextBuilder {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
initial_data: None,
|
||||
sequencer_partial_config: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self {
|
||||
self.initial_data = Some(initial_data);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn with_sequencer_partial_config(
|
||||
mut self,
|
||||
sequencer_partial_config: config::SequencerPartialConfig,
|
||||
) -> Self {
|
||||
self.sequencer_partial_config = Some(sequencer_partial_config);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn build(self) -> Result<TestContext> {
|
||||
TestContext::new_configured(
|
||||
self.sequencer_partial_config.unwrap_or_default(),
|
||||
self.initial_data.unwrap_or_else(|| {
|
||||
config::InitialData::with_two_public_and_two_private_initialized_accounts()
|
||||
}),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
pub fn format_public_account_id(account_id: AccountId) -> String {
|
||||
format!("Public/{account_id}")
|
||||
}
|
||||
|
||||
pub fn format_private_account_id(account_id: &str) -> String {
|
||||
pub fn format_private_account_id(account_id: AccountId) -> String {
|
||||
format!("Private/{account_id}")
|
||||
}
|
||||
|
||||
pub async fn fetch_privacy_preserving_tx(
|
||||
seq_client: &SequencerClient,
|
||||
tx_hash: String,
|
||||
tx_hash: HashType,
|
||||
) -> PrivacyPreservingTransaction {
|
||||
let transaction_encoded = seq_client
|
||||
.get_transaction_by_hash(tx_hash.clone())
|
||||
.get_transaction_by_hash(tx_hash)
|
||||
.await
|
||||
.unwrap()
|
||||
.transaction
|
||||
.unwrap();
|
||||
|
||||
let tx_base64_decode = BASE64.decode(transaction_encoded).unwrap();
|
||||
match NSSATransaction::try_from(
|
||||
&borsh::from_slice::<EncodedTransaction>(&tx_base64_decode).unwrap(),
|
||||
)
|
||||
.unwrap()
|
||||
{
|
||||
let tx_bytes = BASE64.decode(transaction_encoded).unwrap();
|
||||
let tx = borsh::from_slice(&tx_bytes).unwrap();
|
||||
match tx {
|
||||
NSSATransaction::PrivacyPreserving(privacy_preserving_transaction) => {
|
||||
privacy_preserving_transaction
|
||||
}
|
||||
@ -332,20 +408,3 @@ pub async fn verify_commitment_is_in_state(
|
||||
Ok(Some(_))
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{format_private_account_id, format_public_account_id};
|
||||
|
||||
#[test]
|
||||
fn correct_account_id_from_prefix() {
|
||||
let account_id1 = "cafecafe";
|
||||
let account_id2 = "deadbeaf";
|
||||
|
||||
let account_id1_pub = format_public_account_id(account_id1);
|
||||
let account_id2_priv = format_private_account_id(account_id2);
|
||||
|
||||
assert_eq!(account_id1_pub, "Public/cafecafe".to_string());
|
||||
assert_eq!(account_id2_priv, "Private/deadbeaf".to_string());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use integration_tests::{ACC_SENDER, TestContext};
|
||||
use integration_tests::TestContext;
|
||||
use log::info;
|
||||
use nssa::program::Program;
|
||||
use tokio::test;
|
||||
@ -10,7 +10,7 @@ async fn get_existing_account() -> Result<()> {
|
||||
|
||||
let account = ctx
|
||||
.sequencer_client()
|
||||
.get_account(ACC_SENDER.to_string())
|
||||
.get_account(ctx.existing_public_accounts()[0])
|
||||
.await?
|
||||
.account;
|
||||
|
||||
|
||||
@ -88,8 +88,8 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
// Create new token
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_public_account_id(&definition_account_id_1.to_string()),
|
||||
supply_account_id: format_public_account_id(&supply_account_id_1.to_string()),
|
||||
definition_account_id: format_public_account_id(definition_account_id_1),
|
||||
supply_account_id: format_public_account_id(supply_account_id_1),
|
||||
name: "A NAM1".to_string(),
|
||||
total_supply: 37,
|
||||
};
|
||||
@ -99,10 +99,8 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_1`
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_public_account_id(&supply_account_id_1.to_string()),
|
||||
to: Some(format_public_account_id(
|
||||
&recipient_account_id_1.to_string(),
|
||||
)),
|
||||
from: format_public_account_id(supply_account_id_1),
|
||||
to: Some(format_public_account_id(recipient_account_id_1)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 7,
|
||||
@ -114,8 +112,8 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
// Create new token
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_public_account_id(&definition_account_id_2.to_string()),
|
||||
supply_account_id: format_public_account_id(&supply_account_id_2.to_string()),
|
||||
definition_account_id: format_public_account_id(definition_account_id_2),
|
||||
supply_account_id: format_public_account_id(supply_account_id_2),
|
||||
name: "A NAM2".to_string(),
|
||||
total_supply: 37,
|
||||
};
|
||||
@ -125,10 +123,8 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
// Transfer 7 tokens from `supply_acc` to the account at account_id `recipient_account_id_2`
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_public_account_id(&supply_account_id_2.to_string()),
|
||||
to: Some(format_public_account_id(
|
||||
&recipient_account_id_2.to_string(),
|
||||
)),
|
||||
from: format_public_account_id(supply_account_id_2),
|
||||
to: Some(format_public_account_id(recipient_account_id_2)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 7,
|
||||
@ -157,9 +153,9 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
// Send creation tx
|
||||
let subcommand = AmmProgramAgnosticSubcommand::New {
|
||||
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
|
||||
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
|
||||
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
|
||||
user_holding_a: format_public_account_id(recipient_account_id_1),
|
||||
user_holding_b: format_public_account_id(recipient_account_id_2),
|
||||
user_holding_lp: format_public_account_id(user_holding_lp),
|
||||
balance_a: 3,
|
||||
balance_b: 3,
|
||||
};
|
||||
@ -170,19 +166,19 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
let user_holding_a_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_1.to_string())
|
||||
.get_account(recipient_account_id_1)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_b_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_2.to_string())
|
||||
.get_account(recipient_account_id_2)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_lp_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(user_holding_lp.to_string())
|
||||
.get_account(user_holding_lp)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
@ -206,8 +202,8 @@ async fn amm_public() -> Result<()> {
|
||||
// Make swap
|
||||
|
||||
let subcommand = AmmProgramAgnosticSubcommand::Swap {
|
||||
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
|
||||
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
|
||||
user_holding_a: format_public_account_id(recipient_account_id_1),
|
||||
user_holding_b: format_public_account_id(recipient_account_id_2),
|
||||
amount_in: 2,
|
||||
min_amount_out: 1,
|
||||
token_definition: definition_account_id_1.to_string(),
|
||||
@ -219,19 +215,19 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
let user_holding_a_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_1.to_string())
|
||||
.get_account(recipient_account_id_1)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_b_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_2.to_string())
|
||||
.get_account(recipient_account_id_2)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_lp_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(user_holding_lp.to_string())
|
||||
.get_account(user_holding_lp)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
@ -255,8 +251,8 @@ async fn amm_public() -> Result<()> {
|
||||
// Make swap
|
||||
|
||||
let subcommand = AmmProgramAgnosticSubcommand::Swap {
|
||||
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
|
||||
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
|
||||
user_holding_a: format_public_account_id(recipient_account_id_1),
|
||||
user_holding_b: format_public_account_id(recipient_account_id_2),
|
||||
amount_in: 2,
|
||||
min_amount_out: 1,
|
||||
token_definition: definition_account_id_2.to_string(),
|
||||
@ -268,19 +264,19 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
let user_holding_a_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_1.to_string())
|
||||
.get_account(recipient_account_id_1)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_b_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_2.to_string())
|
||||
.get_account(recipient_account_id_2)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_lp_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(user_holding_lp.to_string())
|
||||
.get_account(user_holding_lp)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
@ -304,9 +300,9 @@ async fn amm_public() -> Result<()> {
|
||||
// Add liquidity
|
||||
|
||||
let subcommand = AmmProgramAgnosticSubcommand::AddLiquidity {
|
||||
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
|
||||
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
|
||||
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
|
||||
user_holding_a: format_public_account_id(recipient_account_id_1),
|
||||
user_holding_b: format_public_account_id(recipient_account_id_2),
|
||||
user_holding_lp: format_public_account_id(user_holding_lp),
|
||||
min_amount_lp: 1,
|
||||
max_amount_a: 2,
|
||||
max_amount_b: 2,
|
||||
@ -318,19 +314,19 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
let user_holding_a_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_1.to_string())
|
||||
.get_account(recipient_account_id_1)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_b_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_2.to_string())
|
||||
.get_account(recipient_account_id_2)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_lp_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(user_holding_lp.to_string())
|
||||
.get_account(user_holding_lp)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
@ -354,9 +350,9 @@ async fn amm_public() -> Result<()> {
|
||||
// Remove liquidity
|
||||
|
||||
let subcommand = AmmProgramAgnosticSubcommand::RemoveLiquidity {
|
||||
user_holding_a: format_public_account_id(&recipient_account_id_1.to_string()),
|
||||
user_holding_b: format_public_account_id(&recipient_account_id_2.to_string()),
|
||||
user_holding_lp: format_public_account_id(&user_holding_lp.to_string()),
|
||||
user_holding_a: format_public_account_id(recipient_account_id_1),
|
||||
user_holding_b: format_public_account_id(recipient_account_id_2),
|
||||
user_holding_lp: format_public_account_id(user_holding_lp),
|
||||
balance_lp: 2,
|
||||
min_amount_a: 1,
|
||||
min_amount_b: 1,
|
||||
@ -368,19 +364,19 @@ async fn amm_public() -> Result<()> {
|
||||
|
||||
let user_holding_a_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_1.to_string())
|
||||
.get_account(recipient_account_id_1)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_b_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_2.to_string())
|
||||
.get_account(recipient_account_id_2)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
let user_holding_lp_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(user_holding_lp.to_string())
|
||||
.get_account(user_holding_lp)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
|
||||
@ -2,7 +2,6 @@ use std::time::Duration;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use integration_tests::{
|
||||
ACC_RECEIVER, ACC_RECEIVER_PRIVATE, ACC_SENDER, ACC_SENDER_PRIVATE,
|
||||
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx,
|
||||
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
|
||||
};
|
||||
@ -20,12 +19,12 @@ use wallet::cli::{
|
||||
async fn private_transfer_to_owned_account() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
|
||||
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
|
||||
let from: AccountId = ctx.existing_private_accounts()[0];
|
||||
let to: AccountId = ctx.existing_private_accounts()[1];
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
to: Some(format_private_account_id(&to.to_string())),
|
||||
from: format_private_account_id(from),
|
||||
to: Some(format_private_account_id(to)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -38,13 +37,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
|
||||
|
||||
let new_commitment1 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&from)
|
||||
.get_private_account_commitment(from)
|
||||
.context("Failed to get private account commitment for sender")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
|
||||
|
||||
let new_commitment2 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&to)
|
||||
.get_private_account_commitment(to)
|
||||
.context("Failed to get private account commitment for receiver")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
|
||||
|
||||
@ -57,13 +56,13 @@ async fn private_transfer_to_owned_account() -> Result<()> {
|
||||
async fn private_transfer_to_foreign_account() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
|
||||
let from: AccountId = ctx.existing_private_accounts()[0];
|
||||
let to_npk = NullifierPublicKey([42; 32]);
|
||||
let to_npk_string = hex::encode(to_npk.0);
|
||||
let to_vpk = Secp256k1Point::from_scalar(to_npk.0);
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
from: format_private_account_id(from),
|
||||
to: None,
|
||||
to_npk: Some(to_npk_string),
|
||||
to_vpk: Some(hex::encode(to_vpk.0)),
|
||||
@ -80,10 +79,10 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
|
||||
|
||||
let new_commitment1 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&from)
|
||||
.get_private_account_commitment(from)
|
||||
.context("Failed to get private account commitment for sender")?;
|
||||
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
|
||||
assert_eq!(tx.message.new_commitments[0], new_commitment1);
|
||||
|
||||
assert_eq!(tx.message.new_commitments.len(), 2);
|
||||
@ -100,19 +99,19 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
|
||||
async fn deshielded_transfer_to_public_account() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
|
||||
let to: AccountId = ACC_RECEIVER.parse()?;
|
||||
let from: AccountId = ctx.existing_private_accounts()[0];
|
||||
let to: AccountId = ctx.existing_public_accounts()[1];
|
||||
|
||||
// Check initial balance of the private sender
|
||||
let from_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&from)
|
||||
.get_account_private(from)
|
||||
.context("Failed to get sender's private account")?;
|
||||
assert_eq!(from_acc.balance, 10000);
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
to: Some(format_public_account_id(&to.to_string())),
|
||||
from: format_private_account_id(from),
|
||||
to: Some(format_public_account_id(to)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -125,18 +124,15 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
|
||||
|
||||
let from_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&from)
|
||||
.get_account_private(from)
|
||||
.context("Failed to get sender's private account")?;
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&from)
|
||||
.get_private_account_commitment(from)
|
||||
.context("Failed to get private account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
let acc_2_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(to.to_string())
|
||||
.await?;
|
||||
let acc_2_balance = ctx.sequencer_client().get_account_balance(to).await?;
|
||||
|
||||
assert_eq!(from_acc.balance, 9900);
|
||||
assert_eq!(acc_2_balance.balance, 20100);
|
||||
@ -150,7 +146,7 @@ async fn deshielded_transfer_to_public_account() -> Result<()> {
|
||||
async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
|
||||
let from: AccountId = ctx.existing_private_accounts()[0];
|
||||
|
||||
// Create a new private account
|
||||
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
|
||||
@ -168,13 +164,13 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
|
||||
.wallet()
|
||||
.storage()
|
||||
.user_data
|
||||
.get_private_account(&to_account_id)
|
||||
.get_private_account(to_account_id)
|
||||
.cloned()
|
||||
.context("Failed to get private account")?;
|
||||
|
||||
// Send to this account using claiming path (using npk and vpk instead of account ID)
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
from: format_private_account_id(from),
|
||||
to: None,
|
||||
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
|
||||
to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)),
|
||||
@ -186,7 +182,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
|
||||
anyhow::bail!("Expected PrivacyPreservingTransfer return value");
|
||||
};
|
||||
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
|
||||
|
||||
// Sync the wallet to claim the new account
|
||||
let command = Command::Account(AccountSubcommand::SyncPrivate {});
|
||||
@ -194,7 +190,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
|
||||
|
||||
let new_commitment1 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&from)
|
||||
.get_private_account_commitment(from)
|
||||
.context("Failed to get private account commitment for sender")?;
|
||||
assert_eq!(tx.message.new_commitments[0], new_commitment1);
|
||||
|
||||
@ -205,7 +201,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
|
||||
|
||||
let to_res_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&to_account_id)
|
||||
.get_account_private(to_account_id)
|
||||
.context("Failed to get recipient's private account")?;
|
||||
assert_eq!(to_res_acc.balance, 100);
|
||||
|
||||
@ -218,12 +214,12 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
|
||||
async fn shielded_transfer_to_owned_private_account() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER.parse()?;
|
||||
let to: AccountId = ACC_RECEIVER_PRIVATE.parse()?;
|
||||
let from: AccountId = ctx.existing_public_accounts()[0];
|
||||
let to: AccountId = ctx.existing_private_accounts()[1];
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(&from.to_string()),
|
||||
to: Some(format_private_account_id(&to.to_string())),
|
||||
from: format_public_account_id(from),
|
||||
to: Some(format_private_account_id(to)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -236,18 +232,15 @@ async fn shielded_transfer_to_owned_private_account() -> Result<()> {
|
||||
|
||||
let acc_to = ctx
|
||||
.wallet()
|
||||
.get_account_private(&to)
|
||||
.get_account_private(to)
|
||||
.context("Failed to get receiver's private account")?;
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&to)
|
||||
.get_private_account_commitment(to)
|
||||
.context("Failed to get receiver's commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
let acc_from_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(from.to_string())
|
||||
.await?;
|
||||
let acc_from_balance = ctx.sequencer_client().get_account_balance(from).await?;
|
||||
|
||||
assert_eq!(acc_from_balance.balance, 9900);
|
||||
assert_eq!(acc_to.balance, 20100);
|
||||
@ -264,10 +257,10 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
|
||||
let to_npk = NullifierPublicKey([42; 32]);
|
||||
let to_npk_string = hex::encode(to_npk.0);
|
||||
let to_vpk = Secp256k1Point::from_scalar(to_npk.0);
|
||||
let from: AccountId = ACC_SENDER.parse()?;
|
||||
let from: AccountId = ctx.existing_public_accounts()[0];
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(&from.to_string()),
|
||||
from: format_public_account_id(from),
|
||||
to: None,
|
||||
to_npk: Some(to_npk_string),
|
||||
to_vpk: Some(hex::encode(to_vpk.0)),
|
||||
@ -284,10 +277,7 @@ async fn shielded_transfer_to_foreign_account() -> Result<()> {
|
||||
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
|
||||
|
||||
let acc_1_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(from.to_string())
|
||||
.await?;
|
||||
let acc_1_balance = ctx.sequencer_client().get_account_balance(from).await?;
|
||||
|
||||
assert!(
|
||||
verify_commitment_is_in_state(
|
||||
@ -313,7 +303,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
|
||||
// The original implementation spawned wallet::cli::execute_continuous_run() in background
|
||||
// but this conflicts with TestContext's wallet management
|
||||
|
||||
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
|
||||
let from: AccountId = ctx.existing_private_accounts()[0];
|
||||
|
||||
// Create a new private account
|
||||
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
|
||||
@ -331,13 +321,13 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
|
||||
.wallet()
|
||||
.storage()
|
||||
.user_data
|
||||
.get_private_account(&to_account_id)
|
||||
.get_private_account(to_account_id)
|
||||
.cloned()
|
||||
.context("Failed to get private account")?;
|
||||
|
||||
// Send transfer using nullifier and viewing public keys
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
from: format_private_account_id(from),
|
||||
to: None,
|
||||
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
|
||||
to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)),
|
||||
@ -349,7 +339,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
|
||||
anyhow::bail!("Failed to send transaction");
|
||||
};
|
||||
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash.clone()).await;
|
||||
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
|
||||
|
||||
info!("Waiting for next blocks to check if continuous run fetches account");
|
||||
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
|
||||
@ -364,7 +354,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
|
||||
// Verify receiver account balance
|
||||
let to_res_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&to_account_id)
|
||||
.get_account_private(to_account_id)
|
||||
.context("Failed to get receiver account")?;
|
||||
|
||||
assert_eq!(to_res_acc.balance, 100);
|
||||
@ -383,7 +373,7 @@ async fn initialize_private_account() -> Result<()> {
|
||||
};
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
|
||||
account_id: format_private_account_id(&account_id.to_string()),
|
||||
account_id: format_private_account_id(account_id),
|
||||
});
|
||||
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
|
||||
|
||||
@ -395,13 +385,13 @@ async fn initialize_private_account() -> Result<()> {
|
||||
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&account_id)
|
||||
.get_private_account_commitment(account_id)
|
||||
.context("Failed to get private account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
let account = ctx
|
||||
.wallet()
|
||||
.get_account_private(&account_id)
|
||||
.get_account_private(account_id)
|
||||
.context("Failed to get private account")?;
|
||||
|
||||
assert_eq!(
|
||||
|
||||
@ -1,9 +1,7 @@
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use integration_tests::{
|
||||
ACC_RECEIVER, ACC_SENDER, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id,
|
||||
};
|
||||
use integration_tests::{TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_public_account_id};
|
||||
use log::info;
|
||||
use nssa::program::Program;
|
||||
use tokio::test;
|
||||
@ -18,8 +16,8 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(ACC_SENDER),
|
||||
to: Some(format_public_account_id(ACC_RECEIVER)),
|
||||
from: format_public_account_id(ctx.existing_public_accounts()[0]),
|
||||
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -33,11 +31,11 @@ async fn successful_transfer_to_existing_account() -> Result<()> {
|
||||
info!("Checking correct balance move");
|
||||
let acc_1_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_SENDER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[0])
|
||||
.await?;
|
||||
let acc_2_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_RECEIVER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[1])
|
||||
.await?;
|
||||
|
||||
info!("Balance of sender: {acc_1_balance:#?}");
|
||||
@ -64,17 +62,15 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
|
||||
.storage()
|
||||
.user_data
|
||||
.account_ids()
|
||||
.map(ToString::to_string)
|
||||
.find(|acc_id| acc_id != ACC_SENDER && acc_id != ACC_RECEIVER)
|
||||
.find(|acc_id| {
|
||||
*acc_id != ctx.existing_public_accounts()[0]
|
||||
&& *acc_id != ctx.existing_public_accounts()[1]
|
||||
})
|
||||
.expect("Failed to find newly created account in the wallet storage");
|
||||
|
||||
if new_persistent_account_id == String::new() {
|
||||
panic!("Failed to produce new account, not present in persistent accounts");
|
||||
}
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(ACC_SENDER),
|
||||
to: Some(format_public_account_id(&new_persistent_account_id)),
|
||||
from: format_public_account_id(ctx.existing_public_accounts()[0]),
|
||||
to: Some(format_public_account_id(new_persistent_account_id)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -88,7 +84,7 @@ pub async fn successful_transfer_to_new_account() -> Result<()> {
|
||||
info!("Checking correct balance move");
|
||||
let acc_1_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_SENDER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[0])
|
||||
.await?;
|
||||
let acc_2_balance = ctx
|
||||
.sequencer_client()
|
||||
@ -109,8 +105,8 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(ACC_SENDER),
|
||||
to: Some(format_public_account_id(ACC_RECEIVER)),
|
||||
from: format_public_account_id(ctx.existing_public_accounts()[0]),
|
||||
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 1000000,
|
||||
@ -125,11 +121,11 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
|
||||
info!("Checking balances unchanged");
|
||||
let acc_1_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_SENDER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[0])
|
||||
.await?;
|
||||
let acc_2_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_RECEIVER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[1])
|
||||
.await?;
|
||||
|
||||
info!("Balance of sender: {acc_1_balance:#?}");
|
||||
@ -147,8 +143,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
|
||||
|
||||
// First transfer
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(ACC_SENDER),
|
||||
to: Some(format_public_account_id(ACC_RECEIVER)),
|
||||
from: format_public_account_id(ctx.existing_public_accounts()[0]),
|
||||
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -162,11 +158,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
|
||||
info!("Checking correct balance move after first transfer");
|
||||
let acc_1_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_SENDER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[0])
|
||||
.await?;
|
||||
let acc_2_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_RECEIVER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[1])
|
||||
.await?;
|
||||
|
||||
info!("Balance of sender: {acc_1_balance:#?}");
|
||||
@ -179,8 +175,8 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
|
||||
|
||||
// Second transfer
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(ACC_SENDER),
|
||||
to: Some(format_public_account_id(ACC_RECEIVER)),
|
||||
from: format_public_account_id(ctx.existing_public_accounts()[0]),
|
||||
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -194,11 +190,11 @@ async fn two_consecutive_successful_transfers() -> Result<()> {
|
||||
info!("Checking correct balance move after second transfer");
|
||||
let acc_1_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_SENDER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[0])
|
||||
.await?;
|
||||
let acc_2_balance = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_RECEIVER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[1])
|
||||
.await?;
|
||||
|
||||
info!("Balance of sender: {acc_1_balance:#?}");
|
||||
@ -223,14 +219,14 @@ async fn initialize_public_account() -> Result<()> {
|
||||
};
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
|
||||
account_id: format_public_account_id(&account_id.to_string()),
|
||||
account_id: format_public_account_id(account_id),
|
||||
});
|
||||
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
|
||||
|
||||
info!("Checking correct execution");
|
||||
let account = ctx
|
||||
.sequencer_client()
|
||||
.get_account(account_id.to_string())
|
||||
.get_account(account_id)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
|
||||
@ -1,23 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use integration_tests::TestContext;
|
||||
use log::info;
|
||||
use tokio::test;
|
||||
|
||||
#[ignore = "needs complicated setup"]
|
||||
#[test]
|
||||
// To run this test properly, you need nomos node running in the background.
|
||||
// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node).
|
||||
//
|
||||
// Recommended to run node locally from build binary.
|
||||
async fn indexer_run_local_node() -> Result<()> {
|
||||
let _ctx = TestContext::new_bedrock_local_attached().await?;
|
||||
|
||||
info!("Let's observe behaviour");
|
||||
|
||||
tokio::time::sleep(std::time::Duration::from_secs(180)).await;
|
||||
|
||||
// No way to check state of indexer now
|
||||
// When it will be a service, then it will become possible.
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@ -2,8 +2,8 @@ use std::{str::FromStr, time::Duration};
|
||||
|
||||
use anyhow::Result;
|
||||
use integration_tests::{
|
||||
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
|
||||
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
|
||||
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
|
||||
format_public_account_id, verify_commitment_is_in_state,
|
||||
};
|
||||
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
|
||||
use log::info;
|
||||
@ -19,7 +19,7 @@ use wallet::cli::{
|
||||
async fn restore_keys_from_seed() -> Result<()> {
|
||||
let mut ctx = TestContext::new().await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER_PRIVATE.parse()?;
|
||||
let from: AccountId = ctx.existing_private_accounts()[0];
|
||||
|
||||
// Create first private account at root
|
||||
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private {
|
||||
@ -47,8 +47,8 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
|
||||
// Send to first private account
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
to: Some(format_private_account_id(&to_account_id1.to_string())),
|
||||
from: format_private_account_id(from),
|
||||
to: Some(format_private_account_id(to_account_id1)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 100,
|
||||
@ -57,15 +57,15 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
|
||||
// Send to second private account
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&from.to_string()),
|
||||
to: Some(format_private_account_id(&to_account_id2.to_string())),
|
||||
from: format_private_account_id(from),
|
||||
to: Some(format_private_account_id(to_account_id2)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 101,
|
||||
});
|
||||
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
|
||||
|
||||
let from: AccountId = ACC_SENDER.parse()?;
|
||||
let from: AccountId = ctx.existing_public_accounts()[0];
|
||||
|
||||
// Create first public account at root
|
||||
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public {
|
||||
@ -93,8 +93,8 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
|
||||
// Send to first public account
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(&from.to_string()),
|
||||
to: Some(format_public_account_id(&to_account_id3.to_string())),
|
||||
from: format_public_account_id(from),
|
||||
to: Some(format_public_account_id(to_account_id3)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 102,
|
||||
@ -103,8 +103,8 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
|
||||
// Send to second public account
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(&from.to_string()),
|
||||
to: Some(format_public_account_id(&to_account_id4.to_string())),
|
||||
from: format_public_account_id(from),
|
||||
to: Some(format_public_account_id(to_account_id4)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 103,
|
||||
@ -166,8 +166,8 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
|
||||
// Test that restored accounts can send transactions
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_private_account_id(&to_account_id1.to_string()),
|
||||
to: Some(format_private_account_id(&to_account_id2.to_string())),
|
||||
from: format_private_account_id(to_account_id1),
|
||||
to: Some(format_private_account_id(to_account_id2)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 10,
|
||||
@ -175,8 +175,8 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
|
||||
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
|
||||
from: format_public_account_id(&to_account_id3.to_string()),
|
||||
to: Some(format_public_account_id(&to_account_id4.to_string())),
|
||||
from: format_public_account_id(to_account_id3),
|
||||
to: Some(format_public_account_id(to_account_id4)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: 11,
|
||||
@ -188,11 +188,11 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
// Verify commitments exist for private accounts
|
||||
let comm1 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&to_account_id1)
|
||||
.get_private_account_commitment(to_account_id1)
|
||||
.expect("Acc 1 commitment should exist");
|
||||
let comm2 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&to_account_id2)
|
||||
.get_private_account_commitment(to_account_id2)
|
||||
.expect("Acc 2 commitment should exist");
|
||||
|
||||
assert!(verify_commitment_is_in_state(comm1, ctx.sequencer_client()).await);
|
||||
@ -201,11 +201,11 @@ async fn restore_keys_from_seed() -> Result<()> {
|
||||
// Verify public account balances
|
||||
let acc3 = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(to_account_id3.to_string())
|
||||
.get_account_balance(to_account_id3)
|
||||
.await?;
|
||||
let acc4 = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(to_account_id4.to_string())
|
||||
.get_account_balance(to_account_id4)
|
||||
.await?;
|
||||
|
||||
assert_eq!(acc3.balance, 91); // 102 - 11
|
||||
|
||||
@ -3,8 +3,8 @@ use std::time::Duration;
|
||||
use anyhow::{Context as _, Result};
|
||||
use common::PINATA_BASE58;
|
||||
use integration_tests::{
|
||||
ACC_SENDER, ACC_SENDER_PRIVATE, TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext,
|
||||
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
|
||||
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
|
||||
format_public_account_id, verify_commitment_is_in_state,
|
||||
};
|
||||
use log::info;
|
||||
use tokio::test;
|
||||
@ -22,12 +22,12 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
|
||||
|
||||
let pinata_prize = 150;
|
||||
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
|
||||
to: format_public_account_id(ACC_SENDER),
|
||||
to: format_public_account_id(ctx.existing_public_accounts()[0]),
|
||||
});
|
||||
|
||||
let pinata_balance_pre = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(PINATA_BASE58.to_string())
|
||||
.get_account_balance(PINATA_BASE58.parse().unwrap())
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
@ -39,13 +39,13 @@ async fn claim_pinata_to_existing_public_account() -> Result<()> {
|
||||
info!("Checking correct balance move");
|
||||
let pinata_balance_post = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(PINATA_BASE58.to_string())
|
||||
.get_account_balance(PINATA_BASE58.parse().unwrap())
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
let winner_balance_post = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(ACC_SENDER.to_string())
|
||||
.get_account_balance(ctx.existing_public_accounts()[0])
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
@ -63,12 +63,12 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
|
||||
|
||||
let pinata_prize = 150;
|
||||
let command = Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
|
||||
to: format_private_account_id(ACC_SENDER_PRIVATE),
|
||||
to: format_private_account_id(ctx.existing_private_accounts()[0]),
|
||||
});
|
||||
|
||||
let pinata_balance_pre = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(PINATA_BASE58.to_string())
|
||||
.get_account_balance(PINATA_BASE58.parse().unwrap())
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
@ -86,13 +86,13 @@ async fn claim_pinata_to_existing_private_account() -> Result<()> {
|
||||
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&ACC_SENDER_PRIVATE.parse()?)
|
||||
.get_private_account_commitment(ctx.existing_private_accounts()[0])
|
||||
.context("Failed to get private account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
let pinata_balance_post = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(PINATA_BASE58.to_string())
|
||||
.get_account_balance(PINATA_BASE58.parse().unwrap())
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
@ -122,7 +122,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
|
||||
anyhow::bail!("Expected RegisterAccount return value");
|
||||
};
|
||||
|
||||
let winner_account_id_formatted = format_private_account_id(&winner_account_id.to_string());
|
||||
let winner_account_id_formatted = format_private_account_id(winner_account_id);
|
||||
|
||||
// Initialize account under auth transfer program
|
||||
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
|
||||
@ -135,7 +135,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
|
||||
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&winner_account_id)
|
||||
.get_private_account_commitment(winner_account_id)
|
||||
.context("Failed to get private account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
@ -146,7 +146,7 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
|
||||
|
||||
let pinata_balance_pre = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(PINATA_BASE58.to_string())
|
||||
.get_account_balance(PINATA_BASE58.parse().unwrap())
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
@ -157,13 +157,13 @@ async fn claim_pinata_to_new_private_account() -> Result<()> {
|
||||
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&winner_account_id)
|
||||
.get_private_account_commitment(winner_account_id)
|
||||
.context("Failed to get private account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
let pinata_balance_post = ctx
|
||||
.sequencer_client()
|
||||
.get_account_balance(PINATA_BASE58.to_string())
|
||||
.get_account_balance(PINATA_BASE58.parse().unwrap())
|
||||
.await?
|
||||
.balance;
|
||||
|
||||
|
||||
@ -45,11 +45,13 @@ async fn deploy_and_execute_program() -> Result<()> {
|
||||
let _response = ctx.sequencer_client().send_tx_public(transaction).await?;
|
||||
|
||||
info!("Waiting for next block creation");
|
||||
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
|
||||
// Waiting for long time as it may take some time for such a big transaction to be included in a
|
||||
// block
|
||||
tokio::time::sleep(Duration::from_secs(2 * TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
|
||||
|
||||
let post_state_account = ctx
|
||||
.sequencer_client()
|
||||
.get_account(account_id.to_string())
|
||||
.get_account(account_id)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
|
||||
@ -63,8 +63,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_public_account_id(definition_account_id),
|
||||
supply_account_id: format_public_account_id(supply_account_id),
|
||||
name: name.clone(),
|
||||
total_supply,
|
||||
};
|
||||
@ -76,7 +76,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the token definition account
|
||||
let definition_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(definition_account_id.to_string())
|
||||
.get_account(definition_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
|
||||
@ -94,7 +94,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the token holding account with the total supply
|
||||
let supply_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(supply_account_id.to_string())
|
||||
.get_account(supply_account_id)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
@ -112,8 +112,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Transfer 7 tokens from supply_acc to recipient_account_id
|
||||
let transfer_amount = 7;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_public_account_id(&supply_account_id.to_string()),
|
||||
to: Some(format_public_account_id(&recipient_account_id.to_string())),
|
||||
from: format_public_account_id(supply_account_id),
|
||||
to: Some(format_public_account_id(recipient_account_id)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: transfer_amount,
|
||||
@ -127,7 +127,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the supply account after transfer
|
||||
let supply_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(supply_account_id.to_string())
|
||||
.get_account(supply_account_id)
|
||||
.await?
|
||||
.account;
|
||||
assert_eq!(supply_acc.program_owner, Program::token().id());
|
||||
@ -143,7 +143,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the recipient account after transfer
|
||||
let recipient_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id.to_string())
|
||||
.get_account(recipient_account_id)
|
||||
.await?
|
||||
.account;
|
||||
assert_eq!(recipient_acc.program_owner, Program::token().id());
|
||||
@ -159,8 +159,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Burn 3 tokens from recipient_acc
|
||||
let burn_amount = 3;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Burn {
|
||||
definition: format_public_account_id(&definition_account_id.to_string()),
|
||||
holder: format_public_account_id(&recipient_account_id.to_string()),
|
||||
definition: format_public_account_id(definition_account_id),
|
||||
holder: format_public_account_id(recipient_account_id),
|
||||
amount: burn_amount,
|
||||
};
|
||||
|
||||
@ -172,7 +172,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the token definition account after burn
|
||||
let definition_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(definition_account_id.to_string())
|
||||
.get_account(definition_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
|
||||
@ -189,7 +189,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the recipient account after burn
|
||||
let recipient_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id.to_string())
|
||||
.get_account(recipient_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
@ -205,8 +205,8 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Mint 10 tokens at recipient_acc
|
||||
let mint_amount = 10;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Mint {
|
||||
definition: format_public_account_id(&definition_account_id.to_string()),
|
||||
holder: Some(format_public_account_id(&recipient_account_id.to_string())),
|
||||
definition: format_public_account_id(definition_account_id),
|
||||
holder: Some(format_public_account_id(recipient_account_id)),
|
||||
holder_npk: None,
|
||||
holder_vpk: None,
|
||||
amount: mint_amount,
|
||||
@ -220,7 +220,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the token definition account after mint
|
||||
let definition_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(definition_account_id.to_string())
|
||||
.get_account(definition_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
|
||||
@ -237,7 +237,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
|
||||
// Check the status of the recipient account after mint
|
||||
let recipient_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id.to_string())
|
||||
.get_account(recipient_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
@ -302,8 +302,8 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_public_account_id(definition_account_id),
|
||||
supply_account_id: format_private_account_id(supply_account_id),
|
||||
name: name.clone(),
|
||||
total_supply,
|
||||
};
|
||||
@ -316,7 +316,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
|
||||
// Check the status of the token definition account
|
||||
let definition_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(definition_account_id.to_string())
|
||||
.get_account(definition_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
|
||||
@ -333,15 +333,15 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
|
||||
|
||||
let new_commitment1 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&supply_account_id)
|
||||
.get_private_account_commitment(supply_account_id)
|
||||
.context("Failed to get supply account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
|
||||
|
||||
// Transfer 7 tokens from supply_acc to recipient_account_id
|
||||
let transfer_amount = 7;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_private_account_id(&supply_account_id.to_string()),
|
||||
to: Some(format_private_account_id(&recipient_account_id.to_string())),
|
||||
from: format_private_account_id(supply_account_id),
|
||||
to: Some(format_private_account_id(recipient_account_id)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: transfer_amount,
|
||||
@ -354,21 +354,21 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
|
||||
|
||||
let new_commitment1 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&supply_account_id)
|
||||
.get_private_account_commitment(supply_account_id)
|
||||
.context("Failed to get supply account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
|
||||
|
||||
let new_commitment2 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&recipient_account_id)
|
||||
.get_private_account_commitment(recipient_account_id)
|
||||
.context("Failed to get recipient account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
|
||||
|
||||
// Burn 3 tokens from recipient_acc
|
||||
let burn_amount = 3;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Burn {
|
||||
definition: format_public_account_id(&definition_account_id.to_string()),
|
||||
holder: format_private_account_id(&recipient_account_id.to_string()),
|
||||
definition: format_public_account_id(definition_account_id),
|
||||
holder: format_private_account_id(recipient_account_id),
|
||||
amount: burn_amount,
|
||||
};
|
||||
|
||||
@ -380,7 +380,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
|
||||
// Check the token definition account after burn
|
||||
let definition_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(definition_account_id.to_string())
|
||||
.get_account(definition_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
|
||||
@ -396,14 +396,14 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
|
||||
|
||||
let new_commitment2 = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&recipient_account_id)
|
||||
.get_private_account_commitment(recipient_account_id)
|
||||
.context("Failed to get recipient account commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
|
||||
|
||||
// Check the recipient account balance after burn
|
||||
let recipient_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&recipient_account_id)
|
||||
.get_account_private(recipient_account_id)
|
||||
.context("Failed to get recipient account")?;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
|
||||
@ -458,8 +458,8 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_private_account_id(definition_account_id),
|
||||
supply_account_id: format_public_account_id(supply_account_id),
|
||||
name: name.clone(),
|
||||
total_supply,
|
||||
};
|
||||
@ -472,14 +472,14 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
// Verify private definition commitment
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&definition_account_id)
|
||||
.get_private_account_commitment(definition_account_id)
|
||||
.context("Failed to get definition commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify supply account
|
||||
let supply_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(supply_account_id.to_string())
|
||||
.get_account(supply_account_id)
|
||||
.await?
|
||||
.account;
|
||||
|
||||
@ -522,10 +522,8 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
// Mint to public account
|
||||
let mint_amount_public = 10;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Mint {
|
||||
definition: format_private_account_id(&definition_account_id.to_string()),
|
||||
holder: Some(format_public_account_id(
|
||||
&recipient_account_id_public.to_string(),
|
||||
)),
|
||||
definition: format_private_account_id(definition_account_id),
|
||||
holder: Some(format_public_account_id(recipient_account_id_public)),
|
||||
holder_npk: None,
|
||||
holder_vpk: None,
|
||||
amount: mint_amount_public,
|
||||
@ -539,7 +537,7 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
// Verify definition account has updated supply
|
||||
let definition_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&definition_account_id)
|
||||
.get_account_private(definition_account_id)
|
||||
.context("Failed to get definition account")?;
|
||||
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
|
||||
|
||||
@ -555,7 +553,7 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
// Verify public recipient received tokens
|
||||
let recipient_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id_public.to_string())
|
||||
.get_account(recipient_account_id_public)
|
||||
.await?
|
||||
.account;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
@ -571,10 +569,8 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
// Mint to private account
|
||||
let mint_amount_private = 5;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Mint {
|
||||
definition: format_private_account_id(&definition_account_id.to_string()),
|
||||
holder: Some(format_private_account_id(
|
||||
&recipient_account_id_private.to_string(),
|
||||
)),
|
||||
definition: format_private_account_id(definition_account_id),
|
||||
holder: Some(format_private_account_id(recipient_account_id_private)),
|
||||
holder_npk: None,
|
||||
holder_vpk: None,
|
||||
amount: mint_amount_private,
|
||||
@ -588,14 +584,14 @@ async fn create_token_with_private_definition() -> Result<()> {
|
||||
// Verify private recipient commitment
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&recipient_account_id_private)
|
||||
.get_private_account_commitment(recipient_account_id_private)
|
||||
.context("Failed to get recipient commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify private recipient balance
|
||||
let recipient_acc_private = ctx
|
||||
.wallet()
|
||||
.get_account_private(&recipient_account_id_private)
|
||||
.get_account_private(recipient_account_id_private)
|
||||
.context("Failed to get private recipient account")?;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc_private.data)?;
|
||||
|
||||
@ -646,8 +642,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_private_account_id(definition_account_id),
|
||||
supply_account_id: format_private_account_id(supply_account_id),
|
||||
name,
|
||||
total_supply,
|
||||
};
|
||||
@ -660,21 +656,21 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
|
||||
// Verify definition commitment
|
||||
let definition_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&definition_account_id)
|
||||
.get_private_account_commitment(definition_account_id)
|
||||
.context("Failed to get definition commitment")?;
|
||||
assert!(verify_commitment_is_in_state(definition_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify supply commitment
|
||||
let supply_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&supply_account_id)
|
||||
.get_private_account_commitment(supply_account_id)
|
||||
.context("Failed to get supply commitment")?;
|
||||
assert!(verify_commitment_is_in_state(supply_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify supply balance
|
||||
let supply_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&supply_account_id)
|
||||
.get_account_private(supply_account_id)
|
||||
.context("Failed to get supply account")?;
|
||||
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
|
||||
|
||||
@ -702,8 +698,8 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
|
||||
// Transfer tokens
|
||||
let transfer_amount = 7;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_private_account_id(&supply_account_id.to_string()),
|
||||
to: Some(format_private_account_id(&recipient_account_id.to_string())),
|
||||
from: format_private_account_id(supply_account_id),
|
||||
to: Some(format_private_account_id(recipient_account_id)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: transfer_amount,
|
||||
@ -717,20 +713,20 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
|
||||
// Verify both commitments updated
|
||||
let supply_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&supply_account_id)
|
||||
.get_private_account_commitment(supply_account_id)
|
||||
.context("Failed to get supply commitment")?;
|
||||
assert!(verify_commitment_is_in_state(supply_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
let recipient_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&recipient_account_id)
|
||||
.get_private_account_commitment(recipient_account_id)
|
||||
.context("Failed to get recipient commitment")?;
|
||||
assert!(verify_commitment_is_in_state(recipient_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify balances
|
||||
let supply_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&supply_account_id)
|
||||
.get_account_private(supply_account_id)
|
||||
.context("Failed to get supply account")?;
|
||||
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
|
||||
assert_eq!(
|
||||
@ -743,7 +739,7 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
|
||||
|
||||
let recipient_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&recipient_account_id)
|
||||
.get_account_private(recipient_account_id)
|
||||
.context("Failed to get recipient account")?;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
assert_eq!(
|
||||
@ -806,8 +802,8 @@ async fn shielded_token_transfer() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_public_account_id(definition_account_id),
|
||||
supply_account_id: format_public_account_id(supply_account_id),
|
||||
name,
|
||||
total_supply,
|
||||
};
|
||||
@ -820,8 +816,8 @@ async fn shielded_token_transfer() -> Result<()> {
|
||||
// Perform shielded transfer: public supply -> private recipient
|
||||
let transfer_amount = 7;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_public_account_id(&supply_account_id.to_string()),
|
||||
to: Some(format_private_account_id(&recipient_account_id.to_string())),
|
||||
from: format_public_account_id(supply_account_id),
|
||||
to: Some(format_private_account_id(recipient_account_id)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: transfer_amount,
|
||||
@ -835,7 +831,7 @@ async fn shielded_token_transfer() -> Result<()> {
|
||||
// Verify supply account balance
|
||||
let supply_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(supply_account_id.to_string())
|
||||
.get_account(supply_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
|
||||
@ -850,14 +846,14 @@ async fn shielded_token_transfer() -> Result<()> {
|
||||
// Verify recipient commitment exists
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&recipient_account_id)
|
||||
.get_private_account_commitment(recipient_account_id)
|
||||
.context("Failed to get recipient commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify recipient balance
|
||||
let recipient_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&recipient_account_id)
|
||||
.get_account_private(recipient_account_id)
|
||||
.context("Failed to get recipient account")?;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
assert_eq!(
|
||||
@ -920,8 +916,8 @@ async fn deshielded_token_transfer() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_public_account_id(definition_account_id),
|
||||
supply_account_id: format_private_account_id(supply_account_id),
|
||||
name,
|
||||
total_supply,
|
||||
};
|
||||
@ -934,8 +930,8 @@ async fn deshielded_token_transfer() -> Result<()> {
|
||||
// Perform deshielded transfer: private supply -> public recipient
|
||||
let transfer_amount = 7;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Send {
|
||||
from: format_private_account_id(&supply_account_id.to_string()),
|
||||
to: Some(format_public_account_id(&recipient_account_id.to_string())),
|
||||
from: format_private_account_id(supply_account_id),
|
||||
to: Some(format_public_account_id(recipient_account_id)),
|
||||
to_npk: None,
|
||||
to_vpk: None,
|
||||
amount: transfer_amount,
|
||||
@ -949,14 +945,14 @@ async fn deshielded_token_transfer() -> Result<()> {
|
||||
// Verify supply account commitment exists
|
||||
let new_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&supply_account_id)
|
||||
.get_private_account_commitment(supply_account_id)
|
||||
.context("Failed to get supply commitment")?;
|
||||
assert!(verify_commitment_is_in_state(new_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify supply balance
|
||||
let supply_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&supply_account_id)
|
||||
.get_account_private(supply_account_id)
|
||||
.context("Failed to get supply account")?;
|
||||
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
|
||||
assert_eq!(
|
||||
@ -970,7 +966,7 @@ async fn deshielded_token_transfer() -> Result<()> {
|
||||
// Verify recipient balance
|
||||
let recipient_acc = ctx
|
||||
.sequencer_client()
|
||||
.get_account(recipient_account_id.to_string())
|
||||
.get_account(recipient_account_id)
|
||||
.await?
|
||||
.account;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
@ -1021,8 +1017,8 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
|
||||
let name = "A NAME".to_string();
|
||||
let total_supply = 37;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::New {
|
||||
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
|
||||
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
|
||||
definition_account_id: format_private_account_id(definition_account_id),
|
||||
supply_account_id: format_private_account_id(supply_account_id),
|
||||
name,
|
||||
total_supply,
|
||||
};
|
||||
@ -1050,14 +1046,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
|
||||
.wallet()
|
||||
.storage()
|
||||
.user_data
|
||||
.get_private_account(&recipient_account_id)
|
||||
.get_private_account(recipient_account_id)
|
||||
.cloned()
|
||||
.context("Failed to get private account keys")?;
|
||||
|
||||
// Mint using claiming path (foreign account)
|
||||
let mint_amount = 9;
|
||||
let subcommand = TokenProgramAgnosticSubcommand::Mint {
|
||||
definition: format_private_account_id(&definition_account_id.to_string()),
|
||||
definition: format_private_account_id(definition_account_id),
|
||||
holder: None,
|
||||
holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)),
|
||||
holder_vpk: Some(hex::encode(holder_keys.viewing_public_key.0)),
|
||||
@ -1076,14 +1072,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
|
||||
// Verify commitment exists
|
||||
let recipient_commitment = ctx
|
||||
.wallet()
|
||||
.get_private_account_commitment(&recipient_account_id)
|
||||
.get_private_account_commitment(recipient_account_id)
|
||||
.context("Failed to get recipient commitment")?;
|
||||
assert!(verify_commitment_is_in_state(recipient_commitment, ctx.sequencer_client()).await);
|
||||
|
||||
// Verify balance
|
||||
let recipient_acc = ctx
|
||||
.wallet()
|
||||
.get_account_private(&recipient_account_id)
|
||||
.get_account_private(recipient_account_id)
|
||||
.context("Failed to get recipient account")?;
|
||||
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
|
||||
assert_eq!(
|
||||
|
||||
@ -1,8 +1,11 @@
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
use anyhow::Result;
|
||||
use integration_tests::TestContext;
|
||||
use key_protocol::key_management::ephemeral_key_holder::EphemeralKeyHolder;
|
||||
use integration_tests::{
|
||||
TestContext,
|
||||
config::{InitialData, SequencerPartialConfig},
|
||||
};
|
||||
use key_protocol::key_management::{KeyChain, ephemeral_key_holder::EphemeralKeyHolder};
|
||||
use log::info;
|
||||
use nssa::{
|
||||
Account, AccountId, PrivacyPreservingTransaction, PrivateKey, PublicKey, PublicTransaction,
|
||||
@ -15,21 +18,20 @@ use nssa_core::{
|
||||
account::{AccountWithMetadata, data::Data},
|
||||
encryption::ViewingPublicKey,
|
||||
};
|
||||
use sequencer_core::config::{AccountInitialData, CommitmentsInitialData, SequencerConfig};
|
||||
use tokio::test;
|
||||
|
||||
// TODO: Make a proper benchmark instead of an ad-hoc test
|
||||
#[test]
|
||||
pub async fn tps_test() -> Result<()> {
|
||||
let num_transactions = 300 * 5;
|
||||
let target_tps = 12;
|
||||
let target_tps = 8;
|
||||
|
||||
let tps_test = TpsTestManager::new(target_tps, num_transactions);
|
||||
let ctx = TestContext::new_with_sequencer_and_maybe_indexer_configs(
|
||||
tps_test.generate_sequencer_config(),
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
let ctx = TestContext::builder()
|
||||
.with_sequencer_partial_config(TpsTestManager::generate_sequencer_partial_config())
|
||||
.with_initial_data(tps_test.generate_initial_data())
|
||||
.build()
|
||||
.await?;
|
||||
|
||||
let target_time = tps_test.target_time();
|
||||
info!(
|
||||
@ -59,12 +61,10 @@ pub async fn tps_test() -> Result<()> {
|
||||
|
||||
let tx_obj = ctx
|
||||
.sequencer_client()
|
||||
.get_transaction_by_hash(tx_hash.clone())
|
||||
.get_transaction_by_hash(*tx_hash)
|
||||
.await
|
||||
.inspect_err(|err| {
|
||||
log::warn!(
|
||||
"Failed to get transaction by hash {tx_hash:#?} with error: {err:#?}"
|
||||
)
|
||||
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}")
|
||||
});
|
||||
|
||||
if let Ok(tx_obj) = tx_obj
|
||||
@ -151,46 +151,35 @@ impl TpsTestManager {
|
||||
/// Generates a sequencer configuration with initial balance in a number of public accounts.
|
||||
/// The transactions generated with the function `build_public_txs` will be valid in a node
|
||||
/// started with the config from this method.
|
||||
pub(crate) fn generate_sequencer_config(&self) -> SequencerConfig {
|
||||
fn generate_initial_data(&self) -> InitialData {
|
||||
// Create public public keypairs
|
||||
let initial_public_accounts = self
|
||||
let public_accounts = self
|
||||
.public_keypairs
|
||||
.iter()
|
||||
.map(|(_, account_id)| AccountInitialData {
|
||||
account_id: account_id.to_string(),
|
||||
balance: 10,
|
||||
})
|
||||
.map(|(key, _)| (key.clone(), 10))
|
||||
.collect();
|
||||
|
||||
// Generate an initial commitment to be used with the privacy preserving transaction
|
||||
// created with the `build_privacy_transaction` function.
|
||||
let sender_nsk = [1; 32];
|
||||
let sender_npk = NullifierPublicKey::from(&sender_nsk);
|
||||
let key_chain = KeyChain::new_os_random();
|
||||
let account = Account {
|
||||
balance: 100,
|
||||
nonce: 0xdeadbeef,
|
||||
program_owner: Program::authenticated_transfer_program().id(),
|
||||
data: Data::default(),
|
||||
};
|
||||
let initial_commitment = CommitmentsInitialData {
|
||||
npk: sender_npk,
|
||||
account,
|
||||
};
|
||||
|
||||
SequencerConfig {
|
||||
home: ".".into(),
|
||||
override_rust_log: None,
|
||||
genesis_id: 1,
|
||||
is_genesis_random: true,
|
||||
InitialData {
|
||||
public_accounts,
|
||||
private_accounts: vec![(key_chain, account)],
|
||||
}
|
||||
}
|
||||
|
||||
fn generate_sequencer_partial_config() -> SequencerPartialConfig {
|
||||
SequencerPartialConfig {
|
||||
max_num_tx_in_block: 300,
|
||||
mempool_max_size: 10000,
|
||||
block_create_timeout_millis: 12000,
|
||||
port: 3040,
|
||||
initial_accounts: initial_public_accounts,
|
||||
initial_commitments: vec![initial_commitment],
|
||||
signing_key: [37; 32],
|
||||
bedrock_config: None,
|
||||
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
|
||||
mempool_max_size: 10_000,
|
||||
block_create_timeout_millis: 12_000,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,12 +6,9 @@ use std::{
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use integration_tests::{
|
||||
ACC_RECEIVER, ACC_SENDER, ACC_SENDER_PRIVATE, BlockingTestContext,
|
||||
TIME_TO_WAIT_FOR_BLOCK_SECONDS,
|
||||
};
|
||||
use integration_tests::{BlockingTestContext, TIME_TO_WAIT_FOR_BLOCK_SECONDS};
|
||||
use log::info;
|
||||
use nssa::{Account, AccountId, PublicKey, program::Program};
|
||||
use nssa::{Account, AccountId, PrivateKey, PublicKey, program::Program};
|
||||
use nssa_core::program::DEFAULT_PROGRAM_ID;
|
||||
use tempfile::tempdir;
|
||||
use wallet::WalletCore;
|
||||
@ -328,7 +325,7 @@ fn test_wallet_ffi_list_accounts() {
|
||||
#[test]
|
||||
fn test_wallet_ffi_get_balance_public() -> Result<()> {
|
||||
let ctx = BlockingTestContext::new()?;
|
||||
let account_id: AccountId = ACC_SENDER.parse().unwrap();
|
||||
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
|
||||
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
|
||||
|
||||
let balance = unsafe {
|
||||
@ -356,7 +353,7 @@ fn test_wallet_ffi_get_balance_public() -> Result<()> {
|
||||
#[test]
|
||||
fn test_wallet_ffi_get_account_public() -> Result<()> {
|
||||
let ctx = BlockingTestContext::new()?;
|
||||
let account_id: AccountId = ACC_SENDER.parse().unwrap();
|
||||
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
|
||||
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
|
||||
let mut out_account = FfiAccount::default();
|
||||
|
||||
@ -391,7 +388,7 @@ fn test_wallet_ffi_get_account_public() -> Result<()> {
|
||||
#[test]
|
||||
fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
|
||||
let ctx = BlockingTestContext::new()?;
|
||||
let account_id: AccountId = ACC_SENDER.parse().unwrap();
|
||||
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
|
||||
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
|
||||
let mut out_key = FfiPublicAccountKey::default();
|
||||
|
||||
@ -409,7 +406,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
|
||||
let private_key = ctx
|
||||
.ctx
|
||||
.wallet()
|
||||
.get_account_public_signing_key(&account_id)
|
||||
.get_account_public_signing_key(account_id)
|
||||
.unwrap();
|
||||
PublicKey::new_from_private_key(private_key)
|
||||
};
|
||||
@ -428,7 +425,7 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
|
||||
#[test]
|
||||
fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
|
||||
let ctx = BlockingTestContext::new()?;
|
||||
let account_id: AccountId = ACC_SENDER_PRIVATE.parse().unwrap();
|
||||
let account_id: AccountId = ctx.ctx.existing_public_accounts()[0];
|
||||
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
|
||||
let mut keys = FfiPrivateAccountKeys::default();
|
||||
|
||||
@ -446,7 +443,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
|
||||
.wallet()
|
||||
.storage()
|
||||
.user_data
|
||||
.get_private_account(&account_id)
|
||||
.get_private_account(account_id)
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
@ -468,14 +465,15 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
|
||||
|
||||
#[test]
|
||||
fn test_wallet_ffi_account_id_to_base58() {
|
||||
let account_id_str = ACC_SENDER;
|
||||
let account_id: AccountId = account_id_str.parse().unwrap();
|
||||
let private_key = PrivateKey::new_os_random();
|
||||
let public_key = PublicKey::new_from_private_key(&private_key);
|
||||
let account_id = AccountId::from(&public_key);
|
||||
let ffi_bytes: FfiBytes32 = (&account_id).into();
|
||||
let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) };
|
||||
|
||||
let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() };
|
||||
|
||||
assert_eq!(account_id_str, ffi_result);
|
||||
assert_eq!(account_id.to_string(), ffi_result);
|
||||
|
||||
unsafe {
|
||||
wallet_ffi_free_string(ptr);
|
||||
@ -484,8 +482,11 @@ fn test_wallet_ffi_account_id_to_base58() {
|
||||
|
||||
#[test]
|
||||
fn test_wallet_ffi_base58_to_account_id() {
|
||||
let account_id_str = ACC_SENDER;
|
||||
let account_id_c_str = CString::new(account_id_str).unwrap();
|
||||
let private_key = PrivateKey::new_os_random();
|
||||
let public_key = PublicKey::new_from_private_key(&private_key);
|
||||
let account_id = AccountId::from(&public_key);
|
||||
let account_id_str = account_id.to_string();
|
||||
let account_id_c_str = CString::new(account_id_str.clone()).unwrap();
|
||||
let account_id: AccountId = unsafe {
|
||||
let mut out_account_id_bytes = FfiBytes32::default();
|
||||
wallet_ffi_account_id_from_base58(
|
||||
@ -566,8 +567,8 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
|
||||
fn test_wallet_ffi_transfer_public() -> Result<()> {
|
||||
let ctx = BlockingTestContext::new().unwrap();
|
||||
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
|
||||
let from: FfiBytes32 = (&ACC_SENDER.parse::<AccountId>().unwrap()).into();
|
||||
let to: FfiBytes32 = (&ACC_RECEIVER.parse::<AccountId>().unwrap()).into();
|
||||
let from: FfiBytes32 = (&ctx.ctx.existing_public_accounts()[0]).into();
|
||||
let to: FfiBytes32 = (&ctx.ctx.existing_public_accounts()[1]).into();
|
||||
let amount: [u8; 16] = 100u128.to_le_bytes();
|
||||
|
||||
let mut transfer_result = FfiTransferResult::default();
|
||||
|
||||
@ -22,4 +22,4 @@ aes-gcm.workspace = true
|
||||
bip39.workspace = true
|
||||
hmac-sha512.workspace = true
|
||||
thiserror.workspace = true
|
||||
itertools.workspace = true
|
||||
itertools.workspace = true
|
||||
|
||||
@ -272,7 +272,7 @@ impl KeyTree<ChildKeysPublic> {
|
||||
while let Some(curr_id) = id_stack.pop() {
|
||||
if let Some(node) = self.key_map.get(&curr_id) {
|
||||
let address = node.account_id();
|
||||
let node_acc = client.get_account(address.to_string()).await?.account;
|
||||
let node_acc = client.get_account(address).await?.account;
|
||||
|
||||
if node_acc == nssa::Account::default() && curr_id != ChainIndex::root() {
|
||||
self.remove(address);
|
||||
@ -307,7 +307,7 @@ impl KeyTree<ChildKeysPublic> {
|
||||
for id in ChainIndex::chain_ids_at_depth(i) {
|
||||
if let Some(node) = self.key_map.get(&id) {
|
||||
let address = node.account_id();
|
||||
let node_acc = client.get_account(address.to_string()).await?.account;
|
||||
let node_acc = client.get_account(address).await?.account;
|
||||
|
||||
if node_acc == nssa::Account::default() {
|
||||
let addr = node.account_id();
|
||||
|
||||
@ -64,11 +64,11 @@ impl SeedHolder {
|
||||
}
|
||||
|
||||
// Safe unwrap
|
||||
*hash.first_chunk::<32>().unwrap()
|
||||
HashType(*hash.first_chunk::<32>().unwrap())
|
||||
}
|
||||
|
||||
pub fn produce_top_secret_key_holder(&self) -> SecretSpendingKey {
|
||||
SecretSpendingKey(self.generate_secret_spending_key_hash())
|
||||
SecretSpendingKey(self.generate_secret_spending_key_hash().into())
|
||||
}
|
||||
}
|
||||
|
||||
@ -109,10 +109,9 @@ impl SecretSpendingKey {
|
||||
hasher.update(index.to_le_bytes());
|
||||
hasher.update(SUFFIX_2);
|
||||
|
||||
<HashType>::from(hasher.finalize_fixed())
|
||||
hasher.finalize_fixed().into()
|
||||
}
|
||||
|
||||
// TODO: this should use index
|
||||
pub fn produce_private_key_holder(&self, index: Option<u32>) -> PrivateKeyHolder {
|
||||
PrivateKeyHolder {
|
||||
nullifier_secret_key: self.generate_nullifier_secret_key(index),
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
use std::collections::HashMap;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use anyhow::Result;
|
||||
use k256::AffinePoint;
|
||||
@ -15,10 +15,10 @@ pub type PublicKey = AffinePoint;
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct NSSAUserData {
|
||||
/// Default public accounts
|
||||
pub default_pub_account_signing_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
|
||||
pub default_pub_account_signing_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
|
||||
/// Default private accounts
|
||||
pub default_user_private_accounts:
|
||||
HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
|
||||
BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
|
||||
/// Tree of public keys
|
||||
pub public_key_tree: KeyTreePublic,
|
||||
/// Tree of private keys
|
||||
@ -27,7 +27,7 @@ pub struct NSSAUserData {
|
||||
|
||||
impl NSSAUserData {
|
||||
fn valid_public_key_transaction_pairing_check(
|
||||
accounts_keys_map: &HashMap<nssa::AccountId, nssa::PrivateKey>,
|
||||
accounts_keys_map: &BTreeMap<nssa::AccountId, nssa::PrivateKey>,
|
||||
) -> bool {
|
||||
let mut check_res = true;
|
||||
for (account_id, key) in accounts_keys_map {
|
||||
@ -42,7 +42,7 @@ impl NSSAUserData {
|
||||
}
|
||||
|
||||
fn valid_private_key_transaction_pairing_check(
|
||||
accounts_keys_map: &HashMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
|
||||
accounts_keys_map: &BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
|
||||
) -> bool {
|
||||
let mut check_res = true;
|
||||
for (account_id, (key, _)) in accounts_keys_map {
|
||||
@ -56,8 +56,8 @@ impl NSSAUserData {
|
||||
}
|
||||
|
||||
pub fn new_with_accounts(
|
||||
default_accounts_keys: HashMap<nssa::AccountId, nssa::PrivateKey>,
|
||||
default_accounts_key_chains: HashMap<
|
||||
default_accounts_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
|
||||
default_accounts_key_chains: BTreeMap<
|
||||
nssa::AccountId,
|
||||
(KeyChain, nssa_core::account::Account),
|
||||
>,
|
||||
@ -106,14 +106,14 @@ impl NSSAUserData {
|
||||
/// Returns the signing key for public transaction signatures
|
||||
pub fn get_pub_account_signing_key(
|
||||
&self,
|
||||
account_id: &nssa::AccountId,
|
||||
account_id: nssa::AccountId,
|
||||
) -> Option<&nssa::PrivateKey> {
|
||||
// First seek in defaults
|
||||
if let Some(key) = self.default_pub_account_signing_keys.get(account_id) {
|
||||
if let Some(key) = self.default_pub_account_signing_keys.get(&account_id) {
|
||||
Some(key)
|
||||
// Then seek in tree
|
||||
} else {
|
||||
self.public_key_tree.get_node(*account_id).map(Into::into)
|
||||
self.public_key_tree.get_node(account_id).map(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
@ -139,14 +139,14 @@ impl NSSAUserData {
|
||||
/// Returns the signing key for public transaction signatures
|
||||
pub fn get_private_account(
|
||||
&self,
|
||||
account_id: &nssa::AccountId,
|
||||
account_id: nssa::AccountId,
|
||||
) -> Option<&(KeyChain, nssa_core::account::Account)> {
|
||||
// First seek in defaults
|
||||
if let Some(key) = self.default_user_private_accounts.get(account_id) {
|
||||
if let Some(key) = self.default_user_private_accounts.get(&account_id) {
|
||||
Some(key)
|
||||
// Then seek in tree
|
||||
} else {
|
||||
self.private_key_tree.get_node(*account_id).map(Into::into)
|
||||
self.private_key_tree.get_node(account_id).map(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
@ -166,20 +166,30 @@ impl NSSAUserData {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn account_ids(&self) -> impl Iterator<Item = &nssa::AccountId> {
|
||||
pub fn account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
|
||||
self.public_account_ids().chain(self.private_account_ids())
|
||||
}
|
||||
|
||||
pub fn public_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
|
||||
self.default_pub_account_signing_keys
|
||||
.keys()
|
||||
.chain(self.public_key_tree.account_id_map.keys())
|
||||
.chain(self.default_user_private_accounts.keys())
|
||||
.chain(self.private_key_tree.account_id_map.keys())
|
||||
.copied()
|
||||
.chain(self.public_key_tree.account_id_map.keys().copied())
|
||||
}
|
||||
|
||||
pub fn private_account_ids(&self) -> impl Iterator<Item = nssa::AccountId> {
|
||||
self.default_user_private_accounts
|
||||
.keys()
|
||||
.copied()
|
||||
.chain(self.private_key_tree.account_id_map.keys().copied())
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for NSSAUserData {
|
||||
fn default() -> Self {
|
||||
Self::new_with_accounts(
|
||||
HashMap::new(),
|
||||
HashMap::new(),
|
||||
BTreeMap::new(),
|
||||
BTreeMap::new(),
|
||||
KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_string())),
|
||||
KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_string())),
|
||||
)
|
||||
@ -198,16 +208,13 @@ mod tests {
|
||||
let (account_id_private, _) = user_data
|
||||
.generate_new_privacy_preserving_transaction_key_chain(Some(ChainIndex::root()));
|
||||
|
||||
let is_key_chain_generated = user_data.get_private_account(&account_id_private).is_some();
|
||||
let is_key_chain_generated = user_data.get_private_account(account_id_private).is_some();
|
||||
|
||||
assert!(is_key_chain_generated);
|
||||
|
||||
let account_id_private_str = account_id_private.to_string();
|
||||
println!("{account_id_private_str:#?}");
|
||||
let key_chain = &user_data
|
||||
.get_private_account(&account_id_private)
|
||||
.unwrap()
|
||||
.0;
|
||||
let key_chain = &user_data.get_private_account(account_id_private).unwrap().0;
|
||||
println!("{key_chain:#?}");
|
||||
}
|
||||
}
|
||||
|
||||
@ -16,7 +16,6 @@ borsh.workspace = true
|
||||
hex.workspace = true
|
||||
secp256k1 = "0.31.1"
|
||||
risc0-binfmt = "3.0.2"
|
||||
bytemuck = "1.24.0"
|
||||
log.workspace = true
|
||||
|
||||
[build-dependencies]
|
||||
@ -25,6 +24,7 @@ risc0-binfmt = "3.0.2"
|
||||
|
||||
[dev-dependencies]
|
||||
token_core.workspace = true
|
||||
amm_core.workspace = true
|
||||
test_program_methods.workspace = true
|
||||
|
||||
env_logger.workspace = true
|
||||
|
||||
@ -8,12 +8,11 @@ license = { workspace = true }
|
||||
risc0-zkvm.workspace = true
|
||||
borsh.workspace = true
|
||||
serde.workspace = true
|
||||
serde_with.workspace = true
|
||||
thiserror.workspace = true
|
||||
bytemuck.workspace = true
|
||||
base58.workspace = true
|
||||
k256 = { workspace = true, optional = true }
|
||||
base58 = { workspace = true, optional = true }
|
||||
anyhow = { workspace = true, optional = true }
|
||||
|
||||
chacha20 = { version = "0.9", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
@ -21,4 +20,4 @@ serde_json.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
host = ["dep:k256", "dep:base58", "dep:anyhow"]
|
||||
host = ["dep:k256"]
|
||||
|
||||
@ -1,11 +1,10 @@
|
||||
#[cfg(feature = "host")]
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
#[cfg(feature = "host")]
|
||||
use base58::{FromBase58, ToBase58};
|
||||
use borsh::{BorshDeserialize, BorshSerialize};
|
||||
pub use data::Data;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
||||
|
||||
use crate::program::ProgramId;
|
||||
|
||||
@ -47,8 +46,8 @@ impl AccountWithMetadata {
|
||||
Default,
|
||||
Copy,
|
||||
Clone,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
SerializeDisplay,
|
||||
DeserializeFromStr,
|
||||
PartialEq,
|
||||
Eq,
|
||||
Hash,
|
||||
@ -80,23 +79,19 @@ impl AsRef<[u8]> for AccountId {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "host")]
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum AccountIdError {
|
||||
#[error("invalid base58")]
|
||||
InvalidBase58(#[from] anyhow::Error),
|
||||
#[error("invalid base58: {0:?}")]
|
||||
InvalidBase58(base58::FromBase58Error),
|
||||
#[error("invalid length: expected 32 bytes, got {0}")]
|
||||
InvalidLength(usize),
|
||||
}
|
||||
|
||||
#[cfg(feature = "host")]
|
||||
impl FromStr for AccountId {
|
||||
type Err = AccountIdError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let bytes = s
|
||||
.from_base58()
|
||||
.map_err(|err| anyhow::anyhow!("Invalid base58 err {err:?}"))?;
|
||||
let bytes = s.from_base58().map_err(AccountIdError::InvalidBase58)?;
|
||||
if bytes.len() != 32 {
|
||||
return Err(AccountIdError::InvalidLength(bytes.len()));
|
||||
}
|
||||
@ -106,7 +101,6 @@ impl FromStr for AccountId {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "host")]
|
||||
impl Display for AccountId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.value.to_base58())
|
||||
|
||||
@ -81,7 +81,7 @@ impl PrivacyPreservingTransaction {
|
||||
let signer_account_ids = self.signer_account_ids();
|
||||
// Check nonces corresponds to the current nonces on the public state.
|
||||
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
|
||||
let current_nonce = state.get_account_by_id(account_id).nonce;
|
||||
let current_nonce = state.get_account_by_id(*account_id).nonce;
|
||||
if current_nonce != *nonce {
|
||||
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
|
||||
}
|
||||
@ -93,7 +93,7 @@ impl PrivacyPreservingTransaction {
|
||||
.iter()
|
||||
.map(|account_id| {
|
||||
AccountWithMetadata::new(
|
||||
state.get_account_by_id(account_id),
|
||||
state.get_account_by_id(*account_id),
|
||||
signer_account_ids.contains(account_id),
|
||||
*account_id,
|
||||
)
|
||||
|
||||
@ -83,7 +83,7 @@ impl PublicTransaction {
|
||||
let signer_account_ids = self.signer_account_ids();
|
||||
// Check nonces corresponds to the current nonces on the public state.
|
||||
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
|
||||
let current_nonce = state.get_account_by_id(account_id).nonce;
|
||||
let current_nonce = state.get_account_by_id(*account_id).nonce;
|
||||
if current_nonce != *nonce {
|
||||
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
|
||||
}
|
||||
@ -95,7 +95,7 @@ impl PublicTransaction {
|
||||
.iter()
|
||||
.map(|account_id| {
|
||||
AccountWithMetadata::new(
|
||||
state.get_account_by_id(account_id),
|
||||
state.get_account_by_id(*account_id),
|
||||
signer_account_ids.contains(account_id),
|
||||
*account_id,
|
||||
)
|
||||
@ -147,7 +147,7 @@ impl PublicTransaction {
|
||||
let expected_pre = state_diff
|
||||
.get(&account_id)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| state.get_account_by_id(&account_id));
|
||||
.unwrap_or_else(|| state.get_account_by_id(account_id));
|
||||
if pre.account != expected_pre {
|
||||
return Err(NssaError::InvalidProgramBehavior);
|
||||
}
|
||||
@ -202,7 +202,7 @@ impl PublicTransaction {
|
||||
|
||||
// Check that all modified uninitialized accounts where claimed
|
||||
for post in state_diff.iter().filter_map(|(account_id, post)| {
|
||||
let pre = state.get_account_by_id(account_id);
|
||||
let pre = state.get_account_by_id(*account_id);
|
||||
if pre.program_owner != DEFAULT_PROGRAM_ID {
|
||||
return None;
|
||||
}
|
||||
|
||||
@ -221,9 +221,9 @@ impl V02State {
|
||||
self.public_state.entry(account_id).or_default()
|
||||
}
|
||||
|
||||
pub fn get_account_by_id(&self, account_id: &AccountId) -> Account {
|
||||
pub fn get_account_by_id(&self, account_id: AccountId) -> Account {
|
||||
self.public_state
|
||||
.get(account_id)
|
||||
.get(&account_id)
|
||||
.cloned()
|
||||
.unwrap_or(Account::default())
|
||||
}
|
||||
@ -311,6 +311,7 @@ pub mod tests {
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use amm_core::PoolDefinition;
|
||||
use nssa_core::{
|
||||
Commitment, Nullifier, NullifierPublicKey, NullifierSecretKey, SharedSecretKey,
|
||||
account::{Account, AccountId, AccountWithMetadata, Nonce, data::Data},
|
||||
@ -416,7 +417,7 @@ pub mod tests {
|
||||
let state = V02State::new_with_genesis_accounts(&initial_data, &[]);
|
||||
let expected_account = state.public_state.get(&account_id).unwrap();
|
||||
|
||||
let account = state.get_account_by_id(&account_id);
|
||||
let account = state.get_account_by_id(account_id);
|
||||
|
||||
assert_eq!(&account, expected_account);
|
||||
}
|
||||
@ -427,7 +428,7 @@ pub mod tests {
|
||||
let state = V02State::new_with_genesis_accounts(&[], &[]);
|
||||
let expected_account = Account::default();
|
||||
|
||||
let account = state.get_account_by_id(&addr2);
|
||||
let account = state.get_account_by_id(addr2);
|
||||
|
||||
assert_eq!(account, expected_account);
|
||||
}
|
||||
@ -449,16 +450,16 @@ pub mod tests {
|
||||
let mut state = V02State::new_with_genesis_accounts(&initial_data, &[]);
|
||||
let from = account_id;
|
||||
let to = AccountId::new([2; 32]);
|
||||
assert_eq!(state.get_account_by_id(&to), Account::default());
|
||||
assert_eq!(state.get_account_by_id(to), Account::default());
|
||||
let balance_to_move = 5;
|
||||
|
||||
let tx = transfer_transaction(from, key, 0, to, balance_to_move);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
assert_eq!(state.get_account_by_id(&from).balance, 95);
|
||||
assert_eq!(state.get_account_by_id(&to).balance, 5);
|
||||
assert_eq!(state.get_account_by_id(&from).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(&to).nonce, 0);
|
||||
assert_eq!(state.get_account_by_id(from).balance, 95);
|
||||
assert_eq!(state.get_account_by_id(to).balance, 5);
|
||||
assert_eq!(state.get_account_by_id(from).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(to).nonce, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -471,16 +472,16 @@ pub mod tests {
|
||||
let from_key = key;
|
||||
let to = AccountId::new([2; 32]);
|
||||
let balance_to_move = 101;
|
||||
assert!(state.get_account_by_id(&from).balance < balance_to_move);
|
||||
assert!(state.get_account_by_id(from).balance < balance_to_move);
|
||||
|
||||
let tx = transfer_transaction(from, from_key, 0, to, balance_to_move);
|
||||
let result = state.transition_from_public_transaction(&tx);
|
||||
|
||||
assert!(matches!(result, Err(NssaError::ProgramExecutionFailed(_))));
|
||||
assert_eq!(state.get_account_by_id(&from).balance, 100);
|
||||
assert_eq!(state.get_account_by_id(&to).balance, 0);
|
||||
assert_eq!(state.get_account_by_id(&from).nonce, 0);
|
||||
assert_eq!(state.get_account_by_id(&to).nonce, 0);
|
||||
assert_eq!(state.get_account_by_id(from).balance, 100);
|
||||
assert_eq!(state.get_account_by_id(to).balance, 0);
|
||||
assert_eq!(state.get_account_by_id(from).nonce, 0);
|
||||
assert_eq!(state.get_account_by_id(to).nonce, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -494,16 +495,16 @@ pub mod tests {
|
||||
let from = account_id2;
|
||||
let from_key = key2;
|
||||
let to = account_id1;
|
||||
assert_ne!(state.get_account_by_id(&to), Account::default());
|
||||
assert_ne!(state.get_account_by_id(to), Account::default());
|
||||
let balance_to_move = 8;
|
||||
|
||||
let tx = transfer_transaction(from, from_key, 0, to, balance_to_move);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
assert_eq!(state.get_account_by_id(&from).balance, 192);
|
||||
assert_eq!(state.get_account_by_id(&to).balance, 108);
|
||||
assert_eq!(state.get_account_by_id(&from).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(&to).nonce, 0);
|
||||
assert_eq!(state.get_account_by_id(from).balance, 192);
|
||||
assert_eq!(state.get_account_by_id(to).balance, 108);
|
||||
assert_eq!(state.get_account_by_id(from).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(to).nonce, 0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -523,12 +524,12 @@ pub mod tests {
|
||||
let tx = transfer_transaction(account_id2, key2, 0, account_id3, balance_to_move);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
assert_eq!(state.get_account_by_id(&account_id1).balance, 95);
|
||||
assert_eq!(state.get_account_by_id(&account_id2).balance, 2);
|
||||
assert_eq!(state.get_account_by_id(&account_id3).balance, 3);
|
||||
assert_eq!(state.get_account_by_id(&account_id1).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(&account_id2).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(&account_id3).nonce, 0);
|
||||
assert_eq!(state.get_account_by_id(account_id1).balance, 95);
|
||||
assert_eq!(state.get_account_by_id(account_id2).balance, 2);
|
||||
assert_eq!(state.get_account_by_id(account_id3).balance, 3);
|
||||
assert_eq!(state.get_account_by_id(account_id1).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(account_id2).nonce, 1);
|
||||
assert_eq!(state.get_account_by_id(account_id3).nonce, 0);
|
||||
}
|
||||
|
||||
impl V02State {
|
||||
@ -655,7 +656,7 @@ pub mod tests {
|
||||
let mut state =
|
||||
V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
|
||||
let account_id = AccountId::new([1; 32]);
|
||||
let account = state.get_account_by_id(&account_id);
|
||||
let account = state.get_account_by_id(account_id);
|
||||
// Assert the target account only differs from the default account in the program owner
|
||||
// field
|
||||
assert_ne!(account.program_owner, Account::default().program_owner);
|
||||
@ -680,7 +681,7 @@ pub mod tests {
|
||||
.with_test_programs()
|
||||
.with_non_default_accounts_but_default_program_owners();
|
||||
let account_id = AccountId::new([255; 32]);
|
||||
let account = state.get_account_by_id(&account_id);
|
||||
let account = state.get_account_by_id(account_id);
|
||||
// Assert the target account only differs from the default account in balance field
|
||||
assert_eq!(account.program_owner, Account::default().program_owner);
|
||||
assert_ne!(account.balance, Account::default().balance);
|
||||
@ -704,7 +705,7 @@ pub mod tests {
|
||||
.with_test_programs()
|
||||
.with_non_default_accounts_but_default_program_owners();
|
||||
let account_id = AccountId::new([254; 32]);
|
||||
let account = state.get_account_by_id(&account_id);
|
||||
let account = state.get_account_by_id(account_id);
|
||||
// Assert the target account only differs from the default account in nonce field
|
||||
assert_eq!(account.program_owner, Account::default().program_owner);
|
||||
assert_eq!(account.balance, Account::default().balance);
|
||||
@ -728,7 +729,7 @@ pub mod tests {
|
||||
.with_test_programs()
|
||||
.with_non_default_accounts_but_default_program_owners();
|
||||
let account_id = AccountId::new([253; 32]);
|
||||
let account = state.get_account_by_id(&account_id);
|
||||
let account = state.get_account_by_id(account_id);
|
||||
// Assert the target account only differs from the default account in data field
|
||||
assert_eq!(account.program_owner, Account::default().program_owner);
|
||||
assert_eq!(account.balance, Account::default().balance);
|
||||
@ -755,7 +756,7 @@ pub mod tests {
|
||||
let balance_to_move: u128 = 1;
|
||||
let program_id = Program::simple_balance_transfer().id();
|
||||
assert_ne!(
|
||||
state.get_account_by_id(&sender_account_id).program_owner,
|
||||
state.get_account_by_id(sender_account_id).program_owner,
|
||||
program_id
|
||||
);
|
||||
let message = public_transaction::Message::try_new(
|
||||
@ -782,9 +783,9 @@ pub mod tests {
|
||||
let account_id = AccountId::new([255; 32]);
|
||||
let program_id = Program::data_changer().id();
|
||||
|
||||
assert_ne!(state.get_account_by_id(&account_id), Account::default());
|
||||
assert_ne!(state.get_account_by_id(account_id), Account::default());
|
||||
assert_ne!(
|
||||
state.get_account_by_id(&account_id).program_owner,
|
||||
state.get_account_by_id(account_id).program_owner,
|
||||
program_id
|
||||
);
|
||||
let message =
|
||||
@ -825,11 +826,11 @@ pub mod tests {
|
||||
let program_id = Program::burner().id();
|
||||
let account_id = AccountId::new([252; 32]);
|
||||
assert_eq!(
|
||||
state.get_account_by_id(&account_id).program_owner,
|
||||
state.get_account_by_id(account_id).program_owner,
|
||||
program_id
|
||||
);
|
||||
let balance_to_burn: u128 = 1;
|
||||
assert!(state.get_account_by_id(&account_id).balance > balance_to_burn);
|
||||
assert!(state.get_account_by_id(account_id).balance > balance_to_burn);
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
program_id,
|
||||
@ -897,7 +898,7 @@ pub mod tests {
|
||||
state: &V02State,
|
||||
) -> PrivacyPreservingTransaction {
|
||||
let sender = AccountWithMetadata::new(
|
||||
state.get_account_by_id(&sender_keys.account_id()),
|
||||
state.get_account_by_id(sender_keys.account_id()),
|
||||
true,
|
||||
sender_keys.account_id(),
|
||||
);
|
||||
@ -1001,7 +1002,7 @@ pub mod tests {
|
||||
let sender_pre =
|
||||
AccountWithMetadata::new(sender_private_account.clone(), true, &sender_keys.npk());
|
||||
let recipient_pre = AccountWithMetadata::new(
|
||||
state.get_account_by_id(recipient_account_id),
|
||||
state.get_account_by_id(*recipient_account_id),
|
||||
false,
|
||||
*recipient_account_id,
|
||||
);
|
||||
@ -1053,7 +1054,7 @@ pub mod tests {
|
||||
);
|
||||
|
||||
let expected_sender_post = {
|
||||
let mut this = state.get_account_by_id(&sender_keys.account_id());
|
||||
let mut this = state.get_account_by_id(sender_keys.account_id());
|
||||
this.balance -= balance_to_move;
|
||||
this.nonce += 1;
|
||||
this
|
||||
@ -1066,12 +1067,12 @@ pub mod tests {
|
||||
.transition_from_privacy_preserving_transaction(&tx)
|
||||
.unwrap();
|
||||
|
||||
let sender_post = state.get_account_by_id(&sender_keys.account_id());
|
||||
let sender_post = state.get_account_by_id(sender_keys.account_id());
|
||||
assert_eq!(sender_post, expected_sender_post);
|
||||
assert!(state.private_state.0.contains(&expected_new_commitment));
|
||||
|
||||
assert_eq!(
|
||||
state.get_account_by_id(&sender_keys.account_id()).balance,
|
||||
state.get_account_by_id(sender_keys.account_id()).balance,
|
||||
200 - balance_to_move
|
||||
);
|
||||
}
|
||||
@ -1162,7 +1163,7 @@ pub mod tests {
|
||||
let balance_to_move = 37;
|
||||
|
||||
let expected_recipient_post = {
|
||||
let mut this = state.get_account_by_id(&recipient_keys.account_id());
|
||||
let mut this = state.get_account_by_id(recipient_keys.account_id());
|
||||
this.balance += balance_to_move;
|
||||
this
|
||||
};
|
||||
@ -1198,15 +1199,13 @@ pub mod tests {
|
||||
.transition_from_privacy_preserving_transaction(&tx)
|
||||
.unwrap();
|
||||
|
||||
let recipient_post = state.get_account_by_id(&recipient_keys.account_id());
|
||||
let recipient_post = state.get_account_by_id(recipient_keys.account_id());
|
||||
assert_eq!(recipient_post, expected_recipient_post);
|
||||
assert!(state.private_state.0.contains(&sender_pre_commitment));
|
||||
assert!(state.private_state.0.contains(&expected_new_commitment));
|
||||
assert!(state.private_state.1.contains(&expected_new_nullifier));
|
||||
assert_eq!(
|
||||
state
|
||||
.get_account_by_id(&recipient_keys.account_id())
|
||||
.balance,
|
||||
state.get_account_by_id(recipient_keys.account_id()).balance,
|
||||
recipient_initial_balance + balance_to_move
|
||||
);
|
||||
}
|
||||
@ -2226,7 +2225,7 @@ pub mod tests {
|
||||
let amount: u128 = 37;
|
||||
|
||||
// Check the recipient is an uninitialized account
|
||||
assert_eq!(state.get_account_by_id(&to), Account::default());
|
||||
assert_eq!(state.get_account_by_id(to), Account::default());
|
||||
|
||||
let expected_recipient_post = Account {
|
||||
program_owner: program.id(),
|
||||
@ -2242,7 +2241,7 @@ pub mod tests {
|
||||
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let recipient_post = state.get_account_by_id(&to);
|
||||
let recipient_post = state.get_account_by_id(to);
|
||||
|
||||
assert_eq!(recipient_post, expected_recipient_post);
|
||||
}
|
||||
@ -2285,8 +2284,8 @@ pub mod tests {
|
||||
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let from_post = state.get_account_by_id(&from);
|
||||
let to_post = state.get_account_by_id(&to);
|
||||
let from_post = state.get_account_by_id(from);
|
||||
let to_post = state.get_account_by_id(to);
|
||||
// The `chain_caller` program calls the program twice
|
||||
assert_eq!(from_post.balance, initial_balance - 2 * amount);
|
||||
assert_eq!(to_post, expected_to_post);
|
||||
@ -2329,137 +2328,6 @@ pub mod tests {
|
||||
));
|
||||
}
|
||||
|
||||
// TODO repeated code should ultimately be removed;
|
||||
fn compute_pool_pda(
|
||||
amm_program_id: ProgramId,
|
||||
definition_token_a_id: AccountId,
|
||||
definition_token_b_id: AccountId,
|
||||
) -> AccountId {
|
||||
AccountId::from((
|
||||
&amm_program_id,
|
||||
&compute_pool_pda_seed(definition_token_a_id, definition_token_b_id),
|
||||
))
|
||||
}
|
||||
|
||||
fn compute_pool_pda_seed(
|
||||
definition_token_a_id: AccountId,
|
||||
definition_token_b_id: AccountId,
|
||||
) -> PdaSeed {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
|
||||
let mut i: usize = 0;
|
||||
let (token_1, token_2) = loop {
|
||||
if definition_token_a_id.value()[i] > definition_token_b_id.value()[i] {
|
||||
let token_1 = definition_token_a_id;
|
||||
let token_2 = definition_token_b_id;
|
||||
break (token_1, token_2);
|
||||
} else if definition_token_a_id.value()[i] < definition_token_b_id.value()[i] {
|
||||
let token_1 = definition_token_b_id;
|
||||
let token_2 = definition_token_a_id;
|
||||
break (token_1, token_2);
|
||||
}
|
||||
|
||||
if i == 32 {
|
||||
panic!("Definitions match");
|
||||
} else {
|
||||
i += 1;
|
||||
}
|
||||
};
|
||||
|
||||
let mut bytes = [0; 64];
|
||||
bytes[0..32].copy_from_slice(&token_1.to_bytes());
|
||||
bytes[32..].copy_from_slice(&token_2.to_bytes());
|
||||
|
||||
PdaSeed::new(
|
||||
Impl::hash_bytes(&bytes)
|
||||
.as_bytes()
|
||||
.try_into()
|
||||
.expect("Hash output must be exactly 32 bytes long"),
|
||||
)
|
||||
}
|
||||
|
||||
fn compute_vault_pda(
|
||||
amm_program_id: ProgramId,
|
||||
pool_id: AccountId,
|
||||
definition_token_id: AccountId,
|
||||
) -> AccountId {
|
||||
AccountId::from((
|
||||
&amm_program_id,
|
||||
&compute_vault_pda_seed(pool_id, definition_token_id),
|
||||
))
|
||||
}
|
||||
|
||||
fn compute_vault_pda_seed(pool_id: AccountId, definition_token_id: AccountId) -> PdaSeed {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
|
||||
let mut bytes = [0; 64];
|
||||
bytes[0..32].copy_from_slice(&pool_id.to_bytes());
|
||||
bytes[32..].copy_from_slice(&definition_token_id.to_bytes());
|
||||
|
||||
PdaSeed::new(
|
||||
Impl::hash_bytes(&bytes)
|
||||
.as_bytes()
|
||||
.try_into()
|
||||
.expect("Hash output must be exactly 32 bytes long"),
|
||||
)
|
||||
}
|
||||
|
||||
fn compute_liquidity_token_pda(amm_program_id: ProgramId, pool_id: AccountId) -> AccountId {
|
||||
AccountId::from((&amm_program_id, &compute_liquidity_token_pda_seed(pool_id)))
|
||||
}
|
||||
|
||||
fn compute_liquidity_token_pda_seed(pool_id: AccountId) -> PdaSeed {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
|
||||
let mut bytes = [0; 64];
|
||||
bytes[0..32].copy_from_slice(&pool_id.to_bytes());
|
||||
bytes[32..].copy_from_slice(&[0; 32]);
|
||||
|
||||
PdaSeed::new(
|
||||
Impl::hash_bytes(&bytes)
|
||||
.as_bytes()
|
||||
.try_into()
|
||||
.expect("Hash output must be exactly 32 bytes long"),
|
||||
)
|
||||
}
|
||||
|
||||
const POOL_DEFINITION_DATA_SIZE: usize = 225;
|
||||
|
||||
#[derive(Default)]
|
||||
struct PoolDefinition {
|
||||
definition_token_a_id: AccountId,
|
||||
definition_token_b_id: AccountId,
|
||||
vault_a_id: AccountId,
|
||||
vault_b_id: AccountId,
|
||||
liquidity_pool_id: AccountId,
|
||||
liquidity_pool_supply: u128,
|
||||
reserve_a: u128,
|
||||
reserve_b: u128,
|
||||
fees: u128,
|
||||
active: bool,
|
||||
}
|
||||
|
||||
impl PoolDefinition {
|
||||
fn into_data(self) -> Data {
|
||||
let mut bytes = [0; POOL_DEFINITION_DATA_SIZE];
|
||||
bytes[0..32].copy_from_slice(&self.definition_token_a_id.to_bytes());
|
||||
bytes[32..64].copy_from_slice(&self.definition_token_b_id.to_bytes());
|
||||
bytes[64..96].copy_from_slice(&self.vault_a_id.to_bytes());
|
||||
bytes[96..128].copy_from_slice(&self.vault_b_id.to_bytes());
|
||||
bytes[128..160].copy_from_slice(&self.liquidity_pool_id.to_bytes());
|
||||
bytes[160..176].copy_from_slice(&self.liquidity_pool_supply.to_le_bytes());
|
||||
bytes[176..192].copy_from_slice(&self.reserve_a.to_le_bytes());
|
||||
bytes[192..208].copy_from_slice(&self.reserve_b.to_le_bytes());
|
||||
bytes[208..224].copy_from_slice(&self.fees.to_le_bytes());
|
||||
bytes[224] = self.active as u8;
|
||||
|
||||
bytes
|
||||
.to_vec()
|
||||
.try_into()
|
||||
.expect("225 bytes should fit into Data")
|
||||
}
|
||||
}
|
||||
|
||||
struct PrivateKeysForTests;
|
||||
|
||||
impl PrivateKeysForTests {
|
||||
@ -2640,7 +2508,7 @@ pub mod tests {
|
||||
|
||||
impl IdForTests {
|
||||
fn pool_definition_id() -> AccountId {
|
||||
compute_pool_pda(
|
||||
amm_core::compute_pool_pda(
|
||||
Program::amm().id(),
|
||||
IdForTests::token_a_definition_id(),
|
||||
IdForTests::token_b_definition_id(),
|
||||
@ -2648,7 +2516,10 @@ pub mod tests {
|
||||
}
|
||||
|
||||
fn token_lp_definition_id() -> AccountId {
|
||||
compute_liquidity_token_pda(Program::amm().id(), IdForTests::pool_definition_id())
|
||||
amm_core::compute_liquidity_token_pda(
|
||||
Program::amm().id(),
|
||||
IdForTests::pool_definition_id(),
|
||||
)
|
||||
}
|
||||
|
||||
fn token_a_definition_id() -> AccountId {
|
||||
@ -2678,7 +2549,7 @@ pub mod tests {
|
||||
}
|
||||
|
||||
fn vault_a_id() -> AccountId {
|
||||
compute_vault_pda(
|
||||
amm_core::compute_vault_pda(
|
||||
Program::amm().id(),
|
||||
IdForTests::pool_definition_id(),
|
||||
IdForTests::token_a_definition_id(),
|
||||
@ -2686,7 +2557,7 @@ pub mod tests {
|
||||
}
|
||||
|
||||
fn vault_b_id() -> AccountId {
|
||||
compute_vault_pda(
|
||||
amm_core::compute_vault_pda(
|
||||
Program::amm().id(),
|
||||
IdForTests::pool_definition_id(),
|
||||
IdForTests::token_b_definition_id(),
|
||||
@ -2725,7 +2596,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -2844,7 +2715,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -2912,7 +2783,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -2980,7 +2851,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -3073,7 +2944,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -3179,7 +3050,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -3248,7 +3119,7 @@ pub mod tests {
|
||||
Account {
|
||||
program_owner: Program::amm().id(),
|
||||
balance: 0u128,
|
||||
data: PoolDefinition::into_data(PoolDefinition {
|
||||
data: Data::from(&PoolDefinition {
|
||||
definition_token_a_id: IdForTests::token_a_definition_id(),
|
||||
definition_token_b_id: IdForTests::token_b_definition_id(),
|
||||
vault_a_id: IdForTests::vault_a_id(),
|
||||
@ -3277,11 +3148,6 @@ pub mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
const AMM_NEW_DEFINITION: u8 = 0;
|
||||
const AMM_SWAP: u8 = 1;
|
||||
const AMM_ADD_LIQUIDITY: u8 = 2;
|
||||
const AMM_REMOVE_LIQUIDITY: u8 = 3;
|
||||
|
||||
fn state_for_amm_tests() -> V02State {
|
||||
let initial_data = [];
|
||||
let mut state =
|
||||
@ -3347,11 +3213,11 @@ pub mod tests {
|
||||
fn test_simple_amm_remove() {
|
||||
let mut state = state_for_amm_tests();
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_REMOVE_LIQUIDITY);
|
||||
instruction.extend_from_slice(&BalanceForTests::remove_lp().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::remove_min_amount_a().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::remove_min_amount_b().to_le_bytes());
|
||||
let instruction = amm_core::Instruction::RemoveLiquidity {
|
||||
remove_liquidity_amount: BalanceForTests::remove_lp(),
|
||||
min_amount_to_remove_token_a: BalanceForTests::remove_min_amount_a(),
|
||||
min_amount_to_remove_token_b: BalanceForTests::remove_min_amount_b(),
|
||||
};
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
@ -3377,13 +3243,13 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_remove();
|
||||
let expected_vault_a = AccountForTests::vault_a_remove();
|
||||
@ -3424,12 +3290,11 @@ pub mod tests {
|
||||
AccountForTests::token_lp_definition_init_inactive(),
|
||||
);
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_NEW_DEFINITION);
|
||||
instruction.extend_from_slice(&BalanceForTests::vault_a_balance_init().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::vault_b_balance_init().to_le_bytes());
|
||||
let amm_program_u8: [u8; 32] = bytemuck::cast(Program::amm().id());
|
||||
instruction.extend_from_slice(&amm_program_u8);
|
||||
let instruction = amm_core::Instruction::NewDefinition {
|
||||
token_a_amount: BalanceForTests::vault_a_balance_init(),
|
||||
token_b_amount: BalanceForTests::vault_b_balance_init(),
|
||||
amm_program_id: Program::amm().id(),
|
||||
};
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
@ -3458,13 +3323,13 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_new_init();
|
||||
let expected_vault_a = AccountForTests::vault_a_init();
|
||||
@ -3509,12 +3374,11 @@ pub mod tests {
|
||||
AccountForTests::user_token_lp_holding_init_zero(),
|
||||
);
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_NEW_DEFINITION);
|
||||
instruction.extend_from_slice(&BalanceForTests::vault_a_balance_init().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::vault_b_balance_init().to_le_bytes());
|
||||
let amm_program_u8: [u8; 32] = bytemuck::cast(Program::amm().id());
|
||||
instruction.extend_from_slice(&amm_program_u8);
|
||||
let instruction = amm_core::Instruction::NewDefinition {
|
||||
token_a_amount: BalanceForTests::vault_a_balance_init(),
|
||||
token_b_amount: BalanceForTests::vault_b_balance_init(),
|
||||
amm_program_id: Program::amm().id(),
|
||||
};
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
@ -3543,13 +3407,13 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_init();
|
||||
let expected_vault_a = AccountForTests::vault_a_init();
|
||||
@ -3582,12 +3446,11 @@ pub mod tests {
|
||||
AccountForTests::vault_b_init_inactive(),
|
||||
);
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_NEW_DEFINITION);
|
||||
instruction.extend_from_slice(&BalanceForTests::vault_a_balance_init().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::vault_b_balance_init().to_le_bytes());
|
||||
let amm_program_u8: [u8; 32] = bytemuck::cast(Program::amm().id());
|
||||
instruction.extend_from_slice(&amm_program_u8);
|
||||
let instruction = amm_core::Instruction::NewDefinition {
|
||||
token_a_amount: BalanceForTests::vault_a_balance_init(),
|
||||
token_b_amount: BalanceForTests::vault_b_balance_init(),
|
||||
amm_program_id: Program::amm().id(),
|
||||
};
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
@ -3616,13 +3479,13 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_new_init();
|
||||
let expected_vault_a = AccountForTests::vault_a_init();
|
||||
@ -3646,11 +3509,11 @@ pub mod tests {
|
||||
env_logger::init();
|
||||
let mut state = state_for_amm_tests();
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_ADD_LIQUIDITY);
|
||||
instruction.extend_from_slice(&BalanceForTests::add_min_amount_lp().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::add_max_amount_a().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::add_max_amount_b().to_le_bytes());
|
||||
let instruction = amm_core::Instruction::AddLiquidity {
|
||||
min_amount_liquidity: BalanceForTests::add_min_amount_lp(),
|
||||
max_amount_to_add_token_a: BalanceForTests::add_max_amount_a(),
|
||||
max_amount_to_add_token_b: BalanceForTests::add_max_amount_b(),
|
||||
};
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
@ -3679,13 +3542,13 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(&IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(&IdForTests::user_token_lp_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let token_lp_post = state.get_account_by_id(IdForTests::token_lp_definition_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
let user_token_lp_post = state.get_account_by_id(IdForTests::user_token_lp_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_add();
|
||||
let expected_vault_a = AccountForTests::vault_a_add();
|
||||
@ -3708,11 +3571,11 @@ pub mod tests {
|
||||
fn test_simple_amm_swap_1() {
|
||||
let mut state = state_for_amm_tests();
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_SWAP);
|
||||
instruction.extend_from_slice(&BalanceForTests::swap_amount_in().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::swap_min_amount_out().to_le_bytes());
|
||||
instruction.extend_from_slice(&IdForTests::token_b_definition_id().to_bytes());
|
||||
let instruction = amm_core::Instruction::Swap {
|
||||
swap_amount_in: BalanceForTests::swap_amount_in(),
|
||||
min_amount_out: BalanceForTests::swap_min_amount_out(),
|
||||
token_definition_id_in: IdForTests::token_b_definition_id(),
|
||||
};
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
@ -3736,11 +3599,11 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_swap_1();
|
||||
let expected_vault_a = AccountForTests::vault_a_swap_1();
|
||||
@ -3759,12 +3622,11 @@ pub mod tests {
|
||||
fn test_simple_amm_swap_2() {
|
||||
let mut state = state_for_amm_tests();
|
||||
|
||||
let mut instruction: Vec<u8> = Vec::new();
|
||||
instruction.push(AMM_SWAP);
|
||||
instruction.extend_from_slice(&BalanceForTests::swap_amount_in().to_le_bytes());
|
||||
instruction.extend_from_slice(&BalanceForTests::swap_min_amount_out().to_le_bytes());
|
||||
instruction.extend_from_slice(&IdForTests::token_a_definition_id().to_bytes());
|
||||
|
||||
let instruction = amm_core::Instruction::Swap {
|
||||
swap_amount_in: BalanceForTests::swap_amount_in(),
|
||||
min_amount_out: BalanceForTests::swap_min_amount_out(),
|
||||
token_definition_id_in: IdForTests::token_a_definition_id(),
|
||||
};
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::amm().id(),
|
||||
vec![
|
||||
@ -3787,11 +3649,11 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let pool_post = state.get_account_by_id(&IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(&IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(&IdForTests::vault_b_id());
|
||||
let user_token_a_post = state.get_account_by_id(&IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(&IdForTests::user_token_b_id());
|
||||
let pool_post = state.get_account_by_id(IdForTests::pool_definition_id());
|
||||
let vault_a_post = state.get_account_by_id(IdForTests::vault_a_id());
|
||||
let vault_b_post = state.get_account_by_id(IdForTests::vault_b_id());
|
||||
let user_token_a_post = state.get_account_by_id(IdForTests::user_token_a_id());
|
||||
let user_token_b_post = state.get_account_by_id(IdForTests::user_token_b_id());
|
||||
|
||||
let expected_pool = AccountForTests::pool_definition_swap_2();
|
||||
let expected_vault_a = AccountForTests::vault_a_swap_2();
|
||||
@ -3842,8 +3704,8 @@ pub mod tests {
|
||||
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let from_post = state.get_account_by_id(&from);
|
||||
let to_post = state.get_account_by_id(&to);
|
||||
let from_post = state.get_account_by_id(from);
|
||||
let to_post = state.get_account_by_id(to);
|
||||
assert_eq!(from_post.balance, initial_balance - amount);
|
||||
assert_eq!(to_post, expected_to_post);
|
||||
}
|
||||
@ -3868,7 +3730,7 @@ pub mod tests {
|
||||
let amount: u128 = 37;
|
||||
|
||||
// Check the recipient is an uninitialized account
|
||||
assert_eq!(state.get_account_by_id(&to), Account::default());
|
||||
assert_eq!(state.get_account_by_id(to), Account::default());
|
||||
|
||||
let expected_to_post = Account {
|
||||
// The expected program owner is the authenticated transfer program
|
||||
@ -3898,8 +3760,8 @@ pub mod tests {
|
||||
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let from_post = state.get_account_by_id(&from);
|
||||
let to_post = state.get_account_by_id(&to);
|
||||
let from_post = state.get_account_by_id(from);
|
||||
let to_post = state.get_account_by_id(to);
|
||||
assert_eq!(from_post.balance, initial_balance - amount);
|
||||
assert_eq!(to_post, expected_to_post);
|
||||
}
|
||||
@ -4094,7 +3956,7 @@ pub mod tests {
|
||||
let tx = PublicTransaction::new(message, witness_set);
|
||||
state.transition_from_public_transaction(&tx).unwrap();
|
||||
|
||||
let winner_token_holding_post = state.get_account_by_id(&winner_token_holding_id);
|
||||
let winner_token_holding_post = state.get_account_by_id(winner_token_holding_id);
|
||||
assert_eq!(
|
||||
winner_token_holding_post,
|
||||
expected_winner_token_holding_post
|
||||
@ -4151,13 +4013,12 @@ pub mod tests {
|
||||
|
||||
let balance_to_move: u128 = 4;
|
||||
|
||||
let sender =
|
||||
AccountWithMetadata::new(state.get_account_by_id(&sender_id.clone()), true, sender_id);
|
||||
let sender = AccountWithMetadata::new(state.get_account_by_id(sender_id), true, sender_id);
|
||||
|
||||
let sender_nonce = sender.account.nonce;
|
||||
|
||||
let _recipient =
|
||||
AccountWithMetadata::new(state.get_account_by_id(&recipient_id), false, sender_id);
|
||||
AccountWithMetadata::new(state.get_account_by_id(recipient_id), false, sender_id);
|
||||
|
||||
let message = public_transaction::Message::try_new(
|
||||
Program::modified_transfer_program().id(),
|
||||
@ -4172,18 +4033,18 @@ pub mod tests {
|
||||
let res = state.transition_from_public_transaction(&tx);
|
||||
assert!(matches!(res, Err(NssaError::InvalidProgramBehavior)));
|
||||
|
||||
let sender_post = state.get_account_by_id(&sender_id);
|
||||
let recipient_post = state.get_account_by_id(&recipient_id);
|
||||
let sender_post = state.get_account_by_id(sender_id);
|
||||
let recipient_post = state.get_account_by_id(recipient_id);
|
||||
|
||||
let expected_sender_post = {
|
||||
let mut this = state.get_account_by_id(&sender_id);
|
||||
let mut this = state.get_account_by_id(sender_id);
|
||||
this.balance = sender_init_balance;
|
||||
this.nonce = 0;
|
||||
this
|
||||
};
|
||||
|
||||
let expected_recipient_post = {
|
||||
let mut this = state.get_account_by_id(&sender_id);
|
||||
let mut this = state.get_account_by_id(sender_id);
|
||||
this.balance = recipient_init_balance;
|
||||
this.nonce = 0;
|
||||
this
|
||||
@ -4353,7 +4214,7 @@ pub mod tests {
|
||||
// Should succeed - no changes made, no claim needed
|
||||
assert!(result.is_ok());
|
||||
// Account should remain default/unclaimed
|
||||
assert_eq!(state.get_account_by_id(&account_id), Account::default());
|
||||
assert_eq!(state.get_account_by_id(account_id), Account::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@ -8,5 +8,7 @@ license = { workspace = true }
|
||||
nssa_core.workspace = true
|
||||
token_core.workspace = true
|
||||
token_program.workspace = true
|
||||
amm_core.workspace = true
|
||||
amm_program.workspace = true
|
||||
risc0-zkvm.workspace = true
|
||||
serde = { workspace = true, default-features = false }
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
10
programs/amm/Cargo.toml
Normal file
10
programs/amm/Cargo.toml
Normal file
@ -0,0 +1,10 @@
|
||||
[package]
|
||||
name = "amm_program"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
nssa_core.workspace = true
|
||||
token_core.workspace = true
|
||||
amm_core.workspace = true
|
||||
11
programs/amm/core/Cargo.toml
Normal file
11
programs/amm/core/Cargo.toml
Normal file
@ -0,0 +1,11 @@
|
||||
[package]
|
||||
name = "amm_core"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
license = { workspace = true }
|
||||
|
||||
[dependencies]
|
||||
nssa_core.workspace = true
|
||||
serde.workspace = true
|
||||
risc0-zkvm.workspace = true
|
||||
borsh.workspace = true
|
||||
197
programs/amm/core/src/lib.rs
Normal file
197
programs/amm/core/src/lib.rs
Normal file
@ -0,0 +1,197 @@
|
||||
//! This crate contains core data structures and utilities for the AMM Program.
|
||||
|
||||
use borsh::{BorshDeserialize, BorshSerialize};
|
||||
use nssa_core::{
|
||||
account::{AccountId, Data},
|
||||
program::{PdaSeed, ProgramId},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// AMM Program Instruction.
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub enum Instruction {
|
||||
/// Initializes a new Pool (or re-initializes an inactive Pool).
|
||||
///
|
||||
/// Required accounts:
|
||||
/// - AMM Pool
|
||||
/// - Vault Holding Account for Token A
|
||||
/// - Vault Holding Account for Token B
|
||||
/// - Pool Liquidity Token Definition
|
||||
/// - User Holding Account for Token A (authorized)
|
||||
/// - User Holding Account for Token B (authorized)
|
||||
/// - User Holding Account for Pool Liquidity
|
||||
NewDefinition {
|
||||
token_a_amount: u128,
|
||||
token_b_amount: u128,
|
||||
amm_program_id: ProgramId,
|
||||
},
|
||||
|
||||
/// Adds liquidity to the Pool
|
||||
///
|
||||
/// Required accounts:
|
||||
/// - AMM Pool (initialized)
|
||||
/// - Vault Holding Account for Token A (initialized)
|
||||
/// - Vault Holding Account for Token B (initialized)
|
||||
/// - Pool Liquidity Token Definition (initialized)
|
||||
/// - User Holding Account for Token A (authorized)
|
||||
/// - User Holding Account for Token B (authorized)
|
||||
/// - User Holding Account for Pool Liquidity
|
||||
AddLiquidity {
|
||||
min_amount_liquidity: u128,
|
||||
max_amount_to_add_token_a: u128,
|
||||
max_amount_to_add_token_b: u128,
|
||||
},
|
||||
|
||||
/// Removes liquidity from the Pool
|
||||
///
|
||||
/// Required accounts:
|
||||
/// - AMM Pool (initialized)
|
||||
/// - Vault Holding Account for Token A (initialized)
|
||||
/// - Vault Holding Account for Token B (initialized)
|
||||
/// - Pool Liquidity Token Definition (initialized)
|
||||
/// - User Holding Account for Token A (initialized)
|
||||
/// - User Holding Account for Token B (initialized)
|
||||
/// - User Holding Account for Pool Liquidity (authorized)
|
||||
RemoveLiquidity {
|
||||
remove_liquidity_amount: u128,
|
||||
min_amount_to_remove_token_a: u128,
|
||||
min_amount_to_remove_token_b: u128,
|
||||
},
|
||||
|
||||
/// Swap some quantity of Tokens (either Token A or Token B)
|
||||
/// while maintaining the Pool constant product.
|
||||
///
|
||||
/// Required accounts:
|
||||
/// - AMM Pool (initialized)
|
||||
/// - Vault Holding Account for Token A (initialized)
|
||||
/// - Vault Holding Account for Token B (initialized)
|
||||
/// - User Holding Account for Token A
|
||||
/// - User Holding Account for Token B Either User Holding Account for Token A or Token B is
|
||||
/// authorized.
|
||||
Swap {
|
||||
swap_amount_in: u128,
|
||||
min_amount_out: u128,
|
||||
token_definition_id_in: AccountId,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
|
||||
pub struct PoolDefinition {
|
||||
pub definition_token_a_id: AccountId,
|
||||
pub definition_token_b_id: AccountId,
|
||||
pub vault_a_id: AccountId,
|
||||
pub vault_b_id: AccountId,
|
||||
pub liquidity_pool_id: AccountId,
|
||||
pub liquidity_pool_supply: u128,
|
||||
pub reserve_a: u128,
|
||||
pub reserve_b: u128,
|
||||
/// Fees are currently not used
|
||||
pub fees: u128,
|
||||
/// A pool becomes inactive (active = false)
|
||||
/// once all of its liquidity has been removed (e.g., reserves are emptied and
|
||||
/// liquidity_pool_supply = 0)
|
||||
pub active: bool,
|
||||
}
|
||||
|
||||
impl TryFrom<&Data> for PoolDefinition {
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn try_from(data: &Data) -> Result<Self, Self::Error> {
|
||||
PoolDefinition::try_from_slice(data.as_ref())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&PoolDefinition> for Data {
|
||||
fn from(definition: &PoolDefinition) -> Self {
|
||||
// Using size_of_val as size hint for Vec allocation
|
||||
let mut data = Vec::with_capacity(std::mem::size_of_val(definition));
|
||||
|
||||
BorshSerialize::serialize(definition, &mut data)
|
||||
.expect("Serialization to Vec should not fail");
|
||||
|
||||
Data::try_from(data).expect("Token definition encoded data should fit into Data")
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compute_pool_pda(
|
||||
amm_program_id: ProgramId,
|
||||
definition_token_a_id: AccountId,
|
||||
definition_token_b_id: AccountId,
|
||||
) -> AccountId {
|
||||
AccountId::from((
|
||||
&amm_program_id,
|
||||
&compute_pool_pda_seed(definition_token_a_id, definition_token_b_id),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn compute_pool_pda_seed(
|
||||
definition_token_a_id: AccountId,
|
||||
definition_token_b_id: AccountId,
|
||||
) -> PdaSeed {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
|
||||
let (token_1, token_2) = match definition_token_a_id
|
||||
.value()
|
||||
.cmp(definition_token_b_id.value())
|
||||
{
|
||||
std::cmp::Ordering::Less => (definition_token_b_id, definition_token_a_id),
|
||||
std::cmp::Ordering::Greater => (definition_token_a_id, definition_token_b_id),
|
||||
std::cmp::Ordering::Equal => panic!("Definitions match"),
|
||||
};
|
||||
|
||||
let mut bytes = [0; 64];
|
||||
bytes[0..32].copy_from_slice(&token_1.to_bytes());
|
||||
bytes[32..].copy_from_slice(&token_2.to_bytes());
|
||||
|
||||
PdaSeed::new(
|
||||
Impl::hash_bytes(&bytes)
|
||||
.as_bytes()
|
||||
.try_into()
|
||||
.expect("Hash output must be exactly 32 bytes long"),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn compute_vault_pda(
|
||||
amm_program_id: ProgramId,
|
||||
pool_id: AccountId,
|
||||
definition_token_id: AccountId,
|
||||
) -> AccountId {
|
||||
AccountId::from((
|
||||
&amm_program_id,
|
||||
&compute_vault_pda_seed(pool_id, definition_token_id),
|
||||
))
|
||||
}
|
||||
|
||||
pub fn compute_vault_pda_seed(pool_id: AccountId, definition_token_id: AccountId) -> PdaSeed {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
|
||||
let mut bytes = [0; 64];
|
||||
bytes[0..32].copy_from_slice(&pool_id.to_bytes());
|
||||
bytes[32..].copy_from_slice(&definition_token_id.to_bytes());
|
||||
|
||||
PdaSeed::new(
|
||||
Impl::hash_bytes(&bytes)
|
||||
.as_bytes()
|
||||
.try_into()
|
||||
.expect("Hash output must be exactly 32 bytes long"),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn compute_liquidity_token_pda(amm_program_id: ProgramId, pool_id: AccountId) -> AccountId {
|
||||
AccountId::from((&amm_program_id, &compute_liquidity_token_pda_seed(pool_id)))
|
||||
}
|
||||
|
||||
pub fn compute_liquidity_token_pda_seed(pool_id: AccountId) -> PdaSeed {
|
||||
use risc0_zkvm::sha::{Impl, Sha256};
|
||||
|
||||
let mut bytes = [0; 64];
|
||||
bytes[0..32].copy_from_slice(&pool_id.to_bytes());
|
||||
bytes[32..].copy_from_slice(&[0; 32]);
|
||||
|
||||
PdaSeed::new(
|
||||
Impl::hash_bytes(&bytes)
|
||||
.as_bytes()
|
||||
.try_into()
|
||||
.expect("Hash output must be exactly 32 bytes long"),
|
||||
)
|
||||
}
|
||||
178
programs/amm/src/add.rs
Normal file
178
programs/amm/src/add.rs
Normal file
@ -0,0 +1,178 @@
|
||||
use std::num::NonZeroU128;
|
||||
|
||||
use amm_core::{PoolDefinition, compute_liquidity_token_pda_seed};
|
||||
use nssa_core::{
|
||||
account::{AccountWithMetadata, Data},
|
||||
program::{AccountPostState, ChainedCall},
|
||||
};
|
||||
|
||||
#[expect(clippy::too_many_arguments, reason = "TODO: Fix later")]
|
||||
pub fn add_liquidity(
|
||||
pool: AccountWithMetadata,
|
||||
vault_a: AccountWithMetadata,
|
||||
vault_b: AccountWithMetadata,
|
||||
pool_definition_lp: AccountWithMetadata,
|
||||
user_holding_a: AccountWithMetadata,
|
||||
user_holding_b: AccountWithMetadata,
|
||||
user_holding_lp: AccountWithMetadata,
|
||||
min_amount_liquidity: NonZeroU128,
|
||||
max_amount_to_add_token_a: u128,
|
||||
max_amount_to_add_token_b: u128,
|
||||
) -> (Vec<AccountPostState>, Vec<ChainedCall>) {
|
||||
// 1. Fetch Pool state
|
||||
let pool_def_data = PoolDefinition::try_from(&pool.account.data)
|
||||
.expect("Add liquidity: AMM Program expects valid Pool Definition Account");
|
||||
|
||||
assert_eq!(
|
||||
vault_a.account_id, pool_def_data.vault_a_id,
|
||||
"Vault A was not provided"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
pool_def_data.liquidity_pool_id, pool_definition_lp.account_id,
|
||||
"LP definition mismatch"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
vault_b.account_id, pool_def_data.vault_b_id,
|
||||
"Vault B was not provided"
|
||||
);
|
||||
|
||||
assert!(
|
||||
max_amount_to_add_token_a != 0 && max_amount_to_add_token_b != 0,
|
||||
"Both max-balances must be nonzero"
|
||||
);
|
||||
|
||||
// 2. Determine deposit amount
|
||||
let vault_b_token_holding = token_core::TokenHolding::try_from(&vault_b.account.data)
|
||||
.expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault B");
|
||||
let token_core::TokenHolding::Fungible {
|
||||
definition_id: _,
|
||||
balance: vault_b_balance,
|
||||
} = vault_b_token_holding
|
||||
else {
|
||||
panic!(
|
||||
"Add liquidity: AMM Program expects valid Fungible Token Holding Account for Vault B"
|
||||
);
|
||||
};
|
||||
|
||||
let vault_a_token_holding = token_core::TokenHolding::try_from(&vault_a.account.data)
|
||||
.expect("Add liquidity: AMM Program expects valid Token Holding Account for Vault A");
|
||||
let token_core::TokenHolding::Fungible {
|
||||
definition_id: _,
|
||||
balance: vault_a_balance,
|
||||
} = vault_a_token_holding
|
||||
else {
|
||||
panic!(
|
||||
"Add liquidity: AMM Program expects valid Fungible Token Holding Account for Vault A"
|
||||
);
|
||||
};
|
||||
|
||||
assert!(pool_def_data.reserve_a != 0, "Reserves must be nonzero");
|
||||
assert!(pool_def_data.reserve_b != 0, "Reserves must be nonzero");
|
||||
assert!(
|
||||
vault_a_balance >= pool_def_data.reserve_a,
|
||||
"Vaults' balances must be at least the reserve amounts"
|
||||
);
|
||||
assert!(
|
||||
vault_b_balance >= pool_def_data.reserve_b,
|
||||
"Vaults' balances must be at least the reserve amounts"
|
||||
);
|
||||
|
||||
// Calculate actual_amounts
|
||||
let ideal_a: u128 =
|
||||
(pool_def_data.reserve_a * max_amount_to_add_token_b) / pool_def_data.reserve_b;
|
||||
let ideal_b: u128 =
|
||||
(pool_def_data.reserve_b * max_amount_to_add_token_a) / pool_def_data.reserve_a;
|
||||
|
||||
let actual_amount_a = if ideal_a > max_amount_to_add_token_a {
|
||||
max_amount_to_add_token_a
|
||||
} else {
|
||||
ideal_a
|
||||
};
|
||||
let actual_amount_b = if ideal_b > max_amount_to_add_token_b {
|
||||
max_amount_to_add_token_b
|
||||
} else {
|
||||
ideal_b
|
||||
};
|
||||
|
||||
// 3. Validate amounts
|
||||
assert!(
|
||||
max_amount_to_add_token_a >= actual_amount_a,
|
||||
"Actual trade amounts cannot exceed max_amounts"
|
||||
);
|
||||
assert!(
|
||||
max_amount_to_add_token_b >= actual_amount_b,
|
||||
"Actual trade amounts cannot exceed max_amounts"
|
||||
);
|
||||
|
||||
assert!(actual_amount_a != 0, "A trade amount is 0");
|
||||
assert!(actual_amount_b != 0, "A trade amount is 0");
|
||||
|
||||
// 4. Calculate LP to mint
|
||||
let delta_lp = std::cmp::min(
|
||||
pool_def_data.liquidity_pool_supply * actual_amount_a / pool_def_data.reserve_a,
|
||||
pool_def_data.liquidity_pool_supply * actual_amount_b / pool_def_data.reserve_b,
|
||||
);
|
||||
|
||||
assert!(delta_lp != 0, "Payable LP must be nonzero");
|
||||
|
||||
assert!(
|
||||
delta_lp >= min_amount_liquidity.into(),
|
||||
"Payable LP is less than provided minimum LP amount"
|
||||
);
|
||||
|
||||
// 5. Update pool account
|
||||
let mut pool_post = pool.account.clone();
|
||||
let pool_post_definition = PoolDefinition {
|
||||
liquidity_pool_supply: pool_def_data.liquidity_pool_supply + delta_lp,
|
||||
reserve_a: pool_def_data.reserve_a + actual_amount_a,
|
||||
reserve_b: pool_def_data.reserve_b + actual_amount_b,
|
||||
..pool_def_data
|
||||
};
|
||||
|
||||
pool_post.data = Data::from(&pool_post_definition);
|
||||
let token_program_id = user_holding_a.account.program_owner;
|
||||
|
||||
// Chain call for Token A (UserHoldingA -> Vault_A)
|
||||
let call_token_a = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![user_holding_a.clone(), vault_a.clone()],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: actual_amount_a,
|
||||
},
|
||||
);
|
||||
// Chain call for Token B (UserHoldingB -> Vault_B)
|
||||
let call_token_b = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![user_holding_b.clone(), vault_b.clone()],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: actual_amount_b,
|
||||
},
|
||||
);
|
||||
// Chain call for LP (mint new tokens for user_holding_lp)
|
||||
let mut pool_definition_lp_auth = pool_definition_lp.clone();
|
||||
pool_definition_lp_auth.is_authorized = true;
|
||||
let call_token_lp = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![pool_definition_lp_auth.clone(), user_holding_lp.clone()],
|
||||
&token_core::Instruction::Mint {
|
||||
amount_to_mint: delta_lp,
|
||||
},
|
||||
)
|
||||
.with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]);
|
||||
|
||||
let chained_calls = vec![call_token_lp, call_token_b, call_token_a];
|
||||
|
||||
let post_states = vec![
|
||||
AccountPostState::new(pool_post),
|
||||
AccountPostState::new(vault_a.account.clone()),
|
||||
AccountPostState::new(vault_b.account.clone()),
|
||||
AccountPostState::new(pool_definition_lp.account.clone()),
|
||||
AccountPostState::new(user_holding_a.account.clone()),
|
||||
AccountPostState::new(user_holding_b.account.clone()),
|
||||
AccountPostState::new(user_holding_lp.account.clone()),
|
||||
];
|
||||
|
||||
(post_states, chained_calls)
|
||||
}
|
||||
10
programs/amm/src/lib.rs
Normal file
10
programs/amm/src/lib.rs
Normal file
@ -0,0 +1,10 @@
|
||||
//! The AMM Program implementation.
|
||||
|
||||
pub use amm_core as core;
|
||||
|
||||
pub mod add;
|
||||
pub mod new_definition;
|
||||
pub mod remove;
|
||||
pub mod swap;
|
||||
|
||||
mod tests;
|
||||
158
programs/amm/src/new_definition.rs
Normal file
158
programs/amm/src/new_definition.rs
Normal file
@ -0,0 +1,158 @@
|
||||
use std::num::NonZeroU128;
|
||||
|
||||
use amm_core::{
|
||||
PoolDefinition, compute_liquidity_token_pda, compute_liquidity_token_pda_seed,
|
||||
compute_pool_pda, compute_vault_pda,
|
||||
};
|
||||
use nssa_core::{
|
||||
account::{Account, AccountWithMetadata, Data},
|
||||
program::{AccountPostState, ChainedCall, ProgramId},
|
||||
};
|
||||
|
||||
#[expect(clippy::too_many_arguments, reason = "TODO: Fix later")]
|
||||
pub fn new_definition(
|
||||
pool: AccountWithMetadata,
|
||||
vault_a: AccountWithMetadata,
|
||||
vault_b: AccountWithMetadata,
|
||||
pool_definition_lp: AccountWithMetadata,
|
||||
user_holding_a: AccountWithMetadata,
|
||||
user_holding_b: AccountWithMetadata,
|
||||
user_holding_lp: AccountWithMetadata,
|
||||
token_a_amount: NonZeroU128,
|
||||
token_b_amount: NonZeroU128,
|
||||
amm_program_id: ProgramId,
|
||||
) -> (Vec<AccountPostState>, Vec<ChainedCall>) {
|
||||
// Verify token_a and token_b are different
|
||||
let definition_token_a_id = token_core::TokenHolding::try_from(&user_holding_a.account.data)
|
||||
.expect("New definition: AMM Program expects valid Token Holding account for Token A")
|
||||
.definition_id();
|
||||
let definition_token_b_id = token_core::TokenHolding::try_from(&user_holding_b.account.data)
|
||||
.expect("New definition: AMM Program expects valid Token Holding account for Token B")
|
||||
.definition_id();
|
||||
|
||||
// both instances of the same token program
|
||||
let token_program = user_holding_a.account.program_owner;
|
||||
|
||||
assert_eq!(
|
||||
user_holding_b.account.program_owner, token_program,
|
||||
"User Token holdings must use the same Token Program"
|
||||
);
|
||||
assert!(
|
||||
definition_token_a_id != definition_token_b_id,
|
||||
"Cannot set up a swap for a token with itself"
|
||||
);
|
||||
assert_eq!(
|
||||
pool.account_id,
|
||||
compute_pool_pda(amm_program_id, definition_token_a_id, definition_token_b_id),
|
||||
"Pool Definition Account ID does not match PDA"
|
||||
);
|
||||
assert_eq!(
|
||||
vault_a.account_id,
|
||||
compute_vault_pda(amm_program_id, pool.account_id, definition_token_a_id),
|
||||
"Vault ID does not match PDA"
|
||||
);
|
||||
assert_eq!(
|
||||
vault_b.account_id,
|
||||
compute_vault_pda(amm_program_id, pool.account_id, definition_token_b_id),
|
||||
"Vault ID does not match PDA"
|
||||
);
|
||||
assert_eq!(
|
||||
pool_definition_lp.account_id,
|
||||
compute_liquidity_token_pda(amm_program_id, pool.account_id),
|
||||
"Liquidity pool Token Definition Account ID does not match PDA"
|
||||
);
|
||||
|
||||
// TODO: return here
|
||||
// Verify that Pool Account is not active
|
||||
let pool_account_data = if pool.account == Account::default() {
|
||||
PoolDefinition::default()
|
||||
} else {
|
||||
PoolDefinition::try_from(&pool.account.data)
|
||||
.expect("AMM program expects a valid Pool account")
|
||||
};
|
||||
|
||||
assert!(
|
||||
!pool_account_data.active,
|
||||
"Cannot initialize an active Pool Definition"
|
||||
);
|
||||
|
||||
// LP Token minting calculation
|
||||
// We assume LP is based on the initial deposit amount for Token_A.
|
||||
|
||||
// Update pool account
|
||||
let mut pool_post = pool.account.clone();
|
||||
let pool_post_definition = PoolDefinition {
|
||||
definition_token_a_id,
|
||||
definition_token_b_id,
|
||||
vault_a_id: vault_a.account_id,
|
||||
vault_b_id: vault_b.account_id,
|
||||
liquidity_pool_id: pool_definition_lp.account_id,
|
||||
liquidity_pool_supply: token_a_amount.into(),
|
||||
reserve_a: token_a_amount.into(),
|
||||
reserve_b: token_b_amount.into(),
|
||||
fees: 0u128, // TODO: we assume all fees are 0 for now.
|
||||
active: true,
|
||||
};
|
||||
|
||||
pool_post.data = Data::from(&pool_post_definition);
|
||||
let pool_post: AccountPostState = if pool.account == Account::default() {
|
||||
AccountPostState::new_claimed(pool_post.clone())
|
||||
} else {
|
||||
AccountPostState::new(pool_post.clone())
|
||||
};
|
||||
|
||||
let token_program_id = user_holding_a.account.program_owner;
|
||||
|
||||
// Chain call for Token A (user_holding_a -> Vault_A)
|
||||
let call_token_a = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![user_holding_a.clone(), vault_a.clone()],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: token_a_amount.into(),
|
||||
},
|
||||
);
|
||||
// Chain call for Token B (user_holding_b -> Vault_B)
|
||||
let call_token_b = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![user_holding_b.clone(), vault_b.clone()],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: token_b_amount.into(),
|
||||
},
|
||||
);
|
||||
|
||||
// Chain call for liquidity token (TokenLP definition -> User LP Holding)
|
||||
let instruction = if pool.account == Account::default() {
|
||||
token_core::Instruction::NewFungibleDefinition {
|
||||
name: String::from("LP Token"),
|
||||
total_supply: token_a_amount.into(),
|
||||
}
|
||||
} else {
|
||||
token_core::Instruction::Mint {
|
||||
amount_to_mint: token_a_amount.into(),
|
||||
}
|
||||
};
|
||||
|
||||
let mut pool_lp_auth = pool_definition_lp.clone();
|
||||
pool_lp_auth.is_authorized = true;
|
||||
|
||||
let call_token_lp = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![pool_lp_auth.clone(), user_holding_lp.clone()],
|
||||
&instruction,
|
||||
)
|
||||
.with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]);
|
||||
|
||||
let chained_calls = vec![call_token_lp, call_token_b, call_token_a];
|
||||
|
||||
let post_states = vec![
|
||||
pool_post.clone(),
|
||||
AccountPostState::new(vault_a.account.clone()),
|
||||
AccountPostState::new(vault_b.account.clone()),
|
||||
AccountPostState::new(pool_definition_lp.account.clone()),
|
||||
AccountPostState::new(user_holding_a.account.clone()),
|
||||
AccountPostState::new(user_holding_b.account.clone()),
|
||||
AccountPostState::new(user_holding_lp.account.clone()),
|
||||
];
|
||||
|
||||
(post_states.clone(), chained_calls)
|
||||
}
|
||||
166
programs/amm/src/remove.rs
Normal file
166
programs/amm/src/remove.rs
Normal file
@ -0,0 +1,166 @@
|
||||
use std::num::NonZeroU128;
|
||||
|
||||
use amm_core::{PoolDefinition, compute_liquidity_token_pda_seed, compute_vault_pda_seed};
|
||||
use nssa_core::{
|
||||
account::{AccountWithMetadata, Data},
|
||||
program::{AccountPostState, ChainedCall},
|
||||
};
|
||||
|
||||
#[expect(clippy::too_many_arguments, reason = "TODO: Fix later")]
|
||||
pub fn remove_liquidity(
|
||||
pool: AccountWithMetadata,
|
||||
vault_a: AccountWithMetadata,
|
||||
vault_b: AccountWithMetadata,
|
||||
pool_definition_lp: AccountWithMetadata,
|
||||
user_holding_a: AccountWithMetadata,
|
||||
user_holding_b: AccountWithMetadata,
|
||||
user_holding_lp: AccountWithMetadata,
|
||||
remove_liquidity_amount: NonZeroU128,
|
||||
min_amount_to_remove_token_a: u128,
|
||||
min_amount_to_remove_token_b: u128,
|
||||
) -> (Vec<AccountPostState>, Vec<ChainedCall>) {
|
||||
let remove_liquidity_amount: u128 = remove_liquidity_amount.into();
|
||||
|
||||
// 1. Fetch Pool state
|
||||
let pool_def_data = PoolDefinition::try_from(&pool.account.data)
|
||||
.expect("Remove liquidity: AMM Program expects a valid Pool Definition Account");
|
||||
|
||||
assert!(pool_def_data.active, "Pool is inactive");
|
||||
assert_eq!(
|
||||
pool_def_data.liquidity_pool_id, pool_definition_lp.account_id,
|
||||
"LP definition mismatch"
|
||||
);
|
||||
assert_eq!(
|
||||
vault_a.account_id, pool_def_data.vault_a_id,
|
||||
"Vault A was not provided"
|
||||
);
|
||||
assert_eq!(
|
||||
vault_b.account_id, pool_def_data.vault_b_id,
|
||||
"Vault B was not provided"
|
||||
);
|
||||
|
||||
// Vault addresses do not need to be checked with PDA
|
||||
// calculation for setting authorization since stored
|
||||
// in the Pool Definition.
|
||||
let mut running_vault_a = vault_a.clone();
|
||||
let mut running_vault_b = vault_b.clone();
|
||||
running_vault_a.is_authorized = true;
|
||||
running_vault_b.is_authorized = true;
|
||||
|
||||
assert!(
|
||||
min_amount_to_remove_token_a != 0,
|
||||
"Minimum withdraw amount must be nonzero"
|
||||
);
|
||||
assert!(
|
||||
min_amount_to_remove_token_b != 0,
|
||||
"Minimum withdraw amount must be nonzero"
|
||||
);
|
||||
|
||||
// 2. Compute withdrawal amounts
|
||||
let user_holding_lp_data = token_core::TokenHolding::try_from(&user_holding_lp.account.data)
|
||||
.expect("Remove liquidity: AMM Program expects a valid Token Account for liquidity token");
|
||||
let token_core::TokenHolding::Fungible {
|
||||
definition_id: _,
|
||||
balance: user_lp_balance,
|
||||
} = user_holding_lp_data
|
||||
else {
|
||||
panic!(
|
||||
"Remove liquidity: AMM Program expects a valid Fungible Token Holding Account for liquidity token"
|
||||
);
|
||||
};
|
||||
|
||||
assert!(
|
||||
user_lp_balance <= pool_def_data.liquidity_pool_supply,
|
||||
"Invalid liquidity account provided"
|
||||
);
|
||||
assert_eq!(
|
||||
user_holding_lp_data.definition_id(),
|
||||
pool_def_data.liquidity_pool_id,
|
||||
"Invalid liquidity account provided"
|
||||
);
|
||||
|
||||
let withdraw_amount_a =
|
||||
(pool_def_data.reserve_a * remove_liquidity_amount) / pool_def_data.liquidity_pool_supply;
|
||||
let withdraw_amount_b =
|
||||
(pool_def_data.reserve_b * remove_liquidity_amount) / pool_def_data.liquidity_pool_supply;
|
||||
|
||||
// 3. Validate and slippage check
|
||||
assert!(
|
||||
withdraw_amount_a >= min_amount_to_remove_token_a,
|
||||
"Insufficient minimal withdraw amount (Token A) provided for liquidity amount"
|
||||
);
|
||||
assert!(
|
||||
withdraw_amount_b >= min_amount_to_remove_token_b,
|
||||
"Insufficient minimal withdraw amount (Token B) provided for liquidity amount"
|
||||
);
|
||||
|
||||
// 4. Calculate LP to reduce cap by
|
||||
let delta_lp: u128 = (pool_def_data.liquidity_pool_supply * remove_liquidity_amount)
|
||||
/ pool_def_data.liquidity_pool_supply;
|
||||
|
||||
let active: bool = pool_def_data.liquidity_pool_supply - delta_lp != 0;
|
||||
|
||||
// 5. Update pool account
|
||||
let mut pool_post = pool.account.clone();
|
||||
let pool_post_definition = PoolDefinition {
|
||||
liquidity_pool_supply: pool_def_data.liquidity_pool_supply - delta_lp,
|
||||
reserve_a: pool_def_data.reserve_a - withdraw_amount_a,
|
||||
reserve_b: pool_def_data.reserve_b - withdraw_amount_b,
|
||||
active,
|
||||
..pool_def_data.clone()
|
||||
};
|
||||
|
||||
pool_post.data = Data::from(&pool_post_definition);
|
||||
|
||||
let token_program_id = user_holding_a.account.program_owner;
|
||||
|
||||
// Chaincall for Token A withdraw
|
||||
let call_token_a = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![running_vault_a, user_holding_a.clone()],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: withdraw_amount_a,
|
||||
},
|
||||
)
|
||||
.with_pda_seeds(vec![compute_vault_pda_seed(
|
||||
pool.account_id,
|
||||
pool_def_data.definition_token_a_id,
|
||||
)]);
|
||||
// Chaincall for Token B withdraw
|
||||
let call_token_b = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![running_vault_b, user_holding_b.clone()],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: withdraw_amount_b,
|
||||
},
|
||||
)
|
||||
.with_pda_seeds(vec![compute_vault_pda_seed(
|
||||
pool.account_id,
|
||||
pool_def_data.definition_token_b_id,
|
||||
)]);
|
||||
// Chaincall for LP adjustment
|
||||
let mut pool_definition_lp_auth = pool_definition_lp.clone();
|
||||
pool_definition_lp_auth.is_authorized = true;
|
||||
let call_token_lp = ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![pool_definition_lp_auth, user_holding_lp.clone()],
|
||||
&token_core::Instruction::Burn {
|
||||
amount_to_burn: delta_lp,
|
||||
},
|
||||
)
|
||||
.with_pda_seeds(vec![compute_liquidity_token_pda_seed(pool.account_id)]);
|
||||
|
||||
let chained_calls = vec![call_token_lp, call_token_b, call_token_a];
|
||||
|
||||
let post_states = vec![
|
||||
AccountPostState::new(pool_post.clone()),
|
||||
AccountPostState::new(vault_a.account.clone()),
|
||||
AccountPostState::new(vault_b.account.clone()),
|
||||
AccountPostState::new(pool_definition_lp.account.clone()),
|
||||
AccountPostState::new(user_holding_a.account.clone()),
|
||||
AccountPostState::new(user_holding_b.account.clone()),
|
||||
AccountPostState::new(user_holding_lp.account.clone()),
|
||||
];
|
||||
|
||||
(post_states, chained_calls)
|
||||
}
|
||||
176
programs/amm/src/swap.rs
Normal file
176
programs/amm/src/swap.rs
Normal file
@ -0,0 +1,176 @@
|
||||
pub use amm_core::{PoolDefinition, compute_liquidity_token_pda_seed, compute_vault_pda_seed};
|
||||
use nssa_core::{
|
||||
account::{AccountId, AccountWithMetadata, Data},
|
||||
program::{AccountPostState, ChainedCall},
|
||||
};
|
||||
|
||||
#[expect(clippy::too_many_arguments, reason = "TODO: Fix later")]
|
||||
pub fn swap(
|
||||
pool: AccountWithMetadata,
|
||||
vault_a: AccountWithMetadata,
|
||||
vault_b: AccountWithMetadata,
|
||||
user_holding_a: AccountWithMetadata,
|
||||
user_holding_b: AccountWithMetadata,
|
||||
swap_amount_in: u128,
|
||||
min_amount_out: u128,
|
||||
token_in_id: AccountId,
|
||||
) -> (Vec<AccountPostState>, Vec<ChainedCall>) {
|
||||
// Verify vaults are in fact vaults
|
||||
let pool_def_data = PoolDefinition::try_from(&pool.account.data)
|
||||
.expect("Swap: AMM Program expects a valid Pool Definition Account");
|
||||
|
||||
assert!(pool_def_data.active, "Pool is inactive");
|
||||
assert_eq!(
|
||||
vault_a.account_id, pool_def_data.vault_a_id,
|
||||
"Vault A was not provided"
|
||||
);
|
||||
assert_eq!(
|
||||
vault_b.account_id, pool_def_data.vault_b_id,
|
||||
"Vault B was not provided"
|
||||
);
|
||||
|
||||
// fetch pool reserves
|
||||
// validates reserves is at least the vaults' balances
|
||||
let vault_a_token_holding = token_core::TokenHolding::try_from(&vault_a.account.data)
|
||||
.expect("Swap: AMM Program expects a valid Token Holding Account for Vault A");
|
||||
let token_core::TokenHolding::Fungible {
|
||||
definition_id: _,
|
||||
balance: vault_a_balance,
|
||||
} = vault_a_token_holding
|
||||
else {
|
||||
panic!("Swap: AMM Program expects a valid Fungible Token Holding Account for Vault A");
|
||||
};
|
||||
|
||||
assert!(
|
||||
vault_a_balance >= pool_def_data.reserve_a,
|
||||
"Reserve for Token A exceeds vault balance"
|
||||
);
|
||||
|
||||
let vault_b_token_holding = token_core::TokenHolding::try_from(&vault_b.account.data)
|
||||
.expect("Swap: AMM Program expects a valid Token Holding Account for Vault B");
|
||||
let token_core::TokenHolding::Fungible {
|
||||
definition_id: _,
|
||||
balance: vault_b_balance,
|
||||
} = vault_b_token_holding
|
||||
else {
|
||||
panic!("Swap: AMM Program expects a valid Fungible Token Holding Account for Vault B");
|
||||
};
|
||||
|
||||
assert!(
|
||||
vault_b_balance >= pool_def_data.reserve_b,
|
||||
"Reserve for Token B exceeds vault balance"
|
||||
);
|
||||
|
||||
let (chained_calls, [deposit_a, withdraw_a], [deposit_b, withdraw_b]) =
|
||||
if token_in_id == pool_def_data.definition_token_a_id {
|
||||
let (chained_calls, deposit_a, withdraw_b) = swap_logic(
|
||||
user_holding_a.clone(),
|
||||
vault_a.clone(),
|
||||
vault_b.clone(),
|
||||
user_holding_b.clone(),
|
||||
swap_amount_in,
|
||||
min_amount_out,
|
||||
pool_def_data.reserve_a,
|
||||
pool_def_data.reserve_b,
|
||||
pool.account_id,
|
||||
);
|
||||
|
||||
(chained_calls, [deposit_a, 0], [0, withdraw_b])
|
||||
} else if token_in_id == pool_def_data.definition_token_b_id {
|
||||
let (chained_calls, deposit_b, withdraw_a) = swap_logic(
|
||||
user_holding_b.clone(),
|
||||
vault_b.clone(),
|
||||
vault_a.clone(),
|
||||
user_holding_a.clone(),
|
||||
swap_amount_in,
|
||||
min_amount_out,
|
||||
pool_def_data.reserve_b,
|
||||
pool_def_data.reserve_a,
|
||||
pool.account_id,
|
||||
);
|
||||
|
||||
(chained_calls, [0, withdraw_a], [deposit_b, 0])
|
||||
} else {
|
||||
panic!("AccountId is not a token type for the pool");
|
||||
};
|
||||
|
||||
// Update pool account
|
||||
let mut pool_post = pool.account.clone();
|
||||
let pool_post_definition = PoolDefinition {
|
||||
reserve_a: pool_def_data.reserve_a + deposit_a - withdraw_a,
|
||||
reserve_b: pool_def_data.reserve_b + deposit_b - withdraw_b,
|
||||
..pool_def_data
|
||||
};
|
||||
|
||||
pool_post.data = Data::from(&pool_post_definition);
|
||||
|
||||
let post_states = vec![
|
||||
AccountPostState::new(pool_post.clone()),
|
||||
AccountPostState::new(vault_a.account.clone()),
|
||||
AccountPostState::new(vault_b.account.clone()),
|
||||
AccountPostState::new(user_holding_a.account.clone()),
|
||||
AccountPostState::new(user_holding_b.account.clone()),
|
||||
];
|
||||
|
||||
(post_states, chained_calls)
|
||||
}
|
||||
|
||||
#[expect(clippy::too_many_arguments, reason = "TODO: Fix later")]
|
||||
fn swap_logic(
|
||||
user_deposit: AccountWithMetadata,
|
||||
vault_deposit: AccountWithMetadata,
|
||||
vault_withdraw: AccountWithMetadata,
|
||||
user_withdraw: AccountWithMetadata,
|
||||
swap_amount_in: u128,
|
||||
min_amount_out: u128,
|
||||
reserve_deposit_vault_amount: u128,
|
||||
reserve_withdraw_vault_amount: u128,
|
||||
pool_id: AccountId,
|
||||
) -> (Vec<ChainedCall>, u128, u128) {
|
||||
// Compute withdraw amount
|
||||
// Maintains pool constant product
|
||||
// k = pool_def_data.reserve_a * pool_def_data.reserve_b;
|
||||
let withdraw_amount = (reserve_withdraw_vault_amount * swap_amount_in)
|
||||
/ (reserve_deposit_vault_amount + swap_amount_in);
|
||||
|
||||
// Slippage check
|
||||
assert!(
|
||||
min_amount_out <= withdraw_amount,
|
||||
"Withdraw amount is less than minimal amount out"
|
||||
);
|
||||
assert!(withdraw_amount != 0, "Withdraw amount should be nonzero");
|
||||
|
||||
let token_program_id = user_deposit.account.program_owner;
|
||||
|
||||
let mut chained_calls = Vec::new();
|
||||
chained_calls.push(ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![user_deposit, vault_deposit],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: swap_amount_in,
|
||||
},
|
||||
));
|
||||
|
||||
let mut vault_withdraw = vault_withdraw.clone();
|
||||
vault_withdraw.is_authorized = true;
|
||||
|
||||
let pda_seed = compute_vault_pda_seed(
|
||||
pool_id,
|
||||
token_core::TokenHolding::try_from(&vault_withdraw.account.data)
|
||||
.expect("Swap Logic: AMM Program expects valid token data")
|
||||
.definition_id(),
|
||||
);
|
||||
|
||||
chained_calls.push(
|
||||
ChainedCall::new(
|
||||
token_program_id,
|
||||
vec![vault_withdraw, user_withdraw],
|
||||
&token_core::Instruction::Transfer {
|
||||
amount_to_transfer: withdraw_amount,
|
||||
},
|
||||
)
|
||||
.with_pda_seeds(vec![pda_seed]),
|
||||
);
|
||||
|
||||
(chained_calls, swap_amount_in, withdraw_amount)
|
||||
}
|
||||
1719
programs/amm/src/tests.rs
Normal file
1719
programs/amm/src/tests.rs
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,5 @@
|
||||
# Should be kept in sync with Dockerfiles
|
||||
|
||||
[toolchain]
|
||||
channel = "1.91.1"
|
||||
profile = "default"
|
||||
|
||||
@ -10,6 +10,7 @@ nssa_core.workspace = true
|
||||
common.workspace = true
|
||||
storage.workspace = true
|
||||
mempool.workspace = true
|
||||
bedrock_client.workspace = true
|
||||
|
||||
base58.workspace = true
|
||||
anyhow.workspace = true
|
||||
@ -19,16 +20,18 @@ tempfile.workspace = true
|
||||
chrono.workspace = true
|
||||
log.workspace = true
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
bedrock_client.workspace = true
|
||||
logos-blockchain-key-management-system-service.workspace = true
|
||||
logos-blockchain-core.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest.workspace = true
|
||||
borsh.workspace = true
|
||||
url.workspace = true
|
||||
jsonrpsee = { workspace = true, features = ["ws-client"] }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
testnet = []
|
||||
# Generate mock external clients implementations for testing
|
||||
mock = []
|
||||
|
||||
[dev-dependencies]
|
||||
futures.workspace = true
|
||||
|
||||
@ -1,51 +1,39 @@
|
||||
use std::{fs, path::Path, str::FromStr};
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use anyhow::{Context, Result};
|
||||
use bedrock_client::BedrockClient;
|
||||
use common::block::Block;
|
||||
pub use common::block::Block;
|
||||
pub use logos_blockchain_core::mantle::{MantleTx, SignedMantleTx, ops::channel::MsgId};
|
||||
use logos_blockchain_core::mantle::{
|
||||
MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger,
|
||||
ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp},
|
||||
Op, OpProof, Transaction, TxHash, ledger,
|
||||
ops::channel::{ChannelId, inscribe::InscriptionOp},
|
||||
};
|
||||
use logos_blockchain_key_management_system_service::keys::{
|
||||
ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey,
|
||||
};
|
||||
use reqwest::Url;
|
||||
pub use logos_blockchain_key_management_system_service::keys::Ed25519Key;
|
||||
use logos_blockchain_key_management_system_service::keys::Ed25519PublicKey;
|
||||
|
||||
use crate::config::BedrockConfig;
|
||||
|
||||
/// A component that posts block data to logos blockchain
|
||||
#[derive(Clone)]
|
||||
pub struct BlockSettlementClient {
|
||||
bedrock_client: BedrockClient,
|
||||
bedrock_signing_key: Ed25519Key,
|
||||
bedrock_channel_id: ChannelId,
|
||||
}
|
||||
#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")]
|
||||
pub trait BlockSettlementClientTrait: Clone {
|
||||
//// Create a new client.
|
||||
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self>;
|
||||
|
||||
impl BlockSettlementClient {
|
||||
pub fn try_new(home: &Path, config: &BedrockConfig) -> Result<Self> {
|
||||
let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key"))
|
||||
.context("Failed to load or create signing key")?;
|
||||
let bedrock_url = Url::from_str(config.node_url.as_ref())
|
||||
.context("Bedrock node address is not a valid url")?;
|
||||
let bedrock_client =
|
||||
BedrockClient::new(None, bedrock_url).context("Failed to initialize bedrock client")?;
|
||||
Ok(Self {
|
||||
bedrock_client,
|
||||
bedrock_signing_key,
|
||||
bedrock_channel_id: config.channel_id,
|
||||
})
|
||||
}
|
||||
/// Get the bedrock channel ID used by this client.
|
||||
fn bedrock_channel_id(&self) -> ChannelId;
|
||||
|
||||
/// Create and sign a transaction for inscribing data
|
||||
pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> {
|
||||
/// Get the bedrock signing key used by this client.
|
||||
fn bedrock_signing_key(&self) -> &Ed25519Key;
|
||||
|
||||
/// Post a transaction to the node.
|
||||
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId>;
|
||||
|
||||
/// Create and sign a transaction for inscribing data.
|
||||
fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> {
|
||||
let inscription_data = borsh::to_vec(block)?;
|
||||
let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes();
|
||||
let verifying_key_bytes = self.bedrock_signing_key().public_key().to_bytes();
|
||||
let verifying_key =
|
||||
Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key");
|
||||
|
||||
let inscribe_op = InscriptionOp {
|
||||
channel_id: self.bedrock_channel_id,
|
||||
channel_id: self.bedrock_channel_id(),
|
||||
inscription: inscription_data,
|
||||
parent: block.bedrock_parent_id.into(),
|
||||
signer: verifying_key,
|
||||
@ -64,7 +52,7 @@ impl BlockSettlementClient {
|
||||
|
||||
let tx_hash = inscribe_tx.hash();
|
||||
let signature_bytes = self
|
||||
.bedrock_signing_key
|
||||
.bedrock_signing_key()
|
||||
.sign_payload(tx_hash.as_signing_bytes().as_ref())
|
||||
.to_bytes();
|
||||
let signature =
|
||||
@ -79,31 +67,46 @@ impl BlockSettlementClient {
|
||||
};
|
||||
Ok((signed_mantle_tx, inscribe_op_id))
|
||||
}
|
||||
}
|
||||
|
||||
/// Post a transaction to the node
|
||||
pub async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
|
||||
/// A component that posts block data to logos blockchain
|
||||
#[derive(Clone)]
|
||||
pub struct BlockSettlementClient {
|
||||
bedrock_client: BedrockClient,
|
||||
bedrock_signing_key: Ed25519Key,
|
||||
bedrock_channel_id: ChannelId,
|
||||
}
|
||||
|
||||
impl BlockSettlementClientTrait for BlockSettlementClient {
|
||||
fn new(config: &BedrockConfig, bedrock_signing_key: Ed25519Key) -> Result<Self> {
|
||||
let bedrock_client =
|
||||
BedrockClient::new(config.backoff, config.node_url.clone(), config.auth.clone())
|
||||
.context("Failed to initialize bedrock client")?;
|
||||
Ok(Self {
|
||||
bedrock_client,
|
||||
bedrock_signing_key,
|
||||
bedrock_channel_id: config.channel_id,
|
||||
})
|
||||
}
|
||||
|
||||
async fn submit_block_to_bedrock(&self, block: &Block) -> Result<MsgId> {
|
||||
let (tx, new_msg_id) = self.create_inscribe_tx(block)?;
|
||||
|
||||
// Post the transaction
|
||||
self.bedrock_client.post_transaction(tx).await?;
|
||||
self.bedrock_client
|
||||
.post_transaction(tx)
|
||||
.await
|
||||
.context("Failed to post transaction to Bedrock")?;
|
||||
|
||||
Ok(new_msg_id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Load signing key from file or generate a new one if it doesn't exist
|
||||
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
|
||||
if path.exists() {
|
||||
let key_bytes = fs::read(path)?;
|
||||
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Found key with incorrect length"))?;
|
||||
Ok(Ed25519Key::from_bytes(&key_array))
|
||||
} else {
|
||||
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
|
||||
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
|
||||
fs::write(path, key_bytes)?;
|
||||
Ok(Ed25519Key::from_bytes(&key_bytes))
|
||||
fn bedrock_channel_id(&self) -> ChannelId {
|
||||
self.bedrock_channel_id
|
||||
}
|
||||
|
||||
fn bedrock_signing_key(&self) -> &Ed25519Key {
|
||||
&self.bedrock_signing_key
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
use std::{collections::HashMap, path::Path};
|
||||
|
||||
use anyhow::Result;
|
||||
use common::{HashType, block::Block, transaction::EncodedTransaction};
|
||||
use common::{HashType, block::Block, transaction::NSSATransaction};
|
||||
use nssa::V02State;
|
||||
use storage::RocksDBIO;
|
||||
|
||||
@ -20,7 +20,7 @@ impl SequencerStore {
|
||||
/// ATTENTION: Will overwrite genesis block.
|
||||
pub fn open_db_with_genesis(
|
||||
location: &Path,
|
||||
genesis_block: Option<Block>,
|
||||
genesis_block: Option<&Block>,
|
||||
signing_key: nssa::PrivateKey,
|
||||
) -> Result<Self> {
|
||||
let tx_hash_to_block_map = if let Some(block) = &genesis_block {
|
||||
@ -55,7 +55,7 @@ impl SequencerStore {
|
||||
}
|
||||
|
||||
/// Returns the transaction corresponding to the given hash, if it exists in the blockchain.
|
||||
pub fn get_transaction_by_hash(&self, hash: HashType) -> Option<EncodedTransaction> {
|
||||
pub fn get_transaction_by_hash(&self, hash: HashType) -> Option<NSSATransaction> {
|
||||
let block_id = self.tx_hash_to_block_map.get(&hash);
|
||||
let block = block_id.map(|&id| self.get_block_at_id(id));
|
||||
if let Some(Ok(block)) = block {
|
||||
@ -68,7 +68,7 @@ impl SequencerStore {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, tx: &EncodedTransaction, block_id: u64) {
|
||||
pub fn insert(&mut self, tx: &NSSATransaction, block_id: u64) {
|
||||
self.tx_hash_to_block_map.insert(tx.hash(), block_id);
|
||||
}
|
||||
|
||||
@ -84,8 +84,8 @@ impl SequencerStore {
|
||||
self.dbio.get_all_blocks().map(|res| Ok(res?))
|
||||
}
|
||||
|
||||
pub(crate) fn update(&mut self, block: Block, state: &V02State) -> Result<()> {
|
||||
let new_transactions_map = block_to_transactions_map(&block);
|
||||
pub(crate) fn update(&mut self, block: &Block, state: &V02State) -> Result<()> {
|
||||
let new_transactions_map = block_to_transactions_map(block);
|
||||
self.dbio.atomic_update(block, state)?;
|
||||
self.tx_hash_to_block_map.extend(new_transactions_map);
|
||||
Ok(())
|
||||
@ -121,7 +121,7 @@ mod tests {
|
||||
|
||||
let genesis_block_hashable_data = HashableBlockData {
|
||||
block_id: 0,
|
||||
prev_block_hash: [0; 32],
|
||||
prev_block_hash: HashType([0; 32]),
|
||||
timestamp: 0,
|
||||
transactions: vec![],
|
||||
};
|
||||
@ -129,7 +129,7 @@ mod tests {
|
||||
let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]);
|
||||
// Start an empty node store
|
||||
let mut node_store =
|
||||
SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap();
|
||||
SequencerStore::open_db_with_genesis(path, Some(&genesis_block), signing_key).unwrap();
|
||||
|
||||
let tx = common::test_utils::produce_dummy_empty_transaction();
|
||||
let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]);
|
||||
@ -139,7 +139,7 @@ mod tests {
|
||||
assert_eq!(None, retrieved_tx);
|
||||
// Add the block with the transaction
|
||||
let dummy_state = V02State::new_with_genesis_accounts(&[], &[]);
|
||||
node_store.update(block, &dummy_state).unwrap();
|
||||
node_store.update(&block, &dummy_state).unwrap();
|
||||
// Try again
|
||||
let retrieved_tx = node_store.get_transaction_by_hash(tx.hash());
|
||||
assert_eq!(Some(tx), retrieved_tx);
|
||||
|
||||
@ -5,15 +5,17 @@ use std::{
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use common::sequencer_client::BasicAuth;
|
||||
pub use bedrock_client::BackoffConfig;
|
||||
use common::config::BasicAuth;
|
||||
use logos_blockchain_core::mantle::ops::channel::ChannelId;
|
||||
use nssa::AccountId;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
/// Helperstruct for account serialization
|
||||
pub struct AccountInitialData {
|
||||
/// Hex encoded account id
|
||||
pub account_id: String,
|
||||
pub account_id: AccountId,
|
||||
pub balance: u128,
|
||||
}
|
||||
|
||||
@ -52,15 +54,20 @@ pub struct SequencerConfig {
|
||||
/// Sequencer own signing key
|
||||
pub signing_key: [u8; 32],
|
||||
/// Bedrock configuration options
|
||||
pub bedrock_config: Option<BedrockConfig>,
|
||||
pub bedrock_config: BedrockConfig,
|
||||
/// Indexer RPC URL
|
||||
pub indexer_rpc_url: Url,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct BedrockConfig {
|
||||
/// Fibonacci backoff retry strategy configuration
|
||||
#[serde(default)]
|
||||
pub backoff: BackoffConfig,
|
||||
/// Bedrock channel ID
|
||||
pub channel_id: ChannelId,
|
||||
/// Bedrock Url
|
||||
pub node_url: String,
|
||||
pub node_url: Url,
|
||||
/// Bedrock auth
|
||||
pub auth: Option<BasicAuth>,
|
||||
}
|
||||
|
||||
34
sequencer_core/src/indexer_client.rs
Normal file
34
sequencer_core/src/indexer_client.rs
Normal file
@ -0,0 +1,34 @@
|
||||
use std::{ops::Deref, sync::Arc};
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use log::info;
|
||||
pub use url::Url;
|
||||
|
||||
#[expect(async_fn_in_trait, reason = "We don't care about Send/Sync here")]
|
||||
pub trait IndexerClientTrait: Clone {
|
||||
async fn new(indexer_url: &Url) -> Result<Self>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct IndexerClient(Arc<jsonrpsee::ws_client::WsClient>);
|
||||
|
||||
impl IndexerClientTrait for IndexerClient {
|
||||
async fn new(indexer_url: &Url) -> Result<Self> {
|
||||
info!("Connecting to Indexer at {indexer_url}");
|
||||
|
||||
let client = jsonrpsee::ws_client::WsClientBuilder::default()
|
||||
.build(indexer_url)
|
||||
.await
|
||||
.context("Failed to create websocket client")?;
|
||||
|
||||
Ok(Self(Arc::new(client)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for IndexerClient {
|
||||
type Target = jsonrpsee::ws_client::WsClient;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
@ -1,31 +1,43 @@
|
||||
use std::{fmt::Display, time::Instant};
|
||||
use std::{fmt::Display, path::Path, time::Instant};
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Result, anyhow};
|
||||
#[cfg(feature = "testnet")]
|
||||
use common::PINATA_BASE58;
|
||||
use common::{
|
||||
HashType,
|
||||
block::{BedrockStatus, Block, HashableBlockData, MantleMsgId},
|
||||
transaction::{EncodedTransaction, NSSATransaction},
|
||||
transaction::NSSATransaction,
|
||||
};
|
||||
use config::SequencerConfig;
|
||||
use log::{info, warn};
|
||||
use log::{error, info, warn};
|
||||
use logos_blockchain_key_management_system_service::keys::{ED25519_SECRET_KEY_SIZE, Ed25519Key};
|
||||
use mempool::{MemPool, MemPoolHandle};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerStore};
|
||||
use crate::{
|
||||
block_settlement_client::{BlockSettlementClient, BlockSettlementClientTrait},
|
||||
block_store::SequencerStore,
|
||||
indexer_client::{IndexerClient, IndexerClientTrait},
|
||||
};
|
||||
|
||||
mod block_settlement_client;
|
||||
pub mod block_settlement_client;
|
||||
pub mod block_store;
|
||||
pub mod config;
|
||||
pub mod indexer_client;
|
||||
#[cfg(feature = "mock")]
|
||||
pub mod mock;
|
||||
|
||||
pub struct SequencerCore {
|
||||
pub struct SequencerCore<
|
||||
BC: BlockSettlementClientTrait = BlockSettlementClient,
|
||||
IC: IndexerClientTrait = IndexerClient,
|
||||
> {
|
||||
state: nssa::V02State,
|
||||
store: SequencerStore,
|
||||
mempool: MemPool<EncodedTransaction>,
|
||||
mempool: MemPool<NSSATransaction>,
|
||||
sequencer_config: SequencerConfig,
|
||||
chain_height: u64,
|
||||
block_settlement_client: Option<BlockSettlementClient>,
|
||||
block_settlement_client: BC,
|
||||
indexer_client: IC,
|
||||
last_bedrock_msg_id: MantleMsgId,
|
||||
}
|
||||
|
||||
@ -43,33 +55,36 @@ impl Display for TransactionMalformationError {
|
||||
|
||||
impl std::error::Error for TransactionMalformationError {}
|
||||
|
||||
impl SequencerCore {
|
||||
impl<BC: BlockSettlementClientTrait, IC: IndexerClientTrait> SequencerCore<BC, IC> {
|
||||
/// Starts the sequencer using the provided configuration.
|
||||
/// If an existing database is found, the sequencer state is loaded from it and
|
||||
/// assumed to represent the correct latest state consistent with Bedrock-finalized data.
|
||||
/// If no database is found, the sequencer performs a fresh start from genesis,
|
||||
/// initializing its state with the accounts defined in the configuration file.
|
||||
pub fn start_from_config(config: SequencerConfig) -> (Self, MemPoolHandle<EncodedTransaction>) {
|
||||
pub async fn start_from_config(
|
||||
config: SequencerConfig,
|
||||
) -> (Self, MemPoolHandle<NSSATransaction>) {
|
||||
let hashable_data = HashableBlockData {
|
||||
block_id: config.genesis_id,
|
||||
transactions: vec![],
|
||||
prev_block_hash: [0; 32],
|
||||
prev_block_hash: HashType([0; 32]),
|
||||
timestamp: 0,
|
||||
};
|
||||
|
||||
let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap();
|
||||
let channel_genesis_msg_id = [0; 32];
|
||||
let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id);
|
||||
let genesis_parent_msg_id = [0; 32];
|
||||
let genesis_block = hashable_data.into_pending_block(&signing_key, genesis_parent_msg_id);
|
||||
|
||||
// Sequencer should panic if unable to open db,
|
||||
// as fixing this issue may require actions non-native to program scope
|
||||
let store = SequencerStore::open_db_with_genesis(
|
||||
&config.home.join("rocksdb"),
|
||||
Some(genesis_block),
|
||||
Some(&genesis_block),
|
||||
signing_key,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
#[cfg_attr(not(feature = "testnet"), allow(unused_mut))]
|
||||
let mut state = match store.get_nssa_state() {
|
||||
Some(state) => {
|
||||
info!("Found local database. Loading state and pending blocks from it.");
|
||||
@ -97,7 +112,7 @@ impl SequencerCore {
|
||||
let init_accs: Vec<(nssa::AccountId, u128)> = config
|
||||
.initial_accounts
|
||||
.iter()
|
||||
.map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance))
|
||||
.map(|acc_data| (acc_data.account_id, acc_data.balance))
|
||||
.collect();
|
||||
|
||||
nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments)
|
||||
@ -108,10 +123,15 @@ impl SequencerCore {
|
||||
state.add_pinata_program(PINATA_BASE58.parse().unwrap());
|
||||
|
||||
let (mempool, mempool_handle) = MemPool::new(config.mempool_max_size);
|
||||
let block_settlement_client = config.bedrock_config.as_ref().map(|bedrock_config| {
|
||||
BlockSettlementClient::try_new(&config.home, bedrock_config)
|
||||
.expect("Block settlement client should be constructible")
|
||||
});
|
||||
let bedrock_signing_key =
|
||||
load_or_create_signing_key(&config.home.join("bedrock_signing_key"))
|
||||
.expect("Failed to load or create signing key");
|
||||
let block_settlement_client = BC::new(&config.bedrock_config, bedrock_signing_key)
|
||||
.expect("Failed to initialize Block Settlement Client");
|
||||
|
||||
let indexer_client = IC::new(&config.indexer_rpc_url)
|
||||
.await
|
||||
.expect("Failed to create Indexer Client");
|
||||
|
||||
let sequencer_core = Self {
|
||||
state,
|
||||
@ -120,7 +140,8 @@ impl SequencerCore {
|
||||
chain_height: config.genesis_id,
|
||||
sequencer_config: config,
|
||||
block_settlement_client,
|
||||
last_bedrock_msg_id: channel_genesis_msg_id,
|
||||
indexer_client,
|
||||
last_bedrock_msg_id: genesis_parent_msg_id,
|
||||
};
|
||||
|
||||
(sequencer_core, mempool_handle)
|
||||
@ -145,21 +166,28 @@ impl SequencerCore {
|
||||
}
|
||||
|
||||
pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result<u64> {
|
||||
let block_data = self.produce_new_block_with_mempool_transactions()?;
|
||||
|
||||
if let Some(client) = self.block_settlement_client.as_mut() {
|
||||
let block =
|
||||
block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id);
|
||||
let msg_id = client.submit_block_to_bedrock(&block).await?;
|
||||
self.last_bedrock_msg_id = msg_id.into();
|
||||
log::info!("Posted block data to Bedrock");
|
||||
{
|
||||
let block = self.produce_new_block_with_mempool_transactions()?;
|
||||
match self
|
||||
.block_settlement_client
|
||||
.submit_block_to_bedrock(&block)
|
||||
.await
|
||||
{
|
||||
Ok(msg_id) => {
|
||||
self.last_bedrock_msg_id = msg_id.into();
|
||||
info!("Posted block data to Bedrock, msg_id: {msg_id:?}");
|
||||
}
|
||||
Err(err) => {
|
||||
error!("Failed to post block data to Bedrock with error: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(self.chain_height)
|
||||
}
|
||||
|
||||
/// Produces new block from transactions in mempool
|
||||
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<HashableBlockData> {
|
||||
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<Block> {
|
||||
let now = Instant::now();
|
||||
|
||||
let new_block_height = self.chain_height + 1;
|
||||
@ -167,17 +195,22 @@ impl SequencerCore {
|
||||
let mut valid_transactions = vec![];
|
||||
|
||||
while let Some(tx) = self.mempool.pop() {
|
||||
let nssa_transaction = NSSATransaction::try_from(&tx)
|
||||
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx.hash() })?;
|
||||
let tx_hash = tx.hash();
|
||||
match self.execute_check_transaction_on_state(tx) {
|
||||
Ok(valid_tx) => {
|
||||
info!("Validated transaction with hash {tx_hash}, including it in block",);
|
||||
valid_transactions.push(valid_tx);
|
||||
|
||||
if let Ok(valid_tx) = self.execute_check_transaction_on_state(nssa_transaction) {
|
||||
valid_transactions.push(valid_tx.into());
|
||||
|
||||
if valid_transactions.len() >= self.sequencer_config.max_num_tx_in_block {
|
||||
break;
|
||||
if valid_transactions.len() >= self.sequencer_config.max_num_tx_in_block {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
error!(
|
||||
"Transaction with hash {tx_hash} failed execution check with error: {err:#?}, skipping it",
|
||||
);
|
||||
// TODO: Probably need to handle unsuccessful transaction execution?
|
||||
}
|
||||
} else {
|
||||
// Probably need to handle unsuccessful transaction execution?
|
||||
}
|
||||
}
|
||||
|
||||
@ -196,7 +229,7 @@ impl SequencerCore {
|
||||
.clone()
|
||||
.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id);
|
||||
|
||||
self.store.update(block, &self.state)?;
|
||||
self.store.update(&block, &self.state)?;
|
||||
|
||||
self.chain_height = new_block_height;
|
||||
|
||||
@ -215,7 +248,7 @@ impl SequencerCore {
|
||||
hashable_data.transactions.len(),
|
||||
now.elapsed().as_secs()
|
||||
);
|
||||
Ok(hashable_data)
|
||||
Ok(block)
|
||||
}
|
||||
|
||||
pub fn state(&self) -> &nssa::V02State {
|
||||
@ -245,6 +278,10 @@ impl SequencerCore {
|
||||
.map(|block| block.header.block_id)
|
||||
.min()
|
||||
{
|
||||
info!(
|
||||
"Clearing pending blocks up to id: {}",
|
||||
last_finalized_block_id
|
||||
);
|
||||
(first_pending_block_id..=last_finalized_block_id)
|
||||
.try_for_each(|id| self.store.delete_block_at_id(id))
|
||||
} else {
|
||||
@ -263,9 +300,13 @@ impl SequencerCore {
|
||||
.collect())
|
||||
}
|
||||
|
||||
pub fn block_settlement_client(&self) -> Option<BlockSettlementClient> {
|
||||
pub fn block_settlement_client(&self) -> BC {
|
||||
self.block_settlement_client.clone()
|
||||
}
|
||||
|
||||
pub fn indexer_client(&self) -> IC {
|
||||
self.indexer_client.clone()
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Introduce type-safe wrapper around checked transaction, e.g. AuthenticatedTransaction
|
||||
@ -292,22 +333,38 @@ pub fn transaction_pre_check(
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::pin::pin;
|
||||
|
||||
use base58::{FromBase58, ToBase58};
|
||||
use common::test_utils::sequencer_sign_key_for_testing;
|
||||
use nssa::PrivateKey;
|
||||
|
||||
use super::*;
|
||||
use crate::config::AccountInitialData;
|
||||
|
||||
fn parse_unwrap_tx_body_into_nssa_tx(tx_body: EncodedTransaction) -> NSSATransaction {
|
||||
NSSATransaction::try_from(&tx_body)
|
||||
.map_err(|_| TransactionMalformationError::FailedToDecode { tx: tx_body.hash() })
|
||||
.unwrap()
|
||||
/// Load signing key from file or generate a new one if it doesn't exist
|
||||
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
|
||||
if path.exists() {
|
||||
let key_bytes = std::fs::read(path)?;
|
||||
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Found key with incorrect length"))?;
|
||||
Ok(Ed25519Key::from_bytes(&key_array))
|
||||
} else {
|
||||
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
|
||||
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
|
||||
std::fs::write(path, key_bytes)?;
|
||||
Ok(Ed25519Key::from_bytes(&key_bytes))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, feature = "mock"))]
|
||||
mod tests {
|
||||
use std::{pin::pin, str::FromStr as _};
|
||||
|
||||
use base58::ToBase58;
|
||||
use bedrock_client::BackoffConfig;
|
||||
use common::{test_utils::sequencer_sign_key_for_testing, transaction::NSSATransaction};
|
||||
use logos_blockchain_core::mantle::ops::channel::ChannelId;
|
||||
use mempool::MemPoolHandle;
|
||||
use nssa::{AccountId, PrivateKey};
|
||||
|
||||
use crate::{
|
||||
config::{AccountInitialData, BedrockConfig, SequencerConfig},
|
||||
mock::SequencerCoreWithMockClients,
|
||||
transaction_pre_check,
|
||||
};
|
||||
|
||||
fn setup_sequencer_config_variable_initial_accounts(
|
||||
initial_accounts: Vec<AccountInitialData>,
|
||||
@ -327,8 +384,17 @@ mod tests {
|
||||
initial_accounts,
|
||||
initial_commitments: vec![],
|
||||
signing_key: *sequencer_sign_key_for_testing().value(),
|
||||
bedrock_config: None,
|
||||
bedrock_config: BedrockConfig {
|
||||
backoff: BackoffConfig {
|
||||
start_delay_millis: 100,
|
||||
max_retries: 5,
|
||||
},
|
||||
channel_id: ChannelId::from([0; 32]),
|
||||
node_url: "http://not-used-in-unit-tests".parse().unwrap(),
|
||||
auth: None,
|
||||
},
|
||||
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
|
||||
indexer_rpc_url: "ws://localhost:8779".parse().unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -344,12 +410,12 @@ mod tests {
|
||||
];
|
||||
|
||||
let initial_acc1 = AccountInitialData {
|
||||
account_id: acc1_account_id.to_base58(),
|
||||
account_id: AccountId::from_str(&acc1_account_id.to_base58()).unwrap(),
|
||||
balance: 10000,
|
||||
};
|
||||
|
||||
let initial_acc2 = AccountInitialData {
|
||||
account_id: acc2_account_id.to_base58(),
|
||||
account_id: AccountId::from_str(&acc2_account_id.to_base58()).unwrap(),
|
||||
balance: 20000,
|
||||
};
|
||||
|
||||
@ -366,15 +432,16 @@ mod tests {
|
||||
nssa::PrivateKey::try_new([2; 32]).unwrap()
|
||||
}
|
||||
|
||||
async fn common_setup() -> (SequencerCore, MemPoolHandle<EncodedTransaction>) {
|
||||
async fn common_setup() -> (SequencerCoreWithMockClients, MemPoolHandle<NSSATransaction>) {
|
||||
let config = setup_sequencer_config();
|
||||
common_setup_with_config(config).await
|
||||
}
|
||||
|
||||
async fn common_setup_with_config(
|
||||
config: SequencerConfig,
|
||||
) -> (SequencerCore, MemPoolHandle<EncodedTransaction>) {
|
||||
let (mut sequencer, mempool_handle) = SequencerCore::start_from_config(config);
|
||||
) -> (SequencerCoreWithMockClients, MemPoolHandle<NSSATransaction>) {
|
||||
let (mut sequencer, mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config).await;
|
||||
|
||||
let tx = common::test_utils::produce_dummy_empty_transaction();
|
||||
mempool_handle.push(tx).await.unwrap();
|
||||
@ -386,45 +453,28 @@ mod tests {
|
||||
(sequencer, mempool_handle)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_start_from_config() {
|
||||
#[tokio::test]
|
||||
async fn test_start_from_config() {
|
||||
let config = setup_sequencer_config();
|
||||
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone());
|
||||
let (sequencer, _mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
|
||||
|
||||
assert_eq!(sequencer.chain_height, config.genesis_id);
|
||||
assert_eq!(sequencer.sequencer_config.max_num_tx_in_block, 10);
|
||||
assert_eq!(sequencer.sequencer_config.port, 8080);
|
||||
|
||||
let acc1_account_id = config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2_account_id = config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1_account_id = config.initial_accounts[0].account_id;
|
||||
let acc2_account_id = config.initial_accounts[1].account_id;
|
||||
|
||||
let balance_acc_1 = sequencer
|
||||
.state
|
||||
.get_account_by_id(&nssa::AccountId::new(acc1_account_id))
|
||||
.balance;
|
||||
let balance_acc_2 = sequencer
|
||||
.state
|
||||
.get_account_by_id(&nssa::AccountId::new(acc2_account_id))
|
||||
.balance;
|
||||
let balance_acc_1 = sequencer.state.get_account_by_id(acc1_account_id).balance;
|
||||
let balance_acc_2 = sequencer.state.get_account_by_id(acc2_account_id).balance;
|
||||
|
||||
assert_eq!(10000, balance_acc_1);
|
||||
assert_eq!(20000, balance_acc_2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_start_different_intial_accounts_balances() {
|
||||
#[tokio::test]
|
||||
async fn test_start_different_intial_accounts_balances() {
|
||||
let acc1_account_id: Vec<u8> = vec![
|
||||
27, 132, 197, 86, 123, 18, 100, 64, 153, 93, 62, 213, 170, 186, 5, 101, 215, 30, 24,
|
||||
52, 96, 72, 25, 255, 156, 23, 245, 233, 213, 221, 7, 143,
|
||||
@ -436,55 +486,38 @@ mod tests {
|
||||
];
|
||||
|
||||
let initial_acc1 = AccountInitialData {
|
||||
account_id: acc1_account_id.to_base58(),
|
||||
account_id: AccountId::from_str(&acc1_account_id.to_base58()).unwrap(),
|
||||
balance: 10000,
|
||||
};
|
||||
|
||||
let initial_acc2 = AccountInitialData {
|
||||
account_id: acc2_account_id.to_base58(),
|
||||
account_id: AccountId::from_str(&acc2_account_id.to_base58()).unwrap(),
|
||||
balance: 20000,
|
||||
};
|
||||
|
||||
let initial_accounts = vec![initial_acc1, initial_acc2];
|
||||
|
||||
let config = setup_sequencer_config_variable_initial_accounts(initial_accounts);
|
||||
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone());
|
||||
let (sequencer, _mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
|
||||
|
||||
let acc1_account_id = config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2_account_id = config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1_account_id = config.initial_accounts[0].account_id;
|
||||
let acc2_account_id = config.initial_accounts[1].account_id;
|
||||
|
||||
assert_eq!(
|
||||
10000,
|
||||
sequencer
|
||||
.state
|
||||
.get_account_by_id(&nssa::AccountId::new(acc1_account_id))
|
||||
.balance
|
||||
sequencer.state.get_account_by_id(acc1_account_id).balance
|
||||
);
|
||||
assert_eq!(
|
||||
20000,
|
||||
sequencer
|
||||
.state
|
||||
.get_account_by_id(&nssa::AccountId::new(acc2_account_id))
|
||||
.balance
|
||||
sequencer.state.get_account_by_id(acc2_account_id).balance
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transaction_pre_check_pass() {
|
||||
let tx = common::test_utils::produce_dummy_empty_transaction();
|
||||
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
|
||||
let result = transaction_pre_check(tx);
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
@ -493,27 +526,15 @@ mod tests {
|
||||
async fn test_transaction_pre_check_native_transfer_valid() {
|
||||
let (sequencer, _mempool_handle) = common_setup().await;
|
||||
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
|
||||
|
||||
let sign_key1 = create_signing_key_for_account1();
|
||||
|
||||
let tx = common::test_utils::create_transaction_native_token_transfer(
|
||||
acc1, 0, acc2, 10, sign_key1,
|
||||
);
|
||||
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
|
||||
let result = transaction_pre_check(tx);
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
@ -522,20 +543,8 @@ mod tests {
|
||||
async fn test_transaction_pre_check_native_transfer_other_signature() {
|
||||
let (mut sequencer, _mempool_handle) = common_setup().await;
|
||||
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
|
||||
|
||||
let sign_key2 = create_signing_key_for_account2();
|
||||
|
||||
@ -544,7 +553,7 @@ mod tests {
|
||||
);
|
||||
|
||||
// Signature is valid, stateless check pass
|
||||
let tx = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx)).unwrap();
|
||||
let tx = transaction_pre_check(tx).unwrap();
|
||||
|
||||
// Signature is not from sender. Execution fails
|
||||
let result = sequencer.execute_check_transaction_on_state(tx);
|
||||
@ -559,20 +568,8 @@ mod tests {
|
||||
async fn test_transaction_pre_check_native_transfer_sent_too_much() {
|
||||
let (mut sequencer, _mempool_handle) = common_setup().await;
|
||||
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
|
||||
|
||||
let sign_key1 = create_signing_key_for_account1();
|
||||
|
||||
@ -580,7 +577,7 @@ mod tests {
|
||||
acc1, 0, acc2, 10000000, sign_key1,
|
||||
);
|
||||
|
||||
let result = transaction_pre_check(parse_unwrap_tx_body_into_nssa_tx(tx));
|
||||
let result = transaction_pre_check(tx);
|
||||
|
||||
// Passed pre-check
|
||||
assert!(result.is_ok());
|
||||
@ -598,20 +595,8 @@ mod tests {
|
||||
async fn test_transaction_execute_native_transfer() {
|
||||
let (mut sequencer, _mempool_handle) = common_setup().await;
|
||||
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
|
||||
|
||||
let sign_key1 = create_signing_key_for_account1();
|
||||
|
||||
@ -619,18 +604,10 @@ mod tests {
|
||||
acc1, 0, acc2, 100, sign_key1,
|
||||
);
|
||||
|
||||
sequencer
|
||||
.execute_check_transaction_on_state(parse_unwrap_tx_body_into_nssa_tx(tx))
|
||||
.unwrap();
|
||||
sequencer.execute_check_transaction_on_state(tx).unwrap();
|
||||
|
||||
let bal_from = sequencer
|
||||
.state
|
||||
.get_account_by_id(&nssa::AccountId::new(acc1))
|
||||
.balance;
|
||||
let bal_to = sequencer
|
||||
.state
|
||||
.get_account_by_id(&nssa::AccountId::new(acc2))
|
||||
.balance;
|
||||
let bal_from = sequencer.state.get_account_by_id(acc1).balance;
|
||||
let bal_to = sequencer.state.get_account_by_id(acc2).balance;
|
||||
|
||||
assert_eq!(bal_from, 9900);
|
||||
assert_eq!(bal_to, 20100);
|
||||
@ -673,27 +650,15 @@ mod tests {
|
||||
|
||||
let block = sequencer.produce_new_block_with_mempool_transactions();
|
||||
assert!(block.is_ok());
|
||||
assert_eq!(block.unwrap().block_id, genesis_height + 1);
|
||||
assert_eq!(block.unwrap().header.block_id, genesis_height + 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_replay_transactions_are_rejected_in_the_same_block() {
|
||||
let (mut sequencer, mempool_handle) = common_setup().await;
|
||||
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
|
||||
|
||||
let sign_key1 = create_signing_key_for_account1();
|
||||
|
||||
@ -711,6 +676,7 @@ mod tests {
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap()
|
||||
.header
|
||||
.block_id;
|
||||
let block = sequencer.store.get_block_at_id(current_height).unwrap();
|
||||
|
||||
@ -722,20 +688,8 @@ mod tests {
|
||||
async fn test_replay_transactions_are_rejected_in_different_blocks() {
|
||||
let (mut sequencer, mempool_handle) = common_setup().await;
|
||||
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1]
|
||||
.account_id
|
||||
.clone()
|
||||
.from_base58()
|
||||
.unwrap()
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let acc1 = sequencer.sequencer_config.initial_accounts[0].account_id;
|
||||
let acc2 = sequencer.sequencer_config.initial_accounts[1].account_id;
|
||||
|
||||
let sign_key1 = create_signing_key_for_account1();
|
||||
|
||||
@ -748,6 +702,7 @@ mod tests {
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap()
|
||||
.header
|
||||
.block_id;
|
||||
let block = sequencer.store.get_block_at_id(current_height).unwrap();
|
||||
assert_eq!(block.body.transactions, vec![tx.clone()]);
|
||||
@ -757,6 +712,7 @@ mod tests {
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap()
|
||||
.header
|
||||
.block_id;
|
||||
let block = sequencer.store.get_block_at_id(current_height).unwrap();
|
||||
assert!(block.body.transactions.is_empty());
|
||||
@ -765,23 +721,22 @@ mod tests {
|
||||
#[tokio::test]
|
||||
async fn test_restart_from_storage() {
|
||||
let config = setup_sequencer_config();
|
||||
let acc1_account_id: nssa::AccountId =
|
||||
config.initial_accounts[0].account_id.parse().unwrap();
|
||||
let acc2_account_id: nssa::AccountId =
|
||||
config.initial_accounts[1].account_id.parse().unwrap();
|
||||
let acc1_account_id = config.initial_accounts[0].account_id;
|
||||
let acc2_account_id = config.initial_accounts[1].account_id;
|
||||
let balance_to_move = 13;
|
||||
|
||||
// In the following code block a transaction will be processed that moves `balance_to_move`
|
||||
// from `acc_1` to `acc_2`. The block created with that transaction will be kept stored in
|
||||
// the temporary directory for the block storage of this test.
|
||||
{
|
||||
let (mut sequencer, mempool_handle) = SequencerCore::start_from_config(config.clone());
|
||||
let (mut sequencer, mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
|
||||
let signing_key = PrivateKey::try_new([1; 32]).unwrap();
|
||||
|
||||
let tx = common::test_utils::create_transaction_native_token_transfer(
|
||||
*acc1_account_id.value(),
|
||||
acc1_account_id,
|
||||
0,
|
||||
*acc2_account_id.value(),
|
||||
acc2_account_id,
|
||||
balance_to_move,
|
||||
signing_key,
|
||||
);
|
||||
@ -790,6 +745,7 @@ mod tests {
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap()
|
||||
.header
|
||||
.block_id;
|
||||
let block = sequencer.store.get_block_at_id(current_height).unwrap();
|
||||
assert_eq!(block.body.transactions, vec![tx.clone()]);
|
||||
@ -797,9 +753,10 @@ mod tests {
|
||||
|
||||
// Instantiating a new sequencer from the same config. This should load the existing block
|
||||
// with the above transaction and update the state to reflect that.
|
||||
let (sequencer, _mempool_handle) = SequencerCore::start_from_config(config.clone());
|
||||
let balance_acc_1 = sequencer.state.get_account_by_id(&acc1_account_id).balance;
|
||||
let balance_acc_2 = sequencer.state.get_account_by_id(&acc2_account_id).balance;
|
||||
let (sequencer, _mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config.clone()).await;
|
||||
let balance_acc_1 = sequencer.state.get_account_by_id(acc1_account_id).balance;
|
||||
let balance_acc_2 = sequencer.state.get_account_by_id(acc2_account_id).balance;
|
||||
|
||||
// Balances should be consistent with the stored block
|
||||
assert_eq!(
|
||||
@ -812,10 +769,11 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_get_pending_blocks() {
|
||||
#[tokio::test]
|
||||
async fn test_get_pending_blocks() {
|
||||
let config = setup_sequencer_config();
|
||||
let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config);
|
||||
let (mut sequencer, _mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config).await;
|
||||
sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap();
|
||||
@ -828,10 +786,11 @@ mod tests {
|
||||
assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_blocks() {
|
||||
#[tokio::test]
|
||||
async fn test_delete_blocks() {
|
||||
let config = setup_sequencer_config();
|
||||
let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config);
|
||||
let (mut sequencer, _mempool_handle) =
|
||||
SequencerCoreWithMockClients::start_from_config(config).await;
|
||||
sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user