Merge pull request #248 from logos-blockchain/arjentix/build-docker-image

Publish sequencer_runner Docker image with CI
This commit is contained in:
Daniil Polyakov 2025-12-19 21:05:28 +03:00 committed by GitHub
commit 9b37a2a8b7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
85 changed files with 3432 additions and 4304 deletions

36
.dockerignore Normal file
View File

@ -0,0 +1,36 @@
# Build artifacts
target/
**/target/
# RocksDB data
rocksdb/
**/rocksdb/
# Git
.git/
.gitignore
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
# CI/CD
.github/
ci_scripts/
# Documentation
*.md
!README.md
# Configs (copy selectively if needed)
configs/
# License
LICENSE

View File

@ -0,0 +1,10 @@
name: Install risc0
description: Installs risc0 in the environment
runs:
using: "composite"
steps:
- name: Install risc0
run: |
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
shell: bash

View File

@ -0,0 +1,10 @@
name: Install system dependencies
description: Installs system dependencies in the environment
runs:
using: "composite"
steps:
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential clang libclang-dev libssl-dev pkg-config
shell: bash

View File

@ -14,25 +14,164 @@ on:
name: General
jobs:
ubuntu-latest-pipeline:
fmt-rs:
runs-on: ubuntu-latest
timeout-minutes: 120
name: ubuntu-latest-pipeline
steps:
- uses: actions/checkout@v3
- name: Install system dependencies
run: sudo apt-get update && sudo apt-get install -y build-essential clang libclang-dev libssl-dev pkg-config
- name: Install active toolchain
run: rustup install
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install nightly toolchain for rustfmt
run: rustup install nightly --profile minimal --component rustfmt
- name: lint - ubuntu-latest
run: chmod 777 ./ci_scripts/lint-ubuntu.sh && ./ci_scripts/lint-ubuntu.sh
- name: test ubuntu-latest
if: success() || failure()
run: chmod 777 ./ci_scripts/test-ubuntu.sh && ./ci_scripts/test-ubuntu.sh
- name: Check Rust files are formatted
run: cargo +nightly fmt --check
fmt-toml:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install taplo-cli
run: cargo install --locked taplo-cli
- name: Check TOML files are formatted
run: taplo fmt --check .
machete:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install active toolchain
run: rustup install
- name: Install cargo-machete
run: cargo install cargo-machete
- name: Check for unused dependencies
run: cargo machete
lint:
runs-on: ubuntu-latest
timeout-minutes: 60
name: lint
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Lint workspace
env:
RISC0_SKIP_BUILD: "1"
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
- name: Lint programs
env:
RISC0_SKIP_BUILD: "1"
run: cargo clippy -p "*programs" -- -D warnings
unit-tests:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Install nextest
run: cargo install cargo-nextest
- name: Run unit tests
env:
RISC0_DEV_MODE: "1"
run: cargo nextest run --no-fail-fast
valid-proof-test:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Test valid proof
env:
NSSA_WALLET_HOME_DIR: ./integration_tests/configs/debug/wallet
RUST_LOG: "info"
run: cargo run --bin integration_tests -- ./integration_tests/configs/debug/ test_success_private_transfer_to_another_owned_account
integration-tests:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Run integration tests
env:
NSSA_WALLET_HOME_DIR: ./integration_tests/configs/debug/wallet
RUST_LOG: "info"
RISC0_DEV_MODE: "1"
run: cargo run --bin integration_tests -- ./integration_tests/configs/debug/ all
artifacts:
runs-on: ubuntu-latest
timeout-minutes: 60
name: artifacts
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-risc0
- name: Install just
run: cargo install just
- name: Build artifacts
run: just build-artifacts
- name: Check if artifacts match repository
run: |
if ! git diff --exit-code artifacts/; then
echo "❌ Artifacts in the repository are out of date!"
echo "Please run 'just build-artifacts' and commit the changes."
exit 1
fi
echo "✅ Artifacts are up to date"

44
.github/workflows/publish_image.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: Publish Sequencer Runner Image
on:
workflow_dispatch:
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to registry
uses: docker/login-action@v3
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/sequencer_runner
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha,prefix={{branch}}-
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./sequencer_runner/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

3
.gitignore vendored
View File

@ -6,4 +6,5 @@ data/
.idea/
.vscode/
rocksdb
Cargo.lock
sequencer_runner/data/
storage.json

File diff suppressed because it is too large Load Diff

View File

@ -12,15 +12,42 @@ members = [
"common",
"nssa",
"nssa/core",
"program_methods",
"program_methods/guest",
"test_program_methods",
"test_program_methods/guest",
"integration_tests/proc_macro_test_attribute",
"examples/program_deployment",
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
]
[workspace.dependencies]
nssa = { path = "nssa" }
nssa_core = { path = "nssa/core" }
common = { path = "common" }
mempool = { path = "mempool" }
storage = { path = "storage" }
key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
wallet = { path = "wallet" }
test_program_methods = { path = "test_program_methods" }
tokio = { version = "1.28.2", features = [
"net",
"rt-multi-thread",
"sync",
"fs",
] }
risc0-zkvm = { version = "3.0.3", features = ['std'] }
risc0-build = "3.0.3"
anyhow = "1.0.98"
num_cpus = "1.13.1"
openssl = { version = "0.10", features = ["vendored"] }
openssl-probe = { version = "0.1.2" }
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
actix = "0.13.0"
actix-cors = "0.6.1"
@ -33,9 +60,9 @@ lru = "0.7.8"
thiserror = "2.0.12"
sha2 = "0.10.8"
hex = "0.4.3"
bytemuck = "1.24.0"
aes-gcm = "0.10.3"
toml = "0.7.4"
secp256k1-zkp = "0.11.0"
bincode = "1.3.3"
tempfile = "3.14.0"
light-poseidon = "0.3.0"
@ -54,43 +81,17 @@ rocksdb = { version = "0.24.0", default-features = false, features = [
"snappy",
"bindgen-runtime",
] }
[workspace.dependencies.rand]
features = ["std", "std_rng", "getrandom"]
version = "0.8.5"
[workspace.dependencies.k256]
features = ["ecdsa-core", "arithmetic", "expose-field", "serde", "pem"]
version = "0.13.3"
[workspace.dependencies.elliptic-curve]
features = ["arithmetic"]
version = "0.13.8"
[workspace.dependencies.serde]
features = ["derive"]
version = "1.0.60"
[workspace.dependencies.actix-web]
default-features = false
features = ["macros"]
version = "=4.1.0"
[workspace.dependencies.clap]
features = ["derive", "env"]
version = "4.5.42"
[workspace.dependencies.tokio-retry]
version = "0.3.0"
[workspace.dependencies.reqwest]
features = ["json"]
version = "0.11.16"
[workspace.dependencies.tokio]
features = ["net", "rt-multi-thread", "sync", "fs"]
version = "1.28.2"
[workspace.dependencies.tracing]
features = ["std"]
version = "0.1.13"
rand = { version = "0.8.5", features = ["std", "std_rng", "getrandom"] }
k256 = { version = "0.13.3", features = [
"ecdsa-core",
"arithmetic",
"expose-field",
"serde",
"pem",
] }
elliptic-curve = { version = "0.13.8", features = ["arithmetic"] }
actix-web = { version = "=4.1.0", default-features = false, features = [
"macros",
] }
clap = { version = "4.5.42", features = ["derive", "env"] }
reqwest = { version = "0.11.16", features = ["json"] }

19
Justfile Normal file
View File

@ -0,0 +1,19 @@
set shell := ["bash", "-eu", "-o", "pipefail", "-c"]
default:
@just --list
# ---- Configuration ----
METHODS_PATH := "program_methods"
TEST_METHODS_PATH := "test_program_methods"
ARTIFACTS := "artifacts"
# ---- Artifacts build ----
build-artifacts:
@echo "🔨 Building artifacts"
@for methods_path in {{METHODS_PATH}} {{TEST_METHODS_PATH}}; do \
echo "Building artifacts for $methods_path"; \
CARGO_TARGET_DIR=target/$methods_path cargo risczero build --manifest-path $methods_path/guest/Cargo.toml; \
mkdir -p {{ARTIFACTS}}/$methods_path; \
cp target/$methods_path/riscv32im-risc0-zkvm-elf/docker/*.bin {{ARTIFACTS}}/$methods_path; \
done

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,4 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/Users/runner/.risc0/bin/rzup install
RUSTFLAGS="-D warnings" cargo build

View File

@ -1,4 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
RUSTFLAGS="-D warnings" cargo build

View File

@ -1,8 +0,0 @@
set -e
cargo +nightly fmt -- --check
cargo install taplo-cli --locked
taplo fmt --check
RISC0_SKIP_BUILD=1 cargo clippy --workspace --all-targets -- -D warnings

View File

@ -1,17 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
RISC0_DEV_MODE=1 cargo test --release --features no_docker
cd integration_tests
export NSSA_WALLET_HOME_DIR=$(pwd)/configs/debug/wallet/
export RUST_LOG=info
echo "Try test valid proof at least once"
cargo run $(pwd)/configs/debug test_success_private_transfer_to_another_owned_account
echo "Continuing in dev mode"
RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
cd ..
cd nssa/program_methods/guest && cargo test --release

View File

@ -4,18 +4,16 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
reqwest.workspace = true
sha2.workspace = true
log.workspace = true
hex.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
borsh.workspace = true
base64.workspace = true
[dependencies.nssa]
path = "../nssa"

View File

@ -4,10 +4,8 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
wallet.workspace = true
tokio = { workspace = true, features = ["macros"] }
wallet = { path = "../../wallet" }
nssa-core = { path = "../../nssa/core" }
nssa = { path = "../../nssa" }
key_protocol = { path = "../../key_protocol/" }
clap = "4.5.53"
serde = "1.0.228"
clap.workspace = true

View File

@ -1,10 +1,10 @@
[package]
name = "test-program-methods"
name = "example_program_deployment_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -1,13 +1,11 @@
[package]
name = "programs"
name = "example_program_deployment_programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../../../nssa/core" }
serde = { version = "1.0.219", default-features = false }
hex = "0.4.3"
bytemuck = "1.24.0"
nssa_core.workspace = true
hex.workspace = true
bytemuck.workspace = true
risc0-zkvm.workspace = true

View File

@ -9,7 +9,6 @@ use nssa_core::program::{
// It reads a single account, emits it unchanged, and then triggers a tail call
// to the Hello World program with a fixed greeting.
/// This needs to be set to the ID of the Hello world program.
/// To get the ID run **from the root directoy of the repository**:
/// `cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml`

View File

@ -4,6 +4,16 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
proc_macro_test_attribute = { path = "./proc_macro_test_attribute" }
clap = { workspace = true, features = ["derive", "env"] }
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
@ -14,31 +24,3 @@ tokio.workspace = true
hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
proc_macro_test_attribute = { path = "./proc_macro_test_attribute" }
[dependencies.clap]
features = ["derive", "env"]
workspace = true
[dependencies.sequencer_core]
path = "../sequencer_core"
features = ["testnet"]
[dependencies.sequencer_runner]
path = "../sequencer_runner"
[dependencies.wallet]
path = "../wallet"
[dependencies.common]
path = "../common"
[dependencies.key_protocol]
path = "../key_protocol"
[dependencies.nssa]
path = "../nssa"
features = ["no_docker"]

Binary file not shown.

View File

@ -1596,7 +1596,10 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
pub async fn test_program_deployment() {
info!("########## test program deployment ##########");
let binary_filepath: PathBuf = NSSA_PROGRAM_FOR_TEST_DATA_CHANGER.parse().unwrap();
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let binary_filepath: PathBuf = PathBuf::from(manifest_dir)
.join("../artifacts/test_program_methods")
.join(NSSA_PROGRAM_FOR_TEST_DATA_CHANGER);
let command = Command::DeployProgram {
binary_filepath: binary_filepath.clone(),

View File

@ -4,22 +4,19 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
common.workspace = true
anyhow.workspace = true
serde.workspace = true
k256.workspace = true
sha2.workspace = true
rand.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
aes-gcm.workspace = true
bip39.workspace = true
hmac-sha512.workspace = true
thiserror.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
itertools.workspace = true
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"

View File

@ -4,26 +4,26 @@ version = "0.1.0"
edition = "2024"
[dependencies]
thiserror = "2.0.12"
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "core", features = ["host"] }
program-methods = { path = "program_methods", optional = true }
serde = "1.0.219"
sha2 = "0.10.9"
nssa_core = { workspace = true, features = ["host"] }
thiserror.workspace = true
risc0-zkvm.workspace = true
serde.workspace = true
sha2.workspace = true
rand.workspace = true
borsh.workspace = true
hex.workspace = true
secp256k1 = "0.31.1"
rand = "0.8"
borsh = "1.5.7"
hex = "0.4.3"
risc0-binfmt = "3.0.2"
log.workspace = true
[build-dependencies]
risc0-build = "3.0.3"
risc0-binfmt = "3.0.2"
[dev-dependencies]
test-program-methods = { path = "test_program_methods" }
test_program_methods.workspace = true
hex-literal = "1.0.0"
[features]
default = []
no_docker = ["program-methods"]

View File

@ -1,43 +1,21 @@
fn main() {
if cfg!(feature = "no_docker") {
println!("cargo:warning=NO_DOCKER feature enabled deterministic build skipped");
return;
}
build_deterministic().expect("Deterministic build failed");
}
fn build_deterministic() -> Result<(), Box<dyn std::error::Error>> {
use std::{env, fs, path::PathBuf, process::Command};
use std::{env, fs, path::PathBuf};
fn main() -> Result<(), Box<dyn std::error::Error>> {
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?);
let out_dir = PathBuf::from(env::var("OUT_DIR")?);
let mod_dir = out_dir.join("program_methods");
let mod_file = mod_dir.join("mod.rs");
let program_methods_dir = manifest_dir.join("../artifacts/program_methods/");
println!("cargo:rerun-if-changed=program_methods/guest/src");
println!("cargo:rerun-if-changed=program_methods/guest/Cargo.toml");
println!("cargo:rerun-if-changed={}", program_methods_dir.display());
let guest_manifest = manifest_dir.join("program_methods/guest/Cargo.toml");
let status = Command::new("cargo")
.args(["risczero", "build", "--manifest-path"])
.arg(&guest_manifest)
.status()?;
if !status.success() {
return Err("Risc0 deterministic build failed".into());
}
let target_dir =
manifest_dir.join("program_methods/guest/target/riscv32im-risc0-zkvm-elf/docker/");
let bins = fs::read_dir(&target_dir)?
let bins = fs::read_dir(&program_methods_dir)?
.filter_map(Result::ok)
.filter(|e| e.path().extension().is_some_and(|ext| ext == "bin"))
.collect::<Vec<_>>();
if bins.is_empty() {
return Err(format!("No .bin files found in {:?}", target_dir).into());
return Err(format!("No .bin files found in {:?}", program_methods_dir).into());
}
fs::create_dir_all(&mod_dir)?;

View File

@ -1,21 +1,21 @@
[package]
name = "nssa-core"
name = "nssa_core"
version = "0.1.0"
edition = "2024"
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
serde = { version = "1.0", default-features = false }
thiserror = { version = "2.0.12" }
bytemuck = { version = "1.13", optional = true }
risc0-zkvm.workspace = true
borsh.workspace = true
serde = { workspace = true }
thiserror.workspace = true
chacha20 = { version = "0.9", default-features = false }
k256 = { version = "0.13.3", optional = true }
base58 = { version = "0.2.0", optional = true }
anyhow = { version = "1.0.98", optional = true }
borsh = "1.5.7"
bytemuck = { workspace = true, optional = true }
k256 = { workspace = true, optional = true }
base58 = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
[dev-dependencies]
serde_json = "1.0.81"
serde_json.workspace = true
[features]
default = []

View File

@ -44,11 +44,10 @@ impl AccountWithMetadata {
}
}
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize)]
#[cfg_attr(
any(feature = "host", test),
derive(Debug, Copy, PartialOrd, Ord, Default)
#[derive(
Copy, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize,
)]
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialOrd, Ord, Default))]
pub struct AccountId {
value: [u8; 32],
}

View File

@ -1,11 +0,0 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../core" }
serde = { version = "1.0.219", default-features = false }

View File

@ -1,12 +1,7 @@
#[cfg(not(feature = "no_docker"))]
pub mod program_methods {
include!(concat!(env!("OUT_DIR"), "/program_methods/mod.rs"));
}
#[cfg(feature = "no_docker")]
#[allow(clippy::single_component_path_imports)]
use program_methods;
pub mod encoding;
pub mod error;
mod merkle_tree;

View File

@ -1,6 +1,7 @@
use std::collections::{HashMap, HashSet, VecDeque};
use borsh::{BorshDeserialize, BorshSerialize};
use log::debug;
use nssa_core::{
account::{Account, AccountId, AccountWithMetadata},
program::{ChainedCall, DEFAULT_PROGRAM_ID, PdaSeed, ProgramId, validate_execution},
@ -123,8 +124,16 @@ impl PublicTransaction {
return Err(NssaError::InvalidInput("Unknown program".into()));
};
debug!(
"Program {:?} pre_states: {:?}, instruction_data: {:?}",
chained_call.program_id, chained_call.pre_states, chained_call.instruction_data
);
let mut program_output =
program.execute(&chained_call.pre_states, &chained_call.instruction_data)?;
debug!(
"Program {:?} output: {:?}",
chained_call.program_id, program_output
);
let authorized_pdas =
self.compute_authorized_pdas(&caller_program_id, &chained_call.pda_seeds);

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +0,0 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../core" }
serde = { version = "1.0.219", default-features = false }

View File

@ -1,18 +0,0 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. } , instruction_words) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
Err(_) => return,
};
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.nonce += 1;
write_nssa_outputs(instruction_words ,vec![pre], vec![AccountPostState::new(account_post)]);
}

View File

@ -1,12 +0,0 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, ProgramInput, AccountPostState};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let post_states = pre_states.iter().map(|account| {
AccountPostState::new(account.account.clone())
}).collect();
write_nssa_outputs(instruction_words, pre_states, post_states);
}

View File

@ -1,10 +1,10 @@
[package]
name = "program-methods"
name = "program_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,10 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
risc0-zkvm.workspace = true
serde = { workspace = true, default-features = false }

View File

@ -107,7 +107,7 @@ fn main() {
} else {
pre_states.push(pre.clone());
}
state_diff.insert(pre.account_id.clone(), post.account().clone());
state_diff.insert(pre.account_id, post.account().clone());
}
// TODO: Modify when multi-chain calls are supported in the circuit

View File

@ -6,36 +6,35 @@ use nssa_core::{
};
// The token program has three functions:
// 1. New token definition.
// Arguments to this function are:
// * Two **default** accounts: [definition_account, holding_account].
// The first default account will be initialized with the token definition account values. The second account will
// be initialized to a token holding account for the new token, holding the entire total supply.
// * An instruction data of 23-bytes, indicating the total supply and the token name, with
// the following layout:
// [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
// The name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
// 2. Token transfer
// Arguments to this function are:
// 1. New token definition. Arguments to this function are:
// * Two **default** accounts: [definition_account, holding_account]. The first default account
// will be initialized with the token definition account values. The second account will be
// initialized to a token holding account for the new token, holding the entire total supply.
// * An instruction data of 23-bytes, indicating the total supply and the token name, with the
// following layout: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] The
// name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
// 2. Token transfer Arguments to this function are:
// * Two accounts: [sender_account, recipient_account].
// * An instruction data byte string of length 23, indicating the total supply with the following layout
// [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
// 3. Initialize account with zero balance
// Arguments to this function are:
// * An instruction data byte string of length 23, indicating the total supply with the
// following layout [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00
// || 0x00 || 0x00].
// 3. Initialize account with zero balance Arguments to this function are:
// * Two accounts: [definition_account, account_to_initialize].
// * An dummy byte string of length 23, with the following layout
// [0x02 || 0x00 || 0x00 || 0x00 || ... || 0x00 || 0x00].
// 4. Burn tokens from a Token Holding account (thus lowering total supply)
// Arguments to this function are:
// * An dummy byte string of length 23, with the following layout [0x02 || 0x00 || 0x00 || 0x00
// || ... || 0x00 || 0x00].
// 4. Burn tokens from a Token Holding account (thus lowering total supply) Arguments to this
// function are:
// * Two accounts: [definition_account, holding_account].
// * Authorization required: holding_account
// * An instruction data byte string of length 23, indicating the balance to burn with the folloiwng layout
// * An instruction data byte string of length 23, indicating the balance to burn with the
// folloiwng layout
// [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total supply)
// Arguments to this function are:
// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total
// supply) Arguments to this function are:
// * Two accounts: [definition_account, holding_account].
// * Authorization required: definition_account
// * An instruction data byte string of length 23, indicating the balance to mint with the folloiwng layout
// * An instruction data byte string of length 23, indicating the balance to mint with the
// folloiwng layout
// [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
const TOKEN_DEFINITION_TYPE: u8 = 0;
@ -89,10 +88,10 @@ impl TokenDefinition {
}
impl TokenHolding {
fn new(definition_id: &AccountId) -> Self {
fn new(definition_id: AccountId) -> Self {
Self {
account_type: TOKEN_HOLDING_TYPE,
definition_id: definition_id.clone(),
definition_id,
balance: 0,
}
}
@ -142,7 +141,7 @@ fn transfer(pre_states: &[AccountWithMetadata], balance_to_move: u128) -> Vec<Ac
let mut sender_holding =
TokenHolding::parse(&sender.account.data).expect("Invalid sender data");
let mut recipient_holding = if recipient.account == Account::default() {
TokenHolding::new(&sender_holding.definition_id)
TokenHolding::new(sender_holding.definition_id)
} else {
TokenHolding::parse(&recipient.account.data).expect("Invalid recipient data")
};
@ -213,7 +212,7 @@ fn new_definition(
let token_holding = TokenHolding {
account_type: TOKEN_HOLDING_TYPE,
definition_id: definition_target_account.account_id.clone(),
definition_id: definition_target_account.account_id,
balance: total_supply,
};
@ -247,7 +246,7 @@ fn initialize_account(pre_states: &[AccountWithMetadata]) -> Vec<AccountPostStat
// Check definition account is valid
let _definition_values =
TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid");
let holding_values = TokenHolding::new(&definition.account_id);
let holding_values = TokenHolding::new(definition.account_id);
let definition_post = definition.account.clone();
let mut account_to_initialize = account_to_initialize.account.clone();
@ -330,7 +329,7 @@ fn mint_additional_supply(
TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid");
let token_holding_values: TokenHolding = if token_holding.account == Account::default() {
TokenHolding::new(&definition.account_id)
TokenHolding::new(definition.account_id)
} else {
TokenHolding::parse(&token_holding.account.data).expect("Holding account must be valid")
};
@ -1078,7 +1077,6 @@ mod tests {
is_authorized: true,
account_id: helper_id_constructor(IdEnum::PoolDefinitionId),
},
_ => panic!("Invalid selection"),
}
}
@ -1093,8 +1091,7 @@ mod tests {
BalanceEnum::MintSuccess => 50_000,
BalanceEnum::InitSupplyMint => 150_000,
BalanceEnum::HoldingBalanceMint => 51_000,
BalanceEnum::MintOverflow => (2 as u128).pow(128) - 40_000,
_ => panic!("Invalid selection"),
BalanceEnum::MintOverflow => u128::MAX - 40_000,
}
}
@ -1290,7 +1287,7 @@ mod tests {
assert!(
*holding_post.account() == helper_account_constructor(AccountsEnum::InitMint).account
);
assert!(holding_post.requires_claim() == true);
assert!(holding_post.requires_claim());
}
#[test]

View File

@ -4,26 +4,18 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
common.workspace = true
storage.workspace = true
mempool.workspace = true
base58.workspace = true
anyhow.workspace = true
serde.workspace = true
rand.workspace = true
tempfile.workspace = true
chrono.workspace = true
log.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
[dependencies.storage]
path = "../storage"
[dependencies.mempool]
path = "../mempool"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[features]
default = []

View File

@ -4,6 +4,11 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
common.workspace = true
mempool.workspace = true
sequencer_core.workspace = true
anyhow.workspace = true
serde_json.workspace = true
log.workspace = true
@ -11,25 +16,10 @@ serde.workspace = true
actix-cors.workspace = true
futures.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
tempfile.workspace = true
base64.workspace = true
itertools.workspace = true
actix-web.workspace = true
tokio.workspace = true
borsh.workspace = true
# TODO: Move to workspace
[dependencies.sequencer_core]
path = "../sequencer_core"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[dependencies.mempool]
path = "../mempool"

View File

@ -4,25 +4,15 @@ version = "0.1.0"
edition = "2024"
[dependencies]
common.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_rpc.workspace = true
clap = { workspace = true, features = ["derive", "env"] }
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
actix.workspace = true
actix-web.workspace = true
tokio.workspace = true
[dependencies.clap]
features = ["derive", "env"]
workspace = true
[dependencies.sequencer_rpc]
path = "../sequencer_rpc"
[dependencies.sequencer_core]
path = "../sequencer_core"
features = ["testnet"]
[dependencies.common]
path = "../common"

View File

@ -0,0 +1,79 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
# Install build dependencies
RUN apt-get update && apt-get install -y \
pkg-config \
libssl-dev \
libclang-dev \
clang \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /sequencer_runner
# Planner stage - generates dependency recipe
FROM chef AS planner
COPY . .
RUN cargo chef prepare --bin sequencer_runner --recipe-path recipe.json
# Builder stage - builds dependencies and application
FROM chef AS builder
COPY --from=planner /sequencer_runner/recipe.json recipe.json
# Build dependencies only (this layer will be cached)
RUN cargo chef cook --bin sequencer_runner --release --recipe-path recipe.json
# Copy source code
COPY . .
# Build the actual application
RUN cargo build --release --bin sequencer_runner
# Strip debug symbols to reduce binary size
RUN strip /sequencer_runner/target/release/sequencer_runner
# Runtime stage - minimal image
FROM debian:trixie-slim
# Install runtime dependencies
RUN apt-get update \
&& apt-get install -y gosu jq \
&& rm -rf /var/lib/apt/lists/*
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash sequencer_user && \
mkdir -p /sequencer_runner /etc/sequencer_runner && \
chown -R sequencer_user:sequencer_user /sequencer_runner /etc/sequencer_runner
# Copy binary from builder
COPY --from=builder --chown=sequencer_user:sequencer_user /sequencer_runner/target/release/sequencer_runner /usr/local/bin/sequencer_runner
# Copy entrypoint script
COPY sequencer_runner/docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint.sh
# Volume for configuration directory
VOLUME ["/etc/sequencer_runner"]
# Expose default port
EXPOSE 3040
# Health check (TODO #244: Replace when a real health endpoint is available)
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl http://localhost:3040 \
-H "Content-Type: application/json" \
-d "{ \
\"jsonrpc\": \"2.0\", \
\"method\": \"hello\", \
\"params\": {}, \
\"id\": 1 \
}" || exit 1
# Run the application
ENV RUST_LOG=info
USER root
ENTRYPOINT ["/docker-entrypoint.sh"]
WORKDIR /sequencer_runner
CMD ["sequencer_runner", "/etc/sequencer_runner"]

View File

@ -0,0 +1,158 @@
{
"home": "/var/lib/sequencer_runner",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 3040,
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
]
}

View File

@ -0,0 +1,14 @@
services:
sequencer_runner:
image: lssa/sequencer_runner
build:
context: ..
dockerfile: sequencer_runner/Dockerfile
container_name: sequencer_runner
ports:
- "3040:3040"
volumes:
# Mount configuration folder
- ./configs/docker:/etc/sequencer_runner
# Mount data folder
- ./data:/var/lib/sequencer_runner

View File

@ -0,0 +1,29 @@
#!/bin/sh
# This is an entrypoint script for the sequencer_runner Docker container,
# it's not meant to be executed outside of the container.
set -e
CONFIG="/etc/sequencer_runner/sequencer_config.json"
# Check config file exists
if [ ! -f "$CONFIG" ]; then
echo "Config file not found: $CONFIG" >&2
exit 1
fi
# Parse home dir
HOME_DIR=$(jq -r '.home' "$CONFIG")
if [ -z "$HOME_DIR" ] || [ "$HOME_DIR" = "null" ]; then
echo "'home' key missing in config" >&2
exit 1
fi
# Give permissions to the data directory and switch to non-root user
if [ "$(id -u)" = "0" ]; then
mkdir -p "$HOME_DIR"
chown -R sequencer_user:sequencer_user "$HOME_DIR"
exec gosu sequencer_user "$@"
fi

View File

@ -4,10 +4,8 @@ version = "0.1.0"
edition = "2024"
[dependencies]
common.workspace = true
thiserror.workspace = true
borsh.workspace = true
rocksdb.workspace = true
[dependencies.common]
path = "../common"

View File

@ -1,10 +1,10 @@
[package]
name = "test-program-methods"
name = "test_program_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,9 @@
[package]
name = "test_programs"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
risc0-zkvm.workspace = true

View File

@ -20,5 +20,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.balance -= balance_to_burn;
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -13,9 +13,9 @@ fn main() {
let (
ProgramInput {
pre_states,
instruction: (balance, auth_transfer_id, num_chain_calls, pda_seed),
instruction: (balance, auth_transfer_id, num_chain_calls, pda_seed),
},
instruction_words
instruction_words,
) = read_nssa_inputs::<Instruction>();
let [recipient_pre, sender_pre] = match pre_states.try_into() {
@ -37,7 +37,7 @@ fn main() {
let new_chained_call = ChainedCall {
program_id: auth_transfer_id,
instruction_data: instruction_data.clone(),
pre_states: vec![running_sender_pre.clone(), running_recipient_pre.clone()], // <- Account order permutation here
pre_states: vec![running_sender_pre.clone(), running_recipient_pre.clone()], /* <- Account order permutation here */
pda_seeds: pda_seed.iter().cloned().collect(),
};
chained_calls.push(new_chained_call);

View File

@ -4,7 +4,13 @@ type Instruction = Vec<u8>;
/// A program that modifies the account data by setting bytes sent in instruction.
fn main() {
let (ProgramInput { pre_states, instruction: data }, instruction_words) = read_nssa_inputs::<Instruction>();
let (
ProgramInput {
pre_states,
instruction: data,
},
instruction_words,
) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
@ -13,7 +19,9 @@ fn main() {
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.data = data.try_into().expect("provided data should fit into data limit");
account_post.data = data
.try_into()
.expect("provided data should fit into data limit");
write_nssa_outputs(
instruction_words,

View File

@ -1,4 +1,4 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
@ -14,5 +14,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.balance += 1;
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -0,0 +1,22 @@
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
Err(_) => return,
};
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.nonce += 1;
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -0,0 +1,13 @@
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let post_states = pre_states
.iter()
.map(|account| AccountPostState::new(account.account.clone()))
.collect();
write_nssa_outputs(instruction_words, pre_states, post_states);
}

View File

@ -1,4 +1,4 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
@ -14,5 +14,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.program_owner = [0, 1, 2, 3, 4, 5, 6, 7];
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -4,32 +4,26 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
nssa.workspace = true
common.workspace = true
key_protocol.workspace = true
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
serde.workspace = true
tokio.workspace = true
tempfile.workspace = true
clap.workspace = true
nssa-core = { path = "../nssa/core" }
base64.workspace = true
bytemuck = "1.23.2"
bytemuck.workspace = true
borsh.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
rand.workspace = true
itertools.workspace = true
sha2.workspace = true
futures.workspace = true
async-stream = "0.3.6"
indicatif = { version = "0.18.3", features = ["improved_unicode"] }
[dependencies.key_protocol]
path = "../key_protocol"
[dependencies.nssa]
path = "../nssa"
[dependencies.common]
path = "../common"

View File

@ -0,0 +1,547 @@
{
"override_rust_log": null,
"sequencer_addr": "http://127.0.0.1:3040",
"seq_poll_timeout_millis": 12000,
"seq_tx_poll_max_blocks": 5,
"seq_poll_max_retries": 5,
"seq_block_poll_max_amount": 100,
"initial_accounts": [
{
"Public": {
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"pub_sign_key": [
16,
162,
106,
154,
236,
125,
52,
184,
35,
100,
238,
174,
69,
197,
41,
77,
187,
10,
118,
75,
0,
11,
148,
238,
185,
181,
133,
17,
220,
72,
124,
77
]
}
},
{
"Public": {
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"pub_sign_key": [
113,
121,
64,
177,
204,
85,
229,
214,
178,
6,
109,
191,
29,
154,
63,
38,
242,
18,
244,
219,
8,
208,
35,
136,
23,
127,
207,
237,
216,
169,
190,
27
]
}
},
{
"Private": {
"account_id": "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
251,
82,
235,
1,
146,
96,
30,
81,
162,
234,
33,
15,
123,
129,
116,
0,
84,
136,
176,
70,
190,
224,
161,
54,
134,
142,
154,
1,
18,
251,
242,
189
],
"private_key_holder": {
"nullifier_secret_key": [
29,
250,
10,
187,
35,
123,
180,
250,
246,
97,
216,
153,
44,
156,
16,
93,
241,
26,
174,
219,
72,
84,
34,
247,
112,
101,
217,
243,
189,
173,
75,
20
],
"incoming_viewing_secret_key": [
251,
201,
22,
154,
100,
165,
218,
108,
163,
190,
135,
91,
145,
84,
69,
241,
46,
117,
217,
110,
197,
248,
91,
193,
14,
104,
88,
103,
67,
153,
182,
158
],
"outgoing_viewing_secret_key": [
25,
67,
121,
76,
175,
100,
30,
198,
105,
123,
49,
169,
75,
178,
75,
210,
100,
143,
210,
243,
228,
243,
21,
18,
36,
84,
164,
186,
139,
113,
214,
12
]
},
"nullifer_public_key": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"incoming_viewing_public_key": [
3,
235,
139,
131,
237,
177,
122,
189,
6,
177,
167,
178,
202,
117,
246,
58,
28,
65,
132,
79,
220,
139,
119,
243,
187,
160,
212,
121,
61,
247,
116,
72,
205
]
}
}
},
{
"Private": {
"account_id": "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
238,
171,
241,
69,
111,
217,
85,
64,
19,
82,
18,
189,
32,
91,
78,
175,
107,
7,
109,
60,
52,
44,
243,
230,
72,
244,
192,
92,
137,
33,
118,
254
],
"private_key_holder": {
"nullifier_secret_key": [
25,
211,
215,
119,
57,
223,
247,
37,
245,
144,
122,
29,
118,
245,
83,
228,
23,
9,
101,
120,
88,
33,
238,
207,
128,
61,
110,
2,
89,
62,
164,
13
],
"incoming_viewing_secret_key": [
193,
181,
14,
196,
142,
84,
15,
65,
128,
101,
70,
196,
241,
47,
130,
221,
23,
146,
161,
237,
221,
40,
19,
126,
59,
15,
169,
236,
25,
105,
104,
231
],
"outgoing_viewing_secret_key": [
20,
170,
220,
108,
41,
23,
155,
217,
247,
190,
175,
168,
247,
34,
105,
134,
114,
74,
104,
91,
211,
62,
126,
13,
130,
100,
241,
214,
250,
236,
38,
150
]
},
"nullifer_public_key": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"incoming_viewing_public_key": [
2,
181,
98,
93,
216,
241,
241,
110,
58,
198,
119,
174,
250,
184,
1,
204,
200,
173,
44,
238,
37,
247,
170,
156,
100,
254,
116,
242,
28,
183,
187,
77,
255
]
}
}
}
],
"basic_auth": null
}