Merge branch 'main' into simple_amm

This commit is contained in:
jonesmarvin8 2025-12-23 11:03:11 -05:00
commit 8289c07b28
98 changed files with 3756 additions and 4318 deletions

36
.dockerignore Normal file
View File

@ -0,0 +1,36 @@
# Build artifacts
target/
**/target/
# RocksDB data
rocksdb/
**/rocksdb/
# Git
.git/
.gitignore
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
# CI/CD
.github/
ci_scripts/
# Documentation
*.md
!README.md
# Configs (copy selectively if needed)
configs/
# License
LICENSE

View File

@ -0,0 +1,10 @@
name: Install risc0
description: Installs risc0 in the environment
runs:
using: "composite"
steps:
- name: Install risc0
run: |
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
shell: bash

View File

@ -0,0 +1,10 @@
name: Install system dependencies
description: Installs system dependencies in the environment
runs:
using: "composite"
steps:
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential clang libclang-dev libssl-dev pkg-config
shell: bash

View File

@ -14,25 +14,164 @@ on:
name: General
jobs:
ubuntu-latest-pipeline:
fmt-rs:
runs-on: ubuntu-latest
timeout-minutes: 120
name: ubuntu-latest-pipeline
steps:
- uses: actions/checkout@v3
- name: Install system dependencies
run: sudo apt-get update && sudo apt-get install -y build-essential clang libclang-dev libssl-dev pkg-config
- name: Install active toolchain
run: rustup install
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install nightly toolchain for rustfmt
run: rustup install nightly --profile minimal --component rustfmt
- name: lint - ubuntu-latest
run: chmod 777 ./ci_scripts/lint-ubuntu.sh && ./ci_scripts/lint-ubuntu.sh
- name: test ubuntu-latest
if: success() || failure()
run: chmod 777 ./ci_scripts/test-ubuntu.sh && ./ci_scripts/test-ubuntu.sh
- name: Check Rust files are formatted
run: cargo +nightly fmt --check
fmt-toml:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install taplo-cli
run: cargo install --locked taplo-cli
- name: Check TOML files are formatted
run: taplo fmt --check .
machete:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install active toolchain
run: rustup install
- name: Install cargo-machete
run: cargo install cargo-machete
- name: Check for unused dependencies
run: cargo machete
lint:
runs-on: ubuntu-latest
timeout-minutes: 60
name: lint
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Lint workspace
env:
RISC0_SKIP_BUILD: "1"
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
- name: Lint programs
env:
RISC0_SKIP_BUILD: "1"
run: cargo clippy -p "*programs" -- -D warnings
unit-tests:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Install nextest
run: cargo install cargo-nextest
- name: Run unit tests
env:
RISC0_DEV_MODE: "1"
run: cargo nextest run --no-fail-fast
valid-proof-test:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Test valid proof
env:
NSSA_WALLET_HOME_DIR: ./integration_tests/configs/debug/wallet
RUST_LOG: "info"
run: cargo run --bin integration_tests -- ./integration_tests/configs/debug/ test_success_private_transfer_to_another_owned_account
integration-tests:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Run integration tests
env:
NSSA_WALLET_HOME_DIR: ./integration_tests/configs/debug/wallet
RUST_LOG: "info"
RISC0_DEV_MODE: "1"
run: cargo run --bin integration_tests -- ./integration_tests/configs/debug/ all
artifacts:
runs-on: ubuntu-latest
timeout-minutes: 60
name: artifacts
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-risc0
- name: Install just
run: cargo install just
- name: Build artifacts
run: just build-artifacts
- name: Check if artifacts match repository
run: |
if ! git diff --exit-code artifacts/; then
echo "❌ Artifacts in the repository are out of date!"
echo "Please run 'just build-artifacts' and commit the changes."
exit 1
fi
echo "✅ Artifacts are up to date"

44
.github/workflows/publish_image.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: Publish Sequencer Runner Image
on:
workflow_dispatch:
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to registry
uses: docker/login-action@v3
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/sequencer_runner
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha,prefix={{branch}}-
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./sequencer_runner/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

3
.gitignore vendored
View File

@ -6,4 +6,5 @@ data/
.idea/
.vscode/
rocksdb
Cargo.lock
sequencer_runner/data/
storage.json

File diff suppressed because it is too large Load Diff

View File

@ -12,15 +12,42 @@ members = [
"common",
"nssa",
"nssa/core",
"program_methods",
"program_methods/guest",
"test_program_methods",
"test_program_methods/guest",
"integration_tests/proc_macro_test_attribute",
"examples/program_deployment",
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
]
[workspace.dependencies]
nssa = { path = "nssa" }
nssa_core = { path = "nssa/core" }
common = { path = "common" }
mempool = { path = "mempool" }
storage = { path = "storage" }
key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
wallet = { path = "wallet" }
test_program_methods = { path = "test_program_methods" }
tokio = { version = "1.28.2", features = [
"net",
"rt-multi-thread",
"sync",
"fs",
] }
risc0-zkvm = { version = "3.0.3", features = ['std'] }
risc0-build = "3.0.3"
anyhow = "1.0.98"
num_cpus = "1.13.1"
openssl = { version = "0.10", features = ["vendored"] }
openssl-probe = { version = "0.1.2" }
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
actix = "0.13.0"
actix-cors = "0.6.1"
@ -33,9 +60,9 @@ lru = "0.7.8"
thiserror = "2.0.12"
sha2 = "0.10.8"
hex = "0.4.3"
bytemuck = "1.24.0"
aes-gcm = "0.10.3"
toml = "0.7.4"
secp256k1-zkp = "0.11.0"
bincode = "1.3.3"
tempfile = "3.14.0"
light-poseidon = "0.3.0"
@ -54,43 +81,17 @@ rocksdb = { version = "0.24.0", default-features = false, features = [
"snappy",
"bindgen-runtime",
] }
[workspace.dependencies.rand]
features = ["std", "std_rng", "getrandom"]
version = "0.8.5"
[workspace.dependencies.k256]
features = ["ecdsa-core", "arithmetic", "expose-field", "serde", "pem"]
version = "0.13.3"
[workspace.dependencies.elliptic-curve]
features = ["arithmetic"]
version = "0.13.8"
[workspace.dependencies.serde]
features = ["derive"]
version = "1.0.60"
[workspace.dependencies.actix-web]
default-features = false
features = ["macros"]
version = "=4.1.0"
[workspace.dependencies.clap]
features = ["derive", "env"]
version = "4.5.42"
[workspace.dependencies.tokio-retry]
version = "0.3.0"
[workspace.dependencies.reqwest]
features = ["json"]
version = "0.11.16"
[workspace.dependencies.tokio]
features = ["net", "rt-multi-thread", "sync", "fs"]
version = "1.28.2"
[workspace.dependencies.tracing]
features = ["std"]
version = "0.1.13"
rand = { version = "0.8.5", features = ["std", "std_rng", "getrandom"] }
k256 = { version = "0.13.3", features = [
"ecdsa-core",
"arithmetic",
"expose-field",
"serde",
"pem",
] }
elliptic-curve = { version = "0.13.8", features = ["arithmetic"] }
actix-web = { version = "=4.1.0", default-features = false, features = [
"macros",
] }
clap = { version = "4.5.42", features = ["derive", "env"] }
reqwest = { version = "0.11.16", features = ["json"] }

19
Justfile Normal file
View File

@ -0,0 +1,19 @@
set shell := ["bash", "-eu", "-o", "pipefail", "-c"]
default:
@just --list
# ---- Configuration ----
METHODS_PATH := "program_methods"
TEST_METHODS_PATH := "test_program_methods"
ARTIFACTS := "artifacts"
# ---- Artifacts build ----
build-artifacts:
@echo "🔨 Building artifacts"
@for methods_path in {{METHODS_PATH}} {{TEST_METHODS_PATH}}; do \
echo "Building artifacts for $methods_path"; \
CARGO_TARGET_DIR=target/$methods_path cargo risczero build --manifest-path $methods_path/guest/Cargo.toml; \
mkdir -p {{ARTIFACTS}}/$methods_path; \
cp target/$methods_path/riscv32im-risc0-zkvm-elf/docker/*.bin {{ARTIFACTS}}/$methods_path; \
done

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,4 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/Users/runner/.risc0/bin/rzup install
RUSTFLAGS="-D warnings" cargo build

View File

@ -1,4 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
RUSTFLAGS="-D warnings" cargo build

View File

@ -1,8 +0,0 @@
set -e
cargo +nightly fmt -- --check
cargo install taplo-cli --locked
taplo fmt --check
RISC0_SKIP_BUILD=1 cargo clippy --workspace --all-targets -- -D warnings

View File

@ -1,17 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
RISC0_DEV_MODE=1 cargo test --release --features no_docker
cd integration_tests
export NSSA_WALLET_HOME_DIR=$(pwd)/configs/debug/wallet/
export RUST_LOG=info
echo "Try test valid proof at least once"
cargo run $(pwd)/configs/debug test_success_private_transfer_to_another_owned_account
echo "Continuing in dev mode"
RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
cd ..
cd nssa/program_methods/guest && cargo test --release

View File

@ -4,18 +4,16 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
reqwest.workspace = true
sha2.workspace = true
log.workspace = true
hex.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
borsh.workspace = true
base64.workspace = true
[dependencies.nssa]
path = "../nssa"

View File

@ -4,10 +4,8 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
wallet.workspace = true
tokio = { workspace = true, features = ["macros"] }
wallet = { path = "../../wallet" }
nssa-core = { path = "../../nssa/core" }
nssa = { path = "../../nssa" }
key_protocol = { path = "../../key_protocol/" }
clap = "4.5.53"
serde = "1.0.228"
clap.workspace = true

View File

@ -340,7 +340,7 @@ Luckily all that complexity is hidden behind the `wallet_core.send_privacy_prese
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(greeting).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();
@ -568,4 +568,94 @@ Output:
```
Hola mundo!Hello from tail call
```
## Private tail-calls
There's support for tail calls in privacy preserving executions too. The `run_hello_world_through_tail_call_private.rs` runner walks you through the process of invoking such an execution.
The only difference is that, since the execution is local, the runner will need both programs: the `simple_tail_call` and it's dependency `hello_world`.
Let's use our existing private account with id `8vzkK7vsdrS2gdPhLk72La8X4FJkgJ5kJLUBRbEVkReU`. This one is already owned by the `hello_world` program.
You can test the privacy tail calls with
```bash
cargo run --bin run_hello_world_through_tail_call_private \
$EXAMPLE_PROGRAMS_BUILD_DIR/simple_tail_call.bin \
$EXAMPLE_PROGRAMS_BUILD_DIR/hello_world.bin \
8vzkK7vsdrS2gdPhLk72La8X4FJkgJ5kJLUBRbEVkReU
```
>[!NOTE]
> The above command may take longer than the previous privacy executions because needs to generate proofs of execution of both the `simple_tail_call` and the `hello_world` programs.
Once finished run the following to see the changes
```bash
wallet account sync-private
wallet account get --account-id Private/8vzkK7vsdrS2gdPhLk72La8X4FJkgJ5kJLUBRbEVkReU
```
# 13. Program derived accounts: authorizing accounts through tail calls
## Digression: account authority vs account program ownership
In NSSA there are two distinct concepts that control who can modify an account:
**Program Ownership:** Each account has a field: `program_owner: ProgramId`.
This indicates which program is allowed to update the accounts state during execution.
- If a program is the program_owner of an account, it can freely mutate its fields.
- If the account is uninitialized (`program_owner = DEFAULT_PROGRAM_ID`), a program may claim it and become its owner.
- If a program is not the owner and the account is not claimable, any attempt to modify it will cause the transition to fail.
Program ownership is about mutation rights during program execution.
**Account authority**: Independent from program ownership, each account also has an authority. The entity that is allowed to set: `is_authorized = true`. This flag indicates that the account has been authorized for use in a transaction.
Who can act as authority?
- User-defined accounts: The user is the authority. They can mark an account as authorized by:
- Signing the transaction (public accounts)
- Providing a valid nullifiers secret key ownership proof (private accounts)
- Program derived accounts: Programs are automatically the authority of a dedicated namespace of public accounts.
Each program owns a non-overlapping space of 2^256 **public** account IDs. They do not overlap with:
- User accounts (public or private)
- Other programs PDAs
> [!NOTE]
> Currently PDAs are restricted to the public state.
A program can be the authority of an account owned by another program, which is the most common case.
During a chained call, a program can mark its PDA accounts as `is_authorized=true` without requiring any user signatures or nullifier secret keys. This enables programs to safely authorize accounts during program composition. Importantly, these flags can only be set to true for PDA accounts through an execution of the program that is their authority. No user and no other program can execute any transition that requires authorization of PDA accounts belonging to a different program.
## Running the example
This tutorial includes an example of PDA usage in `methods/guest/src/bin/tail_call_with_pda.rs.`. That programs sole purpose is to forward one of its own PDA accounts, an account for which it is the authority, to the "Hello World with authorization" program via a chained call. The Hello World program will then claim the account and become its program owner, but the `tail_call_with_pda` program remains the authority. This means it is still the only entity capable of marking that account as `is_authorized=true`.
Deploy the program:
```bash
wallet deploy-program $EXAMPLE_PROGRAMS_BUILD_DIR/tail_call_with_pda.bin
```
There is no need to create a new account for this example, because we simply use one of the PDA accounts belonging to the `tail_call_with_pda` program.
Execute the program
```bash
cargo run --bin run_hello_world_with_authorization_through_tail_call_with_pda $EXAMPLE_PROGRAMS_BUILD_DIR/tail_call_with_pda.bin
```
You'll see an output like the following:
```bash
The program derived account ID is: 3tfTPPuxj3eSE1cLVuNBEk8eSHzpnYS1oqEdeH3Nfsks
```
Then check the status of that account
```bash
wallet account get --account-id Public/3tfTPPuxj3eSE1cLVuNBEk8eSHzpnYS1oqEdeH3Nfsks
```
Output:
```bash
{
"balance":0,
"program_owner_b64":"HZXHYRaKf6YusVo8x00/B15uyY5sGsJb1bzH4KlCY5g=",
"data_b64": "SGVsbG8gZnJvbSB0YWlsIGNhbGwgd2l0aCBQcm9ncmFtIERlcml2ZWQgQWNjb3VudCBJRA==",
"nonce":0"
}
```

View File

@ -1,10 +1,10 @@
[package]
name = "test-program-methods"
name = "example_program_deployment_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -1,13 +1,11 @@
[package]
name = "programs"
name = "example_program_deployment_programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../../../nssa/core" }
serde = { version = "1.0.219", default-features = false }
hex = "0.4.3"
bytemuck = "1.24.0"
nssa_core.workspace = true
hex.workspace = true
bytemuck.workspace = true
risc0-zkvm.workspace = true

View File

@ -9,13 +9,12 @@ use nssa_core::program::{
// It reads a single account, emits it unchanged, and then triggers a tail call
// to the Hello World program with a fixed greeting.
/// This needs to be set to the ID of the Hello world program.
/// To get the ID run **from the root directoy of the repository**:
/// `cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml`
/// This compiles the programs and outputs the IDs in hex that can be used to copy here.
const HELLO_WORLD_PROGRAM_ID_HEX: &str =
"7e99d6e2d158f4dea59597011da5d1c2eef17beed6667657f515b387035b935a";
"e9dfc5a5d03c9afa732adae6e0edfce4bbb44c7a2afb9f148f4309917eb2de6f";
fn hello_world_program_id() -> ProgramId {
let hello_world_program_id_bytes: [u8; 32] = hex::decode(HELLO_WORLD_PROGRAM_ID_HEX)

View File

@ -0,0 +1,76 @@
use nssa_core::program::{
AccountPostState, ChainedCall, PdaSeed, ProgramId, ProgramInput, read_nssa_inputs,
write_nssa_outputs_with_chained_call,
};
// Tail Call with PDA example program.
//
// Demonstrates how to chain execution to another program using `ChainedCall`
// while authorizing program-derived accounts.
//
// Expects a single input account whose Account ID is derived from this
// programs ID and the fixed PDA seed below (as defined by the
// `<AccountId as From<(&ProgramId, &PdaSeed)>>` implementation).
//
// Emits this account unchanged, then performs a tail call to the
// Hello-World-with-Authorization program with a fixed greeting. The same
// account is passed along but marked with `is_authorized = true`.
const HELLO_WORLD_WITH_AUTHORIZATION_PROGRAM_ID_HEX: &str =
"1d95c761168a7fa62eb15a3cc74d3f075e6ec98e6c1ac25bd5bcc7e0a9426398";
const PDA_SEED: PdaSeed = PdaSeed::new([37; 32]);
fn hello_world_program_id() -> ProgramId {
let hello_world_program_id_bytes: [u8; 32] =
hex::decode(HELLO_WORLD_WITH_AUTHORIZATION_PROGRAM_ID_HEX)
.unwrap()
.try_into()
.unwrap();
bytemuck::cast(hello_world_program_id_bytes)
}
fn main() {
// Read inputs
let (
ProgramInput {
pre_states,
instruction: _,
},
instruction_data,
) = read_nssa_inputs::<()>();
// Unpack the input account pre state
let [pre_state] = pre_states
.clone()
.try_into()
.unwrap_or_else(|_| panic!("Input pre states should consist of a single account"));
// Create the (unchanged) post state
let post_state = AccountPostState::new(pre_state.account.clone());
// Create the chained call
let chained_call_greeting: Vec<u8> =
b"Hello from tail call with Program Derived Account ID".to_vec();
let chained_call_instruction_data = risc0_zkvm::serde::to_vec(&chained_call_greeting).unwrap();
// Flip the `is_authorized` flag to true
let pre_state_for_chained_call = {
let mut this = pre_state.clone();
this.is_authorized = true;
this
};
let chained_call = ChainedCall {
program_id: hello_world_program_id(),
instruction_data: chained_call_instruction_data,
pre_states: vec![pre_state_for_chained_call],
pda_seeds: vec![PDA_SEED],
};
// Write the outputs
write_nssa_outputs_with_chained_call(
instruction_data,
vec![pre_state],
vec![post_state],
vec![chained_call],
);
}

View File

@ -54,7 +54,7 @@ async fn main() {
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(greeting).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();

View File

@ -0,0 +1,69 @@
use std::collections::HashMap;
use nssa::{
AccountId, ProgramId, privacy_preserving_transaction::circuit::ProgramWithDependencies,
program::Program,
};
use wallet::{PrivacyPreservingAccount, WalletCore, helperfunctions::fetch_config};
// Before running this example, compile the `simple_tail_call.rs` guest program with:
//
// cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml
//
// Note: you must run the above command from the root of the `lssa` repository.
// Note: The compiled binary file is stored in
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/simple_tail_call.bin
//
//
// Usage:
// cargo run --bin run_hello_world_through_tail_call_private /path/to/guest/binary <account_id>
//
// Example:
// cargo run --bin run_hello_world_through_tail_call \
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/simple_tail_call.bin \
// Ds8q5PjLcKwwV97Zi7duhRVF9uwA2PuYMoLL7FwCzsXE
#[tokio::main]
async fn main() {
// Load wallet config and storage
let wallet_config = fetch_config().await.unwrap();
let wallet_core = WalletCore::start_from_config_update_chain(wallet_config)
.await
.unwrap();
// Parse arguments
// First argument is the path to the simple_tail_call program binary
let simple_tail_call_path = std::env::args_os().nth(1).unwrap().into_string().unwrap();
// Second argument is the path to the hello_world program binary
let hello_world_path = std::env::args_os().nth(2).unwrap().into_string().unwrap();
// Third argument is the account_id
let account_id: AccountId = std::env::args_os()
.nth(3)
.unwrap()
.into_string()
.unwrap()
.parse()
.unwrap();
// Load the program and its dependencies (the hellow world program)
let simple_tail_call_bytecode: Vec<u8> = std::fs::read(simple_tail_call_path).unwrap();
let simple_tail_call = Program::new(simple_tail_call_bytecode).unwrap();
let hello_world_bytecode: Vec<u8> = std::fs::read(hello_world_path).unwrap();
let hello_world = Program::new(hello_world_bytecode).unwrap();
let dependencies: HashMap<ProgramId, Program> =
[(hello_world.id(), hello_world)].into_iter().collect();
let program_with_dependencies = ProgramWithDependencies::new(simple_tail_call, dependencies);
let accounts = vec![PrivacyPreservingAccount::PrivateOwned(account_id)];
// Construct and submit the privacy-preserving transaction
let instruction = ();
wallet_core
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(instruction).unwrap(),
&program_with_dependencies,
)
.await
.unwrap();
}

View File

@ -0,0 +1,62 @@
use nssa::{
AccountId, PublicTransaction,
program::Program,
public_transaction::{Message, WitnessSet},
};
use nssa_core::program::PdaSeed;
use wallet::{WalletCore, helperfunctions::fetch_config};
// Before running this example, compile the `simple_tail_call.rs` guest program with:
//
// cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml
//
// Note: you must run the above command from the root of the `lssa` repository.
// Note: The compiled binary file is stored in
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/simple_tail_call.bin
//
//
// Usage:
// cargo run --bin run_hello_world_with_authorization_through_tail_call_with_pda
// /path/to/guest/binary <account_id>
//
// Example:
// cargo run --bin run_hello_world_with_authorization_through_tail_call_with_pda \
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/tail_call_with_pda.bin
const PDA_SEED: PdaSeed = PdaSeed::new([37; 32]);
#[tokio::main]
async fn main() {
// Load wallet config and storage
let wallet_config = fetch_config().await.unwrap();
let wallet_core = WalletCore::start_from_config_update_chain(wallet_config)
.await
.unwrap();
// Parse arguments
// First argument is the path to the program binary
let program_path = std::env::args_os().nth(1).unwrap().into_string().unwrap();
// Load the program
let bytecode: Vec<u8> = std::fs::read(program_path).unwrap();
let program = Program::new(bytecode).unwrap();
// Compute the PDA to pass it as input account to the public execution
let pda = AccountId::from((&program.id(), &PDA_SEED));
let account_ids = vec![pda];
let instruction_data = ();
let nonces = vec![];
let signing_keys = [];
let message = Message::try_new(program.id(), account_ids, nonces, instruction_data).unwrap();
let witness_set = WitnessSet::for_message(&message, &signing_keys);
let tx = PublicTransaction::new(message, witness_set);
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.await
.unwrap();
println!("The program derived account id is: {pda}");
}

View File

@ -105,7 +105,7 @@ async fn main() {
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(instruction).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();
@ -146,7 +146,7 @@ async fn main() {
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(instruction).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();

View File

@ -4,6 +4,16 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
proc_macro_test_attribute = { path = "./proc_macro_test_attribute" }
clap = { workspace = true, features = ["derive", "env"] }
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
@ -14,31 +24,3 @@ tokio.workspace = true
hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
proc_macro_test_attribute = { path = "./proc_macro_test_attribute" }
[dependencies.clap]
features = ["derive", "env"]
workspace = true
[dependencies.sequencer_core]
path = "../sequencer_core"
features = ["testnet"]
[dependencies.sequencer_runner]
path = "../sequencer_runner"
[dependencies.wallet]
path = "../wallet"
[dependencies.common]
path = "../common"
[dependencies.key_protocol]
path = "../key_protocol"
[dependencies.nssa]
path = "../nssa"
features = ["no_docker"]

Binary file not shown.

View File

@ -1596,7 +1596,10 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
pub async fn test_program_deployment() {
info!("########## test program deployment ##########");
let binary_filepath: PathBuf = NSSA_PROGRAM_FOR_TEST_DATA_CHANGER.parse().unwrap();
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let binary_filepath: PathBuf = PathBuf::from(manifest_dir)
.join("../artifacts/test_program_methods")
.join(NSSA_PROGRAM_FOR_TEST_DATA_CHANGER);
let command = Command::DeployProgram {
binary_filepath: binary_filepath.clone(),

View File

@ -4,22 +4,19 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
common.workspace = true
anyhow.workspace = true
serde.workspace = true
k256.workspace = true
sha2.workspace = true
rand.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
aes-gcm.workspace = true
bip39.workspace = true
hmac-sha512.workspace = true
thiserror.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
itertools.workspace = true
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"

View File

@ -4,27 +4,27 @@ version = "0.1.0"
edition = "2024"
[dependencies]
thiserror = "2.0.12"
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "core", features = ["host"] }
program-methods = { path = "program_methods", optional = true }
serde = "1.0.219"
sha2 = "0.10.9"
nssa_core = { workspace = true, features = ["host"] }
thiserror.workspace = true
risc0-zkvm.workspace = true
serde.workspace = true
sha2.workspace = true
rand.workspace = true
borsh.workspace = true
hex.workspace = true
secp256k1 = "0.31.1"
rand = "0.8"
borsh = "1.5.7"
hex = "0.4.3"
risc0-binfmt = "3.0.2"
bytemuck = "1.24.0"
log.workspace = true
[build-dependencies]
risc0-build = "3.0.3"
risc0-binfmt = "3.0.2"
[dev-dependencies]
test-program-methods = { path = "test_program_methods" }
test_program_methods.workspace = true
hex-literal = "1.0.0"
[features]
default = []
no_docker = ["program-methods"]

View File

@ -1,43 +1,21 @@
fn main() {
if cfg!(feature = "no_docker") {
println!("cargo:warning=NO_DOCKER feature enabled deterministic build skipped");
return;
}
build_deterministic().expect("Deterministic build failed");
}
fn build_deterministic() -> Result<(), Box<dyn std::error::Error>> {
use std::{env, fs, path::PathBuf, process::Command};
use std::{env, fs, path::PathBuf};
fn main() -> Result<(), Box<dyn std::error::Error>> {
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?);
let out_dir = PathBuf::from(env::var("OUT_DIR")?);
let mod_dir = out_dir.join("program_methods");
let mod_file = mod_dir.join("mod.rs");
let program_methods_dir = manifest_dir.join("../artifacts/program_methods/");
println!("cargo:rerun-if-changed=program_methods/guest/src");
println!("cargo:rerun-if-changed=program_methods/guest/Cargo.toml");
println!("cargo:rerun-if-changed={}", program_methods_dir.display());
let guest_manifest = manifest_dir.join("program_methods/guest/Cargo.toml");
let status = Command::new("cargo")
.args(["risczero", "build", "--manifest-path"])
.arg(&guest_manifest)
.status()?;
if !status.success() {
return Err("Risc0 deterministic build failed".into());
}
let target_dir =
manifest_dir.join("program_methods/guest/target/riscv32im-risc0-zkvm-elf/docker/");
let bins = fs::read_dir(&target_dir)?
let bins = fs::read_dir(&program_methods_dir)?
.filter_map(Result::ok)
.filter(|e| e.path().extension().is_some_and(|ext| ext == "bin"))
.collect::<Vec<_>>();
if bins.is_empty() {
return Err(format!("No .bin files found in {:?}", target_dir).into());
return Err(format!("No .bin files found in {:?}", program_methods_dir).into());
}
fs::create_dir_all(&mod_dir)?;

View File

@ -1,21 +1,21 @@
[package]
name = "nssa-core"
name = "nssa_core"
version = "0.1.0"
edition = "2024"
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
serde = { version = "1.0", default-features = false }
thiserror = { version = "2.0.12" }
bytemuck = "1.13"
risc0-zkvm.workspace = true
borsh.workspace = true
serde = { workspace = true }
thiserror.workspace = true
chacha20 = { version = "0.9", default-features = false }
k256 = { version = "0.13.3", optional = true }
base58 = { version = "0.2.0", optional = true }
anyhow = { version = "1.0.98", optional = true }
borsh = "1.5.7"
bytemuck = { workspace = true, optional = true }
k256 = { workspace = true, optional = true }
base58 = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
[dev-dependencies]
serde_json = "1.0.81"
serde_json.workspace = true
[features]
default = []

View File

@ -44,11 +44,10 @@ impl AccountWithMetadata {
}
}
#[derive(Default, Clone, Eq, Hash, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(
any(feature = "host", test),
derive(Debug, Copy, PartialOrd, Ord)
#[derive(
Copy, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize,
)]
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialOrd, Ord, Default))]
pub struct AccountId {
value: [u8; 32],
}

View File

@ -27,7 +27,7 @@ pub struct ProgramInput<T> {
pub struct PdaSeed([u8; 32]);
impl PdaSeed {
pub fn new(value: [u8; 32]) -> Self {
pub const fn new(value: [u8; 32]) -> Self {
Self(value)
}
}

View File

@ -1,12 +1,7 @@
#[cfg(not(feature = "no_docker"))]
pub mod program_methods {
include!(concat!(env!("OUT_DIR"), "/program_methods/mod.rs"));
}
#[cfg(feature = "no_docker")]
#[allow(clippy::single_component_path_imports)]
use program_methods;
pub mod encoding;
pub mod error;
mod merkle_tree;

View File

@ -1,6 +1,7 @@
use std::collections::{HashMap, HashSet, VecDeque};
use borsh::{BorshDeserialize, BorshSerialize};
use log::debug;
use nssa_core::{
account::{Account, AccountId, AccountWithMetadata},
program::{ChainedCall, DEFAULT_PROGRAM_ID, PdaSeed, ProgramId, validate_execution},
@ -123,8 +124,16 @@ impl PublicTransaction {
return Err(NssaError::InvalidInput("Unknown program".into()));
};
debug!(
"Program {:?} pre_states: {:?}, instruction_data: {:?}",
chained_call.program_id, chained_call.pre_states, chained_call.instruction_data
);
let mut program_output =
program.execute(&chained_call.pre_states, &chained_call.instruction_data)?;
debug!(
"Program {:?} output: {:?}",
chained_call.program_id, program_output
);
let authorized_pdas =
self.compute_authorized_pdas(&caller_program_id, &chained_call.pda_seeds);

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +0,0 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../core" }
serde = { version = "1.0.219", default-features = false }

View File

@ -1,18 +0,0 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. } , instruction_words) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
Err(_) => return,
};
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.nonce += 1;
write_nssa_outputs(instruction_words ,vec![pre], vec![AccountPostState::new(account_post)]);
}

View File

@ -1,12 +0,0 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, ProgramInput, AccountPostState};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let post_states = pre_states.iter().map(|account| {
AccountPostState::new(account.account.clone())
}).collect();
write_nssa_outputs(instruction_words, pre_states, post_states);
}

View File

@ -1,10 +1,10 @@
[package]
name = "program-methods"
name = "program_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,10 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
risc0-zkvm.workspace = true
serde = { workspace = true, default-features = false }

View File

@ -107,7 +107,7 @@ fn main() {
} else {
pre_states.push(pre.clone());
}
state_diff.insert(pre.account_id.clone(), post.account().clone());
state_diff.insert(pre.account_id, post.account().clone());
}
// TODO: Modify when multi-chain calls are supported in the circuit

View File

@ -6,36 +6,35 @@ use nssa_core::{
};
// The token program has three functions:
// 1. New token definition.
// Arguments to this function are:
// * Two **default** accounts: [definition_account, holding_account].
// The first default account will be initialized with the token definition account values. The second account will
// be initialized to a token holding account for the new token, holding the entire total supply.
// * An instruction data of 23-bytes, indicating the total supply and the token name, with
// the following layout:
// [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
// The name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
// 2. Token transfer
// Arguments to this function are:
// 1. New token definition. Arguments to this function are:
// * Two **default** accounts: [definition_account, holding_account]. The first default account
// will be initialized with the token definition account values. The second account will be
// initialized to a token holding account for the new token, holding the entire total supply.
// * An instruction data of 23-bytes, indicating the total supply and the token name, with the
// following layout: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] The
// name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
// 2. Token transfer Arguments to this function are:
// * Two accounts: [sender_account, recipient_account].
// * An instruction data byte string of length 23, indicating the total supply with the following layout
// [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
// 3. Initialize account with zero balance
// Arguments to this function are:
// * An instruction data byte string of length 23, indicating the total supply with the
// following layout [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00
// || 0x00 || 0x00].
// 3. Initialize account with zero balance Arguments to this function are:
// * Two accounts: [definition_account, account_to_initialize].
// * An dummy byte string of length 23, with the following layout
// [0x02 || 0x00 || 0x00 || 0x00 || ... || 0x00 || 0x00].
// 4. Burn tokens from a Token Holding account (thus lowering total supply)
// Arguments to this function are:
// * An dummy byte string of length 23, with the following layout [0x02 || 0x00 || 0x00 || 0x00
// || ... || 0x00 || 0x00].
// 4. Burn tokens from a Token Holding account (thus lowering total supply) Arguments to this
// function are:
// * Two accounts: [definition_account, holding_account].
// * Authorization required: holding_account
// * An instruction data byte string of length 23, indicating the balance to burn with the folloiwng layout
// * An instruction data byte string of length 23, indicating the balance to burn with the
// folloiwng layout
// [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total supply)
// Arguments to this function are:
// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total
// supply) Arguments to this function are:
// * Two accounts: [definition_account, holding_account].
// * Authorization required: definition_account
// * An instruction data byte string of length 23, indicating the balance to mint with the folloiwng layout
// * An instruction data byte string of length 23, indicating the balance to mint with the
// folloiwng layout
// [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
const TOKEN_DEFINITION_TYPE: u8 = 0;
@ -89,10 +88,10 @@ impl TokenDefinition {
}
impl TokenHolding {
fn new(definition_id: &AccountId) -> Self {
fn new(definition_id: AccountId) -> Self {
Self {
account_type: TOKEN_HOLDING_TYPE,
definition_id: definition_id.clone(),
definition_id,
balance: 0,
}
}
@ -142,7 +141,7 @@ fn transfer(pre_states: &[AccountWithMetadata], balance_to_move: u128) -> Vec<Ac
let mut sender_holding =
TokenHolding::parse(&sender.account.data).expect("Invalid sender data");
let mut recipient_holding = if recipient.account == Account::default() {
TokenHolding::new(&sender_holding.definition_id)
TokenHolding::new(sender_holding.definition_id)
} else {
TokenHolding::parse(&recipient.account.data).expect("Invalid recipient data")
};
@ -213,7 +212,7 @@ fn new_definition(
let token_holding = TokenHolding {
account_type: TOKEN_HOLDING_TYPE,
definition_id: definition_target_account.account_id.clone(),
definition_id: definition_target_account.account_id,
balance: total_supply,
};
@ -247,7 +246,7 @@ fn initialize_account(pre_states: &[AccountWithMetadata]) -> Vec<AccountPostStat
// Check definition account is valid
let _definition_values =
TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid");
let holding_values = TokenHolding::new(&definition.account_id);
let holding_values = TokenHolding::new(definition.account_id);
let definition_post = definition.account.clone();
let mut account_to_initialize = account_to_initialize.account.clone();
@ -330,7 +329,7 @@ fn mint_additional_supply(
TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid");
let token_holding_values: TokenHolding = if token_holding.account == Account::default() {
TokenHolding::new(&definition.account_id)
TokenHolding::new(definition.account_id)
} else {
TokenHolding::parse(&token_holding.account.data).expect("Holding account must be valid")
};
@ -1078,7 +1077,6 @@ mod tests {
is_authorized: true,
account_id: helper_id_constructor(IdEnum::PoolDefinitionId),
},
_ => panic!("Invalid selection"),
}
}
@ -1093,8 +1091,7 @@ mod tests {
BalanceEnum::MintSuccess => 50_000,
BalanceEnum::InitSupplyMint => 150_000,
BalanceEnum::HoldingBalanceMint => 51_000,
BalanceEnum::MintOverflow => (2 as u128).pow(128) - 40_000,
_ => panic!("Invalid selection"),
BalanceEnum::MintOverflow => u128::MAX - 40_000,
}
}
@ -1290,7 +1287,7 @@ mod tests {
assert!(
*holding_post.account() == helper_account_constructor(AccountsEnum::InitMint).account
);
assert!(holding_post.requires_claim() == true);
assert!(holding_post.requires_claim());
}
#[test]

View File

@ -4,26 +4,18 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
common.workspace = true
storage.workspace = true
mempool.workspace = true
base58.workspace = true
anyhow.workspace = true
serde.workspace = true
rand.workspace = true
tempfile.workspace = true
chrono.workspace = true
log.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
[dependencies.storage]
path = "../storage"
[dependencies.mempool]
path = "../mempool"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[features]
default = []

View File

@ -4,6 +4,11 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
common.workspace = true
mempool.workspace = true
sequencer_core.workspace = true
anyhow.workspace = true
serde_json.workspace = true
log.workspace = true
@ -11,25 +16,10 @@ serde.workspace = true
actix-cors.workspace = true
futures.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
tempfile.workspace = true
base64.workspace = true
itertools.workspace = true
actix-web.workspace = true
tokio.workspace = true
borsh.workspace = true
# TODO: Move to workspace
[dependencies.sequencer_core]
path = "../sequencer_core"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[dependencies.mempool]
path = "../mempool"

View File

@ -4,25 +4,15 @@ version = "0.1.0"
edition = "2024"
[dependencies]
common.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_rpc.workspace = true
clap = { workspace = true, features = ["derive", "env"] }
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
actix.workspace = true
actix-web.workspace = true
tokio.workspace = true
[dependencies.clap]
features = ["derive", "env"]
workspace = true
[dependencies.sequencer_rpc]
path = "../sequencer_rpc"
[dependencies.sequencer_core]
path = "../sequencer_core"
features = ["testnet"]
[dependencies.common]
path = "../common"

View File

@ -0,0 +1,79 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
# Install build dependencies
RUN apt-get update && apt-get install -y \
pkg-config \
libssl-dev \
libclang-dev \
clang \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /sequencer_runner
# Planner stage - generates dependency recipe
FROM chef AS planner
COPY . .
RUN cargo chef prepare --bin sequencer_runner --recipe-path recipe.json
# Builder stage - builds dependencies and application
FROM chef AS builder
COPY --from=planner /sequencer_runner/recipe.json recipe.json
# Build dependencies only (this layer will be cached)
RUN cargo chef cook --bin sequencer_runner --release --recipe-path recipe.json
# Copy source code
COPY . .
# Build the actual application
RUN cargo build --release --bin sequencer_runner
# Strip debug symbols to reduce binary size
RUN strip /sequencer_runner/target/release/sequencer_runner
# Runtime stage - minimal image
FROM debian:trixie-slim
# Install runtime dependencies
RUN apt-get update \
&& apt-get install -y gosu jq \
&& rm -rf /var/lib/apt/lists/*
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash sequencer_user && \
mkdir -p /sequencer_runner /etc/sequencer_runner && \
chown -R sequencer_user:sequencer_user /sequencer_runner /etc/sequencer_runner
# Copy binary from builder
COPY --from=builder --chown=sequencer_user:sequencer_user /sequencer_runner/target/release/sequencer_runner /usr/local/bin/sequencer_runner
# Copy entrypoint script
COPY sequencer_runner/docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint.sh
# Volume for configuration directory
VOLUME ["/etc/sequencer_runner"]
# Expose default port
EXPOSE 3040
# Health check (TODO #244: Replace when a real health endpoint is available)
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl http://localhost:3040 \
-H "Content-Type: application/json" \
-d "{ \
\"jsonrpc\": \"2.0\", \
\"method\": \"hello\", \
\"params\": {}, \
\"id\": 1 \
}" || exit 1
# Run the application
ENV RUST_LOG=info
USER root
ENTRYPOINT ["/docker-entrypoint.sh"]
WORKDIR /sequencer_runner
CMD ["sequencer_runner", "/etc/sequencer_runner"]

View File

@ -0,0 +1,158 @@
{
"home": "/var/lib/sequencer_runner",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 3040,
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
]
}

View File

@ -0,0 +1,14 @@
services:
sequencer_runner:
image: lssa/sequencer_runner
build:
context: ..
dockerfile: sequencer_runner/Dockerfile
container_name: sequencer_runner
ports:
- "3040:3040"
volumes:
# Mount configuration folder
- ./configs/docker:/etc/sequencer_runner
# Mount data folder
- ./data:/var/lib/sequencer_runner

View File

@ -0,0 +1,29 @@
#!/bin/sh
# This is an entrypoint script for the sequencer_runner Docker container,
# it's not meant to be executed outside of the container.
set -e
CONFIG="/etc/sequencer_runner/sequencer_config.json"
# Check config file exists
if [ ! -f "$CONFIG" ]; then
echo "Config file not found: $CONFIG" >&2
exit 1
fi
# Parse home dir
HOME_DIR=$(jq -r '.home' "$CONFIG")
if [ -z "$HOME_DIR" ] || [ "$HOME_DIR" = "null" ]; then
echo "'home' key missing in config" >&2
exit 1
fi
# Give permissions to the data directory and switch to non-root user
if [ "$(id -u)" = "0" ]; then
mkdir -p "$HOME_DIR"
chown -R sequencer_user:sequencer_user "$HOME_DIR"
exec gosu sequencer_user "$@"
fi

View File

@ -4,10 +4,8 @@ version = "0.1.0"
edition = "2024"
[dependencies]
common.workspace = true
thiserror.workspace = true
borsh.workspace = true
rocksdb.workspace = true
[dependencies.common]
path = "../common"

View File

@ -1,10 +1,10 @@
[package]
name = "test-program-methods"
name = "test_program_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,9 @@
[package]
name = "test_programs"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
risc0-zkvm.workspace = true

View File

@ -20,5 +20,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.balance -= balance_to_burn;
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -15,7 +15,7 @@ fn main() {
pre_states,
instruction: (balance, auth_transfer_id, num_chain_calls, pda_seed),
},
instruction_words
instruction_words,
) = read_nssa_inputs::<Instruction>();
let [recipient_pre, sender_pre] = match pre_states.try_into() {
@ -37,7 +37,7 @@ fn main() {
let new_chained_call = ChainedCall {
program_id: auth_transfer_id,
instruction_data: instruction_data.clone(),
pre_states: vec![running_sender_pre.clone(), running_recipient_pre.clone()], // <- Account order permutation here
pre_states: vec![running_sender_pre.clone(), running_recipient_pre.clone()], /* <- Account order permutation here */
pda_seeds: pda_seed.iter().cloned().collect(),
};
chained_calls.push(new_chained_call);

View File

@ -4,7 +4,13 @@ type Instruction = Vec<u8>;
/// A program that modifies the account data by setting bytes sent in instruction.
fn main() {
let (ProgramInput { pre_states, instruction: data }, instruction_words) = read_nssa_inputs::<Instruction>();
let (
ProgramInput {
pre_states,
instruction: data,
},
instruction_words,
) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
@ -13,7 +19,9 @@ fn main() {
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.data = data.try_into().expect("provided data should fit into data limit");
account_post.data = data
.try_into()
.expect("provided data should fit into data limit");
write_nssa_outputs(
instruction_words,

View File

@ -1,4 +1,4 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
@ -14,5 +14,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.balance += 1;
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -0,0 +1,22 @@
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
Err(_) => return,
};
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.nonce += 1;
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -0,0 +1,13 @@
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let post_states = pre_states
.iter()
.map(|account| AccountPostState::new(account.account.clone()))
.collect();
write_nssa_outputs(instruction_words, pre_states, post_states);
}

View File

@ -1,4 +1,4 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
@ -14,5 +14,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.program_owner = [0, 1, 2, 3, 4, 5, 6, 7];
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -4,32 +4,26 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
nssa.workspace = true
common.workspace = true
key_protocol.workspace = true
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
serde.workspace = true
tokio.workspace = true
tempfile.workspace = true
clap.workspace = true
nssa-core = { path = "../nssa/core" }
base64.workspace = true
bytemuck = "1.23.2"
bytemuck.workspace = true
borsh.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
rand.workspace = true
itertools.workspace = true
sha2.workspace = true
futures.workspace = true
async-stream = "0.3.6"
indicatif = { version = "0.18.3", features = ["improved_unicode"] }
[dependencies.key_protocol]
path = "../key_protocol"
[dependencies.nssa]
path = "../nssa"
[dependencies.common]
path = "../common"

View File

@ -0,0 +1,547 @@
{
"override_rust_log": null,
"sequencer_addr": "http://127.0.0.1:3040",
"seq_poll_timeout_millis": 12000,
"seq_tx_poll_max_blocks": 5,
"seq_poll_max_retries": 5,
"seq_block_poll_max_amount": 100,
"initial_accounts": [
{
"Public": {
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"pub_sign_key": [
16,
162,
106,
154,
236,
125,
52,
184,
35,
100,
238,
174,
69,
197,
41,
77,
187,
10,
118,
75,
0,
11,
148,
238,
185,
181,
133,
17,
220,
72,
124,
77
]
}
},
{
"Public": {
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"pub_sign_key": [
113,
121,
64,
177,
204,
85,
229,
214,
178,
6,
109,
191,
29,
154,
63,
38,
242,
18,
244,
219,
8,
208,
35,
136,
23,
127,
207,
237,
216,
169,
190,
27
]
}
},
{
"Private": {
"account_id": "3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
251,
82,
235,
1,
146,
96,
30,
81,
162,
234,
33,
15,
123,
129,
116,
0,
84,
136,
176,
70,
190,
224,
161,
54,
134,
142,
154,
1,
18,
251,
242,
189
],
"private_key_holder": {
"nullifier_secret_key": [
29,
250,
10,
187,
35,
123,
180,
250,
246,
97,
216,
153,
44,
156,
16,
93,
241,
26,
174,
219,
72,
84,
34,
247,
112,
101,
217,
243,
189,
173,
75,
20
],
"incoming_viewing_secret_key": [
251,
201,
22,
154,
100,
165,
218,
108,
163,
190,
135,
91,
145,
84,
69,
241,
46,
117,
217,
110,
197,
248,
91,
193,
14,
104,
88,
103,
67,
153,
182,
158
],
"outgoing_viewing_secret_key": [
25,
67,
121,
76,
175,
100,
30,
198,
105,
123,
49,
169,
75,
178,
75,
210,
100,
143,
210,
243,
228,
243,
21,
18,
36,
84,
164,
186,
139,
113,
214,
12
]
},
"nullifer_public_key": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"incoming_viewing_public_key": [
3,
235,
139,
131,
237,
177,
122,
189,
6,
177,
167,
178,
202,
117,
246,
58,
28,
65,
132,
79,
220,
139,
119,
243,
187,
160,
212,
121,
61,
247,
116,
72,
205
]
}
}
},
{
"Private": {
"account_id": "AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX",
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
},
"key_chain": {
"secret_spending_key": [
238,
171,
241,
69,
111,
217,
85,
64,
19,
82,
18,
189,
32,
91,
78,
175,
107,
7,
109,
60,
52,
44,
243,
230,
72,
244,
192,
92,
137,
33,
118,
254
],
"private_key_holder": {
"nullifier_secret_key": [
25,
211,
215,
119,
57,
223,
247,
37,
245,
144,
122,
29,
118,
245,
83,
228,
23,
9,
101,
120,
88,
33,
238,
207,
128,
61,
110,
2,
89,
62,
164,
13
],
"incoming_viewing_secret_key": [
193,
181,
14,
196,
142,
84,
15,
65,
128,
101,
70,
196,
241,
47,
130,
221,
23,
146,
161,
237,
221,
40,
19,
126,
59,
15,
169,
236,
25,
105,
104,
231
],
"outgoing_viewing_secret_key": [
20,
170,
220,
108,
41,
23,
155,
217,
247,
190,
175,
168,
247,
34,
105,
134,
114,
74,
104,
91,
211,
62,
126,
13,
130,
100,
241,
214,
250,
236,
38,
150
]
},
"nullifer_public_key": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"incoming_viewing_public_key": [
2,
181,
98,
93,
216,
241,
241,
110,
58,
198,
119,
174,
250,
184,
1,
204,
200,
173,
44,
238,
37,
247,
170,
156,
100,
254,
116,
242,
28,
183,
187,
77,
255
]
}
}
}
],
"basic_auth": null
}

View File

@ -14,7 +14,9 @@ use key_protocol::key_management::key_tree::{chain_index::ChainIndex, traits::Ke
use log::info;
use nssa::{
Account, AccountId, PrivacyPreservingTransaction,
privacy_preserving_transaction::message::EncryptedAccountData, program::Program,
privacy_preserving_transaction::{
circuit::ProgramWithDependencies, message::EncryptedAccountData,
},
};
use nssa_core::{Commitment, MembershipProof, SharedSecretKey, program::InstructionData};
pub use privacy_preserving_tx::PrivacyPreservingAccount;
@ -247,7 +249,7 @@ impl WalletCore {
&self,
accounts: Vec<PrivacyPreservingAccount>,
instruction_data: &InstructionData,
program: &Program,
program: &ProgramWithDependencies,
) -> Result<(SendTxResponse, Vec<SharedSecretKey>), ExecutionFailureKind> {
self.send_privacy_preserving_tx_with_pre_check(accounts, instruction_data, program, |_| {
Ok(())
@ -259,7 +261,7 @@ impl WalletCore {
&self,
accounts: Vec<PrivacyPreservingAccount>,
instruction_data: &InstructionData,
program: &Program,
program: &ProgramWithDependencies,
tx_pre_check: impl FnOnce(&[&Account]) -> Result<(), ExecutionFailureKind>,
) -> Result<(SendTxResponse, Vec<SharedSecretKey>), ExecutionFailureKind> {
let acc_manager = privacy_preserving_tx::AccountManager::new(self, accounts).await?;
@ -284,7 +286,7 @@ impl WalletCore {
.collect::<Vec<_>>(),
&acc_manager.private_account_auth(),
&acc_manager.private_account_membership_proofs(),
&program.to_owned().into(),
&program.to_owned(),
)
.unwrap();

View File

@ -20,7 +20,7 @@ impl NativeTokenTransfer<'_> {
PrivacyPreservingAccount::Public(to),
],
&instruction_data,
&program,
&program.into(),
tx_pre_check,
)
.await

View File

@ -18,7 +18,7 @@ impl NativeTokenTransfer<'_> {
.send_privacy_preserving_tx_with_pre_check(
vec![PrivacyPreservingAccount::PrivateOwned(from)],
&Program::serialize_instruction(instruction).unwrap(),
&Program::authenticated_transfer_program(),
&Program::authenticated_transfer_program().into(),
|_| Ok(()),
)
.await
@ -48,7 +48,7 @@ impl NativeTokenTransfer<'_> {
},
],
&instruction_data,
&program,
&program.into(),
tx_pre_check,
)
.await
@ -75,7 +75,7 @@ impl NativeTokenTransfer<'_> {
PrivacyPreservingAccount::PrivateOwned(to),
],
&instruction_data,
&program,
&program.into(),
tx_pre_check,
)
.await

View File

@ -21,7 +21,7 @@ impl NativeTokenTransfer<'_> {
PrivacyPreservingAccount::PrivateOwned(to),
],
&instruction_data,
&program,
&program.into(),
tx_pre_check,
)
.await
@ -53,7 +53,7 @@ impl NativeTokenTransfer<'_> {
},
],
&instruction_data,
&program,
&program.into(),
tx_pre_check,
)
.await

View File

@ -38,7 +38,7 @@ impl Pinata<'_> {
PrivacyPreservingAccount::PrivateOwned(winner_account_id),
],
&nssa::program::Program::serialize_instruction(solution).unwrap(),
&nssa::program::Program::pinata(),
&nssa::program::Program::pinata().into(),
)
.await
.map(|(resp, secrets)| {

View File

@ -54,7 +54,7 @@ impl Token<'_> {
PrivacyPreservingAccount::PrivateOwned(supply_account_id),
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -82,7 +82,7 @@ impl Token<'_> {
PrivacyPreservingAccount::Public(supply_account_id),
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -110,7 +110,7 @@ impl Token<'_> {
PrivacyPreservingAccount::PrivateOwned(supply_account_id),
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -176,7 +176,7 @@ impl Token<'_> {
PrivacyPreservingAccount::PrivateOwned(recipient_account_id),
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -206,7 +206,7 @@ impl Token<'_> {
},
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -232,7 +232,7 @@ impl Token<'_> {
PrivacyPreservingAccount::Public(recipient_account_id),
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -259,7 +259,7 @@ impl Token<'_> {
PrivacyPreservingAccount::PrivateOwned(recipient_account_id),
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {
@ -290,7 +290,7 @@ impl Token<'_> {
},
],
&instruction_data,
&program,
&program.into(),
)
.await
.map(|(resp, secrets)| {