Merge branch 'main' into Pravdyvy/amm-wallet-integration

This commit is contained in:
Pravdyvy 2025-12-22 05:00:50 +02:00
commit 22f6d92ca6
109 changed files with 4258 additions and 4422 deletions

36
.dockerignore Normal file
View File

@ -0,0 +1,36 @@
# Build artifacts
target/
**/target/
# RocksDB data
rocksdb/
**/rocksdb/
# Git
.git/
.gitignore
# IDE
.vscode/
.idea/
*.swp
*.swo
*~
# OS
.DS_Store
Thumbs.db
# CI/CD
.github/
ci_scripts/
# Documentation
*.md
!README.md
# Configs (copy selectively if needed)
configs/
# License
LICENSE

View File

@ -0,0 +1,10 @@
name: Install risc0
description: Installs risc0 in the environment
runs:
using: "composite"
steps:
- name: Install risc0
run: |
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
shell: bash

View File

@ -0,0 +1,10 @@
name: Install system dependencies
description: Installs system dependencies in the environment
runs:
using: "composite"
steps:
- name: Install system dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential clang libclang-dev libssl-dev pkg-config
shell: bash

View File

@ -10,10 +10,10 @@ TO COMPLETE
TO COMPLETE
[ ] Change ...
[ ] Add ...
[ ] Fix ...
[ ] ...
- [ ] Change ...
- [ ] Add ...
- [ ] Fix ...
- [ ] ...
## 🧪 How to Test
@ -37,7 +37,7 @@ TO COMPLETE IF APPLICABLE
*Mark only completed items. A complete PR should have all boxes ticked.*
[ ] Complete PR description
[ ] Implement the core functionality
[ ] Add/update tests
[ ] Add/update documentation and inline comments
- [ ] Complete PR description
- [ ] Implement the core functionality
- [ ] Add/update tests
- [ ] Add/update documentation and inline comments

View File

@ -14,22 +14,164 @@ on:
name: General
jobs:
ubuntu-latest-pipeline:
fmt-rs:
runs-on: ubuntu-latest
timeout-minutes: 120
name: ubuntu-latest-pipeline
steps:
- uses: actions/checkout@v3
- name: Install active toolchain
run: rustup install
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install nightly toolchain for rustfmt
run: rustup install nightly --profile minimal --component rustfmt
- name: lint - ubuntu-latest
run: chmod 777 ./ci_scripts/lint-ubuntu.sh && ./ci_scripts/lint-ubuntu.sh
- name: test ubuntu-latest
if: success() || failure()
run: chmod 777 ./ci_scripts/test-ubuntu.sh && ./ci_scripts/test-ubuntu.sh
- name: Check Rust files are formatted
run: cargo +nightly fmt --check
fmt-toml:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install taplo-cli
run: cargo install --locked taplo-cli
- name: Check TOML files are formatted
run: taplo fmt --check .
machete:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install active toolchain
run: rustup install
- name: Install cargo-machete
run: cargo install cargo-machete
- name: Check for unused dependencies
run: cargo machete
lint:
runs-on: ubuntu-latest
timeout-minutes: 60
name: lint
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Lint workspace
env:
RISC0_SKIP_BUILD: "1"
run: cargo clippy --workspace --all-targets --all-features -- -D warnings
- name: Lint programs
env:
RISC0_SKIP_BUILD: "1"
run: cargo clippy -p "*programs" -- -D warnings
unit-tests:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Install nextest
run: cargo install cargo-nextest
- name: Run unit tests
env:
RISC0_DEV_MODE: "1"
run: cargo nextest run --no-fail-fast
valid-proof-test:
runs-on: ubuntu-latest
timeout-minutes: 60
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Test valid proof
env:
NSSA_WALLET_HOME_DIR: ./integration_tests/configs/debug/wallet
RUST_LOG: "info"
run: cargo run --bin integration_tests -- ./integration_tests/configs/debug/ test_success_private_transfer_to_another_owned_account
integration-tests:
runs-on: ubuntu-latest
timeout-minutes: 120
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-system-deps
- uses: ./.github/actions/install-risc0
- name: Install active toolchain
run: rustup install
- name: Run integration tests
env:
NSSA_WALLET_HOME_DIR: ./integration_tests/configs/debug/wallet
RUST_LOG: "info"
RISC0_DEV_MODE: "1"
run: cargo run --bin integration_tests -- ./integration_tests/configs/debug/ all
artifacts:
runs-on: ubuntu-latest
timeout-minutes: 60
name: artifacts
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- uses: ./.github/actions/install-risc0
- name: Install just
run: cargo install just
- name: Build artifacts
run: just build-artifacts
- name: Check if artifacts match repository
run: |
if ! git diff --exit-code artifacts/; then
echo "❌ Artifacts in the repository are out of date!"
echo "Please run 'just build-artifacts' and commit the changes."
exit 1
fi
echo "✅ Artifacts are up to date"

23
.github/workflows/deploy.yml vendored Normal file
View File

@ -0,0 +1,23 @@
name: Deploy Sequencer
on:
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
permissions:
contents: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
- name: Deploy to server
uses: appleboy/ssh-action@v1.2.4
with:
host: ${{ secrets.DEPLOY_SSH_HOST }}
username: ${{ secrets.DEPLOY_SSH_USERNAME }}
key: ${{ secrets.DEPLOY_SSH_KEY }}
envs: GITHUB_ACTOR
script_path: ci_scripts/deploy.sh

44
.github/workflows/publish_image.yml vendored Normal file
View File

@ -0,0 +1,44 @@
name: Publish Sequencer Runner Image
on:
workflow_dispatch:
jobs:
publish:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Log in to registry
uses: docker/login-action@v3
with:
registry: ${{ secrets.DOCKER_REGISTRY }}
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_PASSWORD }}
- name: Extract metadata
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ secrets.DOCKER_REGISTRY }}/${{ github.repository }}/sequencer_runner
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=sha,prefix={{branch}}-
type=raw,value=latest,enable={{is_default_branch}}
- name: Build and push Docker image
uses: docker/build-push-action@v5
with:
context: .
file: ./sequencer_runner/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max

3
.gitignore vendored
View File

@ -6,4 +6,5 @@ data/
.idea/
.vscode/
rocksdb
Cargo.lock
sequencer_runner/data/
storage.json

File diff suppressed because it is too large Load Diff

View File

@ -12,15 +12,42 @@ members = [
"common",
"nssa",
"nssa/core",
"program_methods",
"program_methods/guest",
"test_program_methods",
"test_program_methods/guest",
"integration_tests/proc_macro_test_attribute",
"examples/program_deployment",
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
]
[workspace.dependencies]
nssa = { path = "nssa" }
nssa_core = { path = "nssa/core" }
common = { path = "common" }
mempool = { path = "mempool" }
storage = { path = "storage" }
key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
wallet = { path = "wallet" }
test_program_methods = { path = "test_program_methods" }
tokio = { version = "1.28.2", features = [
"net",
"rt-multi-thread",
"sync",
"fs",
] }
risc0-zkvm = { version = "3.0.3", features = ['std'] }
risc0-build = "3.0.3"
anyhow = "1.0.98"
num_cpus = "1.13.1"
openssl = { version = "0.10", features = ["vendored"] }
openssl-probe = { version = "0.1.2" }
serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
actix = "0.13.0"
actix-cors = "0.6.1"
@ -33,9 +60,9 @@ lru = "0.7.8"
thiserror = "2.0.12"
sha2 = "0.10.8"
hex = "0.4.3"
bytemuck = "1.24.0"
aes-gcm = "0.10.3"
toml = "0.7.4"
secp256k1-zkp = "0.11.0"
bincode = "1.3.3"
tempfile = "3.14.0"
light-poseidon = "0.3.0"
@ -50,46 +77,21 @@ borsh = "1.5.7"
base58 = "0.2.0"
itertools = "0.14.0"
rocksdb = { version = "0.21.0", default-features = false, features = [
rocksdb = { version = "0.24.0", default-features = false, features = [
"snappy",
"bindgen-runtime",
] }
[workspace.dependencies.rand]
features = ["std", "std_rng", "getrandom"]
version = "0.8.5"
[workspace.dependencies.k256]
features = ["ecdsa-core", "arithmetic", "expose-field", "serde", "pem"]
version = "0.13.3"
[workspace.dependencies.elliptic-curve]
features = ["arithmetic"]
version = "0.13.8"
[workspace.dependencies.serde]
features = ["derive"]
version = "1.0.60"
[workspace.dependencies.actix-web]
default-features = false
features = ["macros"]
version = "=4.1.0"
[workspace.dependencies.clap]
features = ["derive", "env"]
version = "4.5.42"
[workspace.dependencies.tokio-retry]
version = "0.3.0"
[workspace.dependencies.reqwest]
features = ["json"]
version = "0.11.16"
[workspace.dependencies.tokio]
features = ["net", "rt-multi-thread", "sync", "fs"]
version = "1.28.2"
[workspace.dependencies.tracing]
features = ["std"]
version = "0.1.13"
rand = { version = "0.8.5", features = ["std", "std_rng", "getrandom"] }
k256 = { version = "0.13.3", features = [
"ecdsa-core",
"arithmetic",
"expose-field",
"serde",
"pem",
] }
elliptic-curve = { version = "0.13.8", features = ["arithmetic"] }
actix-web = { version = "=4.1.0", default-features = false, features = [
"macros",
] }
clap = { version = "4.5.42", features = ["derive", "env"] }
reqwest = { version = "0.11.16", features = ["json"] }

19
Justfile Normal file
View File

@ -0,0 +1,19 @@
set shell := ["bash", "-eu", "-o", "pipefail", "-c"]
default:
@just --list
# ---- Configuration ----
METHODS_PATH := "program_methods"
TEST_METHODS_PATH := "test_program_methods"
ARTIFACTS := "artifacts"
# ---- Artifacts build ----
build-artifacts:
@echo "🔨 Building artifacts"
@for methods_path in {{METHODS_PATH}} {{TEST_METHODS_PATH}}; do \
echo "Building artifacts for $methods_path"; \
CARGO_TARGET_DIR=target/$methods_path cargo risczero build --manifest-path $methods_path/guest/Cargo.toml; \
mkdir -p {{ARTIFACTS}}/$methods_path; \
cp target/$methods_path/riscv32im-risc0-zkvm-elf/docker/*.bin {{ARTIFACTS}}/$methods_path; \
done

View File

@ -69,16 +69,14 @@ Install build dependencies
- On Linux
Ubuntu / Debian
```sh
apt install build-essential clang libssl-dev pkg-config
apt install build-essential clang libclang-dev libssl-dev pkg-config
```
Fedora
```sh
sudo dnf install clang openssl-devel pkgconf llvm
sudo dnf install clang clang-devel openssl-devel pkgconf
```
> **Note for Fedora 41+ users:** GCC 14+ has stricter C++ standard library headers that cause build failures with the bundled RocksDB. You must set `CXXFLAGS="-include cstdint"` when running cargo commands. See the [Run tests](#run-tests) section for examples.
- On Mac
```sh
xcode-select --install
@ -110,9 +108,6 @@ The NSSA repository includes both unit and integration test suites.
```bash
# RISC0_DEV_MODE=1 is used to skip proof generation and reduce test runtime overhead
RISC0_DEV_MODE=1 cargo test --release
# On Fedora 41+ (GCC 14+), prefix with CXXFLAGS to fix RocksDB build:
CXXFLAGS="-include cstdint" RISC0_DEV_MODE=1 cargo test --release
```
### Integration tests
@ -122,9 +117,6 @@ export NSSA_WALLET_HOME_DIR=$(pwd)/integration_tests/configs/debug/wallet/
cd integration_tests
# RISC0_DEV_MODE=1 skips proof generation; RUST_LOG=info enables runtime logs
RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
# On Fedora 41+ (GCC 14+), prefix with CXXFLAGS to fix RocksDB build:
CXXFLAGS="-include cstdint" RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
```
# Run the sequencer
@ -134,9 +126,6 @@ The sequencer can be run locally:
```bash
cd sequencer_runner
RUST_LOG=info cargo run --release configs/debug
# On Fedora 41+ (GCC 14+), prefix with CXXFLAGS to fix RocksDB build:
CXXFLAGS="-include cstdint" RUST_LOG=info cargo run --release configs/debug
```
If everything went well you should see an output similar to this:
@ -162,9 +151,6 @@ This repository includes a CLI for interacting with the Nescience sequencer. To
```bash
cargo install --path wallet --force
# On Fedora 41+ (GCC 14+), prefix with CXXFLAGS to fix RocksDB build:
CXXFLAGS="-include cstdint" cargo install --path wallet --force
```
Run `wallet help` to check everything went well.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,4 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/Users/runner/.risc0/bin/rzup install
RUSTFLAGS="-D warnings" cargo build

View File

@ -1,4 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
RUSTFLAGS="-D warnings" cargo build

84
ci_scripts/deploy.sh Normal file
View File

@ -0,0 +1,84 @@
#!/usr/bin/env bash
set -e
# Base directory for deployment
LSSA_DIR="/home/arjentix/test_deploy/lssa"
# Expect GITHUB_ACTOR to be passed as first argument or environment variable
GITHUB_ACTOR="${1:-${GITHUB_ACTOR:-unknown}}"
# Function to log messages with timestamp
log_deploy() {
echo "[$(date '+%Y-%m-%d %H:%M:%S %Z')] $1" >> "${LSSA_DIR}/deploy.log"
}
# Error handler
handle_error() {
echo "✗ Deployment failed by: ${GITHUB_ACTOR}"
log_deploy "Deployment failed by: ${GITHUB_ACTOR}"
exit 1
}
find_sequencer_runner_pids() {
pgrep -f "sequencer_runner" | grep -v $$
}
# Set trap to catch any errors
trap 'handle_error' ERR
# Log deployment info
log_deploy "Deployment initiated by: ${GITHUB_ACTOR}"
# Navigate to code directory
if [ ! -d "${LSSA_DIR}/code" ]; then
mkdir -p "${LSSA_DIR}/code"
fi
cd "${LSSA_DIR}/code"
# Stop current sequencer if running
if find_sequencer_runner_pids > /dev/null; then
echo "Stopping current sequencer..."
find_sequencer_runner_pids | xargs -r kill -SIGINT || true
sleep 2
# Force kill if still running
find_sequencer_runner_pids | grep -v $$ | xargs -r kill -9 || true
fi
# Clone or update repository
if [ -d ".git" ]; then
echo "Updating existing repository..."
git fetch origin
git checkout main
git reset --hard origin/main
else
echo "Cloning repository..."
git clone https://github.com/vacp2p/nescience-testnet.git .
git checkout main
fi
# Build sequencer_runner and wallet in release mode
echo "Building sequencer_runner"
# That could be just `cargo build --release --bin sequencer_runner --bin wallet`
# but we have `no_docker` feature bug, see issue #179
cd sequencer_runner
cargo build --release
cd ../wallet
cargo build --release
cd ..
# Run sequencer_runner with config
echo "Starting sequencer_runner..."
export RUST_LOG=info
nohup ./target/release/sequencer_runner "${LSSA_DIR}/configs/sequencer" > "${LSSA_DIR}/sequencer.log" 2>&1 &
# Wait 5 seconds and check health using wallet
sleep 5
if ./target/release/wallet check-health; then
echo "✓ Sequencer started successfully and is healthy"
log_deploy "Deployment completed successfully by: ${GITHUB_ACTOR}"
exit 0
else
echo "✗ Sequencer failed health check"
tail -n 50 "${LSSA_DIR}/sequencer.log"
handle_error
fi

View File

@ -1,8 +0,0 @@
set -e
cargo +nightly fmt -- --check
cargo install taplo-cli --locked
taplo fmt --check
RISC0_SKIP_BUILD=1 cargo clippy --workspace --all-targets -- -D warnings

View File

@ -1,17 +0,0 @@
set -e
curl -L https://risczero.com/install | bash
/home/runner/.risc0/bin/rzup install
RISC0_DEV_MODE=1 cargo test --release --features no_docker
cd integration_tests
export NSSA_WALLET_HOME_DIR=$(pwd)/configs/debug/wallet/
export RUST_LOG=info
echo "Try test valid proof at least once"
cargo run $(pwd)/configs/debug test_success_private_transfer_to_another_owned_account
echo "Continuing in dev mode"
RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
cd ..
cd nssa/program_methods/guest && cargo test --release

View File

@ -4,18 +4,16 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
anyhow.workspace = true
thiserror.workspace = true
serde_json.workspace = true
serde.workspace = true
reqwest.workspace = true
sha2.workspace = true
log.workspace = true
hex.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
borsh.workspace = true
base64.workspace = true
[dependencies.nssa]
path = "../nssa"

View File

@ -4,10 +4,8 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
wallet.workspace = true
tokio = { workspace = true, features = ["macros"] }
wallet = { path = "../../wallet" }
nssa-core = { path = "../../nssa/core" }
nssa = { path = "../../nssa" }
key_protocol = { path = "../../key_protocol/" }
clap = "4.5.53"
serde = "1.0.228"
clap.workspace = true

View File

@ -340,7 +340,7 @@ Luckily all that complexity is hidden behind the `wallet_core.send_privacy_prese
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(greeting).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();
@ -568,4 +568,94 @@ Output:
```
Hola mundo!Hello from tail call
```
## Private tail-calls
There's support for tail calls in privacy preserving executions too. The `run_hello_world_through_tail_call_private.rs` runner walks you through the process of invoking such an execution.
The only difference is that, since the execution is local, the runner will need both programs: the `simple_tail_call` and it's dependency `hello_world`.
Let's use our existing private account with id `8vzkK7vsdrS2gdPhLk72La8X4FJkgJ5kJLUBRbEVkReU`. This one is already owned by the `hello_world` program.
You can test the privacy tail calls with
```bash
cargo run --bin run_hello_world_through_tail_call_private \
$EXAMPLE_PROGRAMS_BUILD_DIR/simple_tail_call.bin \
$EXAMPLE_PROGRAMS_BUILD_DIR/hello_world.bin \
8vzkK7vsdrS2gdPhLk72La8X4FJkgJ5kJLUBRbEVkReU
```
>[!NOTE]
> The above command may take longer than the previous privacy executions because needs to generate proofs of execution of both the `simple_tail_call` and the `hello_world` programs.
Once finished run the following to see the changes
```bash
wallet account sync-private
wallet account get --account-id Private/8vzkK7vsdrS2gdPhLk72La8X4FJkgJ5kJLUBRbEVkReU
```
# 13. Program derived accounts: authorizing accounts through tail calls
## Digression: account authority vs account program ownership
In NSSA there are two distinct concepts that control who can modify an account:
**Program Ownership:** Each account has a field: `program_owner: ProgramId`.
This indicates which program is allowed to update the accounts state during execution.
- If a program is the program_owner of an account, it can freely mutate its fields.
- If the account is uninitialized (`program_owner = DEFAULT_PROGRAM_ID`), a program may claim it and become its owner.
- If a program is not the owner and the account is not claimable, any attempt to modify it will cause the transition to fail.
Program ownership is about mutation rights during program execution.
**Account authority**: Independent from program ownership, each account also has an authority. The entity that is allowed to set: `is_authorized = true`. This flag indicates that the account has been authorized for use in a transaction.
Who can act as authority?
- User-defined accounts: The user is the authority. They can mark an account as authorized by:
- Signing the transaction (public accounts)
- Providing a valid nullifiers secret key ownership proof (private accounts)
- Program derived accounts: Programs are automatically the authority of a dedicated namespace of public accounts.
Each program owns a non-overlapping space of 2^256 **public** account IDs. They do not overlap with:
- User accounts (public or private)
- Other programs PDAs
> [!NOTE]
> Currently PDAs are restricted to the public state.
A program can be the authority of an account owned by another program, which is the most common case.
During a chained call, a program can mark its PDA accounts as `is_authorized=true` without requiring any user signatures or nullifier secret keys. This enables programs to safely authorize accounts during program composition. Importantly, these flags can only be set to true for PDA accounts through an execution of the program that is their authority. No user and no other program can execute any transition that requires authorization of PDA accounts belonging to a different program.
## Running the example
This tutorial includes an example of PDA usage in `methods/guest/src/bin/tail_call_with_pda.rs.`. That programs sole purpose is to forward one of its own PDA accounts, an account for which it is the authority, to the "Hello World with authorization" program via a chained call. The Hello World program will then claim the account and become its program owner, but the `tail_call_with_pda` program remains the authority. This means it is still the only entity capable of marking that account as `is_authorized=true`.
Deploy the program:
```bash
wallet deploy-program $EXAMPLE_PROGRAMS_BUILD_DIR/tail_call_with_pda.bin
```
There is no need to create a new account for this example, because we simply use one of the PDA accounts belonging to the `tail_call_with_pda` program.
Execute the program
```bash
cargo run --bin run_hello_world_with_authorization_through_tail_call_with_pda $EXAMPLE_PROGRAMS_BUILD_DIR/tail_call_with_pda.bin
```
You'll see an output like the following:
```bash
The program derived account ID is: 3tfTPPuxj3eSE1cLVuNBEk8eSHzpnYS1oqEdeH3Nfsks
```
Then check the status of that account
```bash
wallet account get --account-id Public/3tfTPPuxj3eSE1cLVuNBEk8eSHzpnYS1oqEdeH3Nfsks
```
Output:
```bash
{
"balance":0,
"program_owner_b64":"HZXHYRaKf6YusVo8x00/B15uyY5sGsJb1bzH4KlCY5g=",
"data_b64": "SGVsbG8gZnJvbSB0YWlsIGNhbGwgd2l0aCBQcm9ncmFtIERlcml2ZWQgQWNjb3VudCBJRA==",
"nonce":0"
}
```

View File

@ -1,10 +1,10 @@
[package]
name = "test-program-methods"
name = "example_program_deployment_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -1,13 +1,11 @@
[package]
name = "programs"
name = "example_program_deployment_programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../../../nssa/core" }
serde = { version = "1.0.219", default-features = false }
hex = "0.4.3"
bytemuck = "1.24.0"
nssa_core.workspace = true
hex.workspace = true
bytemuck.workspace = true
risc0-zkvm.workspace = true

View File

@ -9,13 +9,12 @@ use nssa_core::program::{
// It reads a single account, emits it unchanged, and then triggers a tail call
// to the Hello World program with a fixed greeting.
/// This needs to be set to the ID of the Hello world program.
/// To get the ID run **from the root directoy of the repository**:
/// `cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml`
/// This compiles the programs and outputs the IDs in hex that can be used to copy here.
const HELLO_WORLD_PROGRAM_ID_HEX: &str =
"7e99d6e2d158f4dea59597011da5d1c2eef17beed6667657f515b387035b935a";
"e9dfc5a5d03c9afa732adae6e0edfce4bbb44c7a2afb9f148f4309917eb2de6f";
fn hello_world_program_id() -> ProgramId {
let hello_world_program_id_bytes: [u8; 32] = hex::decode(HELLO_WORLD_PROGRAM_ID_HEX)

View File

@ -0,0 +1,76 @@
use nssa_core::program::{
AccountPostState, ChainedCall, PdaSeed, ProgramId, ProgramInput, read_nssa_inputs,
write_nssa_outputs_with_chained_call,
};
// Tail Call with PDA example program.
//
// Demonstrates how to chain execution to another program using `ChainedCall`
// while authorizing program-derived accounts.
//
// Expects a single input account whose Account ID is derived from this
// programs ID and the fixed PDA seed below (as defined by the
// `<AccountId as From<(&ProgramId, &PdaSeed)>>` implementation).
//
// Emits this account unchanged, then performs a tail call to the
// Hello-World-with-Authorization program with a fixed greeting. The same
// account is passed along but marked with `is_authorized = true`.
const HELLO_WORLD_WITH_AUTHORIZATION_PROGRAM_ID_HEX: &str =
"1d95c761168a7fa62eb15a3cc74d3f075e6ec98e6c1ac25bd5bcc7e0a9426398";
const PDA_SEED: PdaSeed = PdaSeed::new([37; 32]);
fn hello_world_program_id() -> ProgramId {
let hello_world_program_id_bytes: [u8; 32] =
hex::decode(HELLO_WORLD_WITH_AUTHORIZATION_PROGRAM_ID_HEX)
.unwrap()
.try_into()
.unwrap();
bytemuck::cast(hello_world_program_id_bytes)
}
fn main() {
// Read inputs
let (
ProgramInput {
pre_states,
instruction: _,
},
instruction_data,
) = read_nssa_inputs::<()>();
// Unpack the input account pre state
let [pre_state] = pre_states
.clone()
.try_into()
.unwrap_or_else(|_| panic!("Input pre states should consist of a single account"));
// Create the (unchanged) post state
let post_state = AccountPostState::new(pre_state.account.clone());
// Create the chained call
let chained_call_greeting: Vec<u8> =
b"Hello from tail call with Program Derived Account ID".to_vec();
let chained_call_instruction_data = risc0_zkvm::serde::to_vec(&chained_call_greeting).unwrap();
// Flip the `is_authorized` flag to true
let pre_state_for_chained_call = {
let mut this = pre_state.clone();
this.is_authorized = true;
this
};
let chained_call = ChainedCall {
program_id: hello_world_program_id(),
instruction_data: chained_call_instruction_data,
pre_states: vec![pre_state_for_chained_call],
pda_seeds: vec![PDA_SEED],
};
// Write the outputs
write_nssa_outputs_with_chained_call(
instruction_data,
vec![pre_state],
vec![post_state],
vec![chained_call],
);
}

View File

@ -54,7 +54,7 @@ async fn main() {
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(greeting).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();

View File

@ -0,0 +1,69 @@
use std::collections::HashMap;
use nssa::{
AccountId, ProgramId, privacy_preserving_transaction::circuit::ProgramWithDependencies,
program::Program,
};
use wallet::{PrivacyPreservingAccount, WalletCore, helperfunctions::fetch_config};
// Before running this example, compile the `simple_tail_call.rs` guest program with:
//
// cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml
//
// Note: you must run the above command from the root of the `lssa` repository.
// Note: The compiled binary file is stored in
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/simple_tail_call.bin
//
//
// Usage:
// cargo run --bin run_hello_world_through_tail_call_private /path/to/guest/binary <account_id>
//
// Example:
// cargo run --bin run_hello_world_through_tail_call \
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/simple_tail_call.bin \
// Ds8q5PjLcKwwV97Zi7duhRVF9uwA2PuYMoLL7FwCzsXE
#[tokio::main]
async fn main() {
// Load wallet config and storage
let wallet_config = fetch_config().await.unwrap();
let wallet_core = WalletCore::start_from_config_update_chain(wallet_config)
.await
.unwrap();
// Parse arguments
// First argument is the path to the simple_tail_call program binary
let simple_tail_call_path = std::env::args_os().nth(1).unwrap().into_string().unwrap();
// Second argument is the path to the hello_world program binary
let hello_world_path = std::env::args_os().nth(2).unwrap().into_string().unwrap();
// Third argument is the account_id
let account_id: AccountId = std::env::args_os()
.nth(3)
.unwrap()
.into_string()
.unwrap()
.parse()
.unwrap();
// Load the program and its dependencies (the hellow world program)
let simple_tail_call_bytecode: Vec<u8> = std::fs::read(simple_tail_call_path).unwrap();
let simple_tail_call = Program::new(simple_tail_call_bytecode).unwrap();
let hello_world_bytecode: Vec<u8> = std::fs::read(hello_world_path).unwrap();
let hello_world = Program::new(hello_world_bytecode).unwrap();
let dependencies: HashMap<ProgramId, Program> =
[(hello_world.id(), hello_world)].into_iter().collect();
let program_with_dependencies = ProgramWithDependencies::new(simple_tail_call, dependencies);
let accounts = vec![PrivacyPreservingAccount::PrivateOwned(account_id)];
// Construct and submit the privacy-preserving transaction
let instruction = ();
wallet_core
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(instruction).unwrap(),
&program_with_dependencies,
)
.await
.unwrap();
}

View File

@ -0,0 +1,62 @@
use nssa::{
AccountId, PublicTransaction,
program::Program,
public_transaction::{Message, WitnessSet},
};
use nssa_core::program::PdaSeed;
use wallet::{WalletCore, helperfunctions::fetch_config};
// Before running this example, compile the `simple_tail_call.rs` guest program with:
//
// cargo risczero build --manifest-path examples/program_deployment/methods/guest/Cargo.toml
//
// Note: you must run the above command from the root of the `lssa` repository.
// Note: The compiled binary file is stored in
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/simple_tail_call.bin
//
//
// Usage:
// cargo run --bin run_hello_world_with_authorization_through_tail_call_with_pda
// /path/to/guest/binary <account_id>
//
// Example:
// cargo run --bin run_hello_world_with_authorization_through_tail_call_with_pda \
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/tail_call_with_pda.bin
const PDA_SEED: PdaSeed = PdaSeed::new([37; 32]);
#[tokio::main]
async fn main() {
// Load wallet config and storage
let wallet_config = fetch_config().await.unwrap();
let wallet_core = WalletCore::start_from_config_update_chain(wallet_config)
.await
.unwrap();
// Parse arguments
// First argument is the path to the program binary
let program_path = std::env::args_os().nth(1).unwrap().into_string().unwrap();
// Load the program
let bytecode: Vec<u8> = std::fs::read(program_path).unwrap();
let program = Program::new(bytecode).unwrap();
// Compute the PDA to pass it as input account to the public execution
let pda = AccountId::from((&program.id(), &PDA_SEED));
let account_ids = vec![pda];
let instruction_data = ();
let nonces = vec![];
let signing_keys = [];
let message = Message::try_new(program.id(), account_ids, nonces, instruction_data).unwrap();
let witness_set = WitnessSet::for_message(&message, &signing_keys);
let tx = PublicTransaction::new(message, witness_set);
// Submit the transaction
let _response = wallet_core
.sequencer_client
.send_tx_public(tx)
.await
.unwrap();
println!("The program derived account id is: {pda}");
}

View File

@ -105,7 +105,7 @@ async fn main() {
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(instruction).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();
@ -146,7 +146,7 @@ async fn main() {
.send_privacy_preserving_tx(
accounts,
&Program::serialize_instruction(instruction).unwrap(),
&program,
&program.into(),
)
.await
.unwrap();

View File

@ -4,6 +4,16 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
proc_macro_test_attribute = { path = "./proc_macro_test_attribute" }
clap = { workspace = true, features = ["derive", "env"] }
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true
@ -14,31 +24,3 @@ tokio.workspace = true
hex.workspace = true
tempfile.workspace = true
borsh.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
proc_macro_test_attribute = { path = "./proc_macro_test_attribute" }
[dependencies.clap]
features = ["derive", "env"]
workspace = true
[dependencies.sequencer_core]
path = "../sequencer_core"
features = ["testnet"]
[dependencies.sequencer_runner]
path = "../sequencer_runner"
[dependencies.wallet]
path = "../wallet"
[dependencies.common]
path = "../common"
[dependencies.key_protocol]
path = "../key_protocol"
[dependencies.nssa]
path = "../nssa"
features = ["no_docker"]

Binary file not shown.

View File

@ -1596,7 +1596,10 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
pub async fn test_program_deployment() {
info!("########## test program deployment ##########");
let binary_filepath: PathBuf = NSSA_PROGRAM_FOR_TEST_DATA_CHANGER.parse().unwrap();
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let binary_filepath: PathBuf = PathBuf::from(manifest_dir)
.join("../artifacts/test_program_methods")
.join(NSSA_PROGRAM_FOR_TEST_DATA_CHANGER);
let command = Command::DeployProgram {
binary_filepath: binary_filepath.clone(),
@ -1681,6 +1684,49 @@ pub fn prepare_function_map() -> HashMap<String, TestFunction> {
info!("Success!");
}
#[nssa_integration_test]
pub async fn test_authenticated_transfer_initialize_function_private() {
info!("########## test initialize private account for authenticated transfer ##########");
let command =
Command::Account(AccountSubcommand::New(NewSubcommand::Private { cci: None }));
let SubcommandReturnValue::RegisterAccount { account_id } =
wallet::cli::execute_subcommand(command).await.unwrap()
else {
panic!("Error creating account");
};
let command = Command::AuthTransfer(AuthTransferSubcommand::Init {
account_id: make_private_account_input_from_str(&account_id.to_string()),
});
wallet::cli::execute_subcommand(command).await.unwrap();
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
info!("Checking correct execution");
let command = Command::Account(AccountSubcommand::SyncPrivate {});
wallet::cli::execute_subcommand(command).await.unwrap();
let wallet_config = fetch_config().await.unwrap();
let seq_client = SequencerClient::new(wallet_config.sequencer_addr.clone()).unwrap();
let wallet_storage = WalletCore::start_from_config_update_chain(wallet_config)
.await
.unwrap();
let new_commitment1 = wallet_storage
.get_private_account_commitment(&account_id)
.unwrap();
assert!(verify_commitment_is_in_state(new_commitment1, &seq_client).await);
let account = wallet_storage.get_account_private(&account_id).unwrap();
let expected_program_owner = Program::authenticated_transfer_program().id();
let expected_balance = 0;
assert_eq!(account.program_owner, expected_program_owner);
assert_eq!(account.balance, expected_balance);
assert!(account.data.is_empty());
}
#[nssa_integration_test]
pub async fn test_pinata_private_receiver() {
info!("########## test_pinata_private_receiver ##########");

View File

@ -167,7 +167,8 @@ fn build_privacy_transaction() -> PrivacyPreservingTransaction {
(sender_npk.clone(), sender_ss),
(recipient_npk.clone(), recipient_ss),
],
&[(sender_nsk, proof)],
&[sender_nsk],
&[Some(proof)],
&program.into(),
)
.unwrap();

View File

@ -4,22 +4,19 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
common.workspace = true
anyhow.workspace = true
serde.workspace = true
k256.workspace = true
sha2.workspace = true
rand.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
aes-gcm.workspace = true
bip39.workspace = true
hmac-sha512.workspace = true
thiserror.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
itertools.workspace = true
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"

View File

@ -4,27 +4,27 @@ version = "0.1.0"
edition = "2024"
[dependencies]
thiserror = "2.0.12"
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "core", features = ["host"] }
program-methods = { path = "program_methods", optional = true }
serde = "1.0.219"
sha2 = "0.10.9"
nssa_core = { workspace = true, features = ["host"] }
thiserror.workspace = true
risc0-zkvm.workspace = true
serde.workspace = true
sha2.workspace = true
rand.workspace = true
borsh.workspace = true
hex.workspace = true
secp256k1 = "0.31.1"
rand = "0.8"
borsh = "1.5.7"
hex = "0.4.3"
risc0-binfmt = "3.0.2"
bytemuck = "1.24.0"
log.workspace = true
[build-dependencies]
risc0-build = "3.0.3"
risc0-binfmt = "3.0.2"
[dev-dependencies]
test-program-methods = { path = "test_program_methods" }
test_program_methods.workspace = true
hex-literal = "1.0.0"
[features]
default = []
no_docker = ["program-methods"]

View File

@ -1,43 +1,21 @@
fn main() {
if cfg!(feature = "no_docker") {
println!("cargo:warning=NO_DOCKER feature enabled deterministic build skipped");
return;
}
build_deterministic().expect("Deterministic build failed");
}
fn build_deterministic() -> Result<(), Box<dyn std::error::Error>> {
use std::{env, fs, path::PathBuf, process::Command};
use std::{env, fs, path::PathBuf};
fn main() -> Result<(), Box<dyn std::error::Error>> {
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?);
let out_dir = PathBuf::from(env::var("OUT_DIR")?);
let mod_dir = out_dir.join("program_methods");
let mod_file = mod_dir.join("mod.rs");
let program_methods_dir = manifest_dir.join("../artifacts/program_methods/");
println!("cargo:rerun-if-changed=program_methods/guest/src");
println!("cargo:rerun-if-changed=program_methods/guest/Cargo.toml");
println!("cargo:rerun-if-changed={}", program_methods_dir.display());
let guest_manifest = manifest_dir.join("program_methods/guest/Cargo.toml");
let status = Command::new("cargo")
.args(["risczero", "build", "--manifest-path"])
.arg(&guest_manifest)
.status()?;
if !status.success() {
return Err("Risc0 deterministic build failed".into());
}
let target_dir =
manifest_dir.join("program_methods/guest/target/riscv32im-risc0-zkvm-elf/docker/");
let bins = fs::read_dir(&target_dir)?
let bins = fs::read_dir(&program_methods_dir)?
.filter_map(Result::ok)
.filter(|e| e.path().extension().is_some_and(|ext| ext == "bin"))
.collect::<Vec<_>>();
if bins.is_empty() {
return Err(format!("No .bin files found in {:?}", target_dir).into());
return Err(format!("No .bin files found in {:?}", program_methods_dir).into());
}
fs::create_dir_all(&mod_dir)?;

View File

@ -1,21 +1,21 @@
[package]
name = "nssa-core"
name = "nssa_core"
version = "0.1.0"
edition = "2024"
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
serde = { version = "1.0", default-features = false }
thiserror = { version = "2.0.12" }
bytemuck = "1.24.0"
risc0-zkvm.workspace = true
borsh.workspace = true
serde = { workspace = true }
thiserror.workspace = true
chacha20 = { version = "0.9", default-features = false }
k256 = { version = "0.13.3", optional = true }
base58 = { version = "0.2.0", optional = true }
anyhow = { version = "1.0.98", optional = true }
borsh = "1.5.7"
bytemuck = { workspace = true, optional = true }
k256 = { workspace = true, optional = true }
base58 = { workspace = true, optional = true }
anyhow = { workspace = true, optional = true }
[dev-dependencies]
serde_json = "1.0.81"
serde_json.workspace = true
[features]
default = []

View File

@ -45,9 +45,9 @@ impl AccountWithMetadata {
}
#[derive(
Serialize, Deserialize, Clone, Default, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize,
Copy, Clone, Serialize, Deserialize, PartialEq, Eq, Hash, BorshSerialize, BorshDeserialize,
)]
#[cfg_attr(any(feature = "host", test), derive(Debug, Copy, PartialOrd, Ord))]
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialOrd, Ord, Default))]
pub struct AccountId {
value: [u8; 32],
}

View File

@ -10,11 +10,23 @@ use crate::{
#[derive(Serialize, Deserialize)]
pub struct PrivacyPreservingCircuitInput {
/// Outputs of the program execution.
pub program_outputs: Vec<ProgramOutput>,
/// Visibility mask for accounts.
///
/// - `0` - public account
/// - `1` - private account with authentication
/// - `2` - private account without authentication
pub visibility_mask: Vec<u8>,
/// Nonces of private accounts.
pub private_account_nonces: Vec<Nonce>,
/// Public keys of private accounts.
pub private_account_keys: Vec<(NullifierPublicKey, SharedSecretKey)>,
pub private_account_auth: Vec<(NullifierSecretKey, MembershipProof)>,
/// Nullifier secret keys for authorized private accounts.
pub private_account_nsks: Vec<NullifierSecretKey>,
/// Membership proofs for private accounts. Can be [`None`] for uninitialized accounts.
pub private_account_membership_proofs: Vec<Option<MembershipProof>>,
/// Program ID.
pub program_id: ProgramId,
}

View File

@ -25,7 +25,7 @@ pub struct ProgramInput<T> {
pub struct PdaSeed([u8; 32]);
impl PdaSeed {
pub fn new(value: [u8; 32]) -> Self {
pub const fn new(value: [u8; 32]) -> Self {
Self(value)
}
}

View File

@ -1,11 +0,0 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../core" }
serde = { version = "1.0.219", default-features = false }

View File

@ -1,12 +1,7 @@
#[cfg(not(feature = "no_docker"))]
pub mod program_methods {
include!(concat!(env!("OUT_DIR"), "/program_methods/mod.rs"));
}
#[cfg(feature = "no_docker")]
#[allow(clippy::single_component_path_imports)]
use program_methods;
pub mod encoding;
pub mod error;
mod merkle_tree;

View File

@ -44,13 +44,15 @@ impl From<Program> for ProgramWithDependencies {
/// Generates a proof of the execution of a NSSA program inside the privacy preserving execution
/// circuit
#[expect(clippy::too_many_arguments, reason = "TODO: fix later")]
pub fn execute_and_prove(
pre_states: &[AccountWithMetadata],
instruction_data: &InstructionData,
visibility_mask: &[u8],
private_account_nonces: &[u128],
private_account_keys: &[(NullifierPublicKey, SharedSecretKey)],
private_account_auth: &[(NullifierSecretKey, MembershipProof)],
private_account_nsks: &[NullifierSecretKey],
private_account_membership_proofs: &[Option<MembershipProof>],
program_with_dependencies: &ProgramWithDependencies,
) -> Result<(PrivacyPreservingCircuitOutput, Proof), NssaError> {
let mut program = &program_with_dependencies.program;
@ -105,7 +107,8 @@ pub fn execute_and_prove(
visibility_mask: visibility_mask.to_vec(),
private_account_nonces: private_account_nonces.to_vec(),
private_account_keys: private_account_keys.to_vec(),
private_account_auth: private_account_auth.to_vec(),
private_account_nsks: private_account_nsks.to_vec(),
private_account_membership_proofs: private_account_membership_proofs.to_vec(),
program_id: program_with_dependencies.program.id(),
};
@ -218,6 +221,7 @@ mod tests {
&[0xdeadbeef],
&[(recipient_keys.npk(), shared_secret)],
&[],
&[None],
&Program::authenticated_transfer_program().into(),
)
.unwrap();
@ -315,10 +319,8 @@ mod tests {
(sender_keys.npk(), shared_secret_1),
(recipient_keys.npk(), shared_secret_2),
],
&[(
sender_keys.nsk,
commitment_set.get_proof_for(&commitment_sender).unwrap(),
)],
&[sender_keys.nsk],
&[commitment_set.get_proof_for(&commitment_sender), None],
&program.into(),
)
.unwrap();

View File

@ -228,6 +228,15 @@ mod tests {
}
}
pub fn noop() -> Self {
use test_program_methods::{NOOP_ELF, NOOP_ID};
Program {
id: NOOP_ID,
elf: NOOP_ELF.to_vec(),
}
}
pub fn modified_transfer_program() -> Self {
use test_program_methods::MODIFIED_TRANSFER_ELF;
// This unwrap won't panic since the `MODIFIED_TRANSFER_ELF` comes from risc0 build of

View File

@ -1,6 +1,7 @@
use std::collections::{HashMap, HashSet, VecDeque};
use borsh::{BorshDeserialize, BorshSerialize};
use log::debug;
use nssa_core::{
account::{Account, AccountId, AccountWithMetadata},
program::{ChainedCall, DEFAULT_PROGRAM_ID, PdaSeed, ProgramId, validate_execution},
@ -123,8 +124,16 @@ impl PublicTransaction {
return Err(NssaError::InvalidInput("Unknown program".into()));
};
debug!(
"Program {:?} pre_states: {:?}, instruction_data: {:?}",
chained_call.program_id, chained_call.pre_states, chained_call.instruction_data
);
let mut program_output =
program.execute(&chained_call.pre_states, &chained_call.instruction_data)?;
debug!(
"Program {:?} output: {:?}",
chained_call.program_id, program_output
);
let authorized_pdas =
self.compute_authorized_pdas(&caller_program_id, &chained_call.pda_seeds);

View File

@ -871,6 +871,7 @@ pub mod tests {
&[0xdeadbeef],
&[(recipient_keys.npk(), shared_secret)],
&[],
&[None],
&Program::authenticated_transfer_program().into(),
)
.unwrap();
@ -919,10 +920,8 @@ pub mod tests {
(sender_keys.npk(), shared_secret_1),
(recipient_keys.npk(), shared_secret_2),
],
&[(
sender_keys.nsk,
state.get_proof_for_commitment(&sender_commitment).unwrap(),
)],
&[sender_keys.nsk],
&[state.get_proof_for_commitment(&sender_commitment), None],
&program.into(),
)
.unwrap();
@ -971,10 +970,8 @@ pub mod tests {
&[1, 0],
&[new_nonce],
&[(sender_keys.npk(), shared_secret)],
&[(
sender_keys.nsk,
state.get_proof_for_commitment(&sender_commitment).unwrap(),
)],
&[sender_keys.nsk],
&[state.get_proof_for_commitment(&sender_commitment)],
&program.into(),
)
.unwrap();
@ -1188,6 +1185,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1214,6 +1212,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1240,6 +1239,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1266,6 +1266,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1294,6 +1295,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.to_owned().into(),
);
@ -1320,6 +1322,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1355,6 +1358,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1381,6 +1385,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1416,6 +1421,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1453,6 +1459,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1493,7 +1500,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1527,7 +1535,50 @@ pub mod tests {
&[1, 2],
&[0xdeadbeef1, 0xdeadbeef2],
&private_account_keys,
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
assert!(matches!(result, Err(NssaError::CircuitProvingError(_))));
}
#[test]
fn test_circuit_fails_if_insufficient_commitment_proofs_are_provided() {
let program = Program::simple_balance_transfer();
let sender_keys = test_private_account_keys_1();
let recipient_keys = test_private_account_keys_2();
let private_account_1 = AccountWithMetadata::new(
Account {
program_owner: program.id(),
balance: 100,
..Account::default()
},
true,
&sender_keys.npk(),
);
let private_account_2 =
AccountWithMetadata::new(Account::default(), false, &recipient_keys.npk());
// Setting no second commitment proof.
let private_account_membership_proofs = [Some((0, vec![]))];
let result = execute_and_prove(
&[private_account_1, private_account_2],
&Program::serialize_instruction(10u128).unwrap(),
&[1, 2],
&[0xdeadbeef1, 0xdeadbeef2],
&[
(
sender_keys.npk(),
SharedSecretKey::new(&[55; 32], &sender_keys.ivk()),
),
(
recipient_keys.npk(),
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[sender_keys.nsk],
&private_account_membership_proofs,
&program.into(),
);
@ -1552,7 +1603,7 @@ pub mod tests {
AccountWithMetadata::new(Account::default(), false, &recipient_keys.npk());
// Setting no auth key for an execution with one non default private accounts.
let private_account_auth = [];
let private_account_nsks = [];
let result = execute_and_prove(
&[private_account_1, private_account_2],
&Program::serialize_instruction(10u128).unwrap(),
@ -1568,7 +1619,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&private_account_auth,
&private_account_nsks,
&[],
&program.into(),
);
@ -1604,19 +1656,20 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
];
let private_account_auth = [
// Setting the recipient key to authorize the sender.
// This should be set to the sender private account in
// a normal circumstance. The recipient can't authorize this.
(recipient_keys.nsk, (0, vec![])),
];
let private_account_nsks = [recipient_keys.nsk];
let private_account_membership_proofs = [Some((0, vec![]))];
let result = execute_and_prove(
&[private_account_1, private_account_2],
&Program::serialize_instruction(10u128).unwrap(),
&[1, 2],
&[0xdeadbeef1, 0xdeadbeef2],
&private_account_keys,
&private_account_auth,
&private_account_nsks,
&private_account_membership_proofs,
&program.into(),
);
@ -1662,7 +1715,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1709,7 +1763,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1755,7 +1810,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1801,7 +1857,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1845,7 +1902,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1875,6 +1933,7 @@ pub mod tests {
&[],
&[],
&[],
&[],
&program.into(),
);
@ -1916,7 +1975,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1962,7 +2022,8 @@ pub mod tests {
&[1, 2],
&[0xdeadbeef1, 0xdeadbeef2],
&private_account_keys,
&[(sender_keys.nsk, (0, vec![]))],
&[sender_keys.nsk],
&[Some((0, vec![]))],
&program.into(),
);
@ -1989,10 +2050,8 @@ pub mod tests {
// Setting two private account keys for a circuit execution with only one non default
// private account (visibility mask equal to 1 means that auth keys are expected).
let visibility_mask = [1, 2];
let private_account_auth = [
(sender_keys.nsk, (0, vec![])),
(recipient_keys.nsk, (1, vec![])),
];
let private_account_nsks = [sender_keys.nsk, recipient_keys.nsk];
let private_account_membership_proofs = [Some((0, vec![])), Some((1, vec![]))];
let result = execute_and_prove(
&[private_account_1, private_account_2],
&Program::serialize_instruction(10u128).unwrap(),
@ -2008,7 +2067,8 @@ pub mod tests {
SharedSecretKey::new(&[56; 32], &recipient_keys.ivk()),
),
],
&private_account_auth,
&private_account_nsks,
&private_account_membership_proofs,
&program.into(),
);
@ -2085,10 +2145,8 @@ pub mod tests {
);
let visibility_mask = [1, 1];
let private_account_auth = [
(sender_keys.nsk, (1, vec![])),
(sender_keys.nsk, (1, vec![])),
];
let private_account_nsks = [sender_keys.nsk, sender_keys.nsk];
let private_account_membership_proofs = [Some((1, vec![])), Some((1, vec![]))];
let shared_secret = SharedSecretKey::new(&[55; 32], &sender_keys.ivk());
let result = execute_and_prove(
&[private_account_1.clone(), private_account_1],
@ -2099,7 +2157,8 @@ pub mod tests {
(sender_keys.npk(), shared_secret),
(sender_keys.npk(), shared_secret),
],
&private_account_auth,
&private_account_nsks,
&private_account_membership_proofs,
&program.into(),
);
@ -3928,15 +3987,10 @@ pub mod tests {
&[1, 1],
&[from_new_nonce, to_new_nonce],
&[(from_keys.npk(), to_ss), (to_keys.npk(), from_ss)],
&[from_keys.nsk, to_keys.nsk],
&[
(
from_keys.nsk,
state.get_proof_for_commitment(&from_commitment).unwrap(),
),
(
to_keys.nsk,
state.get_proof_for_commitment(&to_commitment).unwrap(),
),
state.get_proof_for_commitment(&from_commitment),
state.get_proof_for_commitment(&to_commitment),
],
&program_with_deps,
)
@ -4143,4 +4197,143 @@ pub mod tests {
assert!(expected_sender_post == sender_post);
assert!(expected_recipient_post == recipient_post);
}
#[test]
fn test_private_authorized_uninitialized_account() {
let mut state = V02State::new_with_genesis_accounts(&[], &[]);
// Set up keys for the authorized private account
let private_keys = test_private_account_keys_1();
// Create an authorized private account with default values (new account being initialized)
let authorized_account =
AccountWithMetadata::new(Account::default(), true, &private_keys.npk());
let program = Program::authenticated_transfer_program();
// Set up parameters for the new account
let esk = [3; 32];
let shared_secret = SharedSecretKey::new(&esk, &private_keys.ivk());
let epk = EphemeralPublicKey::from_scalar(esk);
// Balance to initialize the account with (0 for a new account)
let balance: u128 = 0;
let nonce = 0xdeadbeef1;
// Execute and prove the circuit with the authorized account but no commitment proof
let (output, proof) = execute_and_prove(
std::slice::from_ref(&authorized_account),
&Program::serialize_instruction(balance).unwrap(),
&[1],
&[nonce],
&[(private_keys.npk(), shared_secret)],
&[private_keys.nsk],
&[None],
&program.into(),
)
.unwrap();
// Create message from circuit output
let message = Message::try_from_circuit_output(
vec![],
vec![],
vec![(private_keys.npk(), private_keys.ivk(), epk)],
output,
)
.unwrap();
let witness_set = WitnessSet::for_message(&message, proof, &[]);
let tx = PrivacyPreservingTransaction::new(message, witness_set);
let result = state.transition_from_privacy_preserving_transaction(&tx);
assert!(result.is_ok());
let nullifier = Nullifier::for_account_initialization(&private_keys.npk());
assert!(state.private_state.1.contains(&nullifier));
}
#[test]
fn test_private_account_claimed_then_used_without_init_flag_should_fail() {
let mut state = V02State::new_with_genesis_accounts(&[], &[]).with_test_programs();
// Set up keys for the private account
let private_keys = test_private_account_keys_1();
// Step 1: Create a new private account with authorization
let authorized_account =
AccountWithMetadata::new(Account::default(), true, &private_keys.npk());
let claimer_program = Program::claimer();
// Set up parameters for claiming the new account
let esk = [3; 32];
let shared_secret = SharedSecretKey::new(&esk, &private_keys.ivk());
let epk = EphemeralPublicKey::from_scalar(esk);
let balance: u128 = 0;
let nonce = 0xdeadbeef1;
// Step 2: Execute claimer program to claim the account with authentication
let (output, proof) = execute_and_prove(
std::slice::from_ref(&authorized_account),
&Program::serialize_instruction(balance).unwrap(),
&[1],
&[nonce],
&[(private_keys.npk(), shared_secret)],
&[private_keys.nsk],
&[None],
&claimer_program.into(),
)
.unwrap();
let message = Message::try_from_circuit_output(
vec![],
vec![],
vec![(private_keys.npk(), private_keys.ivk(), epk)],
output,
)
.unwrap();
let witness_set = WitnessSet::for_message(&message, proof, &[]);
let tx = PrivacyPreservingTransaction::new(message, witness_set);
// Claim should succeed
assert!(
state
.transition_from_privacy_preserving_transaction(&tx)
.is_ok()
);
// Verify the account is now initialized (nullifier exists)
let nullifier = Nullifier::for_account_initialization(&private_keys.npk());
assert!(state.private_state.1.contains(&nullifier));
// Prepare new state of account
let account_metadata = {
let mut acc = authorized_account.clone();
acc.account.program_owner = Program::claimer().id();
acc
};
let noop_program = Program::noop();
let esk2 = [4; 32];
let shared_secret2 = SharedSecretKey::new(&esk2, &private_keys.ivk());
let nonce2 = 0xdeadbeef2;
// Step 3: Try to execute noop program with authentication but without initialization
let res = execute_and_prove(
std::slice::from_ref(&account_metadata),
&Program::serialize_instruction(()).unwrap(),
&[1],
&[nonce2],
&[(private_keys.npk(), shared_secret2)],
&[private_keys.nsk],
&[None],
&noop_program.into(),
);
assert!(matches!(res, Err(NssaError::CircuitProvingError(_))));
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,11 +0,0 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[workspace]
[dependencies]
risc0-zkvm = { version = "3.0.3", features = ['std'] }
nssa-core = { path = "../../core" }
serde = { version = "1.0.219", default-features = false }

View File

@ -1,18 +0,0 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. } , instruction_words) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
Err(_) => return,
};
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.nonce += 1;
write_nssa_outputs(instruction_words ,vec![pre], vec![AccountPostState::new(account_post)]);
}

View File

@ -1,10 +1,10 @@
[package]
name = "program-methods"
name = "program_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,10 @@
[package]
name = "programs"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
risc0-zkvm.workspace = true
serde = { workspace = true, default-features = false }

View File

@ -17,7 +17,7 @@ fn initialize_account(pre_state: AccountWithMetadata) -> AccountPostState {
// Continue only if the owner authorized this operation
if !is_authorized {
panic!("Invalid input");
panic!("Account must be authorized");
}
account_to_claim
@ -31,12 +31,12 @@ fn transfer(
) -> Vec<AccountPostState> {
// Continue only if the sender has authorized this operation
if !sender.is_authorized {
panic!("Invalid input");
panic!("Sender must be authorized");
}
// Continue only if the sender has enough balance
if sender.account.balance < balance_to_move {
panic!("Invalid input");
panic!("Sender has insufficient balance");
}
// Create accounts post states, with updated balances

View File

@ -16,7 +16,8 @@ fn main() {
visibility_mask,
private_account_nonces,
private_account_keys,
private_account_auth,
private_account_nsks,
private_account_membership_proofs,
mut program_id,
} = env::read();
@ -63,7 +64,8 @@ fn main() {
for (i, program_output) in program_outputs.iter().enumerate() {
let mut program_output = program_output.clone();
// Check that `program_output` is consistent with the execution of the corresponding program.
// Check that `program_output` is consistent with the execution of the corresponding
// program.
let program_output_words =
&to_vec(&program_output).expect("program_output must be serializable");
env::verify(program_id, program_output_words)
@ -105,7 +107,7 @@ fn main() {
} else {
pre_states.push(pre.clone());
}
state_diff.insert(pre.account_id.clone(), post.account().clone());
state_diff.insert(pre.account_id, post.account().clone());
}
// TODO: Modify when multi-chain calls are supported in the circuit
@ -131,7 +133,8 @@ fn main() {
let mut private_nonces_iter = private_account_nonces.iter();
let mut private_keys_iter = private_account_keys.iter();
let mut private_auth_iter = private_account_auth.iter();
let mut private_nsks_iter = private_account_nsks.iter();
let mut private_membership_proofs_iter = private_account_membership_proofs.iter();
let mut output_index = 0;
for i in 0..n_accounts {
@ -158,8 +161,7 @@ fn main() {
if visibility_mask[i] == 1 {
// Private account with authentication
let (nsk, membership_proof) =
private_auth_iter.next().expect("Missing private auth");
let nsk = private_nsks_iter.next().expect("Missing nsk");
// Verify the nullifier public key
let expected_npk = NullifierPublicKey::from(nsk);
@ -167,19 +169,38 @@ fn main() {
panic!("Nullifier public key mismatch");
}
// Compute commitment set digest associated with provided auth path
let commitment_pre = Commitment::new(npk, &pre_states[i].account);
let set_digest = compute_digest_for_path(&commitment_pre, membership_proof);
// Check pre_state authorization
if !pre_states[i].is_authorized {
panic!("Pre-state not authorized");
}
let membership_proof_opt = private_membership_proofs_iter
.next()
.expect("Missing membership proof");
let (nullifier, set_digest) = membership_proof_opt
.as_ref()
.map(|membership_proof| {
// Compute commitment set digest associated with provided auth path
let commitment_pre = Commitment::new(npk, &pre_states[i].account);
let set_digest =
compute_digest_for_path(&commitment_pre, membership_proof);
// Compute update nullifier
let nullifier = Nullifier::for_account_update(&commitment_pre, nsk);
(nullifier, set_digest)
})
.unwrap_or_else(|| {
if pre_states[i].account != Account::default() {
panic!("Found new private account with non default values.");
}
// Compute initialization nullifier
let nullifier = Nullifier::for_account_initialization(npk);
(nullifier, DUMMY_COMMITMENT_HASH)
});
new_nullifiers.push((nullifier, set_digest));
} else {
// Private account without authentication
if pre_states[i].account != Account::default() {
panic!("Found new private account with non default values.");
}
@ -188,7 +209,13 @@ fn main() {
panic!("Found new private account marked as authorized.");
}
// Compute initialization nullifier
let membership_proof_opt = private_membership_proofs_iter
.next()
.expect("Missing membership proof");
assert!(
membership_proof_opt.is_none(),
"Membership proof must be None for unauthorized accounts"
);
let nullifier = Nullifier::for_account_initialization(npk);
new_nullifiers.push((nullifier, DUMMY_COMMITMENT_HASH));
}
@ -223,15 +250,19 @@ fn main() {
}
if private_nonces_iter.next().is_some() {
panic!("Too many nonces.");
panic!("Too many nonces");
}
if private_keys_iter.next().is_some() {
panic!("Too many private account keys.");
panic!("Too many private account keys");
}
if private_auth_iter.next().is_some() {
panic!("Too many private account authentication keys.");
if private_nsks_iter.next().is_some() {
panic!("Too many private account authentication keys");
}
if private_membership_proofs_iter.next().is_some() {
panic!("Too many private account membership proofs");
}
let output = PrivacyPreservingCircuitOutput {

View File

@ -6,36 +6,35 @@ use nssa_core::{
};
// The token program has three functions:
// 1. New token definition.
// Arguments to this function are:
// * Two **default** accounts: [definition_account, holding_account].
// The first default account will be initialized with the token definition account values. The second account will
// be initialized to a token holding account for the new token, holding the entire total supply.
// * An instruction data of 23-bytes, indicating the total supply and the token name, with
// the following layout:
// [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
// The name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
// 2. Token transfer
// Arguments to this function are:
// 1. New token definition. Arguments to this function are:
// * Two **default** accounts: [definition_account, holding_account]. The first default account
// will be initialized with the token definition account values. The second account will be
// initialized to a token holding account for the new token, holding the entire total supply.
// * An instruction data of 23-bytes, indicating the total supply and the token name, with the
// following layout: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)] The
// name cannot be equal to [0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
// 2. Token transfer Arguments to this function are:
// * Two accounts: [sender_account, recipient_account].
// * An instruction data byte string of length 23, indicating the total supply with the following layout
// [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
// 3. Initialize account with zero balance
// Arguments to this function are:
// * An instruction data byte string of length 23, indicating the total supply with the
// following layout [0x01 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00
// || 0x00 || 0x00].
// 3. Initialize account with zero balance Arguments to this function are:
// * Two accounts: [definition_account, account_to_initialize].
// * An dummy byte string of length 23, with the following layout
// [0x02 || 0x00 || 0x00 || 0x00 || ... || 0x00 || 0x00].
// 4. Burn tokens from a Token Holding account (thus lowering total supply)
// Arguments to this function are:
// * An dummy byte string of length 23, with the following layout [0x02 || 0x00 || 0x00 || 0x00
// || ... || 0x00 || 0x00].
// 4. Burn tokens from a Token Holding account (thus lowering total supply) Arguments to this
// function are:
// * Two accounts: [definition_account, holding_account].
// * Authorization required: holding_account
// * An instruction data byte string of length 23, indicating the balance to burn with the folloiwng layout
// * An instruction data byte string of length 23, indicating the balance to burn with the
// folloiwng layout
// [0x03 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total supply)
// Arguments to this function are:
// 5. Mint additional supply of tokens tokens to a Token Holding account (thus increasing total
// supply) Arguments to this function are:
// * Two accounts: [definition_account, holding_account].
// * Authorization required: definition_account
// * An instruction data byte string of length 23, indicating the balance to mint with the folloiwng layout
// * An instruction data byte string of length 23, indicating the balance to mint with the
// folloiwng layout
// [0x04 || amount (little-endian 16 bytes) || 0x00 || 0x00 || 0x00 || 0x00 || 0x00 || 0x00].
const TOKEN_DEFINITION_TYPE: u8 = 0;
@ -89,10 +88,10 @@ impl TokenDefinition {
}
impl TokenHolding {
fn new(definition_id: &AccountId) -> Self {
fn new(definition_id: AccountId) -> Self {
Self {
account_type: TOKEN_HOLDING_TYPE,
definition_id: definition_id.clone(),
definition_id,
balance: 0,
}
}
@ -142,7 +141,7 @@ fn transfer(pre_states: &[AccountWithMetadata], balance_to_move: u128) -> Vec<Ac
let mut sender_holding =
TokenHolding::parse(&sender.account.data).expect("Invalid sender data");
let mut recipient_holding = if recipient.account == Account::default() {
TokenHolding::new(&sender_holding.definition_id)
TokenHolding::new(sender_holding.definition_id)
} else {
TokenHolding::parse(&recipient.account.data).expect("Invalid recipient data")
};
@ -213,7 +212,7 @@ fn new_definition(
let token_holding = TokenHolding {
account_type: TOKEN_HOLDING_TYPE,
definition_id: definition_target_account.account_id.clone(),
definition_id: definition_target_account.account_id,
balance: total_supply,
};
@ -247,7 +246,7 @@ fn initialize_account(pre_states: &[AccountWithMetadata]) -> Vec<AccountPostStat
// Check definition account is valid
let _definition_values =
TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid");
let holding_values = TokenHolding::new(&definition.account_id);
let holding_values = TokenHolding::new(definition.account_id);
let definition_post = definition.account.clone();
let mut account_to_initialize = account_to_initialize.account.clone();
@ -330,7 +329,7 @@ fn mint_additional_supply(
TokenDefinition::parse(&definition.account.data).expect("Definition account must be valid");
let token_holding_values: TokenHolding = if token_holding.account == Account::default() {
TokenHolding::new(&definition.account_id)
TokenHolding::new(definition.account_id)
} else {
TokenHolding::parse(&token_holding.account.data).expect("Holding account must be valid")
};
@ -1078,7 +1077,6 @@ mod tests {
is_authorized: true,
account_id: helper_id_constructor(IdEnum::PoolDefinitionId),
},
_ => panic!("Invalid selection"),
}
}
@ -1093,8 +1091,7 @@ mod tests {
BalanceEnum::MintSuccess => 50_000,
BalanceEnum::InitSupplyMint => 150_000,
BalanceEnum::HoldingBalanceMint => 51_000,
BalanceEnum::MintOverflow => (2 as u128).pow(128) - 40_000,
_ => panic!("Invalid selection"),
BalanceEnum::MintOverflow => u128::MAX - 40_000,
}
}
@ -1290,7 +1287,7 @@ mod tests {
assert!(
*holding_post.account() == helper_account_constructor(AccountsEnum::InitMint).account
);
assert!(holding_post.requires_claim() == true);
assert!(holding_post.requires_claim());
}
#[test]

View File

@ -4,26 +4,18 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
nssa_core.workspace = true
common.workspace = true
storage.workspace = true
mempool.workspace = true
base58.workspace = true
anyhow.workspace = true
serde.workspace = true
rand.workspace = true
tempfile.workspace = true
chrono.workspace = true
log.workspace = true
nssa-core = { path = "../nssa/core", features = ["host"] }
[dependencies.storage]
path = "../storage"
[dependencies.mempool]
path = "../mempool"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[features]
default = []

View File

@ -4,6 +4,11 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa.workspace = true
common.workspace = true
mempool.workspace = true
sequencer_core.workspace = true
anyhow.workspace = true
serde_json.workspace = true
log.workspace = true
@ -11,25 +16,10 @@ serde.workspace = true
actix-cors.workspace = true
futures.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
tempfile.workspace = true
base64.workspace = true
itertools.workspace = true
actix-web.workspace = true
tokio.workspace = true
borsh.workspace = true
# TODO: Move to workspace
[dependencies.sequencer_core]
path = "../sequencer_core"
[dependencies.common]
path = "../common"
[dependencies.nssa]
path = "../nssa"
[dependencies.mempool]
path = "../mempool"

View File

@ -4,25 +4,15 @@ version = "0.1.0"
edition = "2024"
[dependencies]
common.workspace = true
sequencer_core = { workspace = true, features = ["testnet"] }
sequencer_rpc.workspace = true
clap = { workspace = true, features = ["derive", "env"] }
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
actix.workspace = true
actix-web.workspace = true
tokio.workspace = true
[dependencies.clap]
features = ["derive", "env"]
workspace = true
[dependencies.sequencer_rpc]
path = "../sequencer_rpc"
[dependencies.sequencer_core]
path = "../sequencer_core"
features = ["testnet"]
[dependencies.common]
path = "../common"

View File

@ -0,0 +1,79 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
# Install build dependencies
RUN apt-get update && apt-get install -y \
pkg-config \
libssl-dev \
libclang-dev \
clang \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /sequencer_runner
# Planner stage - generates dependency recipe
FROM chef AS planner
COPY . .
RUN cargo chef prepare --bin sequencer_runner --recipe-path recipe.json
# Builder stage - builds dependencies and application
FROM chef AS builder
COPY --from=planner /sequencer_runner/recipe.json recipe.json
# Build dependencies only (this layer will be cached)
RUN cargo chef cook --bin sequencer_runner --release --recipe-path recipe.json
# Copy source code
COPY . .
# Build the actual application
RUN cargo build --release --bin sequencer_runner
# Strip debug symbols to reduce binary size
RUN strip /sequencer_runner/target/release/sequencer_runner
# Runtime stage - minimal image
FROM debian:trixie-slim
# Install runtime dependencies
RUN apt-get update \
&& apt-get install -y gosu jq \
&& rm -rf /var/lib/apt/lists/*
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash sequencer_user && \
mkdir -p /sequencer_runner /etc/sequencer_runner && \
chown -R sequencer_user:sequencer_user /sequencer_runner /etc/sequencer_runner
# Copy binary from builder
COPY --from=builder --chown=sequencer_user:sequencer_user /sequencer_runner/target/release/sequencer_runner /usr/local/bin/sequencer_runner
# Copy entrypoint script
COPY sequencer_runner/docker-entrypoint.sh /docker-entrypoint.sh
RUN chmod +x /docker-entrypoint.sh
# Volume for configuration directory
VOLUME ["/etc/sequencer_runner"]
# Expose default port
EXPOSE 3040
# Health check (TODO #244: Replace when a real health endpoint is available)
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl http://localhost:3040 \
-H "Content-Type: application/json" \
-d "{ \
\"jsonrpc\": \"2.0\", \
\"method\": \"hello\", \
\"params\": {}, \
\"id\": 1 \
}" || exit 1
# Run the application
ENV RUST_LOG=info
USER root
ENTRYPOINT ["/docker-entrypoint.sh"]
WORKDIR /sequencer_runner
CMD ["sequencer_runner", "/etc/sequencer_runner"]

View File

@ -0,0 +1,158 @@
{
"home": "/var/lib/sequencer_runner",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 3040,
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
]
}

View File

@ -0,0 +1,14 @@
services:
sequencer_runner:
image: lssa/sequencer_runner
build:
context: ..
dockerfile: sequencer_runner/Dockerfile
container_name: sequencer_runner
ports:
- "3040:3040"
volumes:
# Mount configuration folder
- ./configs/docker:/etc/sequencer_runner
# Mount data folder
- ./data:/var/lib/sequencer_runner

View File

@ -0,0 +1,29 @@
#!/bin/sh
# This is an entrypoint script for the sequencer_runner Docker container,
# it's not meant to be executed outside of the container.
set -e
CONFIG="/etc/sequencer_runner/sequencer_config.json"
# Check config file exists
if [ ! -f "$CONFIG" ]; then
echo "Config file not found: $CONFIG" >&2
exit 1
fi
# Parse home dir
HOME_DIR=$(jq -r '.home' "$CONFIG")
if [ -z "$HOME_DIR" ] || [ "$HOME_DIR" = "null" ]; then
echo "'home' key missing in config" >&2
exit 1
fi
# Give permissions to the data directory and switch to non-root user
if [ "$(id -u)" = "0" ]; then
mkdir -p "$HOME_DIR"
chown -R sequencer_user:sequencer_user "$HOME_DIR"
exec gosu sequencer_user "$@"
fi

View File

@ -4,10 +4,8 @@ version = "0.1.0"
edition = "2024"
[dependencies]
common.workspace = true
thiserror.workspace = true
borsh.workspace = true
rocksdb.workspace = true
[dependencies.common]
path = "../common"

View File

@ -1,10 +1,10 @@
[package]
name = "test-program-methods"
name = "test_program_methods"
version = "0.1.0"
edition = "2024"
[build-dependencies]
risc0-build = { version = "3.0.3" }
risc0-build.workspace = true
[package.metadata.risc0]
methods = ["guest"]

View File

@ -0,0 +1,9 @@
[package]
name = "test_programs"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
risc0-zkvm.workspace = true

View File

@ -20,5 +20,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.balance -= balance_to_burn;
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -15,7 +15,7 @@ fn main() {
pre_states,
instruction: (balance, auth_transfer_id, num_chain_calls, pda_seed),
},
instruction_words
instruction_words,
) = read_nssa_inputs::<Instruction>();
let [recipient_pre, sender_pre] = match pre_states.try_into() {
@ -37,7 +37,7 @@ fn main() {
let new_chained_call = ChainedCall {
program_id: auth_transfer_id,
instruction_data: instruction_data.clone(),
pre_states: vec![running_sender_pre.clone(), running_recipient_pre.clone()], // <- Account order permutation here
pre_states: vec![running_sender_pre.clone(), running_recipient_pre.clone()], /* <- Account order permutation here */
pda_seeds: pda_seed.iter().cloned().collect(),
};
chained_calls.push(new_chained_call);

View File

@ -4,7 +4,13 @@ type Instruction = Vec<u8>;
/// A program that modifies the account data by setting bytes sent in instruction.
fn main() {
let (ProgramInput { pre_states, instruction: data }, instruction_words) = read_nssa_inputs::<Instruction>();
let (
ProgramInput {
pre_states,
instruction: data,
},
instruction_words,
) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
@ -13,7 +19,9 @@ fn main() {
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.data = data.try_into().expect("provided data should fit into data limit");
account_post.data = data
.try_into()
.expect("provided data should fit into data limit");
write_nssa_outputs(
instruction_words,

View File

@ -1,4 +1,4 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
@ -14,5 +14,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.balance += 1;
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -0,0 +1,22 @@
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let [pre] = match pre_states.try_into() {
Ok(array) => array,
Err(_) => return,
};
let account_pre = &pre.account;
let mut account_post = account_pre.clone();
account_post.nonce += 1;
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -0,0 +1,13 @@
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
fn main() {
let (ProgramInput { pre_states, .. }, instruction_words) = read_nssa_inputs::<Instruction>();
let post_states = pre_states
.iter()
.map(|account| AccountPostState::new(account.account.clone()))
.collect();
write_nssa_outputs(instruction_words, pre_states, post_states);
}

View File

@ -1,4 +1,4 @@
use nssa_core::program::{read_nssa_inputs, write_nssa_outputs, AccountPostState, ProgramInput};
use nssa_core::program::{AccountPostState, ProgramInput, read_nssa_inputs, write_nssa_outputs};
type Instruction = ();
@ -14,5 +14,9 @@ fn main() {
let mut account_post = account_pre.clone();
account_post.program_owner = [0, 1, 2, 3, 4, 5, 6, 7];
write_nssa_outputs(instruction_words, vec![pre], vec![AccountPostState::new(account_post)]);
write_nssa_outputs(
instruction_words,
vec![pre],
vec![AccountPostState::new(account_post)],
);
}

View File

@ -4,33 +4,26 @@ version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core.workspace = true
nssa.workspace = true
common.workspace = true
key_protocol.workspace = true
anyhow.workspace = true
serde_json.workspace = true
env_logger.workspace = true
log.workspace = true
serde.workspace = true
tokio.workspace = true
tempfile.workspace = true
clap.workspace = true
nssa-core = { path = "../nssa/core" }
base64.workspace = true
bytemuck = "1.24.0"
bytemuck.workspace = true
borsh.workspace = true
base58.workspace = true
hex = "0.4.3"
hex.workspace = true
rand.workspace = true
itertools.workspace = true
sha2.workspace = true
futures.workspace = true
async-stream = "0.3.6"
indicatif = { version = "0.18.3", features = ["improved_unicode"] }
risc0-zkvm = { version = "3.0.3", features = ['std'] }
[dependencies.key_protocol]
path = "../key_protocol"
[dependencies.nssa]
path = "../nssa"
[dependencies.common]
path = "../common"

Some files were not shown because too many files have changed in this diff Show More