Merge branch 'main' into marvin/public_keys

This commit is contained in:
jonesmarvin8 2026-02-10 09:19:45 -05:00
commit f891f57cab
143 changed files with 9066 additions and 4341 deletions

50
.deny.toml Normal file
View File

@ -0,0 +1,50 @@
# Config file reference can be found at https://embarkstudios.github.io/cargo-deny/checks/cfg.html.
[graph]
all-features = true
exclude-dev = true
no-default-features = true
[advisories]
ignore = [
{ id = "RUSTSEC-2023-0071", reason = "Marvin Attack: potential key recovery through timing sidechannels" },
{ id = "RUSTSEC-2024-0388", reason = "`derivative` is unmaintained; consider using an alternative. Use `cargo tree -p derivative -i > tmp.txt` to check the dependency tree." },
{ id = "RUSTSEC-2024-0436", reason = "`paste` has a security vulnerability; consider using an alternative. Use `cargo tree -p paste -i > tmp.txt` to check the dependency tree." },
{ id = "RUSTSEC-2025-0055", reason = "`tracing-subscriber` v0.2.25 pulled in by ark-relations v0.4.0 - will be addressed before mainnet" },
{ id = "RUSTSEC-2025-0141", reason = "`bincode` is unmaintained but continuing to use it." },
]
yanked = "deny"
unused-ignored-advisory = "deny"
[bans]
allow-wildcard-paths = false
multiple-versions = "allow"
[licenses]
allow = [
"Apache-2.0 WITH LLVM-exception",
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"BSL-1.0",
"CC0-1.0",
"CDLA-Permissive-2.0",
"ISC",
"MIT",
"MPL-2.0",
"Unicode-3.0",
"Zlib",
]
private = { ignore = false }
unused-allowed-license = "deny"
[sources]
allow-git = [
"https://github.com/EspressoSystems/jellyfish.git",
"https://github.com/logos-blockchain/logos-blockchain.git",
]
unknown-git = "deny"
unknown-registry = "deny"
[sources.allow-org]
github = ["logos-co"]

View File

@ -0,0 +1,19 @@
name: Setup Logos Blockchain Circuits
description: Set up Logos Blockchain Circom Circuits, Rapidsnark prover and Rapidsnark verifier using the setup-logos-blockchain-circuits.sh script.
inputs:
github-token:
description: GitHub token for downloading releases
required: true
runs:
using: "composite"
steps:
- name: Setup logos-blockchain-circuits
shell: bash
working-directory: ${{ github.workspace }}
env:
GITHUB_TOKEN: ${{ inputs.github-token }}
run: |
curl -sSL https://raw.githubusercontent.com/logos-blockchain/logos-blockchain/main/scripts/setup-logos-blockchain-circuits.sh | bash

View File

@ -56,6 +56,19 @@ jobs:
- name: Check for unused dependencies
run: cargo machete
deny:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
ref: ${{ github.head_ref }}
- name: Install cargo-deny
run: cargo install --locked cargo-deny
- name: Check licenses and advisories
run: cargo deny check
lint:
runs-on: ubuntu-latest
timeout-minutes: 60
@ -70,6 +83,10 @@ jobs:
- uses: ./.github/actions/install-risc0
- uses: ./.github/actions/install-logos-blockchain-circuits
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install active toolchain
run: rustup install
@ -95,6 +112,10 @@ jobs:
- uses: ./.github/actions/install-risc0
- uses: ./.github/actions/install-logos-blockchain-circuits
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install active toolchain
run: rustup install
@ -119,6 +140,10 @@ jobs:
- uses: ./.github/actions/install-risc0
- uses: ./.github/actions/install-logos-blockchain-circuits
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
- name: Install active toolchain
run: rustup install

3
.gitignore vendored
View File

@ -7,4 +7,5 @@ data/
.vscode/
rocksdb
sequencer_runner/data/
storage.json
storage.json
result

682
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,17 +1,26 @@
[workspace.package]
license = "MIT or Apache-2.0"
[workspace]
resolver = "3"
members = [
"integration_tests",
"sequencer_runner",
"storage",
"key_protocol",
"sequencer_rpc",
"mempool",
"wallet",
"sequencer_core",
"wallet-ffi",
"common",
"nssa",
"nssa/core",
"sequencer_core",
"sequencer_rpc",
"sequencer_runner",
"indexer_service",
"indexer_service/protocol",
"indexer_service/rpc",
"programs/token/core",
"programs/token",
"program_methods",
"program_methods/guest",
"test_program_methods",
@ -20,6 +29,7 @@ members = [
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
"bedrock_client",
"indexer_core",
]
[workspace.dependencies]
@ -32,9 +42,16 @@ key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
indexer_service = { path = "indexer_service" }
indexer_service_protocol = { path = "indexer_service/protocol" }
indexer_service_rpc = { path = "indexer_service/rpc" }
wallet = { path = "wallet" }
wallet-ffi = { path = "wallet-ffi" }
token_core = { path = "programs/token/core" }
token_program = { path = "programs/token" }
test_program_methods = { path = "test_program_methods" }
bedrock_client = { path = "bedrock_client" }
indexer_core = { path = "indexer_core" }
tokio = { version = "1.28.2", features = [
"net",
@ -42,6 +59,7 @@ tokio = { version = "1.28.2", features = [
"sync",
"fs",
] }
tokio-util = "0.7.18"
risc0-zkvm = { version = "3.0.3", features = ['std'] }
risc0-build = "3.0.3"
anyhow = "1.0.98"
@ -52,6 +70,7 @@ serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
actix = "0.13.0"
actix-cors = "0.6.1"
jsonrpsee = "0.26.0"
futures = "0.3"
actix-rt = "*"
lazy_static = "1.5.0"
@ -77,11 +96,14 @@ chrono = "0.4.41"
borsh = "1.5.7"
base58 = "0.2.0"
itertools = "0.14.0"
url = "2.5.4"
url = { version = "2.5.4", features = ["serde"] }
tokio-retry = "0.3.0"
schemars = "1.2.0"
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-chain-broadcast-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
rocksdb = { version = "0.24.0", default-features = false, features = [
"snappy",

View File

@ -0,0 +1,109 @@
# Automated Market Maker (AMM)
This tutorial covers the AMM program in LEZ. The AMM manages liquidity pools and enables swaps between custom tokens. By the end, you will have practiced:
1. Creating a liquidity pool for a token pair.
2. Swapping tokens.
3. Withdrawing liquidity from the pool.
4. Adding liquidity to the pool.
## 1. Creating a liquidity pool for a token pair
We start by creating a pool for the tokens created earlier. In return for providing liquidity, you receive liquidity provider (LP) tokens. LP tokens represent your share of the pool and are required to withdraw liquidity later.
> [!NOTE]
> The AMM does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore represent only a proportional share of the pool reserves. Fee support will be added in future versions.
### a. Create an LP holding account
```bash
wallet account new public
# Output:
Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf
```
### b. Initialize the pool
Deposit tokens A and B and specify the account that will receive LP tokens:
```bash
wallet amm new \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--balance-a 100 \
--balance-b 200
```
> [!Important]
> The LP holding account is owned by the token program, so LP tokens are managed using the same token infrastructure as regular tokens.
```bash
wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100}
```
> [!Tip]
> If you inspect the `user-holding-a` and `user-holding-b` accounts, you will see that 100 and 200 tokens were deducted. Those tokens now reside in the pool and are available for swaps by any user.
## 2. Swapping
Use `wallet amm swap` to perform a token swap:
```bash
wallet amm swap \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
# The amount of tokens to swap
--amount-in 5 \
# The minimum number of tokens expected in return
--min-amount-out 8 \
# The definition ID of the token being provided to the swap
# In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA
--token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
```
Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (computed by the pools pricing function) is credited to the Token B holding account.
## 3. Withdrawing liquidity from the pool
Liquidity providers can withdraw assets by redeeming (burning) LP tokens. The amount received is proportional to the share of LP tokens redeemed relative to the total LP supply.
Use `wallet amm remove-liquidity`:
```bash
wallet amm remove-liquidity \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--balance-lp 20 \
--min-amount-a 1 \
--min-amount-b 1
```
> [!Important]
> This burns `balance-lp` LP tokens from the users LP holding account. In return, the AMM transfers tokens A and B from the pool vaults to the users holding accounts, based on current reserves.
> The `min-amount-a` and `min-amount-b` parameters set the minimum acceptable outputs. If the computed amounts fall below either threshold, the instruction fails to protect against unfavorable pool changes.
## 4. Adding liquidity to the pool
To add liquidity, deposit tokens A and B in the ratio implied by current pool reserves. In return, the AMM mints new LP tokens that represent your proportional share.
Use `wallet amm add-liquidity`:
```bash
wallet amm add-liquidity \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--min-amount-lp 1 \
--max-amount-a 10 \
--max-amount-b 10
```
> [!Important]
> `max-amount-a` and `max-amount-b` cap how many tokens A and B can be taken from the users accounts. The AMM computes the required amounts based on the pools reserve ratio.
> `min-amount-lp` sets the minimum LP tokens to mint. If the computed LP amount falls below this threshold, the instruction fails.

View File

@ -0,0 +1,159 @@
This tutorial focuses on custom tokens using the Token program. As of now, you have used the authenticated-transfers program for native tokens. The Token program is for creating and managing custom tokens. By the end, you will have practiced:
1. Creating new tokens.
2. Transferring custom tokens.
> [!Important]
> The Token program is a single program that creates and manages all tokens, so you do not deploy a new program for each token.
> Token program accounts fall into two types:
> - Token definition accounts: store token metadata such as name and total supply. This account is the tokens identifier.
> - Token holding accounts: store balances and the definition ID they belong to.
The CLI provides commands to execute the Token program. Run `wallet token` to see the options:
```bash
Commands:
new Produce a new token
send Send tokens from one account to another with variable privacy
help Print this message or the help of the given subcommand(s)
```
## 1. Creating new tokens
Use `wallet token new` to execute the `New` function of the Token program. The command expects:
- A token name.
- A total supply.
- Two uninitialized accounts:
- One for the token definition account.
- One for the token holding account that receives the initial supply.
### a. Public definition account and public supply account
1. Create two new public accounts:
```bash
wallet account new public
# Output:
Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
```
```bash
wallet account new public
# Output:
Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
```
2. Create the token (Token A):
```bash
wallet token new \
--name TOKENA \
--total-supply 1337 \
--definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \
--supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
```
3. Inspect the initialized accounts:
```bash
wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
# Output:
Definition account owned by token program
{"account_type":"Token definition","name":"TOKENA","total_supply":1337}
```
```bash
wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337}
```
### b. Public definition account and private supply account
1. Create fresh accounts for this example:
> [!Important]
> You cannot reuse the accounts from the previous example. Create new ones here.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii
```
```bash
wallet account new private
# Output:
Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93
With ipk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577
```
2. Create the token (Token B):
```bash
wallet token new \
--name TOKENB \
--total-supply 7331 \
--definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \
--supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
```
3. Inspect the accounts:
```bash
wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii
# Output:
Definition account owned by token program
{"account_type":"Token definition","name":"TOKENB","total_supply":7331}
```
```bash
wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331}
```
> [!Important]
> As a private account, the supply account is visible only in your local wallet storage.
## 2. Custom token transfers
The Token program can move balances between token holding accounts. If the recipient account is uninitialized, the token program will automatically claim it. Use `wallet token send` to execute a transfer.
### a. Create a recipient account
```bash
wallet account new public
# Output:
Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6
```
### b. Send 1000 TOKENB to the recipient
```bash
wallet token send \
--from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \
--to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--amount 1000
```
### c. Inspect the recipient account
```bash
wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000}
```

View File

@ -0,0 +1,250 @@
This tutorial walks through native token transfers between public and private accounts using the Authenticated-Transfers program. You will create and initialize accounts, fund them with the Pinata program, and run transfers across different privacy combinations. By the end, you will have practiced:
1. Public account creation and initialization.
2. Account funding through the Pinata program.
3. Native token transfers between public accounts.
4. Private account creation.
5. Native token transfer from a public account to a private account.
6. Native token transfer from a public account to a private account owned by someone else.
---
The CLI provides commands to manage accounts. Run `wallet account` to see the options available:
```bash
Commands:
get Get account data
new Produce new public or private account
sync-private Sync private accounts
help Print this message or the help of the given subcommand(s)
```
## 1. Public account creation and initialization
> [!Important]
> Public accounts live on-chain and are identified by a 32-byte Account ID. Running `wallet account new public` generates a fresh keypair for the signature scheme used in LEZ.
> The account ID is derived from the public key, and the private key signs transactions and authorizes program executions.
> The CLI can create both public and private accounts.
### a. New public account creation
```bash
wallet account new public
# Output:
Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
> [!Tip]
> Save this account ID. You will use it in later commands.
### b. Account initialization
To query the accounts current status, run:
```bash
# Replace the id with yours
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account is Uninitialized
```
In this example, we initialize the account for the authenticated-transfer program, which manages native token transfers and enforces authenticated debits.
1. Initialize the account:
```bash
# This command submits a public transaction executing the `init` function of the
# authenticated-transfer program. The wallet polls the sequencer until the
# transaction is included in a block, which may take several seconds.
wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
2. Check the updated account status:
```bash
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account owned by authenticated-transfer program
{"balance":0}
```
> [!NOTE]
> New accounts start uninitialized, meaning no program owns them yet. Any program may claim an uninitialized account; once claimed, that program owns it.
> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account.
> Debiting native tokens must always be performed by the owning program.
## 2. Account funding through the Piñata program
Now that the account is initialized under the authenticated-tansfer program, fund it using the testnet Piñata program.
```bash
# Replace with your id
wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
After the claim succeeds, the account is funded:
```bash
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account owned by authenticated-transfer program
{"balance":150}
```
## 3. Native token transfers between public accounts
LEZ includes a program for managing native tokens. Run `wallet auth-transfer` to see the available commands:
```bash
Commands:
init Initialize account under the authenticated-transfer program
send Send native tokens from one account to another with variable privacy
help Print this message or the help of the given subcommand(s)
```
We already used `init`. Now use `send` to execute a transfer.
### a. Create a recipient account
```bash
wallet account new public
# Output:
Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
```
> [!NOTE]
> The new account is uninitialized. The authenticated-transfer program will claim any uninitialized account used in a transfer, so manual initialization isnt required.
### b. Send 37 tokens to the new account
```bash
wallet auth-transfer send \
--from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \
--to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--amount 37
```
### c. Check both accounts
```bash
# Sender account (use your sender ID)
wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8
# Output:
Account owned by authenticated-transfer program
{"balance":113}
```
```bash
# Recipient account
wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
# Output:
Account owned by authenticated-transfer program
{"balance":37}
```
## 4. Private account creation
> [!Important]
> Private accounts are structurally identical to public accounts, but their values are stored off-chain. On-chain, only a 32-byte commitment is recorded.
> Transactions include encrypted private values so the owner can recover them, and the decryption keys are never shared.
> Private accounts use two keypairs: nullifier keys for privacy-preserving executions and viewing keys for encrypting and decrypting values.
> The private account ID is derived from the nullifier public key.
> Private accounts can be initialized by anyone, but once initialized they can only be modified by the owners keys.
> Updates include a new commitment and a nullifier for the old state, which prevents linkage between versions.
### a. Create a private account
```bash
wallet account new private
# Output:
Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951
With ipk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17
```
> [!Tip]
> Focus on the account ID for now. The `npk` and `ipk` values are stored locally and used to build privacy-preserving transactions. The private account ID is derived from `npk`.
Just like public accounts, new private accounts start out uninitialized:
```bash
wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
# Output:
Account is Uninitialized
```
> [!Important]
> Private accounts are never visible to the network. They exist only in your local wallet storage.
## 5. Native token transfer from a public account to a private account
> [!Important]
> Sending tokens to an uninitialized private account causes the authenticated-transfer program to claim it, just like with public accounts. Program logic is the same regardless of account type.
### a. Send 17 tokens to the private account
> [!Note]
> The syntax matches public-to-public transfers, but the recipient is a private ID. This runs locally, generates a proof, and submits it to the sequencer. It may take 30 seconds to 4 minutes.
```bash
wallet auth-transfer send \
--from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \
--amount 17
```
### b. Check both accounts
```bash
# Public sender account
wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
# Output:
Account owned by authenticated-transfer program
{"balance":20}
```
```bash
# Private recipient account
wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
# Output:
Account owned by authenticated-transfer program
{"balance":17}
```
> [!Note]
> The last command does not query the network. It works offline because private account data is stored locally. Other users cannot read your private balances.
> [!Caution]
> Private accounts can only be modified by their owners keys. The exception is initialization: any user can initialize an uninitialized private account. This enables transfers to a private account owned by someone else, as long as that account is uninitialized.
## 6. Native token transfer from a public account to a private account owned by someone else
> [!Important]
> Well simulate transferring to someone else by creating a new private account we own and treating it as if it belonged to another user.
### a. Create a new uninitialized private account
```bash
wallet account new private
# Output:
Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5
With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e
With ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72
```
> [!Tip]
> Ignore the private account ID here and use the `npk` and `ipk` values to send to a foreign private account.
```bash
wallet auth-transfer send \
--from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \
--to-ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \
--amount 3
```
> [!Warning]
> This command creates a privacy-preserving transaction, which may take a few minutes. The updated values are encrypted and included in the transaction.
> Once accepted, the recipient must run `wallet account sync-private` to scan the chain for their encrypted updates and refresh local state.
> [!Note]
> You have seen transfers between two public accounts and from a public sender to a private recipient. Transfers from a private sender, whether to a public account or to another private account, follow the same pattern.

View File

@ -0,0 +1,26 @@
This repository includes a CLI for interacting with the Logos Blockchain. To install it, run the following command from the root of the repository:
```bash
cargo install --path wallet --force
```
To check that everythin is working, run `wallet help`.
## Available Wallet Commands
| Command | Description |
|------------------------|-------------------------------------------------------------|
| `wallet auth-transfer` | Authenticated transfer (init, send) |
| `wallet chain-info` | Chain info queries (current-block-id, block, transaction) |
| `wallet account` | Account management (get, list, new, sync-private) |
| `wallet pinata` | Piñata faucet (claim) |
| `wallet token` | Token operations (new, send) |
| `wallet amm` | AMM operations (new, swap, add-liquidity, remove-liquidity) |
| `wallet check-health` | Health checks that the wallet is connected to the node |
| `wallet config` | Config Setup (get, set) |
| `wallet restore-keys ` | Keys restore from a given password at given `depth` |
| `wallet deploy-program`| Program deployment |
| `wallet help` | Help |
Some completion scripts exists, see the [completions](./completions/README.md) folder.

740
README.md
View File

@ -1,70 +1,75 @@
# Nescience
# Logos Execution Zone (LEZ)
Logos Execution Zone (LEZ) is a programmable blockchain that cleanly separates public and private state while keeping them fully interoperable. Developers can build apps that operate across transparent and privacy-preserving accounts without changing their logic. Privacy is enforced by the protocol itself through zero-knowledge proofs (ZKPs), so it is always available and automatic.
Nescience State Separation Architecture (NSSA) is a programmable blockchain system that introduces a clean separation between public and private states, while keeping them fully interoperable. It lets developers build apps that can operate across both transparent and privacy-preserving accounts. Privacy is handled automatically by the protocol through zero-knowledge proofs (ZKPs). The result is a programmable blockchain where privacy comes built-in.
## Background
Typically, public blockchains maintain a fully transparent state, where the mapping from account IDs to account values is entirely visible. In NSSA, we introduce a parallel *private state*, a new layer of accounts that coexists with the public one. The public and private states can be viewed as a partition of the account ID space: accounts with public IDs are openly visible, while private accounts are accessible only to holders of the corresponding viewing keys. Consistency across both states is enforced through zero-knowledge proofs (ZKPs).
These features are provided by the Logos Execution Environment (LEE). Traditional public blockchains expose a fully transparent state: the mapping from account IDs to account values is entirely visible. LEE introduces a parallel *private state* that coexists with the public one. Together, public and private accounts form a partition of the account ID space: public IDs are visible on-chain, while private accounts are accessible only to holders of the corresponding viewing keys. Consistency across both states is enforced by ZKPs.
Public accounts are stored on-chain as a visible map from IDs to account states, and their values are updated in place. Private accounts are never stored on-chain in raw form. Each update produces a new commitment that binds the current value while keeping it hidden. Previous commitments remain on-chain, but a nullifier set marks old versions as spent, ensuring that only the most recent private state can be used in execution.
Public accounts are represented on-chain as a visible map from IDs to account states and are modified in-place when their values change. Private accounts, by contrast, are never stored in raw form on-chain. Each update creates a new commitment, which cryptographically binds the current value of the account while preserving privacy. Commitments of previous valid versions remain on-chain, but a nullifier set is maintained to mark old versions as spent, ensuring that only the most up-to-date version of each private account can be used in any execution.
### Programmability and selective privacy
Our goal is to enable full programmability within this hybrid model, matching the flexibility and composability of public blockchains. Developers write and deploy programs in NSSA just as they would on any other blockchain. Privacy, along with the ability to execute programs involving any combination of public and private accounts, is handled entirely at the protocol level and available out of the box for all programs. From the programs perspective, all accounts are indistinguishable. This abstraction allows developers to focus purely on business logic, while the system transparently enforces privacy and consistency guarantees.
LEZ aims to deliver full programmability in a hybrid public/private model, with the same flexibility and composability as public blockchains. Developers write and deploy programs in LEZ just as they would elsewhere. The protocol automatically supports executions that involve any combination of public and private accounts. From the programs perspective, all accounts look the same, and privacy is enforced transparently. This lets developers focus on business logic while the system guarantees privacy and correctness.
To the best of our knowledge, this approach is unique to Nescience. Other programmable blockchains with a focus on privacy typically adopt a developer-driven model for private execution, meaning that dApp logic must explicitly handle private inputs correctly. In contrast, Nescience handles privacy at the protocol level, so developers do not need to modify their programs—private and public accounts are treated uniformly, and privacy-preserving execution is available out of the box.
To our knowledge, this design is unique to LEZ. Other privacy-focused programmable blockchains often require developers to explicitly handle private inputs inside their app logic. In LEZ, privacy is protocol-level: programs do not change, accounts are treated uniformly, and private execution works out of the box.
### Example: creating and transferring tokens across states
---
## Example: Creating and transferring tokens across states
1. Token creation (public execution)
- Alice submits a transaction that executes the token program `New` function on-chain.
- A new public token definition account is created.
- The minted tokens are recorded on-chain in Alices public account.
1. Token creation (public execution):
- Alice submits a transaction to execute the token program `New` function on-chain.
- A new public token account is created, representing the token.
- The minted tokens are recorded on-chain and fully visible on Alice's public account.
2. Transfer from public to private (local / privacy-preserving execution)
- Alice executes the token program `Transfer` function locally, specifying a Bobs private account as recipient.
- Alice runs the token program `Transfer` function locally, sending to Bobs private account.
- A ZKP of correct execution is generated.
- The proof is submitted to the blockchain, and validator nodes verify it.
- Alice's public account balance is modified accordingly.
- Bobs private account and balance remain hidden, while the transfer is provably valid.
- The proof is submitted to the blockchain and verified by validators.
- Alices public balance is updated on-chain.
- Bobs private balance remains hidden, while the transfer is provably correct.
3. Transferring private to public (local / privacy-preserving execution)
- Bob executes the token program `Transfer` function locally, specifying a Charlies public account as recipient.
- Bob executes the token program `Transfer` function locally, sending to Charlies public account.
- A ZKP of correct execution is generated.
- Bobs private account and balance still remain hidden.
- Charlie's public account is modified with the new tokens added.
4. Transferring public to public (public execution):
- Alice submits a transaction to execute the token program `Transfer` function on-chain, specifying Charlie's public account as recipient.
- The execution is handled on-chain without ZKPs involved.
- Alice's and Charlie's accounts are modified according to the transaction.
- Bobs private balance stays hidden.
- Charlies public account is updated on-chain.
4. Transfer from public to public (public execution)
- Alice submits an on-chain transaction to run `Transfer`, sending to Charlies public account.
- Execution is handled fully on-chain without ZKPs.
- Alices and Charlies public balances are updated.
#### Key points:
- The same token program is used in all executions.
- The difference lies in execution mode: public executions update visible accounts on-chain, while private executions rely on ZKPs.
- Validators only need to verify proofs for privacy-preserving transactions, keeping processing efficient.
### Key points:
- The same token program is used in every execution.
- The only difference is execution mode: public execution updates visible state on-chain, while private execution relies on ZKPs.
- Validators verify proofs only for privacy-preserving transactions, keeping processing efficient.
### The accounts model
---
To achieve both state separation and full programmability, NSSA adopts a stateless program model. Programs do not hold internal state. Instead, all persistent data resides in accounts explicitly passed to the program during execution. This design enables fine-grained control over access and visibility while maintaining composability across public and private states.
## The accounts model
To achieve both state separation and full programmability, LEZ uses a stateless program model. Programs hold no internal state. All persistent data is stored in accounts passed explicitly into each execution. This enables precise access control and visibility while preserving composability across public and private states.
### Execution types
Execution is divided into two fundamentally distinct types based on how they are processed: public execution, which is executed transparently on-chain, and private execution, which occurs off-chain. For private execution, the blockchain relies on ZKPs to verify the correctness of execution and ensure that all system invariants are preserved.
LEZ supports two execution types:
- Public execution runs transparently on-chain.
- Private execution runs off-chain and is verified on-chain with ZKPs.
Both public and private executions of the same program are enforced to use the same Risc0 VM bytecode. For public transactions, programs are executed directly on-chain like any standard RISC-V VM execution, without generating or verifying proofs. For privacy-preserving transactions, users generate Risc0 ZKPs of correct execution, and validator nodes only verify these proofs rather than re-executing the program. This design ensures that from a validators perspective, public transactions are processed as quickly as any RISC-Vbased VM, while verification of ZKPs keeps privacy-preserving transactions efficient as well. Additionally, the system naturally supports parallel execution similar to Solana, further increasing throughput. The main computational bottleneck for privacy-preserving transactions lies on the user side, in generating zk proofs.
Both public and private executions use the same Risc0 VM bytecode. Public transactions are executed directly on-chain like any standard RISC-V VM call, without proof generation. Private transactions are executed locally by users, who generate Risc0 proofs that validators verify instead of re-executing the program.
### Resources
- [IFT Research call](https://forum.vac.dev/t/ift-research-call-september-10th-2025-updates-on-the-development-of-nescience/566)
- [NSSA v0.2 specs](https://www.notion.so/NSSA-v0-2-specifications-2848f96fb65c800c9818e6f66d9be8f2)
- [Choice of VM/zkVM](https://www.notion.so/Conclusion-on-the-chosen-VM-and-zkVM-for-NSSA-2318f96fb65c806a810ed1300f56992d)
- [NSSA vs other privacy projects](https://www.notion.so/Privacy-projects-comparison-2688f96fb65c8096b694ecf7e4deca30)
- [NSSA state model](https://www.notion.so/Public-state-model-decision-2388f96fb65c80758b20c76de07b1fcc)
- [NSSA sequencer specs](https://www.notion.so/Sequencer-specs-2428f96fb65c802da2bfea7b0b214ecb)
- [NSSA sequencer code](https://www.notion.so/NSSA-sequencer-pseudocode-2508f96fb65c805e8859e047dffd6785)
- [NSSA Token program desing](https://www.notion.so/Token-program-design-2538f96fb65c80a1b4bdc4fd9dd162d7)
- [NSSA cross program calls](https://www.notion.so/NSSA-cross-program-calls-Tail-call-model-proposal-extended-version-2838f96fb65c8096b3a2d390444193b6)
This design keeps public transactions as fast as any RISC-Vbased VM and makes private transactions efficient for validators. It also supports parallel execution similar to Solana, improving throughput. The main computational cost for privacy-preserving transactions is on the user side, where ZK proofs are generated.
---
---
# Install dependencies
Install build dependencies
### Install build dependencies
- On Linux
Ubuntu / Debian
@ -72,7 +77,7 @@ Ubuntu / Debian
apt install build-essential clang libclang-dev libssl-dev pkg-config
```
Fedora
- On Fedora
```sh
sudo dnf install clang clang-devel openssl-devel pkgconf
```
@ -83,25 +88,26 @@ xcode-select --install
brew install pkg-config openssl
```
Install Rust
### Install Rust
```sh
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh
```
Install Risc0
### Install Risc0
```sh
curl -L https://risczero.com/install | bash
```
Then restart your shell and run
### Then restart your shell and run
```sh
rzup install
```
# Run tests
The NSSA repository includes both unit and integration test suites.
The LEZ repository includes both unit and integration test suites.
### Unit tests
@ -119,635 +125,23 @@ cd integration_tests
RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
```
# Run the sequencer
# Run the sequencer and node
The sequencer can be run locally:
The sequencer and node can be run locally:
```bash
cd sequencer_runner
RUST_LOG=info cargo run --release configs/debug
```
1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node:
- `git checkout master; git pull`
- `cargo clean`
- `rm ~/.logos-blockchain-circuits`
- `./scripts/setup-logos-blockchain-circuits.sh`
- `cargo build --all-features`
- `./target/debug/logos-blockchain-node nodes/node/config-one-node.yaml`
If everything went well you should see an output similar to this:
```bash
[2025-11-13T19:50:29Z INFO sequencer_runner] Sequencer core set up
[2025-11-13T19:50:29Z INFO network] Starting http server at 0.0.0.0:3040
[2025-11-13T19:50:29Z INFO actix_server::builder] starting 8 workers
[2025-11-13T19:50:29Z INFO sequencer_runner] HTTP server started
[2025-11-13T19:50:29Z INFO sequencer_runner] Starting main sequencer loop
[2025-11-13T19:50:29Z INFO actix_server::server] Tokio runtime found; starting in existing Tokio runtime
[2025-11-13T19:50:29Z INFO actix_server::server] starting service: "actix-web-service-0.0.0.0:3040", workers: 8, listening on: 0.0.0.0:3040
[2025-11-13T19:50:39Z INFO sequencer_runner] Collecting transactions from mempool, block creation
[2025-11-13T19:50:39Z INFO sequencer_core] Created block with 0 transactions in 0 seconds
[2025-11-13T19:50:39Z INFO sequencer_runner] Block with id 2 created
[2025-11-13T19:50:39Z INFO sequencer_runner] Waiting for new transactions
```
2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service:
- `git checkout schouhy/full-bedrock-integration`
- `RUST_LOG=info cargo run --release -p indexer_service $(pwd)/integration_tests/configs/indexer/indexer_config.json`
# Try the Wallet CLI
## Install
This repository includes a CLI for interacting with the Nescience sequencer. To install it, run the following command from the root of the repository:
```bash
cargo install --path wallet --force
```
Run `wallet help` to check everything went well.
Some completion scripts exists, see the [completions](./completions/README.md) folder.
## Tutorial
This tutorial walks you through creating accounts and executing NSSA programs in both public and private contexts.
> [!NOTE]
> The NSSA state is split into two separate but interconnected components: the public state and the private state.
> The public state is an on-chain, publicly visible record of accounts indexed by their Account IDs
> The private state mirrors this, but the actual account values are stored locally by each account owner. On-chain, only a hidden commitment to each private account state is recorded. This allows the chain to enforce freshness (i.e., prevent the reuse of stale private states) while preserving privacy and unlinkability across executions and private accounts.
>
> Every piece of state in NSSA is stored in an account (public or private). Accounts are either uninitialized or are owned by a program, and programs can only modify the accounts they own.
>
> In NSSA, accounts can only be modified through program execution. A program is the sole mechanism that can change an accounts value.
> Programs run publicly when all involved accounts are public, and privately when at least one private account participates.
### Health-check
Verify that the node is running and that the wallet can connect to it:
```bash
wallet check-health
```
You should see `✅ All looks good!`.
### The commands
The wallet provides several commands to interact with the node and query state. To see the full list, run `wallet help`:
```bash
Commands:
auth-transfer Authenticated transfer subcommand
chain-info Generic chain info subcommand
account Account view and sync subcommand
pinata Pinata program interaction subcommand
token Token program interaction subcommand
amm AMM program interaction subcommand
check-health Check the wallet can connect to the node and builtin local programs match the remote versions
```
### Accounts
> [!NOTE]
> Accounts are the basic unit of state in NSSA. They essentially hold native tokens and arbitrary data managed by some program.
The CLI provides commands to manage accounts. Run `wallet account` to see the options available:
```bash
Commands:
get Get account data
new Produce new public or private account
sync-private Sync private accounts
help Print this message or the help of the given subcommand(s)
```
#### Create a new public account
You can create both public and private accounts through the CLI. For example:
```bash
wallet account new public
# Output:
Generated new account with account_id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
This id is required when executing any program that interacts with the account.
> [!NOTE]
> Public accounts live on-chain and are identified by a 32-byte Account ID.
> Running `wallet account new public` generates a fresh keypair for the signature scheme used in NSSA.
> The account ID is derived from the public key. The private key is used to sign transactions and to authorize the account in program executions.
#### Account initialization
To query the accounts current status, run:
```bash
# Replace the id with yours
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account is Uninitialized
```
> [!NOTE]
> New accounts begin in an uninitialized state, meaning they are not yet owned by any program. A program may claim an uninitialized account; once claimed, the account becomes owned by that program.
> Owned accounts can only be modified through executions of the owning program. The only exception is native-token credits: any program may credit native tokens to any account.
> However, debiting native tokens from an account must always be performed by its owning program.
In this example, we will initialize the account for the Authenticated transfer program, which securely manages native token transfers by requiring authentication for debits.
Initialize the account by running:
```bash
# This command submits a public transaction executing the `init` function of the
# Authenticated-transfer program. The wallet polls the sequencer until the
# transaction is included in a block, which may take several seconds.
wallet auth-transfer init --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
After it completes, check the updated account status:
```bash
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account owned by authenticated transfer program
{"balance":0}
```
### Funding the account: executing the Piñata program
Now that we have a public account initialized by the authenticated transfer program, we need to fund it. For that, the testnet provides the Piñata program.
```bash
# Complete with your id
wallet pinata claim --to Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
```
After the claim succeeds, the account will be funded with some tokens:
```bash
wallet account get --account-id Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ
# Output:
Account owned by authenticated transfer program
{"balance":150}
```
### Native token transfers: executing the Authenticated transfers program
NSSA comes with a program for managing and transferring native tokens. Run `wallet auth-transfer` to see the options available:
```bash
Commands:
init Initialize account under authenticated transfer program
send Send native tokens from one account to another with variable privacy
help Print this message or the help of the given subcommand(s)
```
We have already used the `init` command. The `send` command is used to execute the `Transfer` function of the authenticated program.
Let's try it. For that we need to create another account for the recipient of the transfer.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
```
> [!NOTE]
> The new account is uninitialized. The authenticated transfers program will claim any uninitialized account used in a transfer. So we don't need to manually initialize the recipient account.
Let's send 37 tokens to the new account.
```bash
wallet auth-transfer send \
--from Public/9ypzv6GGr3fwsgxY7EZezg5rz6zj52DPCkmf1vVujEiJ \
--to Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--amount 37
```
Once that succeeds we can check the states.
```bash
# Sender account
wallet account get --account-id Public/HrA8TVjBS8UVf9akV7LRhyh6k4c7F6PS7PvqgtPmKAT8
# Output:
Account owned by authenticated transfer program
{"balance":113}
```
```bash
# Recipient account
wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
# Output:
Account owned by authenticated transfer program
{"balance":37}
```
#### Create a new private account
> [!NOTE]
> Private accounts are structurally identical to public accounts; they differ only in how their state is stored off-chain and represented on-chain.
> The raw values of a private account are never stored on-chain. Instead, the chain only holds a 32-byte commitment (a hash-like binding to the actual values). Transactions include encrypted versions of the private values so that users can recover them from the blockchain. The decryption keys are known only to the user and are never shared.
> Private accounts are not managed through the usual signature mechanism used for public accounts. Instead, each private account is associated with two keypairs:
> - *Nullifier keys*, for using the corresponding private account in privacy preserving executions.
> - *Viewing keys*, used for encrypting and decrypting the values included in transactions.
>
> Private accounts also have a 32-byte identifier, derived from the nullifier public key.
>
> Just like public accounts, private accounts can only be initialized once. Any user can initialize them without knowing the owner's secret keys. However, modifying an initialized private account through an off-chain program execution requires knowledge of the owners secret keys.
>
> Transactions that modify the values of a private account include a commitment to the new values, which will be added to the on-chain commitment set. They also include a nullifier that marks the previous version as old.
> The nullifier is constructed so that it cannot be linked to any prior commitment, ensuring that updates to the same private account cannot be correlated.
Now lets switch to the private state and create a private account.
```bash
wallet account new private
# Output:
Generated new account with account_id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
With npk e6366f79d026c8bd64ae6b3d601f0506832ec682ab54897f205fffe64ec0d951
With ipk 02ddc96d0eb56e00ce14994cfdaec5ae1f76244180a919545983156e3519940a17
```
For now, focus only on the account id. Ignore the `npk` and `ipk` values. These are the Nullifier public key and the Viewing public key. They are stored locally in the wallet and are used internally to build privacy-preserving transactions.
Also, the account id for private accounts is derived from the `npk` value. But we won't need them now.
Just like public accounts, new private accounts start out uninitialized:
```bash
wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
# Output:
Account is Uninitialized
```
Unlike public accounts, private accounts are never visible to the network. They exist only in your local wallet storage.
#### Sending tokens from the public account to the private account
Sending tokens to an uninitialized private account causes the Authenticated-Transfers program to claim it. Just like with public accounts.
This happens because program execution logic does not depend on whether the involved accounts are public or private.
Lets send 17 tokens to the new private account.
The syntax is identical to the public-to-public transfer; just set the private ID as the recipient.
This command will run the Authenticated-Transfer program locally, generate a proof, and submit it to the sequencer. Depending on your machine, this can take from 30 seconds to 4 minutes.
```bash
wallet auth-transfer send \
--from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--to Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL \
--amount 17
```
After it succeeds, check both accounts:
```bash
# Public sender account
wallet account get --account-id Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS
# Output:
Account owned by authenticated transfer program
{"balance":20}
```
```bash
# Private recipient account
wallet account get --account-id Private/HacPU3hakLYzWtSqUPw6TUr8fqoMieVWovsUR6sJf7cL
# Output:
Account owned by authenticated transfer program
{"balance":17}
```
> [!NOTE]
> The last command does not query the network.
> It works even offline because private account data lives only in your wallet storage. Other users cannot read your private balances.
#### Digression: modifying private accounts
As a general rule, private accounts can only be modified through a program execution performed by their owner. That is, the person who holds the private key for that account. There is one exception: an uninitialized private account may be initialized by any user, without requiring the private key. After initialization, only the owner can modify it.
This mechanism enables a common use case: transferring funds from any account (public or private) to a private account owned by someone else. For such transfers, the recipients private account must be uninitialized.
#### Sending tokens from the public account to a private account owned by someone else
For this tutorial, well simulate that scenario by creating a new private account that we own, but well treat it as if it belonged to someone else.
Let's create a new (uninitialized) private account like before:
```bash
wallet account new private
# Output:
Generated new account with account_id Private/AukXPRBmrYVqoqEW2HTs7N3hvTn3qdNFDcxDHVr5hMm5
With npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e
With ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72
```
Now we'll ignore the private account ID and focus on the `npk` and `ipk` values. We'll need this to send tokens to a foreign private account. Syntax is very similar.
```bash
wallet auth-transfer send \
--from Public/Ev1JprP9BmhbFVQyBcbznU8bAXcwrzwRoPTetXdQPAWS \
--to-npk 0c95ebc4b3830f53da77bb0b80a276a776cdcf6410932acc718dcdb3f788a00e \
--to-ipk 039fd12a3674a880d3e917804129141e4170d419d1f9e28a3dcf979c1f2369cb72 \
--amount 3
```
The command above produces a privacy-preserving transaction, which may take a few minutes to complete. The updated values of the private account are encrypted and included in the transaction.
Once the transaction is accepted, the recipient must run `wallet account sync-private`. This command scans the chain for encrypted values that belong to their private accounts and updates the local versions accordingly.
#### Transfers in other combinations of public and private accounts
Weve shown how to use the authenticated-transfers program for transfers between two public accounts, and for transfers from a public sender to a private recipient. Sending tokens from a private account (whether to a public account or to another private account) works in essentially the same way.
### The token program
So far, weve made transfers using the authenticated-transfers program, which handles native token transfers. The Token program, on the other hand, is used for creating and managing custom tokens.
> [!NOTE]
> The token program is a single program responsible for creating and managing all tokens. There is no need to deploy new programs to introduce new tokens. All token-related operations are performed by invoking the appropriate functions of the token program.
The CLI provides commands to execute the token program. To see the options available run `wallet token`:
```bash
Commands:
new Produce a new token
send Send tokens from one account to another with variable privacy
help Print this message or the help of the given subcommand(s)
```
> [!NOTE]
> The Token program manages its accounts in two categories. Meaning, all accounts owned by the Token program fall into one of these types.
> - Token definition accounts: these accounts store metadata about a token, such as its name, total supply, and other identifying properties. They act as the tokens unique identifier.
> - Token holding accounts: these accounts hold actual token balances. In addition to the balance, they also record which token definition they belong to.
#### Creating a new token
To create a new token, simply run `wallet token new`. This will create a transaction to execute the `New` function of the token program.
The command expects a name, the desired total supply, and two uninitialized accounts:
- One that will be initialized as the token definition account for the new token.
- Another that will be initialized as a token holding account and receive the tokens entire initial supply.
##### New token with both definition and supply accounts set as public
For example, let's create two new (uninitialized) public accounts and then use them to create a new token.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
```
```bash
wallet account new public
# Output:
Generated new account with account_id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
```
Now we use them to create a new token. Let's call it the "Token A"
```bash
wallet token new \
--name TOKENA \
--total-supply 1337 \
--definition-account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7 \
--supply-account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
```
After it succeeds, we can inspect the two accounts to see how they were initialized.
```bash
wallet account get --account-id Public/4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
# Output:
Definition account owned by token program
{"account_type":"Token definition","name":"TOKENA","total_supply":1337}
```
```bash
wallet account get --account-id Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7","balance":1337}
```
##### New token with public account definition but private holding account for initial supply
Lets create a new token, but this time using a public definition account and a private holding account to store the entire supply.
Since we cant reuse the accounts from the previous example, we need to create fresh ones for this case.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii
```
```bash
wallet account new private
# Output:
Generated new account with account_id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
With npk 6a2dfe433cf28e525aa0196d719be3c16146f7ee358ca39595323f94fde38f93
With ipk 03d59abf4bee974cc12ddb44641c19f0b5441fef39191f047c988c29a77252a577
```
And we use them to create the token.
Now we use them to create a new token. Let's call it "Token B".
```bash
wallet token new \
--name TOKENB \
--total-supply 7331 \
--definition-account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii \
--supply-account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
```
After it succeeds, we can check their values
```bash
wallet account get --account-id Public/GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii
# Output:
Definition account owned by token program
{"account_type":"Token definition","name":"TOKENB","total_supply":7331}
```
```bash
wallet account get --account-id Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":7331}
```
Like any other private account owned by us, it cannot be seen by other users.
#### Custom token transfers
The Token program has a function to move funds from one token holding account to another one. If executed with an uninitialized account as the recipient, this will be automatically claimed by the token program.
The transfer function can be executed with the `wallet token send` command.
Let's create a new public account for the recipient.
```bash
wallet account new public
# Output:
Generated new account with account_id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6
```
Let's send 1000 B tokens to this new account. We'll debit this from the supply account used in the creation of the token.
```bash
wallet token send \
--from Private/HMRHZdPw4pbyPVZHNGrV6K5AA95wACFsHTRST84fr3CF \
--to Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--amount 1000
```
Let's inspect the public account:
```bash
wallet account get --account-id Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"GQ3C8rbprTtQUCvkuVBRu3v9wvUvjafCMFqoSPvTEVii","balance":1000}
```
### Chain information
The wallet provides some commands to query information about the chain. These are under the `wallet chain-info` command.
```bash
Commands:
current-block-id Get current block id from sequencer
block Get block at id from sequencer
transaction Get transaction at hash from sequencer
```
For example, run this to find the current block id.
```bash
wallet chain-info current-block-id
# Output:
Last block id is 65537
```
### Automated Market Maker (AMM)
NSSA includes an AMM program that manages liquidity pools and enables swaps between custom tokens. To test this functionality, we first need to create a liquidity pool.
#### Creating a liquidity pool for a token pair
We start by creating a new pool for the tokens previously created. In return for providing liquidity, we will receive liquidity provider (LP) tokens, which represent our share of the pool and are required to withdraw liquidity later.
>[!NOTE]
> The AMM program does not currently charge swap fees or distribute rewards to liquidity providers. LP tokens therefore only represent a proportional share of the pool reserves and do not provide additional value from swap activity. Fee support for liquidity providers will be added in future versions of the AMM program.
To hold these LP tokens, we first create a new account:
```bash
wallet account new public
# Output:
Generated new account with account_id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf
```
Next, we initialize the liquidity pool by depositing tokens A and B and specifying the account that will receive the LP tokens:
```bash
wallet amm new \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--balance-a 100 \
--balance-b 200
```
The newly created account is owned by the token program, meaning that LP tokens are managed by the same token infrastructure as regular tokens.
```bash
wallet account get --account-id Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf
# Output:
Holding account owned by token program
{"account_type":"Token holding","definition_id":"7BeDS3e28MA5Err7gBswmR1fUKdHXqmUpTefNPu3pJ9i","balance":100}
```
If you inspect the `user-holding-a` and `user-holding-b` accounts passed to the `wallet amm new` command, you will see that 100 and 200 tokens were deducted, respectively. These tokens now reside in the liquidity pool and are available for swaps by any user.
#### Swaping
Token swaps can be performed using the wallet amm swap command:
```bash
wallet amm swap \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
# The amount of tokens to swap
--amount-in 5 \
# The minimum number of tokens expected in return
--min-amount-out 8 \
# The definition ID of the token being provided to the swap
# In this case, we are swapping from TOKENA to TOKENB, and so this is the definition ID of TOKENA
--token-definition 4X9kAcnCZ1Ukkbm3nywW9xfCNPK8XaMWCk3zfs1sP4J7
```
Once executed, 5 tokens are deducted from the Token A holding account and the corresponding amount (determined by the pools pricing function) is credited to the Token B holding account.
#### Withdrawing liquidity from the pool
Liquidity providers can withdraw assets from the pool by redeeming (burning) LP tokens. The amount of tokens received is proportional to the share of LP tokens being redeemed relative to the total LP supply.
This operation is performed using the `wallet amm remove-liquidity` command:
```bash
wallet amm remove-liquidity \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--balance-lp 20 \
--min-amount-a 1 \
--min-amount-b 1
```
This instruction burns `balance-lp` LP tokens from the users LP holding account. In exchange, the AMM transfers tokens A and B from the pools vault accounts to the users holding accounts, according to the current pool reserves.
The `min-amount-a` and `min-amount-b` parameters specify the minimum acceptable amounts of tokens A and B to be received. If the computed outputs fall below either threshold, the instruction fails, protecting the user against unfavorable pool state changes.
#### Adding liquidity to the pool
Additional liquidity can be added to an existing pool by depositing tokens A and B in the ratio implied by the current pool reserves. In return, new LP tokens are minted to represent the users proportional share of the pool.
This is done using the `wallet amm add-liquidity` command:
```bash
wallet amm add-liquidity \
--user-holding-a Public/9RRSMm3w99uCD2Jp2Mqqf6dfc8me2tkFRE9HeU2DFftw \
--user-holding-b Public/88f2zeTgiv9LUthQwPJbrmufb9SiDfmpCs47B7vw6Gd6 \
--user-holding-lp Public/FHgLW9jW4HXMV6egLWbwpTqVAGiCHw2vkg71KYSuimVf \
--min-amount-lp 1 \
--max-amount-a 10 \
--max-amount-b 10
```
In this instruction, `max-amount-a` and `max-amount-b` define upper bounds on the number of tokens A and B that may be withdrawn from the users accounts. The AMM computes the actual required amounts based on the pools reserve ratio.
The `min-amount-lp` parameter specifies the minimum number of LP tokens that must be minted for the transaction to succeed. If the resulting LP token amount is below this threshold, the instruction fails.
3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
- `git checkout schouhy/full-bedrock-integration`
- `RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p sequencer_runner sequencer_runner/configs/debug`

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -2,9 +2,15 @@
name = "bedrock_client"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
reqwest.workspace = true
anyhow.workspace = true
tokio-retry.workspace = true
futures.workspace = true
log.workspace = true
serde.workspace = true
logos-blockchain-common-http-client.workspace = true
logos-blockchain-core.workspace = true
logos-blockchain-chain-broadcast-service.workspace = true

View File

@ -1,10 +1,24 @@
use anyhow::Result;
use futures::{Stream, TryFutureExt};
use log::warn;
pub use logos_blockchain_chain_broadcast_service::BlockInfo;
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
use logos_blockchain_core::mantle::SignedMantleTx;
pub use logos_blockchain_core::{block::Block, header::HeaderId, mantle::SignedMantleTx};
use reqwest::{Client, Url};
use serde::{Deserialize, Serialize};
use tokio_retry::Retry;
/// Fibonacci backoff retry strategy configuration
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BackoffConfig {
pub start_delay_millis: u64,
pub max_retries: usize,
}
// Simple wrapper
// maybe extend in the future for our purposes
// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`).
#[derive(Clone)]
pub struct BedrockClient {
http_client: CommonHttpClient,
node_url: Url,
@ -29,4 +43,25 @@ impl BedrockClient {
.post_transaction(self.node_url.clone(), tx)
.await
}
pub async fn get_lib_stream(&self) -> Result<impl Stream<Item = BlockInfo>, Error> {
self.http_client.get_lib_stream(self.node_url.clone()).await
}
pub async fn get_block_by_id(
&self,
header_id: HeaderId,
backoff: &BackoffConfig,
) -> Result<Option<Block<SignedMantleTx>>, Error> {
let strategy =
tokio_retry::strategy::FibonacciBackoff::from_millis(backoff.start_delay_millis)
.take(backoff.max_retries);
Retry::spawn(strategy, || {
self.http_client
.get_block_by_id(self.node_url.clone(), header_id)
.inspect_err(|err| warn!("Block fetching failed with err: {err:#?}"))
})
.await
}
}

View File

@ -2,6 +2,7 @@
name = "common"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa.workspace = true
@ -17,3 +18,5 @@ log.workspace = true
hex.workspace = true
borsh.workspace = true
base64.workspace = true
url.workspace = true
logos-blockchain-common-http-client.workspace = true

View File

@ -4,6 +4,7 @@ use sha2::{Digest, Sha256, digest::FixedOutput};
use crate::transaction::EncodedTransaction;
pub type HashType = [u8; 32];
pub type MantleMsgId = [u8; 32];
#[derive(Debug, Clone)]
/// Our own hasher.
@ -49,6 +50,7 @@ pub struct Block {
pub header: BlockHeader,
pub body: BlockBody,
pub bedrock_status: BedrockStatus,
pub bedrock_parent_id: MantleMsgId,
}
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
@ -60,7 +62,11 @@ pub struct HashableBlockData {
}
impl HashableBlockData {
pub fn into_pending_block(self, signing_key: &nssa::PrivateKey) -> Block {
pub fn into_pending_block(
self,
signing_key: &nssa::PrivateKey,
bedrock_parent_id: MantleMsgId,
) -> Block {
let data_bytes = borsh::to_vec(&self).unwrap();
let signature = nssa::Signature::new(signing_key, &data_bytes);
let hash = OwnHasher::hash(&data_bytes);
@ -76,8 +82,13 @@ impl HashableBlockData {
transactions: self.transactions,
},
bedrock_status: BedrockStatus::Pending,
bedrock_parent_id,
}
}
pub fn block_hash(&self) -> BlockHash {
OwnHasher::hash(&borsh::to_vec(&self).unwrap())
}
}
impl From<Block> for HashableBlockData {

View File

@ -0,0 +1,6 @@
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum Message {
L2BlockFinalized { l2_block_height: u64 },
}

View File

@ -0,0 +1 @@
pub mod indexer;

View File

@ -1,4 +1,5 @@
pub mod block;
pub mod communication;
pub mod error;
pub mod rpc_primitives;
pub mod sequencer_client;

View File

@ -73,6 +73,11 @@ pub struct GetProofForCommitmentRequest {
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProgramIdsRequest {}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIndexerMessageRequest {
pub message: crate::communication::indexer::Message,
}
parse_request!(HelloRequest);
parse_request!(RegisterAccountRequest);
parse_request!(SendTxRequest);
@ -87,6 +92,7 @@ parse_request!(GetAccountsNoncesRequest);
parse_request!(GetProofForCommitmentRequest);
parse_request!(GetAccountRequest);
parse_request!(GetProgramIdsRequest);
parse_request!(PostIndexerMessageRequest);
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloResponse {
@ -216,3 +222,8 @@ pub struct GetInitialTestnetAccountsResponse {
pub account_id: String,
pub balance: u64,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct PostIndexerMessageResponse {
pub status: String,
}

View File

@ -1,10 +1,12 @@
use std::{collections::HashMap, ops::RangeInclusive};
use std::{collections::HashMap, ops::RangeInclusive, str::FromStr};
use anyhow::Result;
use logos_blockchain_common_http_client::BasicAuthCredentials;
use nssa_core::program::ProgramId;
use reqwest::Client;
use serde::Deserialize;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use url::Url;
use super::rpc_primitives::requests::{
GetAccountBalanceRequest, GetAccountBalanceResponse, GetBlockDataRequest, GetBlockDataResponse,
@ -20,28 +22,75 @@ use crate::{
GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse,
GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest,
GetProofForCommitmentResponse, GetTransactionByHashRequest,
GetTransactionByHashResponse, SendTxRequest, SendTxResponse,
GetTransactionByHashResponse, PostIndexerMessageRequest, PostIndexerMessageResponse,
SendTxRequest, SendTxResponse,
},
},
transaction::{EncodedTransaction, NSSATransaction},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BasicAuth {
pub username: String,
pub password: Option<String>,
}
impl std::fmt::Display for BasicAuth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.username)?;
if let Some(password) = &self.password {
write!(f, ":{password}")?;
}
Ok(())
}
}
impl FromStr for BasicAuth {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parse = || {
let mut parts = s.splitn(2, ':');
let username = parts.next()?;
let password = parts.next().filter(|p| !p.is_empty());
if parts.next().is_some() {
return None;
}
Some((username, password))
};
let (username, password) = parse().ok_or_else(|| {
anyhow::anyhow!("Invalid auth format. Expected 'user' or 'user:password'")
})?;
Ok(Self {
username: username.to_string(),
password: password.map(|p| p.to_string()),
})
}
}
impl From<BasicAuth> for BasicAuthCredentials {
fn from(value: BasicAuth) -> Self {
BasicAuthCredentials::new(value.username, value.password)
}
}
#[derive(Clone)]
pub struct SequencerClient {
pub client: reqwest::Client,
pub sequencer_addr: String,
pub basic_auth: Option<(String, Option<String>)>,
pub sequencer_addr: Url,
pub basic_auth: Option<BasicAuth>,
}
impl SequencerClient {
pub fn new(sequencer_addr: String) -> Result<Self> {
pub fn new(sequencer_addr: Url) -> Result<Self> {
Self::new_with_auth(sequencer_addr, None)
}
pub fn new_with_auth(
sequencer_addr: String,
basic_auth: Option<(String, Option<String>)>,
) -> Result<Self> {
pub fn new_with_auth(sequencer_addr: Url, basic_auth: Option<BasicAuth>) -> Result<Self> {
Ok(Self {
client: Client::builder()
// Add more fields if needed
@ -66,9 +115,9 @@ impl SequencerClient {
"Calling method {method} with payload {request:?} to sequencer at {}",
self.sequencer_addr
);
let mut call_builder = self.client.post(&self.sequencer_addr);
let mut call_builder = self.client.post(self.sequencer_addr.clone());
if let Some((username, password)) = &self.basic_auth {
if let Some(BasicAuth { username, password }) = &self.basic_auth {
call_builder = call_builder.basic_auth(username, password.as_deref());
}
@ -347,4 +396,23 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Post indexer into sequencer
pub async fn post_indexer_message(
&self,
message: crate::communication::indexer::Message,
) -> Result<PostIndexerMessageResponse, SequencerClientError> {
let last_req = PostIndexerMessageRequest { message };
let req = serde_json::to_value(last_req).unwrap();
let resp = self
.call_method_with_payload("post_indexer_message", req)
.await
.unwrap();
let resp_deser = serde_json::from_value(resp).unwrap();
Ok(resp_deser)
}
}

View File

@ -30,7 +30,7 @@ pub fn produce_dummy_block(
transactions,
};
block_data.into_pending_block(&sequencer_sign_key_for_testing())
block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32])
}
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {

View File

@ -150,10 +150,11 @@ _wallet_account() {
subcommand)
subcommands=(
'get:Get account data'
'list:List all accounts'
'list:List all accounts owned by the wallet'
'ls:List all accounts (alias for list)'
'new:Produce new public or private account'
'sync-private:Sync private accounts'
'label:Set a label for an account'
'help:Print this message or the help of the given subcommand(s)'
)
_describe -t subcommands 'account subcommands' subcommands
@ -184,6 +185,11 @@ _wallet_account() {
;;
esac
;;
label)
_arguments \
'(-a --account-id)'{-a,--account-id}'[Account ID to label]:account_id:_wallet_account_ids' \
'(-l --label)'{-l,--label}'[The label to assign to the account]:label:'
;;
esac
;;
esac

View File

@ -2,6 +2,7 @@
name = "program_deployment"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa.workspace = true

View File

@ -2,6 +2,7 @@
name = "example_program_deployment_methods"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[build-dependencies]
risc0-build.workspace = true

View File

@ -2,6 +2,7 @@
name = "example_program_deployment_programs"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core.workspace = true

64
flake.lock generated Normal file
View File

@ -0,0 +1,64 @@
{
"nodes": {
"crane": {
"locked": {
"lastModified": 1769737823,
"narHash": "sha256-DrBaNpZ+sJ4stXm+0nBX7zqZT9t9P22zbk6m5YhQxS4=",
"owner": "ipetkov",
"repo": "crane",
"rev": "b2f45c3830aa96b7456a4c4bc327d04d7a43e1ba",
"type": "github"
},
"original": {
"owner": "ipetkov",
"repo": "crane",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1770019141,
"narHash": "sha256-VKS4ZLNx4PNrABoB0L8KUpc1fE7CLpQXQs985tGfaCU=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "cb369ef2efd432b3cdf8622b0ffc0a97a02f3137",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"crane": "crane",
"nixpkgs": "nixpkgs",
"rust-overlay": "rust-overlay"
}
},
"rust-overlay": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1770088046,
"narHash": "sha256-4hfYDnUTvL1qSSZEA4CEThxfz+KlwSFQ30Z9jgDguO0=",
"owner": "oxalica",
"repo": "rust-overlay",
"rev": "71f9daa4e05e49c434d08627e755495ae222bc34",
"type": "github"
},
"original": {
"owner": "oxalica",
"repo": "rust-overlay",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

96
flake.nix Normal file
View File

@ -0,0 +1,96 @@
{
description = "Logos Execution Zone";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
rust-overlay = {
url = "github:oxalica/rust-overlay";
inputs.nixpkgs.follows = "nixpkgs";
};
crane.url = "github:ipetkov/crane";
};
outputs =
{
self,
nixpkgs,
rust-overlay,
crane,
...
}:
let
systems = [
"x86_64-linux"
"aarch64-linux"
"aarch64-darwin"
"x86_64-windows"
];
forAll = nixpkgs.lib.genAttrs systems;
mkPkgs =
system:
import nixpkgs {
inherit system;
overlays = [ rust-overlay.overlays.default ];
};
in
{
packages = forAll (
system:
let
pkgs = mkPkgs system;
rustToolchain = pkgs.rust-bin.stable.latest.default;
craneLib = (crane.mkLib pkgs).overrideToolchain rustToolchain;
src = ./.;
commonArgs = {
inherit src;
buildInputs = [ pkgs.openssl ];
nativeBuildInputs = [
pkgs.pkg-config
pkgs.clang
pkgs.llvmPackages.libclang.lib
];
LIBCLANG_PATH = "${pkgs.llvmPackages.libclang.lib}/lib";
};
walletFfiPackage = craneLib.buildPackage (
commonArgs
// {
pname = "logos-execution-zone-wallet-ffi";
version = "0.1.0";
cargoExtraArgs = "-p wallet-ffi";
postInstall = ''
mkdir -p $out/include
cp wallet-ffi/wallet_ffi.h $out/include/
''
+ pkgs.lib.optionalString pkgs.stdenv.isDarwin ''
install_name_tool -id @rpath/libwallet_ffi.dylib $out/lib/libwallet_ffi.dylib
'';
}
);
in
{
wallet = walletFfiPackage;
default = walletFfiPackage;
}
);
devShells = forAll (
system:
let
pkgs = mkPkgs system;
walletFfiPackage = self.packages.${system}.wallet;
walletFfiShell = pkgs.mkShell {
inputsFrom = [ walletFfiPackage ];
};
in
{
wallet = walletFfiShell;
default = walletFfiShell;
}
);
};
}

19
indexer_core/Cargo.toml Normal file
View File

@ -0,0 +1,19 @@
[package]
name = "indexer_core"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
common.workspace = true
bedrock_client.workspace = true
anyhow.workspace = true
log.workspace = true
serde.workspace = true
tokio.workspace = true
borsh.workspace = true
futures.workspace = true
url.workspace = true
logos-blockchain-core.workspace = true
serde_json.workspace = true

View File

@ -0,0 +1,36 @@
use std::{fs::File, io::BufReader, path::Path};
use anyhow::{Context, Result};
use bedrock_client::BackoffConfig;
use common::sequencer_client::BasicAuth;
use logos_blockchain_core::mantle::ops::channel::ChannelId;
use serde::{Deserialize, Serialize};
use url::Url;
#[derive(Debug, Clone, Serialize, Deserialize)]
/// ToDo: Expand if necessary
pub struct ClientConfig {
pub addr: Url,
pub auth: Option<BasicAuth>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
/// Note: For individual RPC requests we use Fibonacci backoff retry strategy
pub struct IndexerConfig {
pub resubscribe_interval_millis: u64,
pub backoff: BackoffConfig,
pub bedrock_client_config: ClientConfig,
pub sequencer_client_config: ClientConfig,
pub channel_id: ChannelId,
}
impl IndexerConfig {
pub fn from_path(config_home: &Path) -> Result<IndexerConfig> {
let file = File::open(config_home)
.with_context(|| format!("Failed to open indexer config at {config_home:?}"))?;
let reader = BufReader::new(file);
serde_json::from_reader(reader)
.with_context(|| format!("Failed to parse indexer config at {config_home:?}"))
}
}

124
indexer_core/src/lib.rs Normal file
View File

@ -0,0 +1,124 @@
use std::sync::Arc;
use anyhow::Result;
use bedrock_client::BedrockClient;
use common::{
block::HashableBlockData, communication::indexer::Message,
rpc_primitives::requests::PostIndexerMessageResponse, sequencer_client::SequencerClient,
};
use futures::StreamExt;
use log::info;
use logos_blockchain_core::mantle::{
Op, SignedMantleTx,
ops::channel::{ChannelId, inscribe::InscriptionOp},
};
use tokio::sync::RwLock;
use crate::{config::IndexerConfig, state::IndexerState};
pub mod config;
pub mod state;
pub struct IndexerCore {
pub bedrock_client: BedrockClient,
pub sequencer_client: SequencerClient,
pub config: IndexerConfig,
pub state: IndexerState,
}
impl IndexerCore {
pub fn new(config: IndexerConfig) -> Result<Self> {
Ok(Self {
bedrock_client: BedrockClient::new(
config.bedrock_client_config.auth.clone().map(Into::into),
config.bedrock_client_config.addr.clone(),
)?,
sequencer_client: SequencerClient::new_with_auth(
config.sequencer_client_config.addr.clone(),
config.sequencer_client_config.auth.clone(),
)?,
config,
// No state setup for now, future task.
state: IndexerState {
latest_seen_block: Arc::new(RwLock::new(0)),
},
})
}
pub async fn subscribe_parse_block_stream(&self) -> Result<()> {
loop {
let mut stream_pinned = Box::pin(self.bedrock_client.get_lib_stream().await?);
info!("Block stream joined");
while let Some(block_info) = stream_pinned.next().await {
let header_id = block_info.header_id;
info!("Observed L1 block at height {}", block_info.height);
if let Some(l1_block) = self
.bedrock_client
.get_block_by_id(header_id, &self.config.backoff)
.await?
{
info!("Extracted L1 block at height {}", block_info.height);
let l2_blocks_parsed = parse_blocks(
l1_block.into_transactions().into_iter(),
&self.config.channel_id,
);
for l2_block in l2_blocks_parsed {
// State modification, will be updated in future
{
let mut guard = self.state.latest_seen_block.write().await;
if l2_block.block_id > *guard {
*guard = l2_block.block_id;
}
}
// Sending data into sequencer, may need to be expanded.
let message = Message::L2BlockFinalized {
l2_block_height: l2_block.block_id,
};
let status = self.send_message_to_sequencer(message.clone()).await?;
info!("Sent message {message:#?} to sequencer; status {status:#?}");
}
}
}
// Refetch stream after delay
tokio::time::sleep(std::time::Duration::from_millis(
self.config.resubscribe_interval_millis,
))
.await;
}
}
pub async fn send_message_to_sequencer(
&self,
message: Message,
) -> Result<PostIndexerMessageResponse> {
Ok(self.sequencer_client.post_indexer_message(message).await?)
}
}
fn parse_blocks(
block_txs: impl Iterator<Item = SignedMantleTx>,
decoded_channel_id: &ChannelId,
) -> impl Iterator<Item = HashableBlockData> {
block_txs.flat_map(|tx| {
tx.mantle_tx.ops.into_iter().filter_map(|op| match op {
Op::ChannelInscribe(InscriptionOp {
channel_id,
inscription,
..
}) if channel_id == *decoded_channel_id => {
borsh::from_slice::<HashableBlockData>(&inscription).ok()
}
_ => None,
})
})
}

View File

@ -0,0 +1,9 @@
use std::sync::Arc;
use tokio::sync::RwLock;
#[derive(Debug, Clone)]
pub struct IndexerState {
// Only one field for now, for testing.
pub latest_seen_block: Arc<RwLock<u64>>,
}

View File

@ -0,0 +1,18 @@
[package]
name = "indexer_service"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
indexer_service_protocol.workspace = true
indexer_service_rpc = { workspace = true, features = ["server"] }
clap = { workspace = true, features = ["derive"] }
anyhow.workspace = true
tokio.workspace = true
tokio-util.workspace = true
env_logger.workspace = true
log.workspace = true
jsonrpsee.workspace = true
async-trait = "0.1.89"

View File

@ -0,0 +1,64 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
# Install build dependencies
RUN apt-get update && apt-get install -y \
pkg-config \
libssl-dev \
libclang-dev \
clang \
curl \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /indexer_service
# Planner stage - generates dependency recipe
FROM chef AS planner
COPY . .
RUN cargo chef prepare --bin indexer_service --recipe-path recipe.json
# Builder stage - builds dependencies and application
FROM chef AS builder
COPY --from=planner /indexer_service/recipe.json recipe.json
# Build dependencies only (this layer will be cached)
RUN cargo chef cook --bin indexer_service --release --recipe-path recipe.json
# Copy source code
COPY . .
# Build the actual application
RUN cargo build --release --bin indexer_service
# Strip debug symbols to reduce binary size
RUN strip /indexer_service/target/release/indexer_service
# Runtime stage - minimal image
FROM debian:trixie-slim
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user
# Copy binary from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
# Expose default port
EXPOSE 8779
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl http://localhost:8779 \
-H "Content-Type: application/json" \
-d "{ \
\"jsonrpc\": \"2.0\", \
\"method\": \"get_schema\", \
\"params\": {}, \
\"id\": 1 \
}" || exit 1
# Run the application
ENV RUST_LOG=info
USER indexer_service_user
WORKDIR /indexer_service
CMD ["indexer_service"]

View File

@ -0,0 +1,9 @@
services:
indexer_service:
image: lssa/indexer_service
build:
context: ..
dockerfile: indexer_service/Dockerfile
container_name: indexer_service
ports:
- "8779:8779"

View File

@ -0,0 +1,19 @@
[package]
name = "indexer_service_protocol"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core = { workspace = true, optional = true, features = ["host"] }
nssa = { workspace = true, optional = true }
common = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }
schemars.workspace = true
base64.workspace = true
borsh = { workspace = true, optional = true }
[features]
# Enable conversion to/from NSSA core types
convert = ["dep:nssa_core", "dep:nssa", "dep:common", "dep:borsh"]

View File

@ -0,0 +1,652 @@
//! Conversions between indexer_service_protocol types and nssa/nssa_core types
use crate::*;
// ============================================================================
// Account-related conversions
// ============================================================================
impl From<nssa_core::account::AccountId> for AccountId {
fn from(value: nssa_core::account::AccountId) -> Self {
Self {
value: value.into_value(),
}
}
}
impl From<AccountId> for nssa_core::account::AccountId {
fn from(value: AccountId) -> Self {
let AccountId { value } = value;
nssa_core::account::AccountId::new(value)
}
}
impl From<nssa_core::account::Account> for Account {
fn from(value: nssa_core::account::Account) -> Self {
let nssa_core::account::Account {
program_owner,
balance,
data,
nonce,
} = value;
Self {
program_owner,
balance,
data: data.into(),
nonce,
}
}
}
impl TryFrom<Account> for nssa_core::account::Account {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: Account) -> Result<Self, Self::Error> {
let Account {
program_owner,
balance,
data,
nonce,
} = value;
Ok(nssa_core::account::Account {
program_owner,
balance,
data: data.try_into()?,
nonce,
})
}
}
impl From<nssa_core::account::Data> for Data {
fn from(value: nssa_core::account::Data) -> Self {
Self(value.into_inner())
}
}
impl TryFrom<Data> for nssa_core::account::Data {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: Data) -> Result<Self, Self::Error> {
nssa_core::account::Data::try_from(value.0)
}
}
// ============================================================================
// Commitment and Nullifier conversions
// ============================================================================
impl From<nssa_core::Commitment> for Commitment {
fn from(value: nssa_core::Commitment) -> Self {
Self(value.to_byte_array())
}
}
impl From<Commitment> for nssa_core::Commitment {
fn from(value: Commitment) -> Self {
nssa_core::Commitment::from_byte_array(value.0)
}
}
impl From<nssa_core::Nullifier> for Nullifier {
fn from(value: nssa_core::Nullifier) -> Self {
Self(value.to_byte_array())
}
}
impl From<Nullifier> for nssa_core::Nullifier {
fn from(value: Nullifier) -> Self {
nssa_core::Nullifier::from_byte_array(value.0)
}
}
impl From<nssa_core::CommitmentSetDigest> for CommitmentSetDigest {
fn from(value: nssa_core::CommitmentSetDigest) -> Self {
Self(value)
}
}
impl From<CommitmentSetDigest> for nssa_core::CommitmentSetDigest {
fn from(value: CommitmentSetDigest) -> Self {
value.0
}
}
// ============================================================================
// Encryption-related conversions
// ============================================================================
impl From<nssa_core::encryption::Ciphertext> for Ciphertext {
fn from(value: nssa_core::encryption::Ciphertext) -> Self {
Self(value.into_inner())
}
}
impl From<Ciphertext> for nssa_core::encryption::Ciphertext {
fn from(value: Ciphertext) -> Self {
nssa_core::encryption::Ciphertext::from_inner(value.0)
}
}
impl From<nssa_core::encryption::EphemeralPublicKey> for EphemeralPublicKey {
fn from(value: nssa_core::encryption::EphemeralPublicKey) -> Self {
Self(value.0)
}
}
impl From<EphemeralPublicKey> for nssa_core::encryption::EphemeralPublicKey {
fn from(value: EphemeralPublicKey) -> Self {
nssa_core::encryption::shared_key_derivation::Secp256k1Point(value.0)
}
}
// ============================================================================
// Signature and PublicKey conversions
// ============================================================================
impl From<nssa::Signature> for Signature {
fn from(value: nssa::Signature) -> Self {
let nssa::Signature { value } = value;
Self(value)
}
}
impl From<Signature> for nssa::Signature {
fn from(value: Signature) -> Self {
let Signature(sig_value) = value;
nssa::Signature { value: sig_value }
}
}
impl From<nssa::PublicKey> for PublicKey {
fn from(value: nssa::PublicKey) -> Self {
Self(*value.value())
}
}
impl TryFrom<PublicKey> for nssa::PublicKey {
type Error = nssa::error::NssaError;
fn try_from(value: PublicKey) -> Result<Self, Self::Error> {
nssa::PublicKey::try_new(value.0)
}
}
// ============================================================================
// Proof conversions
// ============================================================================
impl From<nssa::privacy_preserving_transaction::circuit::Proof> for Proof {
fn from(value: nssa::privacy_preserving_transaction::circuit::Proof) -> Self {
Self(value.into_inner())
}
}
impl From<Proof> for nssa::privacy_preserving_transaction::circuit::Proof {
fn from(value: Proof) -> Self {
nssa::privacy_preserving_transaction::circuit::Proof::from_inner(value.0)
}
}
// ============================================================================
// EncryptedAccountData conversions
// ============================================================================
impl From<nssa::privacy_preserving_transaction::message::EncryptedAccountData>
for EncryptedAccountData
{
fn from(value: nssa::privacy_preserving_transaction::message::EncryptedAccountData) -> Self {
Self {
ciphertext: value.ciphertext.into(),
epk: value.epk.into(),
view_tag: value.view_tag,
}
}
}
impl From<EncryptedAccountData>
for nssa::privacy_preserving_transaction::message::EncryptedAccountData
{
fn from(value: EncryptedAccountData) -> Self {
Self {
ciphertext: value.ciphertext.into(),
epk: value.epk.into(),
view_tag: value.view_tag,
}
}
}
// ============================================================================
// Transaction Message conversions
// ============================================================================
impl From<nssa::public_transaction::Message> for PublicMessage {
fn from(value: nssa::public_transaction::Message) -> Self {
let nssa::public_transaction::Message {
program_id,
account_ids,
nonces,
instruction_data,
} = value;
Self {
program_id,
account_ids: account_ids.into_iter().map(Into::into).collect(),
nonces,
instruction_data,
}
}
}
impl From<PublicMessage> for nssa::public_transaction::Message {
fn from(value: PublicMessage) -> Self {
let PublicMessage {
program_id,
account_ids,
nonces,
instruction_data,
} = value;
Self::new_preserialized(
program_id,
account_ids.into_iter().map(Into::into).collect(),
nonces,
instruction_data,
)
}
}
impl From<nssa::privacy_preserving_transaction::message::Message> for PrivacyPreservingMessage {
fn from(value: nssa::privacy_preserving_transaction::message::Message) -> Self {
let nssa::privacy_preserving_transaction::message::Message {
public_account_ids,
nonces,
public_post_states,
encrypted_private_post_states,
new_commitments,
new_nullifiers,
} = value;
Self {
public_account_ids: public_account_ids.into_iter().map(Into::into).collect(),
nonces,
public_post_states: public_post_states.into_iter().map(Into::into).collect(),
encrypted_private_post_states: encrypted_private_post_states
.into_iter()
.map(Into::into)
.collect(),
new_commitments: new_commitments.into_iter().map(Into::into).collect(),
new_nullifiers: new_nullifiers
.into_iter()
.map(|(n, d)| (n.into(), d.into()))
.collect(),
}
}
}
impl TryFrom<PrivacyPreservingMessage> for nssa::privacy_preserving_transaction::message::Message {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: PrivacyPreservingMessage) -> Result<Self, Self::Error> {
let PrivacyPreservingMessage {
public_account_ids,
nonces,
public_post_states,
encrypted_private_post_states,
new_commitments,
new_nullifiers,
} = value;
Ok(Self {
public_account_ids: public_account_ids.into_iter().map(Into::into).collect(),
nonces,
public_post_states: public_post_states
.into_iter()
.map(TryInto::try_into)
.collect::<Result<Vec<_>, _>>()?,
encrypted_private_post_states: encrypted_private_post_states
.into_iter()
.map(Into::into)
.collect(),
new_commitments: new_commitments.into_iter().map(Into::into).collect(),
new_nullifiers: new_nullifiers
.into_iter()
.map(|(n, d)| (n.into(), d.into()))
.collect(),
})
}
}
impl From<nssa::program_deployment_transaction::Message> for ProgramDeploymentMessage {
fn from(value: nssa::program_deployment_transaction::Message) -> Self {
Self {
bytecode: value.into_bytecode(),
}
}
}
impl From<ProgramDeploymentMessage> for nssa::program_deployment_transaction::Message {
fn from(value: ProgramDeploymentMessage) -> Self {
let ProgramDeploymentMessage { bytecode } = value;
Self::new(bytecode)
}
}
// ============================================================================
// WitnessSet conversions
// ============================================================================
impl TryFrom<nssa::public_transaction::WitnessSet> for WitnessSet {
type Error = ();
fn try_from(_value: nssa::public_transaction::WitnessSet) -> Result<Self, Self::Error> {
// Public transaction witness sets don't have proofs, so we can't convert them directly
Err(())
}
}
impl From<nssa::privacy_preserving_transaction::witness_set::WitnessSet> for WitnessSet {
fn from(value: nssa::privacy_preserving_transaction::witness_set::WitnessSet) -> Self {
let (sigs_and_pks, proof) = value.into_raw_parts();
Self {
signatures_and_public_keys: sigs_and_pks
.into_iter()
.map(|(sig, pk)| (sig.into(), pk.into()))
.collect(),
proof: proof.into(),
}
}
}
impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::WitnessSet {
type Error = nssa::error::NssaError;
fn try_from(value: WitnessSet) -> Result<Self, Self::Error> {
let WitnessSet {
signatures_and_public_keys,
proof,
} = value;
let signatures_and_public_keys = signatures_and_public_keys
.into_iter()
.map(|(sig, pk)| Ok((sig.into(), pk.try_into()?)))
.collect::<Result<Vec<_>, Self::Error>>()?;
Ok(Self::from_raw_parts(
signatures_and_public_keys,
proof.into(),
))
}
}
// ============================================================================
// Transaction conversions
// ============================================================================
impl From<nssa::PublicTransaction> for PublicTransaction {
fn from(value: nssa::PublicTransaction) -> Self {
Self {
message: value.message().clone().into(),
witness_set: WitnessSet {
signatures_and_public_keys: value
.witness_set()
.signatures_and_public_keys()
.iter()
.map(|(sig, pk)| (sig.clone().into(), pk.clone().into()))
.collect(),
proof: Proof(vec![]), // Public transactions don't have proofs
},
}
}
}
impl TryFrom<PublicTransaction> for nssa::PublicTransaction {
type Error = nssa::error::NssaError;
fn try_from(value: PublicTransaction) -> Result<Self, Self::Error> {
let PublicTransaction {
message,
witness_set,
} = value;
let WitnessSet {
signatures_and_public_keys,
proof: _,
} = witness_set;
Ok(Self::new(
message.into(),
nssa::public_transaction::WitnessSet::from_raw_parts(
signatures_and_public_keys
.into_iter()
.map(|(sig, pk)| Ok((sig.into(), pk.try_into()?)))
.collect::<Result<Vec<_>, Self::Error>>()?,
),
))
}
}
impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction {
fn from(value: nssa::PrivacyPreservingTransaction) -> Self {
Self {
message: value.message().clone().into(),
witness_set: value.witness_set().clone().into(),
}
}
}
impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransaction {
type Error = nssa::error::NssaError;
fn try_from(value: PrivacyPreservingTransaction) -> Result<Self, Self::Error> {
let PrivacyPreservingTransaction {
message,
witness_set,
} = value;
Ok(Self::new(
message.try_into().map_err(|_| {
nssa::error::NssaError::InvalidInput("Data too big error".to_string())
})?,
witness_set.try_into()?,
))
}
}
impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction {
fn from(value: nssa::ProgramDeploymentTransaction) -> Self {
Self {
message: value.into_message().into(),
}
}
}
impl From<ProgramDeploymentTransaction> for nssa::ProgramDeploymentTransaction {
fn from(value: ProgramDeploymentTransaction) -> Self {
let ProgramDeploymentTransaction { message } = value;
Self::new(message.into())
}
}
impl From<common::transaction::NSSATransaction> for Transaction {
fn from(value: common::transaction::NSSATransaction) -> Self {
match value {
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
Transaction::PrivacyPreserving(tx.into())
}
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
Transaction::ProgramDeployment(tx.into())
}
}
}
}
impl TryFrom<Transaction> for common::transaction::NSSATransaction {
type Error = nssa::error::NssaError;
fn try_from(value: Transaction) -> Result<Self, Self::Error> {
match value {
Transaction::Public(tx) => {
Ok(common::transaction::NSSATransaction::Public(tx.try_into()?))
}
Transaction::PrivacyPreserving(tx) => Ok(
common::transaction::NSSATransaction::PrivacyPreserving(tx.try_into()?),
),
Transaction::ProgramDeployment(tx) => Ok(
common::transaction::NSSATransaction::ProgramDeployment(tx.into()),
),
}
}
}
// ============================================================================
// Block conversions
// ============================================================================
impl From<common::block::BlockHeader> for BlockHeader {
fn from(value: common::block::BlockHeader) -> Self {
let common::block::BlockHeader {
block_id,
prev_block_hash,
hash,
timestamp,
signature,
} = value;
Self {
block_id,
prev_block_hash: Hash(prev_block_hash),
hash: Hash(hash),
timestamp,
signature: signature.into(),
}
}
}
impl TryFrom<BlockHeader> for common::block::BlockHeader {
type Error = nssa::error::NssaError;
fn try_from(value: BlockHeader) -> Result<Self, Self::Error> {
let BlockHeader {
block_id,
prev_block_hash,
hash,
timestamp,
signature,
} = value;
Ok(Self {
block_id,
prev_block_hash: prev_block_hash.0,
hash: hash.0,
timestamp,
signature: signature.into(),
})
}
}
impl TryFrom<common::block::BlockBody> for BlockBody {
type Error = std::io::Error;
fn try_from(value: common::block::BlockBody) -> Result<Self, Self::Error> {
// Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction
// This conversion will decode and re-encode the transactions
use borsh::BorshDeserialize as _;
let common::block::BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|encoded_tx| match encoded_tx.tx_kind {
common::transaction::TxKind::Public => {
nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data)
.map(|tx| Transaction::Public(tx.into()))
}
common::transaction::TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::PrivacyPreserving(tx.into()))
}
common::transaction::TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::ProgramDeployment(tx.into()))
}
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Self { transactions })
}
}
impl TryFrom<BlockBody> for common::block::BlockBody {
type Error = nssa::error::NssaError;
fn try_from(value: BlockBody) -> Result<Self, Self::Error> {
let BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|tx| {
let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?;
Ok::<_, nssa::error::NssaError>(nssa_tx.into())
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Self { transactions })
}
}
impl TryFrom<common::block::Block> for Block {
type Error = std::io::Error;
fn try_from(value: common::block::Block) -> Result<Self, Self::Error> {
let common::block::Block {
header,
body,
bedrock_status,
bedrock_parent_id,
} = value;
Ok(Self {
header: header.into(),
body: body.try_into()?,
bedrock_status: bedrock_status.into(),
bedrock_parent_id: MantleMsgId(bedrock_parent_id),
})
}
}
impl TryFrom<Block> for common::block::Block {
type Error = nssa::error::NssaError;
fn try_from(value: Block) -> Result<Self, Self::Error> {
let Block {
header,
body,
bedrock_status,
bedrock_parent_id,
} = value;
Ok(Self {
header: header.try_into()?,
body: body.try_into()?,
bedrock_status: bedrock_status.into(),
bedrock_parent_id: bedrock_parent_id.0,
})
}
}
impl From<common::block::BedrockStatus> for BedrockStatus {
fn from(value: common::block::BedrockStatus) -> Self {
match value {
common::block::BedrockStatus::Pending => Self::Pending,
common::block::BedrockStatus::Safe => Self::Safe,
common::block::BedrockStatus::Finalized => Self::Finalized,
}
}
}
impl From<BedrockStatus> for common::block::BedrockStatus {
fn from(value: BedrockStatus) -> Self {
match value {
BedrockStatus::Pending => Self::Pending,
BedrockStatus::Safe => Self::Safe,
BedrockStatus::Finalized => Self::Finalized,
}
}
}

View File

@ -0,0 +1,238 @@
//! This crate defines the protocol types used by the indexer service.
//!
//! Currently it mostly mimics types from `nssa_core`, but it's important to have a separate crate
//! to define a stable interface for the indexer service RPCs which evolves in its own way.
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[cfg(feature = "convert")]
mod convert;
pub type Nonce = u128;
pub type ProgramId = [u32; 8];
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct AccountId {
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded account ID")]
pub value: [u8; 32],
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Account {
pub program_owner: ProgramId,
pub balance: u128,
pub data: Data,
pub nonce: Nonce,
}
pub type BlockId = u64;
pub type TimeStamp = u64;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Block {
pub header: BlockHeader,
pub body: BlockBody,
pub bedrock_status: BedrockStatus,
pub bedrock_parent_id: MantleMsgId,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockHeader {
pub block_id: BlockId,
pub prev_block_hash: Hash,
pub hash: Hash,
pub timestamp: TimeStamp,
pub signature: Signature,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Signature(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded signature")]
pub [u8; 64],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockBody {
pub transactions: Vec<Transaction>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub enum Transaction {
Public(PublicTransaction),
PrivacyPreserving(PrivacyPreservingTransaction),
ProgramDeployment(ProgramDeploymentTransaction),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicTransaction {
pub message: PublicMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingTransaction {
pub message: PrivacyPreservingMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicMessage {
pub program_id: ProgramId,
pub account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub instruction_data: InstructionData,
}
pub type InstructionData = Vec<u32>;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingMessage {
pub public_account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub public_post_states: Vec<Account>,
pub encrypted_private_post_states: Vec<EncryptedAccountData>,
pub new_commitments: Vec<Commitment>,
pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct WitnessSet {
pub signatures_and_public_keys: Vec<(Signature, PublicKey)>,
pub proof: Proof,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Proof(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded proof")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct EncryptedAccountData {
pub ciphertext: Ciphertext,
pub epk: EphemeralPublicKey,
pub view_tag: ViewTag,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentTransaction {
pub message: ProgramDeploymentMessage,
}
pub type ViewTag = u8;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Ciphertext(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded ciphertext")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicKey(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded public key")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct EphemeralPublicKey(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded ephemeral public key")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Commitment(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded commitment")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Nullifier(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded nullifier")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct CommitmentSetDigest(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded commitment set digest")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentMessage {
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded program bytecode")]
pub bytecode: Vec<u8>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Data(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded account data")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Hash(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded hash")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct MantleMsgId(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded Bedrock message id")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub enum BedrockStatus {
Pending,
Safe,
Finalized,
}
mod base64 {
use base64::prelude::{BASE64_STANDARD, Engine as _};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub mod arr {
use super::*;
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
super::serialize(v, s)
}
pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>(
d: D,
) -> Result<[u8; N], D::Error> {
let vec = super::deserialize(d)?;
vec.try_into().map_err(|_| {
serde::de::Error::custom(format!("Invalid length, expected {N} bytes"))
})
}
}
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
let base64 = BASE64_STANDARD.encode(v);
String::serialize(&base64, s)
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
let base64 = String::deserialize(d)?;
BASE64_STANDARD
.decode(base64.as_bytes())
.map_err(serde::de::Error::custom)
}
}

View File

@ -0,0 +1,16 @@
[package]
name = "indexer_service_rpc"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
indexer_service_protocol = { workspace = true }
jsonrpsee = { workspace = true, features = ["macros"] }
serde_json.workspace = true
schemars.workspace = true
[features]
client = ["jsonrpsee/client"]
server = ["jsonrpsee/server"]

View File

@ -0,0 +1,40 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use jsonrpsee::{core::SubscriptionResult, proc_macros::rpc, types::ErrorObjectOwned};
#[cfg(all(not(feature = "server"), not(feature = "client")))]
compile_error!("At least one of `server` or `client` features must be enabled.");
#[cfg_attr(feature = "server", rpc(server))]
#[cfg_attr(feature = "client", rpc(client))]
pub trait Rpc {
#[method(name = "get_schema")]
fn get_schema(&self) -> Result<serde_json::Value, ErrorObjectOwned> {
// TODO: Canonical solution would be to provide `describe` method returning OpenRPC spec,
// But for now it's painful to implement, although can be done if really needed.
// Currently we can wait until we can auto-generated it: https://github.com/paritytech/jsonrpsee/issues/737
// and just return JSON schema.
// Block schema contains all other types used in the protocol, so it's sufficient to return
// its schema.
let block_schema = schemars::schema_for!(Block);
Ok(serde_json::to_value(block_schema).expect("Schema serialization should not fail"))
}
#[subscription(name = "subscribeToBlocks", item = Vec<Block>)]
async fn subscribe_to_blocks(&self, from: BlockId) -> SubscriptionResult;
#[method(name = "getBlockById")]
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getBlockByHash")]
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getLastBlockId")]
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned>;
#[method(name = "getAccount")]
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
#[method(name = "getTransaction")]
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>;
}

View File

@ -0,0 +1 @@
pub mod service;

View File

@ -0,0 +1,72 @@
use std::net::SocketAddr;
use anyhow::{Context as _, Result};
use clap::Parser;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use log::{error, info};
use tokio_util::sync::CancellationToken;
#[derive(Debug, Parser)]
#[clap(version)]
struct Args {
#[clap(short, long, default_value = "8779")]
port: u16,
}
#[tokio::main]
async fn main() -> Result<()> {
env_logger::init();
let args = Args::parse();
let cancellation_token = listen_for_shutdown_signal();
let handle = run_server(args.port).await?;
let handle_clone = handle.clone();
tokio::select! {
_ = cancellation_token.cancelled() => {
info!("Shutting down server...");
}
_ = handle_clone.stopped() => {
error!("Server stopped unexpectedly");
}
}
info!("Server shutdown complete");
Ok(())
}
async fn run_server(port: u16) -> Result<jsonrpsee::server::ServerHandle> {
let server = Server::builder()
.build(SocketAddr::from(([0, 0, 0, 0], port)))
.await
.context("Failed to build RPC server")?;
let addr = server
.local_addr()
.context("Failed to get local address of RPC server")?;
info!("Starting Indexer Service RPC server on {addr}");
let handle = server.start(indexer_service::service::IndexerService.into_rpc());
Ok(handle)
}
fn listen_for_shutdown_signal() -> CancellationToken {
let cancellation_token = CancellationToken::new();
let cancellation_token_clone = cancellation_token.clone();
tokio::spawn(async move {
if let Err(err) = tokio::signal::ctrl_c().await {
error!("Failed to listen for Ctrl-C signal: {err}");
return;
}
info!("Received Ctrl-C signal");
cancellation_token_clone.cancel();
});
cancellation_token
}

View File

@ -0,0 +1,36 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct IndexerService;
// `async_trait` is required by `jsonrpsee`
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_blocks(
&self,
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
_from: BlockId,
) -> SubscriptionResult {
todo!()
}
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_block_by_hash(&self, _block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
todo!()
}
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
todo!()
}
async fn get_transaction(&self, _tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
todo!()
}
}

View File

@ -2,6 +2,7 @@
name = "integration_tests"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
@ -11,7 +12,12 @@ sequencer_runner.workspace = true
wallet.workspace = true
common.workspace = true
key_protocol.workspace = true
indexer_core.workspace = true
wallet-ffi.workspace = true
serde_json.workspace = true
token_core.workspace = true
url.workspace = true
anyhow.workspace = true
env_logger.workspace = true
log.workspace = true

View File

@ -0,0 +1,17 @@
{
"bedrock_client_config": {
"addr": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
},
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"backoff": {
"max_retries": 10,
"start_delay_millis": 100
},
"resubscribe_interval_millis": 1000,
"sequencer_client_config": {
"addr": "will_be_replaced_in_runtime"
}
}

View File

@ -0,0 +1,165 @@
{
"home": "",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"port": 0,
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 10000,
"data": [],
"nonce": 0
}
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
136,
130,
172,
219,
225,
161,
139,
229,
89,
243,
125,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
],
"account": {
"program_owner": [
0,
0,
0,
0,
0,
0,
0,
0
],
"balance": 20000,
"data": [],
"nonce": 0
}
}
],
"signing_key": [
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37,
37
],
"bedrock_config": {
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"node_url": "http://127.0.0.1:8080",
"auth": {
"username": "user"
}
}
}

View File

@ -6,6 +6,7 @@
"max_num_tx_in_block": 20,
"mempool_max_size": 10000,
"block_create_timeout_millis": 10000,
"retry_pending_blocks_timeout_millis": 240000,
"port": 0,
"initial_accounts": [
{

View File

@ -3,19 +3,21 @@
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
use actix_web::dev::ServerHandle;
use anyhow::{Context as _, Result};
use anyhow::{Context, Result};
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use common::{
sequencer_client::SequencerClient,
transaction::{EncodedTransaction, NSSATransaction},
};
use futures::FutureExt as _;
use indexer_core::{IndexerCore, config::IndexerConfig};
use log::debug;
use nssa::PrivacyPreservingTransaction;
use nssa_core::Commitment;
use sequencer_core::config::SequencerConfig;
use tempfile::TempDir;
use tokio::task::JoinHandle;
use url::Url;
use wallet::{WalletCore, config::WalletConfigOverrides};
// TODO: Remove this and control time from tests
@ -38,40 +40,71 @@ static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init);
pub struct TestContext {
sequencer_server_handle: ServerHandle,
sequencer_loop_handle: JoinHandle<Result<()>>,
sequencer_retry_pending_blocks_handle: JoinHandle<Result<()>>,
indexer_loop_handle: Option<JoinHandle<Result<()>>>,
sequencer_client: SequencerClient,
wallet: WalletCore,
wallet_password: String,
_temp_sequencer_dir: TempDir,
_temp_wallet_dir: TempDir,
}
impl TestContext {
/// Create new test context.
/// Create new test context in detached mode. Default.
pub async fn new() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path =
PathBuf::from(manifest_dir).join("configs/sequencer/sequencer_config.json");
PathBuf::from(manifest_dir).join("configs/sequencer/detached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
Self::new_with_sequencer_config(sequencer_config).await
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, None).await
}
/// Create new test context with custom sequencer config.
/// Create new test context in local bedrock node attached mode.
pub async fn new_bedrock_local_attached() -> Result<Self> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let sequencer_config_path = PathBuf::from(manifest_dir)
.join("configs/sequencer/bedrock_local_attached/sequencer_config.json");
let sequencer_config = SequencerConfig::from_path(&sequencer_config_path)
.context("Failed to create sequencer config from file")?;
let indexer_config_path =
PathBuf::from(manifest_dir).join("configs/indexer/indexer_config.json");
let indexer_config = IndexerConfig::from_path(&indexer_config_path)
.context("Failed to create indexer config from file")?;
Self::new_with_sequencer_and_maybe_indexer_configs(sequencer_config, Some(indexer_config))
.await
}
/// Create new test context with custom sequencer config and maybe indexer config.
///
/// `home` and `port` fields of the provided config will be overridden to meet tests parallelism
/// requirements.
pub async fn new_with_sequencer_config(sequencer_config: SequencerConfig) -> Result<Self> {
pub async fn new_with_sequencer_and_maybe_indexer_configs(
sequencer_config: SequencerConfig,
indexer_config: Option<IndexerConfig>,
) -> Result<Self> {
// Ensure logger is initialized only once
*LOGGER;
debug!("Test context setup");
let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle, temp_sequencer_dir) =
Self::setup_sequencer(sequencer_config)
.await
.context("Failed to setup sequencer")?;
let (
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
temp_sequencer_dir,
) = Self::setup_sequencer(sequencer_config)
.await
.context("Failed to setup sequencer")?;
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
@ -82,26 +115,60 @@ impl TestContext {
format!("http://{sequencer_addr}")
};
let (wallet, temp_wallet_dir) = Self::setup_wallet(sequencer_addr.clone())
let (wallet, temp_wallet_dir, wallet_password) = Self::setup_wallet(sequencer_addr.clone())
.await
.context("Failed to setup wallet")?;
let sequencer_client =
SequencerClient::new(sequencer_addr).context("Failed to create sequencer client")?;
let sequencer_client = SequencerClient::new(
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?,
)
.context("Failed to create sequencer client")?;
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
})
if let Some(mut indexer_config) = indexer_config {
indexer_config.sequencer_client_config.addr =
Url::parse(&sequencer_addr).context("Failed to parse sequencer addr")?;
let indexer_core = IndexerCore::new(indexer_config)?;
let indexer_loop_handle = Some(tokio::spawn(async move {
indexer_core.subscribe_parse_block_stream().await
}));
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
wallet_password,
})
} else {
Ok(Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle: None,
sequencer_client,
wallet,
_temp_sequencer_dir: temp_sequencer_dir,
_temp_wallet_dir: temp_wallet_dir,
wallet_password,
})
}
}
async fn setup_sequencer(
mut config: SequencerConfig,
) -> Result<(ServerHandle, SocketAddr, JoinHandle<Result<()>>, TempDir)> {
) -> Result<(
ServerHandle,
SocketAddr,
JoinHandle<Result<()>>,
JoinHandle<Result<()>>,
TempDir,
)> {
let temp_sequencer_dir =
tempfile::tempdir().context("Failed to create temp dir for sequencer home")?;
@ -113,18 +180,23 @@ impl TestContext {
// Setting port to 0 lets the OS choose a free port for us
config.port = 0;
let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle) =
sequencer_runner::startup_sequencer(config).await?;
let (
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
) = sequencer_runner::startup_sequencer(config).await?;
Ok((
sequencer_server_handle,
sequencer_addr,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
temp_sequencer_dir,
))
}
async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir)> {
async fn setup_wallet(sequencer_addr: String) -> Result<(WalletCore, TempDir, String)> {
let manifest_dir = env!("CARGO_MANIFEST_DIR");
let wallet_config_source_path =
PathBuf::from(manifest_dir).join("configs/wallet/wallet_config.json");
@ -142,11 +214,12 @@ impl TestContext {
..Default::default()
};
let wallet_password = "test_pass".to_owned();
let wallet = WalletCore::new_init_storage(
config_path,
storage_path,
Some(config_overrides),
"test_pass".to_owned(),
wallet_password.clone(),
)
.context("Failed to init wallet")?;
wallet
@ -154,7 +227,7 @@ impl TestContext {
.await
.context("Failed to store wallet persistent data")?;
Ok((wallet, temp_wallet_dir))
Ok((wallet, temp_wallet_dir, wallet_password))
}
/// Get reference to the wallet.
@ -162,6 +235,10 @@ impl TestContext {
&self.wallet
}
pub fn wallet_password(&self) -> &str {
&self.wallet_password
}
/// Get mutable reference to the wallet.
pub fn wallet_mut(&mut self) -> &mut WalletCore {
&mut self.wallet
@ -180,19 +257,40 @@ impl Drop for TestContext {
let Self {
sequencer_server_handle,
sequencer_loop_handle,
sequencer_retry_pending_blocks_handle,
indexer_loop_handle,
sequencer_client: _,
wallet: _,
_temp_sequencer_dir,
_temp_wallet_dir,
wallet_password: _,
} = self;
sequencer_loop_handle.abort();
sequencer_retry_pending_blocks_handle.abort();
if let Some(indexer_loop_handle) = indexer_loop_handle {
indexer_loop_handle.abort();
}
// Can't wait here as Drop can't be async, but anyway stop signal should be sent
sequencer_server_handle.stop(true).now_or_never();
}
}
/// A test context to be used in normal #[test] tests
pub struct BlockingTestContext {
pub ctx: TestContext,
pub runtime: tokio::runtime::Runtime,
}
impl BlockingTestContext {
pub fn new() -> Result<Self> {
let runtime = tokio::runtime::Runtime::new().unwrap();
let ctx = runtime.block_on(TestContext::new())?;
Ok(Self { ctx, runtime })
}
}
pub fn format_public_account_id(account_id: &str) -> String {
format!("Public/{account_id}")
}

View File

@ -0,0 +1,23 @@
use anyhow::Result;
use integration_tests::TestContext;
use log::info;
use tokio::test;
#[ignore = "needs complicated setup"]
#[test]
// To run this test properly, you need nomos node running in the background.
// For instructions in building nomos node, refer to [this](https://github.com/logos-blockchain/logos-blockchain?tab=readme-ov-file#running-a-logos-blockchain-node).
//
// Recommended to run node locally from build binary.
async fn indexer_run_local_node() -> Result<()> {
let _ctx = TestContext::new_bedrock_local_attached().await?;
info!("Let's observe behaviour");
tokio::time::sleep(std::time::Duration::from_secs(180)).await;
// No way to check state of indexer now
// When it will be a service, then it will become possible.
Ok(())
}

View File

@ -8,6 +8,7 @@ use integration_tests::{
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
use nssa::program::Program;
use token_core::{TokenDefinition, TokenHolding};
use tokio::test;
use wallet::cli::{
Command, SubcommandReturnValue,
@ -59,11 +60,13 @@ async fn create_and_transfer_public_token() -> Result<()> {
};
// Create new token
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name: name.clone(),
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -76,16 +79,16 @@ async fn create_and_transfer_public_token() -> Result<()> {
.get_account(definition_account_id.to_string())
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(definition_acc.program_owner, Program::token().id());
// The data of a token definition account has the following layout:
// [ 0x00 || name (6 bytes) || total supply (little endian 16 bytes) || metadata id (32 bytes)]
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
token_definition,
TokenDefinition::Fungible {
name: name.clone(),
total_supply,
metadata_id: None
}
);
// Check the status of the token holding account with the total supply
@ -97,24 +100,23 @@ async fn create_and_transfer_public_token() -> Result<()> {
// The account must be owned by the token program
assert_eq!(supply_acc.program_owner, Program::token().id());
// The data of a token holding account has the following layout:
// [ 0x01 || corresponding_token_definition_id (32 bytes) || balance (little endian 16 bytes) ]
// First byte of the data equal to 1 means it's a token holding account
assert_eq!(supply_acc.data.as_ref()[0], 1);
// Bytes from 1 to 33 represent the id of the token this account is associated with
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
&supply_acc.data.as_ref()[1..33],
definition_account_id.to_bytes()
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply
}
);
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37);
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
to_npk: None,
to_ipk: None,
amount: 7,
amount: transfer_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -129,9 +131,14 @@ async fn create_and_transfer_public_token() -> Result<()> {
.await?
.account;
assert_eq!(supply_acc.program_owner, Program::token().id());
assert_eq!(supply_acc.data[0], 1);
assert_eq!(&supply_acc.data[1..33], definition_account_id.to_bytes());
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30);
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply - transfer_amount
}
);
// Check the status of the recipient account after transfer
let recipient_acc = ctx
@ -140,15 +147,21 @@ async fn create_and_transfer_public_token() -> Result<()> {
.await?
.account;
assert_eq!(recipient_acc.program_owner, Program::token().id());
assert_eq!(recipient_acc.data[0], 1);
assert_eq!(&recipient_acc.data[1..33], definition_account_id.to_bytes());
assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7);
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount
}
);
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_public_account_id(&recipient_account_id.to_string()),
amount: 3,
amount: burn_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -162,13 +175,15 @@ async fn create_and_transfer_public_token() -> Result<()> {
.get_account(definition_account_id.to_string())
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
token_definition,
TokenDefinition::Fungible {
name: name.clone(),
total_supply: total_supply - burn_amount,
metadata_id: None
}
);
// Check the status of the recipient account after burn
@ -177,16 +192,24 @@ async fn create_and_transfer_public_token() -> Result<()> {
.get_account(recipient_account_id.to_string())
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 4);
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount - burn_amount
}
);
// Mint 10 tokens at recipient_acc
let mint_amount = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(&recipient_account_id.to_string())),
holder_npk: None,
holder_ipk: None,
amount: 10,
amount: mint_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -200,13 +223,15 @@ async fn create_and_transfer_public_token() -> Result<()> {
.get_account(definition_account_id.to_string())
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
token_definition,
TokenDefinition::Fungible {
name,
total_supply: total_supply - burn_amount + mint_amount,
metadata_id: None
}
);
// Check the status of the recipient account after mint
@ -215,10 +240,14 @@ async fn create_and_transfer_public_token() -> Result<()> {
.get_account(recipient_account_id.to_string())
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
u128::from_le_bytes(recipient_acc.data[33..].try_into()?),
14
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount - burn_amount + mint_amount
}
);
info!("Successfully created and transferred public token");
@ -270,11 +299,13 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
};
// Create new token
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name: name.clone(),
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -288,14 +319,16 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
.get_account(definition_account_id.to_string())
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(definition_acc.program_owner, Program::token().id());
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
token_definition,
TokenDefinition::Fungible {
name: name.clone(),
total_supply,
metadata_id: None
}
);
let new_commitment1 = ctx
@ -305,12 +338,13 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
assert!(verify_commitment_is_in_state(new_commitment1, ctx.sequencer_client()).await);
// Transfer 7 tokens from supply_acc to recipient_account_id
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
to_npk: None,
to_ipk: None,
amount: 7,
amount: transfer_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -331,10 +365,11 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
assert!(verify_commitment_is_in_state(new_commitment2, ctx.sequencer_client()).await);
// Burn 3 tokens from recipient_acc
let burn_amount = 3;
let subcommand = TokenProgramAgnosticSubcommand::Burn {
definition: format_public_account_id(&definition_account_id.to_string()),
holder: format_private_account_id(&recipient_account_id.to_string()),
amount: 3,
amount: burn_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -348,13 +383,15 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
.get_account(definition_account_id.to_string())
.await?
.account;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
definition_acc.data.as_ref(),
&[
0, 65, 32, 78, 65, 77, 69, 34, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
]
token_definition,
TokenDefinition::Fungible {
name,
total_supply: total_supply - burn_amount,
metadata_id: None
}
);
let new_commitment2 = ctx
@ -368,10 +405,14 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
.wallet()
.get_account_private(&recipient_account_id)
.context("Failed to get recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
u128::from_le_bytes(recipient_acc.data[33..].try_into()?),
4 // 7 - 3
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount - burn_amount
}
);
info!("Successfully created and transferred token with private supply");
@ -414,11 +455,13 @@ async fn create_token_with_private_definition() -> Result<()> {
};
// Create token with private definition
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name: name.clone(),
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -441,8 +484,14 @@ async fn create_token_with_private_definition() -> Result<()> {
.account;
assert_eq!(supply_acc.program_owner, Program::token().id());
assert_eq!(supply_acc.data.as_ref()[0], 1);
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37);
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply
}
);
// Create private recipient account
let result = wallet::cli::execute_subcommand(
@ -471,6 +520,7 @@ async fn create_token_with_private_definition() -> Result<()> {
};
// Mint to public account
let mint_amount_public = 10;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_public_account_id(
@ -478,7 +528,7 @@ async fn create_token_with_private_definition() -> Result<()> {
)),
holder_npk: None,
holder_ipk: None,
amount: 10,
amount: mint_amount_public,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -491,10 +541,15 @@ async fn create_token_with_private_definition() -> Result<()> {
.wallet()
.get_account_private(&definition_account_id)
.context("Failed to get definition account")?;
let token_definition = TokenDefinition::try_from(&definition_acc.data)?;
assert_eq!(
u128::from_le_bytes(definition_acc.data[7..23].try_into()?),
47 // 37 + 10
token_definition,
TokenDefinition::Fungible {
name: name.clone(),
total_supply: total_supply + mint_amount_public,
metadata_id: None
}
);
// Verify public recipient received tokens
@ -503,13 +558,18 @@ async fn create_token_with_private_definition() -> Result<()> {
.get_account(recipient_account_id_public.to_string())
.await?
.account;
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
u128::from_le_bytes(recipient_acc.data[33..].try_into()?),
10
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: mint_amount_public
}
);
// Mint to private account
let mint_amount_private = 5;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: Some(format_private_account_id(
@ -517,7 +577,7 @@ async fn create_token_with_private_definition() -> Result<()> {
)),
holder_npk: None,
holder_ipk: None,
amount: 5,
amount: mint_amount_private,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -537,10 +597,14 @@ async fn create_token_with_private_definition() -> Result<()> {
.wallet()
.get_account_private(&recipient_account_id_private)
.context("Failed to get private recipient account")?;
let token_holding = TokenHolding::try_from(&recipient_acc_private.data)?;
assert_eq!(
u128::from_le_bytes(recipient_acc_private.data[33..].try_into()?),
5
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: mint_amount_private
}
);
info!("Successfully created token with private definition and minted to both account types");
@ -579,11 +643,13 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
};
// Create token with both private definition and supply
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name,
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -610,8 +676,15 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
.wallet()
.get_account_private(&supply_account_id)
.context("Failed to get supply account")?;
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 37);
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply
}
);
// Create recipient account
let result = wallet::cli::execute_subcommand(
@ -627,12 +700,13 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
};
// Transfer tokens
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
to_npk: None,
to_ipk: None,
amount: 7,
amount: transfer_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -658,13 +732,27 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
.wallet()
.get_account_private(&supply_account_id)
.context("Failed to get supply account")?;
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30);
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply - transfer_amount
}
);
let recipient_acc = ctx
.wallet()
.get_account_private(&recipient_account_id)
.context("Failed to get recipient account")?;
assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7);
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount
}
);
info!("Successfully created and transferred token with both private definition and supply");
@ -715,11 +803,13 @@ async fn shielded_token_transfer() -> Result<()> {
};
// Create token
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_public_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name,
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -728,12 +818,13 @@ async fn shielded_token_transfer() -> Result<()> {
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
// Perform shielded transfer: public supply -> private recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_public_account_id(&supply_account_id.to_string()),
to: Some(format_private_account_id(&recipient_account_id.to_string())),
to_npk: None,
to_ipk: None,
amount: 7,
amount: transfer_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -747,7 +838,14 @@ async fn shielded_token_transfer() -> Result<()> {
.get_account(supply_account_id.to_string())
.await?
.account;
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30);
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply - transfer_amount
}
);
// Verify recipient commitment exists
let new_commitment = ctx
@ -761,7 +859,14 @@ async fn shielded_token_transfer() -> Result<()> {
.wallet()
.get_account_private(&recipient_account_id)
.context("Failed to get recipient account")?;
assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7);
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount
}
);
info!("Successfully performed shielded token transfer");
@ -812,11 +917,13 @@ async fn deshielded_token_transfer() -> Result<()> {
};
// Create token with private supply
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name,
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -825,12 +932,13 @@ async fn deshielded_token_transfer() -> Result<()> {
tokio::time::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS)).await;
// Perform deshielded transfer: private supply -> public recipient
let transfer_amount = 7;
let subcommand = TokenProgramAgnosticSubcommand::Send {
from: format_private_account_id(&supply_account_id.to_string()),
to: Some(format_public_account_id(&recipient_account_id.to_string())),
to_npk: None,
to_ipk: None,
amount: 7,
amount: transfer_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -850,7 +958,14 @@ async fn deshielded_token_transfer() -> Result<()> {
.wallet()
.get_account_private(&supply_account_id)
.context("Failed to get supply account")?;
assert_eq!(u128::from_le_bytes(supply_acc.data[33..].try_into()?), 30);
let token_holding = TokenHolding::try_from(&supply_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: total_supply - transfer_amount
}
);
// Verify recipient balance
let recipient_acc = ctx
@ -858,7 +973,14 @@ async fn deshielded_token_transfer() -> Result<()> {
.get_account(recipient_account_id.to_string())
.await?
.account;
assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 7);
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: transfer_amount
}
);
info!("Successfully performed deshielded token transfer");
@ -896,11 +1018,13 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
};
// Create token
let name = "A NAME".to_string();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(&definition_account_id.to_string()),
supply_account_id: format_private_account_id(&supply_account_id.to_string()),
name: "A NAME".to_string(),
total_supply: 37,
name,
total_supply,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -931,12 +1055,13 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
.context("Failed to get private account keys")?;
// Mint using claiming path (foreign account)
let mint_amount = 9;
let subcommand = TokenProgramAgnosticSubcommand::Mint {
definition: format_private_account_id(&definition_account_id.to_string()),
holder: None,
holder_npk: Some(hex::encode(holder_keys.nullifer_public_key.0)),
holder_ipk: Some(hex::encode(holder_keys.incoming_viewing_public_key.0)),
amount: 9,
amount: mint_amount,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -960,7 +1085,14 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
.wallet()
.get_account_private(&recipient_account_id)
.context("Failed to get recipient account")?;
assert_eq!(u128::from_le_bytes(recipient_acc.data[33..].try_into()?), 9);
let token_holding = TokenHolding::try_from(&recipient_acc.data)?;
assert_eq!(
token_holding,
TokenHolding::Fungible {
definition_id: definition_account_id,
balance: mint_amount
}
);
info!("Successfully minted tokens using claiming path");

View File

@ -25,7 +25,11 @@ pub async fn tps_test() -> Result<()> {
let target_tps = 12;
let tps_test = TpsTestManager::new(target_tps, num_transactions);
let ctx = TestContext::new_with_sequencer_config(tps_test.generate_sequencer_config()).await?;
let ctx = TestContext::new_with_sequencer_and_maybe_indexer_configs(
tps_test.generate_sequencer_config(),
None,
)
.await?;
let target_time = tps_test.target_time();
info!(
@ -186,6 +190,7 @@ impl TpsTestManager {
initial_commitments: vec![initial_commitment],
signing_key: [37; 32],
bedrock_config: None,
retry_pending_blocks_timeout_millis: 1000 * 60 * 4,
}
}
}

View File

@ -0,0 +1,618 @@
use std::{
collections::HashSet,
ffi::{CStr, CString, c_char},
io::Write,
time::Duration,
};
use anyhow::Result;
use integration_tests::{
ACC_RECEIVER, ACC_SENDER, ACC_SENDER_PRIVATE, BlockingTestContext,
TIME_TO_WAIT_FOR_BLOCK_SECONDS,
};
use log::info;
use nssa::{Account, AccountId, PublicKey, program::Program};
use nssa_core::program::DEFAULT_PROGRAM_ID;
use tempfile::tempdir;
use wallet::WalletCore;
use wallet_ffi::{
FfiAccount, FfiAccountList, FfiBytes32, FfiPrivateAccountKeys, FfiPublicAccountKey,
FfiTransferResult, WalletHandle, error,
};
unsafe extern "C" {
fn wallet_ffi_create_new(
config_path: *const c_char,
storage_path: *const c_char,
password: *const c_char,
) -> *mut WalletHandle;
fn wallet_ffi_destroy(handle: *mut WalletHandle);
fn wallet_ffi_create_account_public(
handle: *mut WalletHandle,
out_account_id: *mut FfiBytes32,
) -> error::WalletFfiError;
fn wallet_ffi_create_account_private(
handle: *mut WalletHandle,
out_account_id: *mut FfiBytes32,
) -> error::WalletFfiError;
fn wallet_ffi_list_accounts(
handle: *mut WalletHandle,
out_list: *mut FfiAccountList,
) -> error::WalletFfiError;
fn wallet_ffi_free_account_list(list: *mut FfiAccountList);
fn wallet_ffi_get_balance(
handle: *mut WalletHandle,
account_id: *const FfiBytes32,
is_public: bool,
out_balance: *mut [u8; 16],
) -> error::WalletFfiError;
fn wallet_ffi_get_account_public(
handle: *mut WalletHandle,
account_id: *const FfiBytes32,
out_account: *mut FfiAccount,
) -> error::WalletFfiError;
fn wallet_ffi_free_account_data(account: *mut FfiAccount);
fn wallet_ffi_get_public_account_key(
handle: *mut WalletHandle,
account_id: *const FfiBytes32,
out_public_key: *mut FfiPublicAccountKey,
) -> error::WalletFfiError;
fn wallet_ffi_get_private_account_keys(
handle: *mut WalletHandle,
account_id: *const FfiBytes32,
out_keys: *mut FfiPrivateAccountKeys,
) -> error::WalletFfiError;
fn wallet_ffi_free_private_account_keys(keys: *mut FfiPrivateAccountKeys);
fn wallet_ffi_account_id_to_base58(account_id: *const FfiBytes32) -> *mut std::ffi::c_char;
fn wallet_ffi_free_string(ptr: *mut c_char);
fn wallet_ffi_account_id_from_base58(
base58_str: *const std::ffi::c_char,
out_account_id: *mut FfiBytes32,
) -> error::WalletFfiError;
fn wallet_ffi_transfer_public(
handle: *mut WalletHandle,
from: *const FfiBytes32,
to: *const FfiBytes32,
amount: *const [u8; 16],
out_result: *mut FfiTransferResult,
) -> error::WalletFfiError;
fn wallet_ffi_free_transfer_result(result: *mut FfiTransferResult);
fn wallet_ffi_register_public_account(
handle: *mut WalletHandle,
account_id: *const FfiBytes32,
out_result: *mut FfiTransferResult,
) -> error::WalletFfiError;
}
fn new_wallet_ffi_with_test_context_config(ctx: &BlockingTestContext) -> *mut WalletHandle {
let tempdir = tempfile::tempdir().unwrap();
let config_path = tempdir.path().join("wallet_config.json");
let storage_path = tempdir.path().join("storage.json");
let mut config = ctx.ctx.wallet().config().to_owned();
if let Some(config_overrides) = ctx.ctx.wallet().config_overrides().clone() {
config.apply_overrides(config_overrides);
}
let mut file = std::fs::OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(&config_path)
.unwrap();
let config_with_overrides_serialized = serde_json::to_vec_pretty(&config).unwrap();
file.write_all(&config_with_overrides_serialized).unwrap();
let config_path = CString::new(config_path.to_str().unwrap()).unwrap();
let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap();
let password = CString::new(ctx.ctx.wallet_password()).unwrap();
unsafe {
wallet_ffi_create_new(
config_path.as_ptr(),
storage_path.as_ptr(),
password.as_ptr(),
)
}
}
fn new_wallet_ffi_with_default_config(password: &str) -> *mut WalletHandle {
let tempdir = tempdir().unwrap();
let config_path = tempdir.path().join("wallet_config.json");
let storage_path = tempdir.path().join("storage.json");
let config_path_c = CString::new(config_path.to_str().unwrap()).unwrap();
let storage_path_c = CString::new(storage_path.to_str().unwrap()).unwrap();
let password = CString::new(password).unwrap();
unsafe {
wallet_ffi_create_new(
config_path_c.as_ptr(),
storage_path_c.as_ptr(),
password.as_ptr(),
)
}
}
fn new_wallet_rust_with_default_config(password: &str) -> WalletCore {
let tempdir = tempdir().unwrap();
let config_path = tempdir.path().join("wallet_config.json");
let storage_path = tempdir.path().join("storage.json");
WalletCore::new_init_storage(
config_path.to_path_buf(),
storage_path.to_path_buf(),
None,
password.to_string(),
)
.unwrap()
}
#[test]
fn test_wallet_ffi_create_public_accounts() {
let password = "password_for_tests";
let n_accounts = 10;
// First `n_accounts` public accounts created with Rust wallet
let new_public_account_ids_rust = {
let mut account_ids = Vec::new();
let mut wallet_rust = new_wallet_rust_with_default_config(password);
for _ in 0..n_accounts {
let account_id = wallet_rust.create_new_account_public(None).0;
account_ids.push(*account_id.value());
}
account_ids
};
// First `n_accounts` public accounts created with wallet FFI
let new_public_account_ids_ffi = unsafe {
let mut account_ids = Vec::new();
let wallet_ffi_handle = new_wallet_ffi_with_default_config(password);
for _ in 0..n_accounts {
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
wallet_ffi_create_account_public(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
account_ids.push(out_account_id.data);
}
wallet_ffi_destroy(wallet_ffi_handle);
account_ids
};
assert_eq!(new_public_account_ids_ffi, new_public_account_ids_rust);
}
#[test]
fn test_wallet_ffi_create_private_accounts() {
let password = "password_for_tests";
let n_accounts = 10;
// First `n_accounts` private accounts created with Rust wallet
let new_private_account_ids_rust = {
let mut account_ids = Vec::new();
let mut wallet_rust = new_wallet_rust_with_default_config(password);
for _ in 0..n_accounts {
let account_id = wallet_rust.create_new_account_private(None).0;
account_ids.push(*account_id.value());
}
account_ids
};
// First `n_accounts` private accounts created with wallet FFI
let new_private_account_ids_ffi = unsafe {
let mut account_ids = Vec::new();
let wallet_ffi_handle = new_wallet_ffi_with_default_config(password);
for _ in 0..n_accounts {
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
wallet_ffi_create_account_private(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
account_ids.push(out_account_id.data);
}
wallet_ffi_destroy(wallet_ffi_handle);
account_ids
};
assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust)
}
#[test]
fn test_wallet_ffi_list_accounts() {
let password = "password_for_tests";
// Create the wallet FFI
let wallet_ffi_handle = unsafe {
let handle = new_wallet_ffi_with_default_config(password);
// Create 5 public accounts and 5 private accounts
for _ in 0..5 {
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
wallet_ffi_create_account_public(handle, (&mut out_account_id) as *mut FfiBytes32);
wallet_ffi_create_account_private(handle, (&mut out_account_id) as *mut FfiBytes32);
}
handle
};
// Create the wallet Rust
let wallet_rust = {
let mut wallet = new_wallet_rust_with_default_config(password);
// Create 5 public accounts and 5 private accounts
for _ in 0..5 {
wallet.create_new_account_public(None);
wallet.create_new_account_private(None);
}
wallet
};
// Get the account list with FFI method
let mut wallet_ffi_account_list = unsafe {
let mut out_list = FfiAccountList::default();
wallet_ffi_list_accounts(wallet_ffi_handle, (&mut out_list) as *mut FfiAccountList);
out_list
};
let wallet_rust_account_ids = wallet_rust
.storage()
.user_data
.account_ids()
.collect::<Vec<_>>();
// Assert same number of elements between Rust and FFI result
assert_eq!(wallet_rust_account_ids.len(), wallet_ffi_account_list.count);
let wallet_ffi_account_list_slice = unsafe {
core::slice::from_raw_parts(
wallet_ffi_account_list.entries,
wallet_ffi_account_list.count,
)
};
// Assert same account ids between Rust and FFI result
assert_eq!(
wallet_rust_account_ids
.iter()
.map(|id| id.value())
.collect::<HashSet<_>>(),
wallet_ffi_account_list_slice
.iter()
.map(|entry| &entry.account_id.data)
.collect::<HashSet<_>>()
);
// Assert `is_pub` flag is correct in the FFI result
for entry in wallet_ffi_account_list_slice.iter() {
let account_id = AccountId::new(entry.account_id.data);
let is_pub_default_in_rust_wallet = wallet_rust
.storage()
.user_data
.default_pub_account_signing_keys
.contains_key(&account_id);
let is_pub_key_tree_wallet_rust = wallet_rust
.storage()
.user_data
.public_key_tree
.account_id_map
.contains_key(&account_id);
let is_public_in_rust_wallet = is_pub_default_in_rust_wallet || is_pub_key_tree_wallet_rust;
assert_eq!(entry.is_public, is_public_in_rust_wallet);
}
unsafe {
wallet_ffi_free_account_list((&mut wallet_ffi_account_list) as *mut FfiAccountList);
wallet_ffi_destroy(wallet_ffi_handle);
}
}
#[test]
fn test_wallet_ffi_get_balance_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
true,
(&mut out_balance) as *mut [u8; 16],
);
u128::from_le_bytes(out_balance)
};
assert_eq!(balance, 10000);
info!("Successfully retrieved account balance");
unsafe {
wallet_ffi_destroy(wallet_ffi_handle);
}
Ok(())
}
#[test]
fn test_wallet_ffi_get_account_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut out_account = FfiAccount::default();
let account: Account = unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_account_public(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
(&out_account).try_into().unwrap()
};
assert_eq!(
account.program_owner,
Program::authenticated_transfer_program().id()
);
assert_eq!(account.balance, 10000);
assert!(account.data.is_empty());
assert_eq!(account.nonce, 0);
unsafe {
wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount);
wallet_ffi_destroy(wallet_ffi_handle);
}
info!("Successfully retrieved account with correct details");
Ok(())
}
#[test]
fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER.parse().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut out_key = FfiPublicAccountKey::default();
let key: PublicKey = unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_public_account_key(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut out_key) as *mut FfiPublicAccountKey,
);
(&out_key).try_into().unwrap()
};
let expected_key = {
let private_key = ctx
.ctx
.wallet()
.get_account_public_signing_key(&account_id)
.unwrap();
PublicKey::new_from_private_key(private_key)
};
assert_eq!(key, expected_key);
info!("Successfully retrieved account key");
unsafe {
wallet_ffi_destroy(wallet_ffi_handle);
}
Ok(())
}
#[test]
fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ACC_SENDER_PRIVATE.parse().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let mut keys = FfiPrivateAccountKeys::default();
unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_private_account_keys(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut keys) as *mut FfiPrivateAccountKeys,
);
};
let key_chain = &ctx
.ctx
.wallet()
.storage()
.user_data
.get_private_account(&account_id)
.unwrap()
.0;
let expected_npk = &key_chain.nullifer_public_key;
let expected_ivk = &key_chain.incoming_viewing_public_key;
assert_eq!(&keys.npk(), expected_npk);
assert_eq!(&keys.ivk().unwrap(), expected_ivk);
unsafe {
wallet_ffi_free_private_account_keys((&mut keys) as *mut FfiPrivateAccountKeys);
wallet_ffi_destroy(wallet_ffi_handle);
}
info!("Successfully retrieved account keys");
Ok(())
}
#[test]
fn test_wallet_ffi_account_id_to_base58() {
let account_id_str = ACC_SENDER;
let account_id: AccountId = account_id_str.parse().unwrap();
let ffi_bytes: FfiBytes32 = (&account_id).into();
let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) };
let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() };
assert_eq!(account_id_str, ffi_result);
unsafe {
wallet_ffi_free_string(ptr);
}
}
#[test]
fn test_wallet_ffi_base58_to_account_id() {
let account_id_str = ACC_SENDER;
let account_id_c_str = CString::new(account_id_str).unwrap();
let account_id: AccountId = unsafe {
let mut out_account_id_bytes = FfiBytes32::default();
wallet_ffi_account_id_from_base58(
account_id_c_str.as_ptr(),
(&mut out_account_id_bytes) as *mut FfiBytes32,
);
out_account_id_bytes.into()
};
let expected_account_id = account_id_str.parse().unwrap();
assert_eq!(account_id, expected_account_id);
}
#[test]
fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
// Create a new uninitialized public account
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
unsafe {
wallet_ffi_create_account_public(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
}
// Check its program owner is the default program id
let account: Account = unsafe {
let mut out_account = FfiAccount::default();
let _result = wallet_ffi_get_account_public(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
(&out_account).try_into().unwrap()
};
assert_eq!(account.program_owner, DEFAULT_PROGRAM_ID);
// Call the init funciton
let mut transfer_result = FfiTransferResult::default();
unsafe {
wallet_ffi_register_public_account(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut transfer_result) as *mut FfiTransferResult,
);
}
info!("Waiting for next block creation");
std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS));
// Check that the program owner is now the authenticated transfer program
let account: Account = unsafe {
let mut out_account = FfiAccount::default();
let _result = wallet_ffi_get_account_public(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
(&out_account).try_into().unwrap()
};
assert_eq!(
account.program_owner,
Program::authenticated_transfer_program().id()
);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_destroy(wallet_ffi_handle);
}
Ok(())
}
#[test]
fn test_wallet_ffi_transfer_public() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx);
let from: FfiBytes32 = (&ACC_SENDER.parse::<AccountId>().unwrap()).into();
let to: FfiBytes32 = (&ACC_RECEIVER.parse::<AccountId>().unwrap()).into();
let amount: [u8; 16] = 100u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();
unsafe {
wallet_ffi_transfer_public(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
(&to) as *const FfiBytes32,
(&amount) as *const [u8; 16],
(&mut transfer_result) as *mut FfiTransferResult,
);
}
info!("Waiting for next block creation");
std::thread::sleep(Duration::from_secs(TIME_TO_WAIT_FOR_BLOCK_SECONDS));
let from_balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
true,
(&mut out_balance) as *mut [u8; 16],
);
u128::from_le_bytes(out_balance)
};
let to_balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&to) as *const FfiBytes32,
true,
(&mut out_balance) as *mut [u8; 16],
);
u128::from_le_bytes(out_balance)
};
assert_eq!(from_balance, 9900);
assert_eq!(to_balance, 20100);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_destroy(wallet_ffi_handle);
}
Ok(())
}

View File

@ -2,6 +2,7 @@
name = "key_protocol"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
secp256k1 = "0.31.1"

View File

@ -2,6 +2,7 @@
name = "mempool"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
tokio = { workspace = true, features = ["sync"] }

View File

@ -2,6 +2,7 @@
name = "nssa"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
@ -23,7 +24,9 @@ risc0-build = "3.0.3"
risc0-binfmt = "3.0.2"
[dev-dependencies]
token_core.workspace = true
test_program_methods.workspace = true
env_logger.workspace = true
hex-literal = "1.0.0"
test-case = "3.3.1"

View File

@ -2,6 +2,7 @@
name = "nssa_core"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
risc0-zkvm.workspace = true

View File

@ -68,6 +68,10 @@ impl AccountId {
pub fn value(&self) -> &[u8; 32] {
&self.value
}
pub fn into_value(self) -> [u8; 32] {
self.value
}
}
impl AsRef<[u8]> for AccountId {

View File

@ -5,7 +5,10 @@ use serde::{Deserialize, Serialize};
use crate::{NullifierPublicKey, account::Account};
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))]
#[cfg_attr(
any(feature = "host", test),
derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)
)]
pub struct Commitment(pub(super) [u8; 32]);
/// A commitment to all zero data.

View File

@ -69,6 +69,11 @@ impl Commitment {
self.0
}
#[cfg(feature = "host")]
pub fn from_byte_array(bytes: [u8; 32]) -> Self {
Self(bytes)
}
#[cfg(feature = "host")]
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut bytes = [0u8; 32];
@ -89,6 +94,11 @@ impl Nullifier {
self.0
}
#[cfg(feature = "host")]
pub fn from_byte_array(bytes: [u8; 32]) -> Self {
Self(bytes)
}
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut bytes = [0u8; 32];
cursor.read_exact(&mut bytes)?;
@ -106,6 +116,16 @@ impl Ciphertext {
bytes
}
#[cfg(feature = "host")]
pub fn into_inner(self) -> Vec<u8> {
self.0
}
#[cfg(feature = "host")]
pub fn from_inner(inner: Vec<u8>) -> Self {
Self(inner)
}
#[cfg(feature = "host")]
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut u32_bytes = [0; 4];

View File

@ -42,7 +42,10 @@ impl From<&NullifierSecretKey> for NullifierPublicKey {
pub type NullifierSecretKey = [u8; 32];
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))]
#[cfg_attr(
any(feature = "host", test),
derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)
)]
pub struct Nullifier(pub(super) [u8; 32]);
impl Nullifier {

View File

@ -20,8 +20,7 @@ pub struct ProgramInput<T> {
/// Each program can derive up to `2^256` unique account IDs by choosing different
/// seeds. PDAs allow programs to control namespaced account identifiers without
/// collisions between programs.
#[derive(Serialize, Deserialize, Clone, Eq, PartialEq)]
#[cfg_attr(any(feature = "host", test), derive(Debug))]
#[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq)]
pub struct PdaSeed([u8; 32]);
impl PdaSeed {
@ -65,23 +64,44 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId {
}
}
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq)]
#[cfg_attr(any(feature = "host", test), derive(Debug,))]
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct ChainedCall {
/// The program ID of the program to execute
pub program_id: ProgramId,
pub pre_states: Vec<AccountWithMetadata>,
/// The instruction data to pass
pub instruction_data: InstructionData,
pub pre_states: Vec<AccountWithMetadata>,
pub pda_seeds: Vec<PdaSeed>,
}
impl ChainedCall {
/// Creates a new chained call serializing the given instruction.
pub fn new<I: Serialize>(
program_id: ProgramId,
pre_states: Vec<AccountWithMetadata>,
instruction: &I,
) -> Self {
Self {
program_id,
pre_states,
instruction_data: risc0_zkvm::serde::to_vec(instruction)
.expect("Serialization to Vec<u32> should not fail"),
pda_seeds: Vec::new(),
}
}
pub fn with_pda_seeds(mut self, pda_seeds: Vec<PdaSeed>) -> Self {
self.pda_seeds = pda_seeds;
self
}
}
/// Represents the final state of an `Account` after a program execution.
/// A post state may optionally request that the executing program
/// becomes the owner of the account (a “claim”). This is used to signal
/// that the program intends to take ownership of the account.
#[derive(Serialize, Deserialize, Clone)]
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))]
#[derive(Debug, Serialize, Deserialize, Clone)]
#[cfg_attr(any(feature = "host", test), derive(PartialEq, Eq))]
pub struct AccountPostState {
account: Account,
claim: bool,

View File

@ -14,7 +14,7 @@ mod state;
pub use nssa_core::{
SharedSecretKey,
account::{Account, AccountId},
account::{Account, AccountId, Data},
encryption::EphemeralPublicKey,
program::ProgramId,
};

View File

@ -1,3 +1,4 @@
use borsh::{BorshDeserialize, BorshSerialize};
use sha2::{Digest, Sha256};
mod default_values;
@ -20,6 +21,7 @@ fn hash_value(value: &Value) -> Node {
}
#[cfg_attr(test, derive(Debug, PartialEq, Eq))]
#[derive(BorshSerialize, BorshDeserialize)]
pub struct MerkleTree {
nodes: Vec<Node>,
capacity: usize,

View File

@ -20,6 +20,16 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Proof(pub(crate) Vec<u8>);
impl Proof {
pub fn into_inner(self) -> Vec<u8> {
self.0
}
pub fn from_inner(inner: Vec<u8>) -> Self {
Self(inner)
}
}
#[derive(Clone)]
pub struct ProgramWithDependencies {
pub program: Program,

View File

@ -45,12 +45,12 @@ impl EncryptedAccountData {
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Message {
pub(crate) public_account_ids: Vec<AccountId>,
pub(crate) nonces: Vec<Nonce>,
pub(crate) public_post_states: Vec<Account>,
pub public_account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub public_post_states: Vec<Account>,
pub encrypted_private_post_states: Vec<EncryptedAccountData>,
pub new_commitments: Vec<Commitment>,
pub(crate) new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
}
impl Message {

View File

@ -16,7 +16,7 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct PrivacyPreservingTransaction {
pub message: Message,
witness_set: WitnessSet,
pub witness_set: WitnessSet,
}
impl PrivacyPreservingTransaction {

View File

@ -46,4 +46,18 @@ impl WitnessSet {
pub fn proof(&self) -> &Proof {
&self.proof
}
pub fn into_raw_parts(self) -> (Vec<(Signature, PublicKey)>, Proof) {
(self.signatures_and_public_keys, self.proof)
}
pub fn from_raw_parts(
signatures_and_public_keys: Vec<(Signature, PublicKey)>,
proof: Proof,
) -> Self {
Self {
signatures_and_public_keys,
proof,
}
}
}

View File

@ -1,3 +1,4 @@
use borsh::{BorshDeserialize, BorshSerialize};
use nssa_core::{
account::AccountWithMetadata,
program::{InstructionData, ProgramId, ProgramOutput},
@ -14,7 +15,7 @@ use crate::{
/// TODO: Make this variable when fees are implemented
const MAX_NUM_CYCLES_PUBLIC_EXECUTION: u64 = 1024 * 1024 * 32; // 32M cycles
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Program {
id: ProgramId,
elf: Vec<u8>,

View File

@ -9,4 +9,8 @@ impl Message {
pub fn new(bytecode: Vec<u8>) -> Self {
Self { bytecode }
}
pub fn into_bytecode(self) -> Vec<u8> {
self.bytecode
}
}

View File

@ -14,6 +14,10 @@ impl ProgramDeploymentTransaction {
Self { message }
}
pub fn into_message(self) -> Message {
self.message
}
pub(crate) fn validate_and_produce_public_state_diff(
&self,
state: &V02State,

View File

@ -9,10 +9,10 @@ use crate::{AccountId, error::NssaError, program::Program};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Message {
pub(crate) program_id: ProgramId,
pub(crate) account_ids: Vec<AccountId>,
pub(crate) nonces: Vec<Nonce>,
pub(crate) instruction_data: InstructionData,
pub program_id: ProgramId,
pub account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub instruction_data: InstructionData,
}
impl Message {

View File

@ -37,6 +37,16 @@ impl WitnessSet {
pub fn signatures_and_public_keys(&self) -> &[(Signature, PublicKey)] {
&self.signatures_and_public_keys
}
pub fn into_raw_parts(self) -> Vec<(Signature, PublicKey)> {
self.signatures_and_public_keys
}
pub fn from_raw_parts(signatures_and_public_keys: Vec<(Signature, PublicKey)>) -> Self {
Self {
signatures_and_public_keys,
}
}
}
#[cfg(test)]

View File

@ -8,7 +8,7 @@ use rand::{RngCore, rngs::OsRng};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Signature {
value: [u8; 64],
pub value: [u8; 64],
}
impl Signature {

View File

@ -1,5 +1,6 @@
use std::collections::{HashMap, HashSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use borsh::{BorshDeserialize, BorshSerialize};
use nssa_core::{
Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, MembershipProof, Nullifier,
account::{Account, AccountId},
@ -15,6 +16,8 @@ use crate::{
pub const MAX_NUMBER_CHAINED_CALLS: usize = 10;
#[derive(BorshSerialize, BorshDeserialize)]
#[cfg_attr(test, derive(Debug, PartialEq, Eq))]
pub(crate) struct CommitmentSet {
merkle_tree: MerkleTree,
commitments: HashMap<Commitment, usize>,
@ -60,8 +63,49 @@ impl CommitmentSet {
}
}
type NullifierSet = HashSet<Nullifier>;
#[cfg_attr(test, derive(Debug, PartialEq, Eq))]
struct NullifierSet(BTreeSet<Nullifier>);
impl NullifierSet {
fn new() -> Self {
Self(BTreeSet::new())
}
fn extend(&mut self, new_nullifiers: Vec<Nullifier>) {
self.0.extend(new_nullifiers);
}
fn contains(&self, nullifier: &Nullifier) -> bool {
self.0.contains(nullifier)
}
}
impl BorshSerialize for NullifierSet {
fn serialize<W: std::io::Write>(&self, writer: &mut W) -> std::io::Result<()> {
self.0.iter().collect::<Vec<_>>().serialize(writer)
}
}
impl BorshDeserialize for NullifierSet {
fn deserialize_reader<R: std::io::Read>(reader: &mut R) -> std::io::Result<Self> {
let vec = Vec::<Nullifier>::deserialize_reader(reader)?;
let mut set = BTreeSet::new();
for n in vec {
if !set.insert(n) {
return Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
"duplicate nullifier in NullifierSet",
));
}
}
Ok(Self(set))
}
}
#[derive(BorshSerialize, BorshDeserialize)]
#[cfg_attr(test, derive(Debug, PartialEq, Eq))]
pub struct V02State {
public_state: HashMap<AccountId, Account>,
private_state: (CommitmentSet, NullifierSet),
@ -273,6 +317,7 @@ pub mod tests {
encryption::{EphemeralPublicKey, IncomingViewingPublicKey, Scalar},
program::{PdaSeed, ProgramId},
};
use token_core::{TokenDefinition, TokenHolding};
use crate::{
PublicKey, PublicTransaction, V02State,
@ -2284,53 +2329,6 @@ pub mod tests {
));
}
// TODO: repeated code needs to be cleaned up
// from token.rs (also repeated in amm.rs)
const TOKEN_DEFINITION_DATA_SIZE: usize = 55;
const TOKEN_HOLDING_DATA_SIZE: usize = 49;
struct TokenDefinition {
account_type: u8,
name: [u8; 6],
total_supply: u128,
metadata_id: AccountId,
}
struct TokenHolding {
account_type: u8,
definition_id: AccountId,
balance: u128,
}
impl TokenDefinition {
fn into_data(self) -> Data {
let mut bytes = Vec::<u8>::new();
bytes.extend_from_slice(&[self.account_type]);
bytes.extend_from_slice(&self.name);
bytes.extend_from_slice(&self.total_supply.to_le_bytes());
bytes.extend_from_slice(&self.metadata_id.to_bytes());
if bytes.len() != TOKEN_DEFINITION_DATA_SIZE {
panic!("Invalid Token Definition data");
}
Data::try_from(bytes).expect("Token definition data size must fit into data")
}
}
impl TokenHolding {
fn into_data(self) -> Data {
let mut bytes = [0; TOKEN_HOLDING_DATA_SIZE];
bytes[0] = self.account_type;
bytes[1..33].copy_from_slice(&self.definition_id.to_bytes());
bytes[33..].copy_from_slice(&self.balance.to_le_bytes());
bytes
.to_vec()
.try_into()
.expect("33 bytes should fit into Data")
}
}
// TODO repeated code should ultimately be removed;
fn compute_pool_pda(
amm_program_id: ProgramId,
@ -2703,8 +2701,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::user_token_a_holding_init(),
}),
@ -2716,8 +2713,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::user_token_b_holding_init(),
}),
@ -2749,11 +2745,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("test"),
total_supply: BalanceForTests::token_a_supply(),
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -2763,11 +2758,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("test"),
total_supply: BalanceForTests::token_b_supply(),
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -2777,11 +2771,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("LP Token"),
total_supply: BalanceForTests::token_lp_supply(),
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -2791,8 +2784,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::vault_a_balance_init(),
}),
@ -2804,8 +2796,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::vault_b_balance_init(),
}),
@ -2817,8 +2808,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_lp_definition_id(),
balance: BalanceForTests::user_token_lp_holding_init(),
}),
@ -2830,8 +2820,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::vault_a_balance_swap_1(),
}),
@ -2843,8 +2832,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::vault_b_balance_swap_1(),
}),
@ -2876,8 +2864,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::user_token_a_holding_swap_1(),
}),
@ -2889,8 +2876,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::user_token_b_holding_swap_1(),
}),
@ -2902,8 +2888,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::vault_a_balance_swap_2(),
}),
@ -2915,8 +2900,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::vault_b_balance_swap_2(),
}),
@ -2948,8 +2932,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::user_token_a_holding_swap_2(),
}),
@ -2961,8 +2944,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::user_token_b_holding_swap_2(),
}),
@ -2974,8 +2956,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::vault_a_balance_add(),
}),
@ -2987,8 +2968,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::vault_b_balance_add(),
}),
@ -3020,8 +3000,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::user_token_a_holding_add(),
}),
@ -3033,8 +3012,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::user_token_b_holding_add(),
}),
@ -3046,8 +3024,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_lp_definition_id(),
balance: BalanceForTests::user_token_lp_holding_add(),
}),
@ -3059,11 +3036,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("LP Token"),
total_supply: BalanceForTests::token_lp_supply_add(),
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -3073,8 +3049,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::vault_a_balance_remove(),
}),
@ -3086,8 +3061,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::vault_b_balance_remove(),
}),
@ -3119,8 +3093,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::user_token_a_holding_remove(),
}),
@ -3132,8 +3105,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::user_token_b_holding_remove(),
}),
@ -3145,8 +3117,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_lp_definition_id(),
balance: BalanceForTests::user_token_lp_holding_remove(),
}),
@ -3158,11 +3129,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("LP Token"),
total_supply: BalanceForTests::token_lp_supply_remove(),
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -3172,11 +3142,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("LP Token"),
total_supply: 0,
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -3186,8 +3155,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: 0,
}),
@ -3199,8 +3167,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: 0,
}),
@ -3232,8 +3199,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_a_definition_id(),
balance: BalanceForTests::user_token_a_holding_new_definition(),
}),
@ -3245,8 +3211,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_b_definition_id(),
balance: BalanceForTests::user_token_b_holding_new_definition(),
}),
@ -3258,8 +3223,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_lp_definition_id(),
balance: BalanceForTests::user_token_a_holding_new_definition(),
}),
@ -3271,11 +3235,10 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenDefinition::into_data(TokenDefinition {
account_type: 0u8,
name: [1u8; 6],
data: Data::from(&TokenDefinition::Fungible {
name: String::from("LP Token"),
total_supply: BalanceForTests::vault_a_balance_init(),
metadata_id: AccountId::new([0; 32]),
metadata_id: None,
}),
nonce: 0,
}
@ -3305,8 +3268,7 @@ pub mod tests {
Account {
program_owner: Program::token().id(),
balance: 0u128,
data: TokenHolding::into_data(TokenHolding {
account_type: 1u8,
data: Data::from(&TokenHolding::Fungible {
definition_id: IdForTests::token_lp_definition_id(),
balance: 0,
}),
@ -4071,13 +4033,13 @@ pub mod tests {
let pinata_token_holding_id = AccountId::from((&pinata_token.id(), &PdaSeed::new([0; 32])));
let winner_token_holding_id = AccountId::new([3; 32]);
let mut expected_winner_account_data = [0; 49];
expected_winner_account_data[0] = 1;
expected_winner_account_data[1..33].copy_from_slice(pinata_token_definition_id.value());
expected_winner_account_data[33..].copy_from_slice(&150u128.to_le_bytes());
let expected_winner_account_holding = token_core::TokenHolding::Fungible {
definition_id: pinata_token_definition_id,
balance: 150,
};
let expected_winner_token_holding_post = Account {
program_owner: token.id(),
data: expected_winner_account_data.to_vec().try_into().unwrap(),
data: Data::from(&expected_winner_account_holding),
..Account::default()
};
@ -4087,10 +4049,10 @@ pub mod tests {
// Execution of the token program to create new token for the pinata token
// definition and supply accounts
let total_supply: u128 = 10_000_000;
// instruction: [0x00 || total_supply (little-endian 16 bytes) || name (6 bytes)]
let mut instruction = vec![0; 23];
instruction[1..17].copy_from_slice(&total_supply.to_le_bytes());
instruction[17..].copy_from_slice(b"PINATA");
let instruction = token_core::Instruction::NewFungibleDefinition {
name: String::from("PINATA"),
total_supply,
};
let message = public_transaction::Message::try_new(
token.id(),
vec![pinata_token_definition_id, pinata_token_holding_id],
@ -4102,9 +4064,8 @@ pub mod tests {
let tx = PublicTransaction::new(message, witness_set);
state.transition_from_public_transaction(&tx).unwrap();
// Execution of the token program transfer just to initialize the winner token account
let mut instruction = vec![0; 23];
instruction[0] = 2;
// Execution of winner's token holding account initialization
let instruction = token_core::Instruction::InitializeAccount;
let message = public_transaction::Message::try_new(
token.id(),
vec![pinata_token_definition_id, winner_token_holding_id],
@ -4528,4 +4489,15 @@ pub mod tests {
// Assert - should fail because the malicious program tries to manipulate is_authorized
assert!(matches!(result, Err(NssaError::CircuitProvingError(_))));
}
#[test]
fn test_state_serialization_roundtrip() {
let account_id_1 = AccountId::new([1; 32]);
let account_id_2 = AccountId::new([2; 32]);
let initial_data = [(account_id_1, 100u128), (account_id_2, 151u128)];
let state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs();
let bytes = borsh::to_vec(&state).unwrap();
let state_from_bytes: V02State = borsh::from_slice(&bytes).unwrap();
assert_eq!(state, state_from_bytes);
}
}

View File

@ -2,6 +2,7 @@
name = "program_methods"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[build-dependencies]
risc0-build.workspace = true

View File

@ -2,9 +2,11 @@
name = "programs"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core.workspace = true
token_core.workspace = true
token_program.workspace = true
risc0-zkvm.workspace = true
serde = { workspace = true, default-features = false }

File diff suppressed because it is too large Load Diff

View File

@ -5,10 +5,7 @@ use nssa_core::{
write_nssa_outputs_with_chained_call,
},
};
use risc0_zkvm::{
serde::to_vec,
sha::{Impl, Sha256},
};
use risc0_zkvm::sha::{Impl, Sha256};
const PRIZE: u128 = 150;
@ -82,23 +79,21 @@ fn main() {
let winner_token_holding_post = winner_token_holding.account.clone();
pinata_definition_post.data = data.next_data();
let mut instruction_data = vec![0; 23];
instruction_data[0] = 1;
instruction_data[1..17].copy_from_slice(&PRIZE.to_le_bytes());
// Flip authorization to true for chained call
let mut pinata_token_holding_for_chain_call = pinata_token_holding.clone();
pinata_token_holding_for_chain_call.is_authorized = true;
let chained_calls = vec![ChainedCall {
program_id: pinata_token_holding_post.program_owner,
instruction_data: to_vec(&instruction_data).unwrap(),
pre_states: vec![
let chained_call = ChainedCall::new(
pinata_token_holding_post.program_owner,
vec![
pinata_token_holding_for_chain_call,
winner_token_holding.clone(),
],
pda_seeds: vec![PdaSeed::new([0; 32])],
}];
&token_core::Instruction::Transfer {
amount_to_transfer: PRIZE,
},
)
.with_pda_seeds(vec![PdaSeed::new([0; 32])]);
write_nssa_outputs_with_chained_call(
instruction_words,
@ -112,6 +107,6 @@ fn main() {
AccountPostState::new(pinata_token_holding_post),
AccountPostState::new(winner_token_holding_post),
],
chained_calls,
vec![chained_call],
);
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,9 @@
[package]
name = "token_program"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core.workspace = true
token_core.workspace = true

View File

@ -0,0 +1,10 @@
[package]
name = "token_core"
version = "0.1.0"
edition = "2024"
license = { workspace = true }
[dependencies]
nssa_core.workspace = true
serde.workspace = true
borsh.workspace = true

Some files were not shown because too many files have changed in this diff Show More