Merge branch 'main' into marvin/nonce

This commit is contained in:
jonesmarvin8 2026-03-16 11:22:24 -04:00
commit dd64f0b1f8
61 changed files with 1391 additions and 303 deletions

2
Cargo.lock generated
View File

@ -8989,9 +8989,9 @@ dependencies = [
"amm_core",
"anyhow",
"async-stream",
"base58",
"base64 0.22.1",
"borsh",
"bytemuck",
"clap",
"common",
"env_logger",

View File

@ -37,9 +37,15 @@ run-sequencer:
# Run Indexer
[working-directory: 'indexer/service']
run-indexer:
run-indexer mock="":
@echo "🔍 Running indexer"
RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p indexer_service configs/indexer_config.json
@if [ "{{mock}}" = "mock" ]; then \
echo "🧪 Using mock data"; \
RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release --features mock-responses -p indexer_service configs/indexer_config.json; \
else \
echo "🚀 Using real data"; \
RUST_LOG=info RISC0_DEV_MODE=1 cargo run --release -p indexer_service configs/indexer_config.json; \
fi
# Run Explorer
[working-directory: 'explorer_service']
@ -58,4 +64,6 @@ clean:
@echo "🧹 Cleaning run artifacts"
rm -rf sequencer_runner/bedrock_signing_key
rm -rf sequencer_runner/rocksdb
rm -rf indexer/service/rocksdb
rm -rf wallet/configs/debug/storage.json
cd bedrock && docker compose down -v

View File

@ -69,6 +69,7 @@ Both public and private executions use the same Risc0 VM bytecode. Public transa
This design keeps public transactions as fast as any RISC-Vbased VM and makes private transactions efficient for validators. It also supports parallel execution similar to Solana, improving throughput. The main computational cost for privacy-preserving transactions is on the user side, where ZK proofs are generated.
---
---
---
@ -130,29 +131,31 @@ RUST_LOG=info RISC0_DEV_MODE=1 cargo run $(pwd)/configs/debug all
```
# Run the sequencer and node
## Running Manually
### Normal mode
The sequencer and logos blockchain node can be run locally:
1. On one terminal go to the `logos-blockchain/logos-blockchain` repo and run a local logos blockchain node:
- `git checkout master; git pull`
- `cargo clean`
- `rm -r ~/.logos-blockchain-circuits`
- `./scripts/setup-logos-blockchain-circuits.sh`
- `cargo build --all-features`
- `./target/debug/logos-blockchain-node --deployment nodes/node/standalone-deployment-config.yaml nodes/node/standalone-node-config.yaml`
2. Alternatively (WARNING: This node is outdated) go to ``logos-blockchain/lssa/` repo and run the node from docker:
- `cd bedrock`
- Change line 14 of `docker-compose.yml` from `"0:18080/tcp"` into `"8080:18080/tcp"`
- `docker compose up`
3. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service:
- `git checkout master; git pull`
- `cargo clean`
- `rm -r ~/.logos-blockchain-circuits`
- `./scripts/setup-logos-blockchain-circuits.sh`
- `cargo build --all-features`
- `./target/debug/logos-blockchain-node --deployment nodes/node/standalone-deployment-config.yaml nodes/node/standalone-node-config.yaml`
- Alternatively (WARNING: This node is outdated) go to `logos-blockchain/lssa/` repo and run the node from docker:
- `cd bedrock`
- Change line 14 of `docker-compose.yml` from `"0:18080/tcp"` into `"8080:18080/tcp"`
- `docker compose up`
2. On another terminal go to the `logos-blockchain/lssa` repo and run indexer service:
- `RUST_LOG=info cargo run -p indexer_service indexer/service/configs/indexer_config.json`
4. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
3. On another terminal go to the `logos-blockchain/lssa` repo and run the sequencer:
- `RUST_LOG=info cargo run -p sequencer_runner sequencer_runner/configs/debug`
4. (To run the explorer): on another terminal go to `logos-blockchain/lssa/explorer_service` and run the following:
- `cargo install cargo-leptos`
- `cargo leptos build --release`
- `cargo leptos serve --release`
### Notes on cleanup

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -17,7 +17,6 @@ pub mod test_utils;
pub const PINATA_BASE58: &str = "EfQhKQAkX2FJiwNii2WFQsGndjvF1Mzd7RuVe7QdPLw7";
#[derive(
Debug,
Default,
Copy,
Clone,
@ -37,6 +36,12 @@ impl Display for HashType {
}
}
impl std::fmt::Debug for HashType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl FromStr for HashType {
type Err = hex::FromHexError;

View File

@ -1,6 +1,6 @@
# Wallet CLI Completion
Completion scripts for the LSSA `wallet` command.
Completion scripts for the LSSA `wallet` command.
## ZSH
@ -19,9 +19,9 @@ Preconfigured accounts and accounts only with `/` (no number) are not completed.
e.g.:
```
▶ wallet account list
Preconfigured Public/Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw,
Preconfigured Public/BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy,
▶ wallet account list
Preconfigured Public/7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo,
Preconfigured Public/6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV,
Preconfigured Private/3oCG8gqdKLMegw4rRfyaMQvuPHpcASt7xwttsmnZLSkw,
Preconfigured Private/AKTcXgJ1xoynta1Ec7y6Jso1z1JQtHqd7aPQ1h9er6xX,
/ Public/8DstRgMQrB2N9a7ymv98RDDbt8nctrP9ZzaNRSpKDZSu,
@ -118,9 +118,80 @@ wallet account get --account-id <TAB>
# Shows: Public/... Private/...
```
## Bash
Works with bash 4+. The `bash-completion` package is required for auto-sourcing from
`/etc/bash_completion.d/`; without it, source the file directly from `~/.bashrc` instead.
### Features
- Full completion for all wallet subcommands
- Contextual option completion for each command
- Dynamic account ID completion via `wallet account list`
- Falls back to `Public/` / `Private/` prefixes when no accounts are available
Note that only accounts created by the user auto-complete (same filtering as zsh — see above).
### Installation
#### Option A — source directly from `~/.bashrc` (works everywhere)
```sh
echo "source $(pwd)/completions/bash/wallet" >> ~/.bashrc
exec bash
```
#### Option B — system-wide via `bash-completion`
1. Copy the file:
```sh
cp ./bash/wallet /etc/bash_completion.d/wallet
```
2. Ensure `bash-completion` is initialised in every interactive shell. On many Linux
distributions (e.g. Fedora) it is only sourced for **login** shells via
`/etc/profile.d/bash_completion.sh`. For non-login shells (e.g. a bash session started
inside zsh), add this to `~/.bashrc`:
```sh
[[ -f /usr/share/bash-completion/bash_completion ]] && source /usr/share/bash-completion/bash_completion
```
3. Reload your shell:
```sh
exec bash
```
### Requirements
The completion script calls `wallet account list` to dynamically fetch account IDs. Ensure the `wallet` command is in your `$PATH`.
### Usage
```sh
# Main commands
wallet <TAB>
# Account subcommands
wallet account <TAB>
# Options for auth-transfer send
wallet auth-transfer send --<TAB>
# Account types when creating
wallet account new <TAB>
# Shows: public private
# Account IDs (fetched dynamically)
wallet account get --account-id <TAB>
# Shows: Public/... Private/...
```
## Troubleshooting
### Completions not appearing
### Zsh completions not appearing
1. Check that `compinit` is called in your `.zshrc`
2. Rebuild the completion cache:

382
completions/bash/wallet Normal file
View File

@ -0,0 +1,382 @@
#!/usr/bin/env bash
# Bash completion script for the wallet CLI
# See instructions in ../README.md
# Helper function to complete account IDs
# Uses `wallet account list` to get available accounts
# Only includes accounts with /N prefix (where N is a number)
_wallet_complete_account_id() {
local cur="$1"
local accounts
if command -v wallet &>/dev/null; then
accounts=$(wallet account list 2>/dev/null | grep '^/[0-9]' | awk '{print $2}' | tr -d ',')
fi
if [[ -n "$accounts" ]]; then
COMPREPLY=($(compgen -W "$accounts" -- "$cur"))
else
COMPREPLY=($(compgen -W "Public/ Private/" -- "$cur"))
compopt -o nospace 2>/dev/null
fi
}
_wallet() {
local cur prev words cword
_init_completion 2>/dev/null || {
COMPREPLY=()
cur="${COMP_WORDS[COMP_CWORD]}"
prev="${COMP_WORDS[COMP_CWORD-1]}"
words=("${COMP_WORDS[@]}")
cword=$COMP_CWORD
}
local commands="auth-transfer chain-info account pinata token amm check-health config restore-keys deploy-program help"
# Find the main command and subcommand by scanning words before the cursor.
# Global options that take a value are skipped along with their argument.
local cmd="" subcmd=""
local cmd_idx=0 subcmd_idx=0
local i
for ((i = 1; i < cword; i++)); do
local w="${words[$i]}"
case "$w" in
--auth)
((i++)) # skip the auth value
;;
-c | --continuous-run)
# boolean flag, no value
;;
-*)
# unrecognised option, skip
;;
*)
if [[ -z "$cmd" ]]; then
cmd="$w"
cmd_idx=$i
elif [[ -z "$subcmd" ]]; then
subcmd="$w"
subcmd_idx=$i
fi
;;
esac
done
local config_keys="override_rust_log sequencer_addr seq_poll_timeout seq_tx_poll_max_blocks seq_poll_max_retries seq_block_poll_max_amount initial_accounts basic_auth"
case "$cmd" in
"")
# Completing the main command or a global option
if [[ "$prev" == "--auth" ]]; then
return # completing the --auth value; no suggestions
fi
case "$cur" in
-*)
COMPREPLY=($(compgen -W "-c --continuous-run --auth" -- "$cur"))
;;
*)
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
;;
esac
;;
auth-transfer)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "init send help" -- "$cur"))
;;
init)
case "$prev" in
--account-id)
_wallet_complete_account_id "$cur"
;;
*)
COMPREPLY=($(compgen -W "--account-id" -- "$cur"))
;;
esac
;;
send)
case "$prev" in
--from | --to)
_wallet_complete_account_id "$cur"
;;
--to-npk | --to-vpk | --amount)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--from --to --to-npk --to-vpk --amount" -- "$cur"))
;;
esac
;;
esac
;;
chain-info)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "current-block-id block transaction help" -- "$cur"))
;;
block)
case "$prev" in
-i | --id)
;; # no specific completion for block ID
*)
COMPREPLY=($(compgen -W "-i --id" -- "$cur"))
;;
esac
;;
transaction)
case "$prev" in
-t | --hash)
;; # no specific completion for tx hash
*)
COMPREPLY=($(compgen -W "-t --hash" -- "$cur"))
;;
esac
;;
esac
;;
account)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "get new sync-private list ls label help" -- "$cur"))
;;
get)
case "$prev" in
-a | --account-id)
_wallet_complete_account_id "$cur"
;;
*)
COMPREPLY=($(compgen -W "-r --raw -k --keys -a --account-id" -- "$cur"))
;;
esac
;;
list | ls)
COMPREPLY=($(compgen -W "-l --long" -- "$cur"))
;;
sync-private)
;; # no options
new)
# `account new` is itself a subcommand: public | private
local new_subcmd=""
for ((i = subcmd_idx + 1; i < cword; i++)); do
case "${words[$i]}" in
public | private)
new_subcmd="${words[$i]}"
break
;;
esac
done
if [[ -z "$new_subcmd" ]]; then
COMPREPLY=($(compgen -W "public private" -- "$cur"))
else
case "$prev" in
--cci | -l | --label)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--cci -l --label" -- "$cur"))
;;
esac
fi
;;
label)
case "$prev" in
-a | --account-id)
_wallet_complete_account_id "$cur"
;;
-l | --label)
;; # no specific completion for label value
*)
COMPREPLY=($(compgen -W "-a --account-id -l --label" -- "$cur"))
;;
esac
;;
esac
;;
pinata)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "claim help" -- "$cur"))
;;
claim)
case "$prev" in
--to)
_wallet_complete_account_id "$cur"
;;
*)
COMPREPLY=($(compgen -W "--to" -- "$cur"))
;;
esac
;;
esac
;;
token)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "new send burn mint help" -- "$cur"))
;;
new)
case "$prev" in
--definition-account-id | --supply-account-id)
_wallet_complete_account_id "$cur"
;;
-n | --name | -t | --total-supply)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--definition-account-id --supply-account-id -n --name -t --total-supply" -- "$cur"))
;;
esac
;;
send)
case "$prev" in
--from | --to)
_wallet_complete_account_id "$cur"
;;
--to-npk | --to-vpk | --amount)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--from --to --to-npk --to-vpk --amount" -- "$cur"))
;;
esac
;;
burn)
case "$prev" in
--definition | --holder)
_wallet_complete_account_id "$cur"
;;
--amount)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--definition --holder --amount" -- "$cur"))
;;
esac
;;
mint)
case "$prev" in
--definition | --holder)
_wallet_complete_account_id "$cur"
;;
--holder-npk | --holder-vpk | --amount)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--definition --holder --holder-npk --holder-vpk --amount" -- "$cur"))
;;
esac
;;
esac
;;
amm)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "new swap add-liquidity remove-liquidity help" -- "$cur"))
;;
new)
case "$prev" in
--user-holding-a | --user-holding-b | --user-holding-lp)
_wallet_complete_account_id "$cur"
;;
--balance-a | --balance-b)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --user-holding-lp --balance-a --balance-b" -- "$cur"))
;;
esac
;;
swap)
case "$prev" in
--user-holding-a | --user-holding-b)
_wallet_complete_account_id "$cur"
;;
--amount-in | --min-amount-out | --token-definition)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --amount-in --min-amount-out --token-definition" -- "$cur"))
;;
esac
;;
add-liquidity)
case "$prev" in
--user-holding-a | --user-holding-b | --user-holding-lp)
_wallet_complete_account_id "$cur"
;;
--max-amount-a | --max-amount-b | --min-amount-lp)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --user-holding-lp --max-amount-a --max-amount-b --min-amount-lp" -- "$cur"))
;;
esac
;;
remove-liquidity)
case "$prev" in
--user-holding-a | --user-holding-b | --user-holding-lp)
_wallet_complete_account_id "$cur"
;;
--balance-lp | --min-amount-a | --min-amount-b)
;; # no specific completion
*)
COMPREPLY=($(compgen -W "--user-holding-a --user-holding-b --user-holding-lp --balance-lp --min-amount-a --min-amount-b" -- "$cur"))
;;
esac
;;
esac
;;
config)
case "$subcmd" in
"")
COMPREPLY=($(compgen -W "get set description help" -- "$cur"))
;;
get)
# Accepts optional -a/--all flag and an optional positional key
COMPREPLY=($(compgen -W "--all -a $config_keys" -- "$cur"))
;;
set)
# set <key> <value> — only complete the key; no completion for the value
local set_args=0
for ((i = subcmd_idx + 1; i < cword; i++)); do
[[ "${words[$i]}" != -* ]] && ((set_args++))
done
if [[ $set_args -eq 0 ]]; then
COMPREPLY=($(compgen -W "$config_keys" -- "$cur"))
fi
;;
description)
# description <key> — only complete if no key provided yet
local has_key=false
for ((i = subcmd_idx + 1; i < cword; i++)); do
[[ "${words[$i]}" != -* ]] && has_key=true && break
done
if ! $has_key; then
COMPREPLY=($(compgen -W "$config_keys" -- "$cur"))
fi
;;
esac
;;
restore-keys)
case "$prev" in
-d | --depth)
;; # no specific completion for depth value
*)
COMPREPLY=($(compgen -W "-d --depth" -- "$cur"))
;;
esac
;;
deploy-program)
COMPREPLY=($(compgen -f -- "$cur"))
compopt -o filenames 2>/dev/null
;;
help)
COMPREPLY=($(compgen -W "$commands" -- "$cur"))
;;
esac
}
complete -F _wallet wallet

View File

@ -181,7 +181,8 @@ _wallet_account() {
;;
new_args)
_arguments \
'--cci[Chain index of a parent node]:chain_index:'
'--cci[Chain index of a parent node]:chain_index:' \
'(-l --label)'{-l,--label}'[Label to assign to the new account]:label:'
;;
esac
;;
@ -343,7 +344,6 @@ _wallet_config() {
local -a config_keys
config_keys=(
'all'
'override_rust_log'
'sequencer_addr'
'seq_poll_timeout'
@ -370,7 +370,12 @@ _wallet_config() {
;;
args)
case $line[1] in
get|description)
get)
_arguments \
'(-a --all)'{-a,--all}'[Print all config fields]' \
'::key:compadd -a config_keys'
;;
description)
compadd -a config_keys
;;
set)

View File

@ -1,6 +1,6 @@
{
"home": "./indexer/service",
"consensus_info_polling_interval": "60s",
"consensus_info_polling_interval": "1s",
"bedrock_client_config": {
"addr": "http://logos-blockchain-node-0:18080",
"backoff": {
@ -11,50 +11,50 @@
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
"npk":[
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
11,
87,
38,
254,
159,
231,
165,
1,
94,
64,
137,
243,
76,
249,
101,
251,
129,
33,
101,
189,
30,
42,
11,
191,
34,
103,
186,
227,
230
] ,
"account": {
"program_owner": [
0,
@ -73,38 +73,38 @@
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
32,
67,
72,
164,
106,
53,
66,
239,
141,
15,
52,
230,
136,
130,
172,
219,
225,
161,
139,
229,
89,
177,
2,
236,
207,
243,
125,
134,
135,
210,
143,
87,
232,
215,
128,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
120,
113,
224,
4,
165
],
"account": {
"program_owner": [

View File

@ -20,17 +20,50 @@
"indexer_rpc_url": "ws://indexer_service:8779",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [13, 25, 40, 5, 198, 248, 210, 248, 237, 121, 124, 145, 186, 142, 253, 216, 236, 69, 193, 32, 166, 167, 49, 133, 172, 111, 159, 46, 84, 17, 157, 23],
"npk":[
177,
64,
1,
11,
87,
38,
254,
159,
231,
165,
1,
94,
64,
137,
243,
76,
249,
101,
251,
129,
33,
101,
189,
30,
42,
11,
191,
34,
103,
186,
227,
230
] ,
"account": {
"program_owner": [
0,
@ -48,7 +81,40 @@
}
},
{
"npk": [32, 67, 72, 164, 106, 53, 66, 239, 141, 15, 52, 230, 136, 177, 2, 236, 207, 243, 134, 135, 210, 143, 87, 232, 215, 128, 194, 120, 113, 224, 4, 165],
"npk": [
32,
67,
72,
164,
106,
53,
66,
239,
141,
15,
52,
230,
136,
177,
2,
236,
207,
243,
134,
135,
210,
143,
87,
232,
215,
128,
194,
120,
113,
224,
4,
165
],
"account": {
"program_owner": [
0,

View File

@ -118,11 +118,11 @@ pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnE
/// Get blocks with pagination
#[server]
pub async fn get_blocks(offset: u32, limit: u32) -> Result<Vec<Block>, ServerFnError> {
pub async fn get_blocks(before: Option<u64>, limit: u32) -> Result<Vec<Block>, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client
.get_blocks(offset, limit)
.get_blocks(before, limit)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
}

View File

@ -7,7 +7,7 @@ use crate::{
components::{AccountPreview, BlockPreview, TransactionPreview},
};
const RECENT_BLOCKS_LIMIT: u64 = 10;
const RECENT_BLOCKS_LIMIT: u32 = 10;
/// Main page component
#[component]
@ -39,23 +39,57 @@ pub fn MainPage() -> impl IntoView {
}
});
// Pagination state for blocks
let (all_blocks, set_all_blocks) = signal(Vec::new());
let (is_loading_blocks, set_is_loading_blocks) = signal(false);
let (has_more_blocks, set_has_more_blocks) = signal(true);
let (oldest_loaded_block_id, set_oldest_loaded_block_id) = signal(None::<u64>);
// Load recent blocks on mount
let recent_blocks_resource = Resource::new(
|| (),
|_| async {
match api::get_latest_block_id().await {
Ok(last_id) => {
api::get_blocks(
std::cmp::max(last_id.saturating_sub(RECENT_BLOCKS_LIMIT) as u32, 1),
(RECENT_BLOCKS_LIMIT + 1) as u32,
)
.await
}
Err(err) => Err(err),
}
},
|_| async { api::get_blocks(None, RECENT_BLOCKS_LIMIT).await },
);
// Update all_blocks when initial load completes
Effect::new(move || {
if let Some(Ok(blocks)) = recent_blocks_resource.get() {
let oldest_id = blocks.last().map(|b| b.header.block_id);
set_all_blocks.set(blocks.clone());
set_oldest_loaded_block_id.set(oldest_id);
set_has_more_blocks
.set(blocks.len() as u32 == RECENT_BLOCKS_LIMIT && oldest_id.unwrap_or(0) > 1);
}
});
// Load more blocks handler
let load_more_blocks = move |_| {
let before_id = oldest_loaded_block_id.get();
if before_id.is_none() {
return;
}
set_is_loading_blocks.set(true);
leptos::task::spawn_local(async move {
match api::get_blocks(before_id, RECENT_BLOCKS_LIMIT).await {
Ok(new_blocks) => {
let blocks_count = new_blocks.len() as u32;
let new_oldest_id = new_blocks.last().map(|b| b.header.block_id);
set_all_blocks.update(|blocks| blocks.extend(new_blocks));
set_oldest_loaded_block_id.set(new_oldest_id);
set_has_more_blocks
.set(blocks_count == RECENT_BLOCKS_LIMIT && new_oldest_id.unwrap_or(0) > 1);
}
Err(e) => {
log::error!("Failed to load more blocks: {}", e);
}
}
set_is_loading_blocks.set(false);
});
};
// Handle search - update URL parameter
let on_search = move |ev: SubmitEvent| {
ev.prevent_default();
@ -196,19 +230,48 @@ pub fn MainPage() -> impl IntoView {
recent_blocks_resource
.get()
.map(|result| match result {
Ok(blocks) if !blocks.is_empty() => {
view! {
<div class="blocks-list">
{blocks
.into_iter()
.map(|block| view! { <BlockPreview block=block /> })
.collect::<Vec<_>>()}
</div>
}
.into_any()
}
Ok(_) => {
view! { <div class="no-blocks">"No blocks found"</div> }.into_any()
let blocks = all_blocks.get();
if blocks.is_empty() {
view! { <div class="no-blocks">"No blocks found"</div> }
.into_any()
} else {
view! {
<div>
<div class="blocks-list">
{blocks
.into_iter()
.map(|block| view! { <BlockPreview block=block /> })
.collect::<Vec<_>>()}
</div>
{move || {
if has_more_blocks.get() {
view! {
<button
class="load-more-button"
on:click=load_more_blocks
disabled=move || is_loading_blocks.get()
>
{move || {
if is_loading_blocks.get() {
"Loading..."
} else {
"Load More"
}
}}
</button>
}
.into_any()
} else {
().into_any()
}
}}
</div>
}
.into_any()
}
}
Err(e) => {
view! { <div class="error">{format!("Error: {}", e)}</div> }

View File

@ -50,8 +50,8 @@ impl IndexerStore {
Ok(self.dbio.get_block(id)?)
}
pub fn get_block_batch(&self, offset: u64, limit: u64) -> Result<Vec<Block>> {
Ok(self.dbio.get_block_batch(offset, limit)?)
pub fn get_block_batch(&self, before: Option<u64>, limit: u64) -> Result<Vec<Block>> {
Ok(self.dbio.get_block_batch(before, limit)?)
}
pub fn get_transaction_by_hash(&self, tx_hash: [u8; 32]) -> Result<NSSATransaction> {

View File

@ -1,6 +1,6 @@
{
"home": "./indexer/service",
"consensus_info_polling_interval": "60s",
"home": ".",
"consensus_info_polling_interval": "1s",
"bedrock_client_config": {
"addr": "http://localhost:8080",
"backoff": {
@ -11,50 +11,50 @@
"channel_id": "0101010101010101010101010101010101010101010101010101010101010101",
"initial_accounts": [
{
"account_id": "BLgCRDXYdQPMMWVHYRFGQZbgeHx9frkipa8GtpG2Syqy",
"account_id": "6iArKUXxhUJqS7kCaPNhwMWt3ro71PDyBj7jwAyE2VQV",
"balance": 10000
},
{
"account_id": "Gj1mJy5W7J5pfmLRujmQaLfLMWidNxQ6uwnhb666ZwHw",
"account_id": "7wHg9sbJwc6h3NP1S9bekfAzB8CHifEcxKswCKUt3YQo",
"balance": 20000
}
],
"initial_commitments": [
{
"npk": [
63,
202,
178,
231,
183,
82,
237,
212,
216,
221,
215,
255,
153,
101,
"npk":[
177,
161,
254,
210,
128,
122,
54,
190,
230,
151,
183,
64,
225,
229,
113,
1,
228,
97
],
11,
87,
38,
254,
159,
231,
165,
1,
94,
64,
137,
243,
76,
249,
101,
251,
129,
33,
101,
189,
30,
42,
11,
191,
34,
103,
186,
227,
230
] ,
"account": {
"program_owner": [
0,
@ -73,38 +73,38 @@
},
{
"npk": [
192,
251,
166,
243,
167,
236,
84,
249,
35,
32,
67,
72,
164,
106,
53,
66,
239,
141,
15,
52,
230,
136,
130,
172,
219,
225,
161,
139,
229,
89,
177,
2,
236,
207,
243,
125,
134,
135,
210,
143,
87,
232,
215,
128,
194,
213,
209,
30,
23,
174,
100,
244,
124,
74,
140,
47
120,
113,
224,
4,
165
],
"account": {
"program_owner": [

View File

@ -42,7 +42,11 @@ pub trait Rpc {
async fn get_transaction(&self, tx_hash: HashType) -> Result<Transaction, ErrorObjectOwned>;
#[method(name = "getBlocks")]
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned>;
async fn get_blocks(
&self,
before: Option<u64>,
limit: u32,
) -> Result<Vec<Block>, ErrorObjectOwned>;
#[method(name = "getTransactionsByAccount")]
async fn get_transactions_by_account(

View File

@ -43,10 +43,10 @@ impl MockIndexerService {
);
}
// Create 10 blocks with transactions
// Create 100 blocks with transactions
let mut prev_hash = HashType([0u8; 32]);
for block_id in 0..10 {
for block_id in 1..=100 {
let block_hash = {
let mut hash = [0u8; 32];
hash[0] = block_id as u8;
@ -225,23 +225,20 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| ErrorObjectOwned::owned(-32001, "Transaction not found", None::<()>))
}
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
let offset = offset as usize;
let limit = limit as usize;
let total = self.blocks.len();
async fn get_blocks(
&self,
before: Option<u64>,
limit: u32,
) -> Result<Vec<Block>, ErrorObjectOwned> {
let start_id = before.map_or_else(|| self.blocks.len() as u64, |id| id.saturating_sub(1));
// Return blocks in reverse order (newest first), with pagination
let start = offset.min(total);
let end = (offset + limit).min(total);
Ok(self
.blocks
.iter()
let result = (1..=start_id)
.rev()
.skip(start)
.take(end - start)
.cloned()
.collect())
.take(limit as usize)
.map_while(|block_id| self.blocks.get(block_id as usize - 1).cloned())
.collect();
Ok(result)
}
async fn get_transactions_by_account(

View File

@ -88,11 +88,15 @@ impl indexer_service_rpc::RpcServer for IndexerService {
.into())
}
async fn get_blocks(&self, offset: u32, limit: u32) -> Result<Vec<Block>, ErrorObjectOwned> {
async fn get_blocks(
&self,
before: Option<u64>,
limit: u32,
) -> Result<Vec<Block>, ErrorObjectOwned> {
let blocks = self
.indexer
.store
.get_block_batch(offset as u64, limit as u64)
.get_block_batch(before, limit as u64)
.map_err(db_error)?;
let mut block_res = vec![];

View File

@ -61,8 +61,11 @@ async fn indexer_block_batching() -> Result<()> {
assert!(last_block_indexer > 1);
// Getting wide batch to fit all blocks
let block_batch = ctx.indexer_client().get_blocks(1, 100).await.unwrap();
// Getting wide batch to fit all blocks (from latest backwards)
let mut block_batch = ctx.indexer_client().get_blocks(None, 100).await.unwrap();
// Reverse to check chain consistency from oldest to newest
block_batch.reverse();
// Checking chain consistency
let mut prev_block_hash = block_batch.first().unwrap().header.hash;

View File

@ -1,9 +1,9 @@
use std::{str::FromStr, time::Duration};
use anyhow::Result;
use anyhow::{Context, Result};
use integration_tests::{
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
};
use key_protocol::key_management::key_tree::chain_index::ChainIndex;
use log::info;
@ -15,6 +15,93 @@ use wallet::cli::{
programs::native_token_transfer::AuthTransferSubcommand,
};
#[test]
async fn sync_private_account_with_non_zero_chain_index() -> Result<()> {
let mut ctx = TestContext::new().await?;
let from: AccountId = ctx.existing_private_accounts()[0];
// Create a new private account
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private {
cci: None,
label: None,
}));
for _ in 0..3 {
// Key Tree shift
// This way we have account with child index > 0.
let result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
Command::Account(AccountSubcommand::New(NewSubcommand::Private {
cci: None,
label: None,
})),
)
.await?;
let SubcommandReturnValue::RegisterAccount { account_id: _ } = result else {
anyhow::bail!("Expected RegisterAccount return value");
};
}
let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let SubcommandReturnValue::RegisterAccount {
account_id: to_account_id,
} = sub_ret
else {
anyhow::bail!("Expected RegisterAccount return value");
};
// Get the keys for the newly created account
let (to_keys, _) = ctx
.wallet()
.storage()
.user_data
.get_private_account(to_account_id)
.cloned()
.context("Failed to get private account")?;
// Send to this account using claiming path (using npk and vpk instead of account ID)
let command = Command::AuthTransfer(AuthTransferSubcommand::Send {
from: format_private_account_id(from),
to: None,
to_npk: Some(hex::encode(to_keys.nullifer_public_key.0)),
to_vpk: Some(hex::encode(to_keys.viewing_public_key.0)),
amount: 100,
});
let sub_ret = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let SubcommandReturnValue::PrivacyPreservingTransfer { tx_hash } = sub_ret else {
anyhow::bail!("Expected PrivacyPreservingTransfer return value");
};
let tx = fetch_privacy_preserving_tx(ctx.sequencer_client(), tx_hash).await;
// Sync the wallet to claim the new account
let command = Command::Account(AccountSubcommand::SyncPrivate {});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
let new_commitment1 = ctx
.wallet()
.get_private_account_commitment(from)
.context("Failed to get private account commitment for sender")?;
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
for commitment in tx.message.new_commitments.into_iter() {
assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await);
}
let to_res_acc = ctx
.wallet()
.get_account_private(to_account_id)
.context("Failed to get recipient's private account")?;
assert_eq!(to_res_acc.balance, 100);
info!("Successfully transferred using claiming path");
Ok(())
}
#[test]
async fn restore_keys_from_seed() -> Result<()> {
let mut ctx = TestContext::new().await?;

View File

@ -16,6 +16,118 @@ use wallet::cli::{
},
};
#[test]
async fn claim_pinata_to_uninitialized_public_account_fails_fast() -> Result<()> {
let mut ctx = TestContext::new().await?;
let result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
Command::Account(AccountSubcommand::New(NewSubcommand::Public {
cci: None,
label: None,
})),
)
.await?;
let SubcommandReturnValue::RegisterAccount {
account_id: winner_account_id,
} = result
else {
anyhow::bail!("Expected RegisterAccount return value");
};
let winner_account_id_formatted = format_public_account_id(winner_account_id);
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
let claim_result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: winner_account_id_formatted,
}),
)
.await;
assert!(
claim_result.is_err(),
"Expected uninitialized account error"
);
let err = claim_result.unwrap_err().to_string();
assert!(
err.contains("wallet auth-transfer init --account-id Public/"),
"Expected init guidance, got: {err}",
);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
assert_eq!(pinata_balance_post, pinata_balance_pre);
Ok(())
}
#[test]
async fn claim_pinata_to_uninitialized_private_account_fails_fast() -> Result<()> {
let mut ctx = TestContext::new().await?;
let result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
Command::Account(AccountSubcommand::New(NewSubcommand::Private {
cci: None,
label: None,
})),
)
.await?;
let SubcommandReturnValue::RegisterAccount {
account_id: winner_account_id,
} = result
else {
anyhow::bail!("Expected RegisterAccount return value");
};
let winner_account_id_formatted = format_private_account_id(winner_account_id);
let pinata_balance_pre = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
let claim_result = wallet::cli::execute_subcommand(
ctx.wallet_mut(),
Command::Pinata(PinataProgramAgnosticSubcommand::Claim {
to: winner_account_id_formatted,
}),
)
.await;
assert!(
claim_result.is_err(),
"Expected uninitialized account error"
);
let err = claim_result.unwrap_err().to_string();
assert!(
err.contains("wallet auth-transfer init --account-id Private/"),
"Expected init guidance, got: {err}",
);
let pinata_balance_post = ctx
.sequencer_client()
.get_account_balance(PINATA_BASE58.parse().unwrap())
.await?
.balance;
assert_eq!(pinata_balance_post, pinata_balance_pre);
Ok(())
}
#[test]
async fn claim_pinata_to_existing_public_account() -> Result<()> {
let mut ctx = TestContext::new().await?;

View File

@ -68,6 +68,10 @@ impl ChainIndex {
&self.0
}
pub fn index(&self) -> Option<u32> {
self.chain().last().copied()
}
pub fn next_in_line(&self) -> ChainIndex {
let mut chain = self.0.clone();
// ToDo: Add overflow check

View File

@ -62,9 +62,10 @@ impl KeyChain {
pub fn calculate_shared_secret_receiver(
&self,
ephemeral_public_key_sender: EphemeralPublicKey,
index: Option<u32>,
) -> SharedSecretKey {
SharedSecretKey::new(
&self.secret_spending_key.generate_viewing_secret_key(None),
&self.secret_spending_key.generate_viewing_secret_key(index),
&ephemeral_public_key_sender,
)
}
@ -78,6 +79,9 @@ mod tests {
use rand::RngCore;
use super::*;
use crate::key_management::{
ephemeral_key_holder::EphemeralKeyHolder, key_tree::KeyTreePrivate,
};
#[test]
fn test_new_os_random() {
@ -101,8 +105,8 @@ mod tests {
let ephemeral_public_key_sender = EphemeralPublicKey::from_scalar(scalar);
// Calculate shared secret
let _shared_secret =
account_id_key_holder.calculate_shared_secret_receiver(ephemeral_public_key_sender);
let _shared_secret = account_id_key_holder
.calculate_shared_secret_receiver(ephemeral_public_key_sender, None);
}
#[test]
@ -150,4 +154,40 @@ mod tests {
hex::encode(viewing_public_key.to_bytes())
);
}
fn account_with_chain_index_2_for_tests() -> KeyChain {
let seed = SeedHolder::new_os_random();
let mut key_tree_private = KeyTreePrivate::new(&seed);
// /0
key_tree_private.generate_new_node_layered().unwrap();
// /1
key_tree_private.generate_new_node_layered().unwrap();
// /0/0
key_tree_private.generate_new_node_layered().unwrap();
// /2
let (second_child_id, _) = key_tree_private.generate_new_node_layered().unwrap();
key_tree_private
.get_node(second_child_id)
.unwrap()
.value
.0
.clone()
}
#[test]
fn test_non_trivial_chain_index() {
let keys = account_with_chain_index_2_for_tests();
let eph_key_holder = EphemeralKeyHolder::new(&keys.nullifer_public_key);
let key_sender = eph_key_holder.calculate_shared_secret_sender(&keys.viewing_public_key);
let key_receiver = keys.calculate_shared_secret_receiver(
eph_key_holder.generate_ephemeral_public_key(),
Some(2),
);
assert_eq!(key_sender.0, key_receiver.0);
}
}

View File

@ -83,7 +83,7 @@ impl BorshDeserialize for Nonce {
/// Account to be used both in public and private contexts
#[derive(
Debug, Default, Clone, Eq, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize,
Default, Clone, Eq, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize,
)]
pub struct Account {
pub program_owner: ProgramId,
@ -92,6 +92,23 @@ pub struct Account {
pub nonce: Nonce,
}
impl std::fmt::Debug for Account {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let program_owner_hex: String = self
.program_owner
.iter()
.flat_map(|n| n.to_le_bytes())
.map(|b| format!("{b:02x}"))
.collect();
f.debug_struct("Account")
.field("program_owner", &program_owner_hex)
.field("balance", &self.balance)
.field("data", &self.data)
.field("nonce", &self.nonce)
.finish()
}
}
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
pub struct AccountWithMetadata {
pub account: Account,
@ -111,7 +128,6 @@ impl AccountWithMetadata {
}
#[derive(
Debug,
Default,
Copy,
Clone,
@ -128,6 +144,12 @@ pub struct AccountId {
value: [u8; 32],
}
impl std::fmt::Debug for AccountId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value.to_base58())
}
}
impl AccountId {
pub fn new(value: [u8; 32]) -> Self {
Self { value }

View File

@ -7,10 +7,18 @@ use crate::{NullifierPublicKey, account::Account};
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(
any(feature = "host", test),
derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)
derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)
)]
pub struct Commitment(pub(super) [u8; 32]);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Commitment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
write!(f, "Commitment({hex})")
}
}
/// A commitment to all zero data.
/// ```python
/// from hashlib import sha256

View File

@ -22,9 +22,17 @@ pub struct SharedSecretKey(pub [u8; 32]);
pub struct EncryptionScheme;
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq))]
#[cfg_attr(any(feature = "host", test), derive(Clone, PartialEq, Eq))]
pub struct Ciphertext(pub(crate) Vec<u8>);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Ciphertext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
write!(f, "Ciphertext({hex})")
}
}
impl EncryptionScheme {
pub fn encrypt(
account: &Account,
@ -75,6 +83,17 @@ impl EncryptionScheme {
Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index);
let mut cursor = Cursor::new(buffer.as_slice());
Account::from_cursor(&mut cursor).ok()
Account::from_cursor(&mut cursor)
.inspect_err(|err| {
println!(
"Failed to decode {ciphertext:?} \n
with secret {:?} ,\n
commitment {commitment:?} ,\n
and output_index {output_index} ,\n
with error {err:?}",
shared_secret.0
)
})
.ok()
}
}

View File

@ -10,9 +10,16 @@ use serde::{Deserialize, Serialize};
use crate::{SharedSecretKey, encryption::Scalar};
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Secp256k1Point(pub Vec<u8>);
impl std::fmt::Debug for Secp256k1Point {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
write!(f, "Secp256k1Point({hex})")
}
}
impl Secp256k1Point {
pub fn from_scalar(value: Scalar) -> Secp256k1Point {
let x_bytes: FieldBytes = value.into();

View File

@ -45,10 +45,18 @@ pub type NullifierSecretKey = [u8; 32];
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(
any(feature = "host", test),
derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)
derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)
)]
pub struct Nullifier(pub(super) [u8; 32]);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Nullifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
write!(f, "Nullifier({hex})")
}
}
impl Nullifier {
pub fn for_account_update(commitment: &Commitment, nsk: &NullifierSecretKey) -> Self {
const UPDATE_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Update/\x00\x00\x00\x00";

View File

@ -2,6 +2,15 @@ use std::io;
use thiserror::Error;
#[macro_export]
macro_rules! ensure {
($cond:expr, $err:expr) => {
if !$cond {
return Err($err);
}
};
}
#[derive(Error, Debug)]
pub enum NssaError {
#[error("Invalid input: {0}")]
@ -58,3 +67,24 @@ pub enum NssaError {
#[error("Chain of calls is too long")]
MaxChainedCallsDepthExceeded,
}
#[cfg(test)]
mod tests {
#[derive(Debug)]
enum TestError {
TestErr,
}
fn test_function_ensure(cond: bool) -> Result<(), TestError> {
ensure!(cond, TestError::TestErr);
Ok(())
}
#[test]
fn test_ensure() {
assert!(test_function_ensure(true).is_ok());
assert!(test_function_ensure(false).is_err());
}
}

View File

@ -43,7 +43,7 @@ impl EncryptedAccountData {
}
}
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
#[derive(Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Message {
pub public_account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
@ -53,6 +53,33 @@ pub struct Message {
pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
}
impl std::fmt::Debug for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
struct HexDigest<'a>(&'a [u8; 32]);
impl std::fmt::Debug for HexDigest<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
let nullifiers: Vec<_> = self
.new_nullifiers
.iter()
.map(|(n, d)| (n, HexDigest(d)))
.collect();
f.debug_struct("Message")
.field("public_account_ids", &self.public_account_ids)
.field("nonces", &self.nonces)
.field("public_post_states", &self.public_post_states)
.field(
"encrypted_private_post_states",
&self.encrypted_private_post_states,
)
.field("new_commitments", &self.new_commitments)
.field("new_nullifiers", &nullifiers)
.finish()
}
}
impl Message {
pub fn try_from_circuit_output(
public_account_ids: Vec<AccountId>,

View File

@ -7,7 +7,7 @@ use serde::Serialize;
use crate::{AccountId, error::NssaError, program::Program};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
#[derive(Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Message {
pub program_id: ProgramId,
pub account_ids: Vec<AccountId>,
@ -15,6 +15,23 @@ pub struct Message {
pub instruction_data: InstructionData,
}
impl std::fmt::Debug for Message {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let program_id_hex = hex::encode(
self.program_id
.iter()
.flat_map(|n| n.to_le_bytes())
.collect::<Vec<u8>>(),
);
f.debug_struct("Message")
.field("program_id", &program_id_hex)
.field("account_ids", &self.account_ids)
.field("nonces", &self.nonces)
.field("instruction_data", &self.instruction_data)
.finish()
}
}
impl Message {
pub fn try_new<T: Serialize>(
program_id: ProgramId,

View File

@ -9,7 +9,7 @@ use nssa_core::{
use sha2::{Digest, digest::FixedOutput};
use crate::{
V02State,
V02State, ensure,
error::NssaError,
public_transaction::{Message, WitnessSet},
state::MAX_NUMBER_CHAINED_CALLS,
@ -70,33 +70,33 @@ impl PublicTransaction {
let witness_set = self.witness_set();
// All account_ids must be different
if message.account_ids.iter().collect::<HashSet<_>>().len() != message.account_ids.len() {
return Err(NssaError::InvalidInput(
"Duplicate account_ids found in message".into(),
));
}
ensure!(
message.account_ids.iter().collect::<HashSet<_>>().len() == message.account_ids.len(),
NssaError::InvalidInput("Duplicate account_ids found in message".into(),)
);
// Check exactly one nonce is provided for each signature
if message.nonces.len() != witness_set.signatures_and_public_keys.len() {
return Err(NssaError::InvalidInput(
ensure!(
message.nonces.len() == witness_set.signatures_and_public_keys.len(),
NssaError::InvalidInput(
"Mismatch between number of nonces and signatures/public keys".into(),
));
}
)
);
// Check the signatures are valid
if !witness_set.is_valid_for(message) {
return Err(NssaError::InvalidInput(
"Invalid signature for given message and public key".into(),
));
}
ensure!(
witness_set.is_valid_for(message),
NssaError::InvalidInput("Invalid signature for given message and public key".into())
);
let signer_account_ids = self.signer_account_ids();
// Check nonces corresponds to the current nonces on the public state.
for (account_id, nonce) in signer_account_ids.iter().zip(&message.nonces) {
let current_nonce = state.get_account_by_id(*account_id).nonce;
if current_nonce != *nonce {
return Err(NssaError::InvalidInput("Nonce mismatch".into()));
}
ensure!(
current_nonce == *nonce,
NssaError::InvalidInput("Nonce mismatch".into())
);
}
// Build pre_states for execution
@ -125,9 +125,10 @@ impl PublicTransaction {
let mut chain_calls_counter = 0;
while let Some((chained_call, caller_program_id)) = chained_calls.pop_front() {
if chain_calls_counter > MAX_NUMBER_CHAINED_CALLS {
return Err(NssaError::MaxChainedCallsDepthExceeded);
}
ensure!(
chain_calls_counter <= MAX_NUMBER_CHAINED_CALLS,
NssaError::MaxChainedCallsDepthExceeded
);
// Check that the `program_id` corresponds to a deployed program
let Some(program) = state.programs().get(&chained_call.program_id) else {
@ -158,28 +159,31 @@ impl PublicTransaction {
.get(&account_id)
.cloned()
.unwrap_or_else(|| state.get_account_by_id(account_id));
if pre.account != expected_pre {
return Err(NssaError::InvalidProgramBehavior);
}
ensure!(
pre.account == expected_pre,
NssaError::InvalidProgramBehavior
);
// Check that authorization flags are consistent with the provided ones or
// authorized by program through the PDA mechanism
let is_authorized = signer_account_ids.contains(&account_id)
|| authorized_pdas.contains(&account_id);
if pre.is_authorized != is_authorized {
return Err(NssaError::InvalidProgramBehavior);
}
ensure!(
pre.is_authorized == is_authorized,
NssaError::InvalidProgramBehavior
);
}
// Verify execution corresponds to a well-behaved program.
// See the # Programs section for the definition of the `validate_execution` method.
if !validate_execution(
&program_output.pre_states,
&program_output.post_states,
chained_call.program_id,
) {
return Err(NssaError::InvalidProgramBehavior);
}
ensure!(
validate_execution(
&program_output.pre_states,
&program_output.post_states,
chained_call.program_id,
),
NssaError::InvalidProgramBehavior
);
for post in program_output
.post_states
@ -221,9 +225,10 @@ impl PublicTransaction {
}
Some(post)
}) {
if post.program_owner == DEFAULT_PROGRAM_ID {
return Err(NssaError::InvalidProgramBehavior);
}
ensure!(
post.program_owner != DEFAULT_PROGRAM_ID,
NssaError::InvalidProgramBehavior
);
}
Ok(state_diff)

View File

@ -6,11 +6,17 @@ pub use private_key::PrivateKey;
pub use public_key::PublicKey;
use rand::{RngCore, rngs::OsRng};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
#[derive(Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Signature {
pub value: [u8; 64],
}
impl std::fmt::Debug for Signature {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.value))
}
}
impl Signature {
pub fn new(key: &PrivateKey, message: &[u8]) -> Self {
let mut aux_random = [0u8; 32];

View File

@ -5,9 +5,15 @@ use sha2::{Digest, Sha256};
use crate::{PrivateKey, error::NssaError};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, Serialize, Deserialize)]
#[derive(Clone, PartialEq, Eq, BorshSerialize, Serialize, Deserialize)]
pub struct PublicKey([u8; 32]);
impl std::fmt::Debug for PublicKey {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", hex::encode(self.0))
}
}
impl BorshDeserialize for PublicKey {
fn deserialize_reader<R: std::io::Read>(reader: &mut R) -> std::io::Result<Self> {
let mut buf = [0u8; 32];

View File

@ -284,7 +284,7 @@ impl V02State {
account_id,
Account {
program_owner: Program::pinata().id(),
balance: 1500,
balance: 1500000,
// Difficulty: 3
data: vec![3; 33].try_into().expect("should fit"),
nonce: 0u128.into(),

View File

@ -1,5 +1,5 @@
{
"home": "./sequencer_runner",
"home": ".",
"override_rust_log": null,
"genesis_id": 1,
"is_genesis_random": true,

View File

@ -173,7 +173,7 @@ async fn retry_pending_blocks(seq_core: &Arc<Mutex<SequencerCore>>) -> Result<()
use log::debug;
let (pending_blocks, block_settlement_client) = {
let (mut pending_blocks, block_settlement_client) = {
let sequencer_core = seq_core.lock().await;
let client = sequencer_core.block_settlement_client();
let pending_blocks = sequencer_core
@ -182,6 +182,8 @@ async fn retry_pending_blocks(seq_core: &Arc<Mutex<SequencerCore>>) -> Result<()
(pending_blocks, client)
};
pending_blocks.sort_by(|block1, block2| block1.header.block_id.cmp(&block2.header.block_id));
if !pending_blocks.is_empty() {
info!(
"Resubmitting blocks from {} to {}",

View File

@ -539,13 +539,26 @@ impl RocksDBIO {
}
}
pub fn get_block_batch(&self, offset: u64, limit: u64) -> DbResult<Vec<Block>> {
pub fn get_block_batch(&self, before: Option<u64>, limit: u64) -> DbResult<Vec<Block>> {
let cf_block = self.block_column();
let mut block_batch = vec![];
// Determine the starting block ID
let start_block_id = if let Some(before_id) = before {
before_id.saturating_sub(1)
} else {
// Get the latest block ID
self.get_meta_last_block_in_db()?
};
// ToDo: Multi get this
for block_id in offset..(offset + limit) {
for i in 0..limit {
let block_id = start_block_id.saturating_sub(i);
if block_id == 0 {
break;
}
let res = self
.db
.get_cf(
@ -1215,7 +1228,10 @@ mod tests {
let block_hashes_mem: Vec<[u8; 32]> =
block_res.into_iter().map(|bl| bl.header.hash.0).collect();
let batch_res = dbio.get_block_batch(2, 4).unwrap();
// Get blocks before ID 6 (i.e., starting from 5 going backwards), limit 4
// This should return blocks 5, 4, 3, 2 in descending order
let mut batch_res = dbio.get_block_batch(Some(6), 4).unwrap();
batch_res.reverse(); // Reverse to match ascending order for comparison
let block_hashes_db: Vec<[u8; 32]> =
batch_res.into_iter().map(|bl| bl.header.hash.0).collect();
@ -1224,7 +1240,10 @@ mod tests {
let block_hashes_mem_limited = &block_hashes_mem[1..];
let batch_res_limited = dbio.get_block_batch(3, 4).unwrap();
// Get blocks before ID 6, limit 3
// This should return blocks 5, 4, 3 in descending order
let mut batch_res_limited = dbio.get_block_batch(Some(6), 3).unwrap();
batch_res_limited.reverse(); // Reverse to match ascending order for comparison
let block_hashes_db_limited: Vec<[u8; 32]> = batch_res_limited
.into_iter()

View File

@ -21,8 +21,8 @@ humantime-serde.workspace = true
humantime.workspace = true
tokio = { workspace = true, features = ["macros"] }
clap.workspace = true
base58.workspace = true
base64.workspace = true
bytemuck.workspace = true
borsh.workspace = true
hex.workspace = true
itertools.workspace = true

View File

@ -165,10 +165,13 @@ fn format_account_details(account: &Account) -> (String, String) {
let token_prog_id = Program::token().id();
match &account.program_owner {
o if *o == auth_tr_prog_id => (
"Account owned by authenticated transfer program".to_string(),
serde_json::to_string(&account).unwrap(),
),
o if *o == auth_tr_prog_id => {
let account_hr: HumanReadableAccount = account.clone().into();
(
"Account owned by authenticated transfer program".to_string(),
serde_json::to_string(&account_hr).unwrap(),
)
}
o if *o == token_prog_id => {
if let Ok(token_def) = TokenDefinition::try_from(&account.data) {
(

View File

@ -1,6 +1,7 @@
use anyhow::{Context, Result};
use clap::Subcommand;
use common::{PINATA_BASE58, transaction::NSSATransaction};
use nssa::{Account, AccountId};
use crate::{
AccDecodeData::Decode,
@ -102,17 +103,17 @@ impl WalletSubcommand for PinataProgramSubcommandPublic {
pinata_account_id,
winner_account_id,
} => {
let pinata_account_id = pinata_account_id.parse().unwrap();
let pinata_account_id = pinata_account_id.parse()?;
let winner_account_id: AccountId = winner_account_id.parse()?;
ensure_public_recipient_initialized(wallet_core, winner_account_id).await?;
let solution = find_solution(wallet_core, pinata_account_id)
.await
.context("failed to compute solution")?;
let res = Pinata(wallet_core)
.claim(
pinata_account_id,
winner_account_id.parse().unwrap(),
solution,
)
.claim(pinata_account_id, winner_account_id, solution)
.await?;
println!("Results of tx send are {res:#?}");
@ -138,8 +139,11 @@ impl WalletSubcommand for PinataProgramSubcommandPrivate {
pinata_account_id,
winner_account_id,
} => {
let pinata_account_id = pinata_account_id.parse().unwrap();
let winner_account_id = winner_account_id.parse().unwrap();
let pinata_account_id = pinata_account_id.parse()?;
let winner_account_id: AccountId = winner_account_id.parse()?;
ensure_private_owned_recipient_initialized(wallet_core, winner_account_id)?;
let solution = find_solution(wallet_core, pinata_account_id)
.await
.context("failed to compute solution")?;
@ -188,7 +192,51 @@ impl WalletSubcommand for PinataProgramSubcommand {
}
}
async fn find_solution(wallet: &WalletCore, pinata_account_id: nssa::AccountId) -> Result<u128> {
async fn ensure_public_recipient_initialized(
wallet_core: &WalletCore,
winner_account_id: AccountId,
) -> Result<()> {
let account = wallet_core
.get_account_public(winner_account_id)
.await
.with_context(|| format!("failed to fetch recipient account Public/{winner_account_id}"))?;
if account == Account::default() {
anyhow::bail!(
"Recipient account Public/{winner_account_id} is uninitialized.\n\
Initialize it first:\n \
wallet auth-transfer init --account-id Public/{winner_account_id}"
);
}
Ok(())
}
fn ensure_private_owned_recipient_initialized(
wallet_core: &WalletCore,
winner_account_id: AccountId,
) -> Result<()> {
let Some(account) = wallet_core.get_account_private(winner_account_id) else {
anyhow::bail!(
"Recipient account Private/{winner_account_id} is not found in this wallet.\n\
`wallet pinata claim --to Private/...` supports owned private accounts only."
);
};
if account == Account::default() {
anyhow::bail!(
"Recipient account Private/{winner_account_id} is uninitialized.\n\
Initialize it first:\n \
wallet auth-transfer init --account-id Private/{winner_account_id}\n\
Then sync private state:\n \
wallet account sync-private"
);
}
Ok(())
}
async fn find_solution(wallet: &WalletCore, pinata_account_id: AccountId) -> Result<u128> {
let account = wallet.get_account_public(pinata_account_id).await?;
let data: [u8; 33] = account
.data

View File

@ -1,7 +1,7 @@
use std::{collections::HashMap, path::PathBuf, str::FromStr};
use anyhow::Result;
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use base58::ToBase58;
use key_protocol::key_protocol_core::NSSAUserData;
use nssa::Account;
use serde::Serialize;
@ -141,15 +141,20 @@ pub(crate) fn parse_addr_with_privacy_prefix(
#[derive(Serialize)]
pub(crate) struct HumanReadableAccount {
balance: u128,
program_owner_b64: String,
data_b64: String,
program_owner: String,
data: String,
nonce: u128,
}
impl From<Account> for HumanReadableAccount {
fn from(account: Account) -> Self {
let program_owner_b64 = BASE64.encode(bytemuck::cast_slice(&account.program_owner));
let data_b64 = BASE64.encode(account.data);
let program_owner = account
.program_owner
.iter()
.flat_map(|n| n.to_le_bytes())
.collect::<Vec<u8>>()
.to_base58();
let data = hex::encode(account.data);
Self {
balance: account.balance,
program_owner_b64,

View File

@ -362,7 +362,7 @@ impl WalletCore {
);
let tx = PrivacyPreservingTransaction::new(message, witness_set);
let shared_secrets = private_account_keys
let shared_secrets: Vec<_> = private_account_keys
.into_iter()
.map(|keys| keys.ssk)
.collect();
@ -418,18 +418,19 @@ impl WalletCore {
.user_data
.default_user_private_accounts
.iter()
.map(|(acc_account_id, (key_chain, _))| (*acc_account_id, key_chain))
.chain(
self.storage
.user_data
.private_key_tree
.key_map
.values()
.map(|keys_node| (keys_node.account_id(), &keys_node.value.0)),
);
.map(|(acc_account_id, (key_chain, _))| (*acc_account_id, key_chain, None))
.chain(self.storage.user_data.private_key_tree.key_map.iter().map(
|(chain_index, keys_node)| {
(
keys_node.account_id(),
&keys_node.value.0,
chain_index.index(),
)
},
));
let affected_accounts = private_account_key_chains
.flat_map(|(acc_account_id, key_chain)| {
.flat_map(|(acc_account_id, key_chain, index)| {
let view_tag = EncryptedAccountData::compute_view_tag(
key_chain.nullifer_public_key.clone(),
key_chain.viewing_public_key.clone(),
@ -443,8 +444,8 @@ impl WalletCore {
.filter_map(|(ciph_id, encrypted_data)| {
let ciphertext = &encrypted_data.ciphertext;
let commitment = &tx.message.new_commitments[ciph_id];
let shared_secret =
key_chain.calculate_shared_secret_receiver(encrypted_data.epk.clone());
let shared_secret = key_chain
.calculate_shared_secret_receiver(encrypted_data.epk.clone(), index);
nssa_core::EncryptionScheme::decrypt(
ciphertext,
@ -454,6 +455,7 @@ impl WalletCore {
)
})
.map(move |res_acc| (acc_account_id, res_acc))
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();