Merge branch 'main' into marvin/nonce

This commit is contained in:
jonesmarvin8 2026-03-17 16:45:08 -04:00
commit 8dd5037e28
215 changed files with 6869 additions and 5994 deletions

956
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -58,7 +58,7 @@ amm_program = { path = "programs/amm" }
test_program_methods = { path = "test_program_methods" }
bedrock_client = { path = "bedrock_client" }
tokio = { version = "1.28.2", features = [
tokio = { version = "1.50", features = [
"net",
"rt-multi-thread",
"sync",
@ -75,15 +75,15 @@ serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
serde_with = "3.16.1"
actix = "0.13.0"
actix-cors = "0.6.1"
actix-cors = "0.7.1"
jsonrpsee = "0.26.0"
futures = "0.3"
actix-rt = "*"
lazy_static = "1.5.0"
env_logger = "0.10"
env_logger = "0.11"
log = "0.4.28"
lru = "0.7.8"
thiserror = "2.0.12"
lru = "0.16.3"
thiserror = "2.0"
sha2 = "0.10.8"
hex = "0.4.3"
bytemuck = "1.24.0"
@ -91,7 +91,7 @@ bytesize = { version = "2.3.1", features = ["serde"] }
humantime-serde = "1.1"
humantime = "2.1"
aes-gcm = "0.10.3"
toml = "0.7.4"
toml = "0.9.8"
bincode = "1.3.3"
tempfile = "3.14.0"
light-poseidon = "0.3.0"
@ -107,7 +107,7 @@ base58 = "0.2.0"
itertools = "0.14.0"
url = { version = "2.5.4", features = ["serde"] }
tokio-retry = "0.3.0"
schemars = "1.2.0"
schemars = "1.2"
async-stream = "0.3.6"
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
@ -129,7 +129,7 @@ k256 = { version = "0.13.3", features = [
"pem",
] }
elliptic-curve = { version = "0.13.8", features = ["arithmetic"] }
actix-web = { version = "=4.1.0", default-features = false, features = [
actix-web = { version = "4.13.0", default-features = false, features = [
"macros",
] }
clap = { version = "4.5.42", features = ["derive", "env"] }
@ -141,3 +141,142 @@ inherits = "release"
opt-level = 'z'
lto = true
codegen-units = 1
[workspace.lints.rust]
warnings = "deny"
[workspace.lints]
clippy.all = { level = "deny", priority = -1 }
# Pedantic
clippy.pedantic = { level = "deny", priority = -1 }
# Reason: documenting every function returning Result is too verbose and doesn't add much value when you have good error types.
clippy.missing-errors-doc = "allow"
# Reason: most of the panics are internal and not part of the public API, so documenting them is not necessary.
clippy.missing-panics-doc = "allow"
# Reason: this isn't always bad and actually works well for our financial and cryptography code.
clippy.similar-names = "allow"
# Reason: this lint is too strict and hard to fix.
clippy.too-many-lines = "allow"
# Reason: std hasher is fine for us in public functions.
clippy.implicit-hasher = "allow"
# Restriction
clippy.restriction = { level = "deny", priority = -1 }
# Reason: we deny the whole `restriction` group but we allow things that don't make sense for us.
# That way we can still benefit from new lints added to the `restriction` group without having to
# explicitly allow them.
# As a downside our contributors don't know if some lint was enabled intentionally or just no one
# else faced it before to allow it but we can handle this during code reviews.
clippy.blanket-clippy-restriction-lints = "allow"
# Reason: we can't avoid using unwrap for now.
clippy.unwrap-used = "allow"
# Reason: we can't avoid using expect for now.
clippy.expect-used = "allow"
# Reason: unreachable is good in many cases.
clippy.unreachable = "allow"
# Reason: this is ridiculous strict in our codebase and doesn't add any value.
clippy.single-call-fn = "allow"
# Reason: we use panic in some places and it's okay.
clippy.panic = "allow"
# Reason: shadowing is good most of the times.
clippy.shadow-reuse = "allow"
# Reason: implicit return is good.
clippy.implicit-return = "allow"
# Reason: std is fine for us, we don't need to use core.
clippy.std-instead-of-core = "allow"
# Reason: std is fine for us, we don't need to use alloc.
clippy.std-instead-of-alloc = "allow"
# Reason: default methods are good most of the time.
clippy.missing-trait-methods = "allow"
# Reason: this is too verbose and doesn't help much if you have rust analyzer.
clippy.pattern-type-mismatch = "allow"
# Reason: decreases readability.
clippy.assertions-on-result-states = "allow"
# Reason: documenting every assert is too verbose.
clippy.missing-assert-message = "allow"
# Reason: documenting private items is too verbose and doesn't add much value.
clippy.missing-docs-in-private-items = "allow"
# Reason: we use separated suffix style.
clippy.separated_literal_suffix = "allow"
# Reason: sometimes absolute paths are more readable.
clippy.absolute-paths = "allow"
# Reason: sometimes it's as readable as full variable naming.
clippy.min-ident-chars = "allow"
# Reason: it's very common and handy.
clippy.indexing-slicing = "allow"
# Reason: we use little endian style.
clippy.little-endian-bytes = "allow"
# Reason: we use this style of pub visibility.
clippy.pub-with-shorthand = "allow"
# Reason: question mark operator is very cool.
clippy.question-mark-used = "allow"
# Reason: it's fine to panic in tests and some functions where it makes sense.
clippy.panic-in-result-fn = "allow"
# Reason: we don't care that much about inlining and LTO should take care of it.
clippy.missing_inline_in_public_items = "allow"
# Reason: it's okay for us.
clippy.default-numeric-fallback = "allow"
# Reason: this is fine for us.
clippy.exhaustive-enums = "allow"
# Reason: this is fine for us.
clippy.exhaustive-structs = "allow"
# Reason: this helps readability when item is imported in other modules.
clippy.module-name-repetitions = "allow"
# Reason: mostly historical reasons, maybe we'll address this in future.
clippy.mod-module-files = "allow"
# Reason: named module files is our preferred way.
clippy.self-named-module-files = "allow"
# Reason: this is actually quite handy.
clippy.impl-trait-in-params = "allow"
# Reason: this is often useful.
clippy.use-debug = "allow"
# Reason: this is sometimes useful.
clippy.field-scoped-visibility-modifiers = "allow"
# Reason: `pub use` is good for re-exports and hiding unnecessary details.
clippy.pub-use = "allow"
# Reason: we prefer semicolons inside blocks.
clippy.semicolon-outside-block = "allow"
# Reason: we don't do it blindly, this is mostly internal constraints checks.
clippy.unwrap-in-result = "allow"
# Reason: we don't see any problems with that.
clippy.shadow-same = "allow"
# Reason: this lint is too verbose.
clippy.let-underscore-untyped = "allow"
# Reason: this lint is actually bad as it forces to use wildcard `..` instead of
# field-by-field `_` which may lead to subtle bugs when new fields are added to the struct.
clippy.unneeded-field-pattern = "allow"
# Nursery
clippy.nursery = { level = "deny", priority = -1 }
# Reason: this is okay if it compiles.
clippy.future-not-send = "allow"
# Reason: this is actually a good lint, but currently it gives a lot of false-positives.
clippy.significant-drop-tightening = "allow"
# Correctness
clippy.correctness = { level = "deny", priority = -1 }
# Complexity
clippy.complexity = { level = "deny", priority = -1 }
# Perf
clippy.perf = { level = "deny", priority = -1 }
# Suspicious
clippy.suspicious = { level = "deny", priority = -1 }
# Style
clippy.style = { level = "deny", priority = -1 }
# Cargo
clippy.cargo = { level = "deny", priority = -1 }
# Reason: we're not at this stage yet and it will be a pain to create a new crate.
clippy.cargo-common-metadata = "allow"
# Reason: hard to address right now and mostly comes from dependencies
# so the fix would be just a long list of exceptions.
clippy.multiple-crate-versions = "allow"

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
common.workspace = true

View File

@ -2,7 +2,7 @@ use std::time::Duration;
use anyhow::{Context as _, Result};
use common::config::BasicAuth;
use futures::{Stream, TryFutureExt};
use futures::{Stream, TryFutureExt as _};
#[expect(clippy::single_component_path_imports, reason = "Satisfy machete")]
use humantime_serde;
use log::{info, warn};
@ -14,7 +14,7 @@ use reqwest::{Client, Url};
use serde::{Deserialize, Serialize};
use tokio_retry::Retry;
/// Fibonacci backoff retry strategy configuration
/// Fibonacci backoff retry strategy configuration.
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct BackoffConfig {
#[serde(with = "humantime_serde")]
@ -96,9 +96,14 @@ impl BedrockClient {
}
fn backoff_strategy(&self) -> impl Iterator<Item = Duration> {
tokio_retry::strategy::FibonacciBackoff::from_millis(
self.backoff.start_delay.as_millis() as u64
)
.take(self.backoff.max_retries)
let start_delay_millis = self
.backoff
.start_delay
.as_millis()
.try_into()
.expect("Start delay must be less than u64::MAX milliseconds");
tokio_retry::strategy::FibonacciBackoff::from_millis(start_delay_millis)
.take(self.backoff.max_retries)
}
}

54
clippy.toml Normal file
View File

@ -0,0 +1,54 @@
module-item-order-groupings = [
[
"use",
[
"use",
],
],
[
"modules",
[
"extern_crate",
"mod",
"foreign_mod",
],
],
[
"macros",
[
"macro",
],
],
[
"global_asm",
[
"global_asm",
],
],
[
"UPPER_SNAKE_CASE",
[
"static",
"const",
],
],
[
"PascalCase",
[
"ty_alias",
"enum",
"struct",
"union",
"trait",
"trait_alias",
"impl",
],
],
[
"lower_snake_case",
[
"fn",
],
],
]
source-item-ordering = ["module"]

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
nssa.workspace = true
nssa_core.workspace = true

View File

@ -1,7 +1,7 @@
use borsh::{BorshDeserialize, BorshSerialize};
use nssa::AccountId;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256, digest::FixedOutput};
use sha2::{Digest as _, Sha256, digest::FixedOutput as _};
use crate::{HashType, transaction::NSSATransaction};
@ -20,7 +20,7 @@ pub struct BlockMeta {
#[derive(Debug, Clone)]
/// Our own hasher.
/// Currently it is SHA256 hasher wrapper. May change in a future.
pub struct OwnHasher {}
pub struct OwnHasher;
impl OwnHasher {
fn hash(data: &[u8]) -> HashType {
@ -69,6 +69,7 @@ pub struct HashableBlockData {
}
impl HashableBlockData {
#[must_use]
pub fn into_pending_block(
self,
signing_key: &nssa::PrivateKey,
@ -93,6 +94,7 @@ impl HashableBlockData {
}
}
#[must_use]
pub fn block_hash(&self) -> BlockHash {
OwnHasher::hash(&borsh::to_vec(&self).unwrap())
}
@ -109,14 +111,14 @@ impl From<Block> for HashableBlockData {
}
}
/// Helper struct for account (de-)serialization
/// Helper struct for account (de-)serialization.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AccountInitialData {
pub account_id: AccountId,
pub balance: u128,
}
/// Helper struct to (de-)serialize initial commitments
/// Helper struct to (de-)serialize initial commitments.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CommitmentsInitialData {
pub npk: nssa_core::NullifierPublicKey,
@ -128,7 +130,7 @@ mod tests {
use crate::{HashType, block::HashableBlockData, test_utils};
#[test]
fn test_encoding_roundtrip() {
fn encoding_roundtrip() {
let transactions = vec![test_utils::produce_dummy_empty_transaction()];
let block = test_utils::produce_dummy_block(1, Some(HashType([1; 32])), transactions);
let hashable = HashableBlockData::from(block);

View File

@ -42,14 +42,14 @@ impl FromStr for BasicAuth {
})?;
Ok(Self {
username: username.to_string(),
password: password.map(|p| p.to_string()),
username: username.to_owned(),
password: password.map(std::string::ToString::to_string),
})
}
}
impl From<BasicAuth> for BasicAuthCredentials {
fn from(value: BasicAuth) -> Self {
BasicAuthCredentials::new(value.username, value.password)
Self::new(value.username, value.password)
}
}

View File

@ -22,14 +22,14 @@ pub enum SequencerClientError {
impl From<SequencerRpcError> for SequencerClientError {
fn from(value: SequencerRpcError) -> Self {
SequencerClientError::InternalError(value)
Self::InternalError(value)
}
}
#[derive(Debug, thiserror::Error)]
pub enum ExecutionFailureKind {
#[error("Failed to get account data from sequencer")]
SequencerError,
#[error("Failed to get data from sequencer")]
SequencerError(#[source] anyhow::Error),
#[error("Inputs amounts does not match outputs")]
AmountMismatchError,
#[error("Accounts key not found")]

View File

@ -46,9 +46,9 @@ impl FromStr for HashType {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0u8; 32];
let mut bytes = [0_u8; 32];
hex::decode_to_slice(s, &mut bytes)?;
Ok(HashType(bytes))
Ok(Self(bytes))
}
}
@ -66,7 +66,7 @@ impl From<HashType> for [u8; 32] {
impl From<[u8; 32]> for HashType {
fn from(bytes: [u8; 32]) -> Self {
HashType(bytes)
Self(bytes)
}
}
@ -74,7 +74,7 @@ impl TryFrom<Vec<u8>> for HashType {
type Error = <[u8; 32] as TryFrom<Vec<u8>>>::Error;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
Ok(HashType(value.try_into()?))
Ok(Self(value.try_into()?))
}
}
@ -90,7 +90,7 @@ mod tests {
#[test]
fn serialization_roundtrip() {
let original = HashType([1u8; 32]);
let original = HashType([1_u8; 32]);
let serialized = original.to_string();
let deserialized = HashType::from_str(&serialized).unwrap();
assert_eq!(original, deserialized);

View File

@ -5,25 +5,25 @@ use serde_json::{Value, to_value};
#[derive(serde::Serialize)]
pub struct RpcParseError(pub String);
#[allow(clippy::too_long_first_doc_paragraph)]
/// This struct may be returned from JSON RPC server in case of error
/// This struct may be returned from JSON RPC server in case of error.
///
/// It is expected that that this struct has impls From<_> all other RPC errors
/// like [`RpcBlockError`](crate::types::blocks::RpcBlockError)
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)]
/// like [`RpcBlockError`](crate::types::blocks::RpcBlockError).
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct RpcError {
#[serde(flatten)]
pub error_struct: Option<RpcErrorKind>,
/// Deprecated please use the `error_struct` instead
/// Deprecated please use the `error_struct` instead.
pub code: i64,
/// Deprecated please use the `error_struct` instead
/// Deprecated please use the `error_struct` instead.
pub message: String,
/// Deprecated please use the `error_struct` instead
/// Deprecated please use the `error_struct` instead.
#[serde(skip_serializing_if = "Option::is_none")]
pub data: Option<Value>,
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)]
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(tag = "name", content = "cause", rename_all = "SCREAMING_SNAKE_CASE")]
pub enum RpcErrorKind {
RequestValidationError(RpcRequestValidationErrorKind),
@ -31,14 +31,14 @@ pub enum RpcErrorKind {
InternalError(Value),
}
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)]
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(tag = "name", content = "info", rename_all = "SCREAMING_SNAKE_CASE")]
pub enum RpcRequestValidationErrorKind {
MethodNotFound { method_name: String },
ParseError { error_message: String },
}
/// A general Server Error
/// A general Server Error.
#[derive(serde::Serialize, serde::Deserialize, Debug, PartialEq, Eq, Clone)]
pub enum ServerError {
Timeout,
@ -49,8 +49,9 @@ impl RpcError {
/// A generic constructor.
///
/// Mostly for completeness, doesn't do anything but filling in the corresponding fields.
pub fn new(code: i64, message: String, data: Option<Value>) -> Self {
RpcError {
#[must_use]
pub const fn new(code: i64, message: String, data: Option<Value>) -> Self {
Self {
code,
message,
data,
@ -69,12 +70,12 @@ impl RpcError {
)));
}
};
RpcError::new(-32_602, "Invalid params".to_owned(), Some(value))
Self::new(-32_602, "Invalid params".to_owned(), Some(value))
}
/// Create a server error.
pub fn server_error<E: serde::Serialize>(e: Option<E>) -> Self {
RpcError::new(
Self::new(
-32_000,
"Server error".to_owned(),
e.map(|v| to_value(v).expect("Must be representable in JSON")),
@ -82,8 +83,9 @@ impl RpcError {
}
/// Create a parse error.
#[must_use]
pub fn parse_error(e: String) -> Self {
RpcError {
Self {
code: -32_700,
message: "Parse error".to_owned(),
data: Some(Value::String(e.clone())),
@ -93,12 +95,14 @@ impl RpcError {
}
}
#[must_use]
pub fn serialization_error(e: &str) -> Self {
RpcError::new_internal_error(Some(Value::String(e.to_owned())), e)
Self::new_internal_error(Some(Value::String(e.to_owned())), e)
}
/// Helper method to define extract `INTERNAL_ERROR` in separate `RpcErrorKind`
/// Returns `HANDLER_ERROR` if the error is not internal one
/// Returns `HANDLER_ERROR` if the error is not internal one.
#[must_use]
pub fn new_internal_or_handler_error(error_data: Option<Value>, error_struct: Value) -> Self {
if error_struct["name"] == "INTERNAL_ERROR" {
let error_message = match error_struct["info"].get("error_message") {
@ -111,8 +115,9 @@ impl RpcError {
}
}
#[must_use]
pub fn new_internal_error(error_data: Option<Value>, info: &str) -> Self {
RpcError {
Self {
code: -32_000,
message: "Server error".to_owned(),
data: error_data,
@ -124,7 +129,7 @@ impl RpcError {
}
fn new_handler_error(error_data: Option<Value>, error_struct: Value) -> Self {
RpcError {
Self {
code: -32_000,
message: "Server error".to_owned(),
data: error_data,
@ -133,8 +138,9 @@ impl RpcError {
}
/// Create a method not found error.
#[must_use]
pub fn method_not_found(method: String) -> Self {
RpcError {
Self {
code: -32_601,
message: "Method not found".to_owned(),
data: Some(Value::String(method.clone())),
@ -161,6 +167,7 @@ impl From<RpcParseError> for RpcError {
impl From<std::convert::Infallible> for RpcError {
fn from(_: std::convert::Infallible) -> Self {
// SAFETY: Infallible error can never be constructed, so this code can never be reached.
unsafe { core::hint::unreachable_unchecked() }
}
}
@ -168,20 +175,20 @@ impl From<std::convert::Infallible> for RpcError {
impl fmt::Display for ServerError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ServerError::Timeout => write!(f, "ServerError: Timeout"),
ServerError::Closed => write!(f, "ServerError: Closed"),
Self::Timeout => write!(f, "ServerError: Timeout"),
Self::Closed => write!(f, "ServerError: Closed"),
}
}
}
impl From<ServerError> for RpcError {
fn from(e: ServerError) -> RpcError {
fn from(e: ServerError) -> Self {
let error_data = match to_value(&e) {
Ok(value) => value,
Err(_err) => {
return RpcError::new_internal_error(None, "Failed to serialize ServerError");
return Self::new_internal_error(None, "Failed to serialize ServerError");
}
};
RpcError::new_internal_error(Some(error_data), e.to_string().as_str())
Self::new_internal_error(Some(error_data), e.to_string().as_str())
}
}

View File

@ -13,12 +13,14 @@ use std::fmt::{Formatter, Result as FmtResult};
use serde::{
de::{Deserializer, Error, Unexpected, Visitor},
ser::{SerializeStruct, Serializer},
ser::{SerializeStruct as _, Serializer},
};
use serde_json::{Result as JsonResult, Value};
use super::errors::RpcError;
pub type Parsed = Result<Message, Broken>;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct Version;
@ -29,10 +31,13 @@ impl serde::Serialize for Version {
}
impl<'de> serde::Deserialize<'de> for Version {
#[expect(
clippy::renamed_function_params,
reason = "More readable than original serde parameter names"
)]
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct VersionVisitor;
#[allow(clippy::needless_lifetimes)]
impl<'de> Visitor<'de> for VersionVisitor {
impl Visitor<'_> for VersionVisitor {
type Value = Version;
fn expecting(&self, formatter: &mut Formatter<'_>) -> FmtResult {
@ -51,8 +56,12 @@ impl<'de> serde::Deserialize<'de> for Version {
}
/// An RPC request.
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)]
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
#[expect(
clippy::partial_pub_fields,
reason = "We don't want to allow access to the version, but the others are public for ease of use"
)]
pub struct Request {
jsonrpc: Version,
pub method: String,
@ -62,6 +71,7 @@ pub struct Request {
}
impl Request {
#[must_use]
pub fn from_payload_version_2_0(method: String, payload: serde_json::Value) -> Self {
Self {
jsonrpc: Version,
@ -75,6 +85,7 @@ impl Request {
/// Answer the request with a (positive) reply.
///
/// The ID is taken from the request.
#[must_use]
pub fn reply(&self, reply: Value) -> Message {
Message::Response(Response {
jsonrpc: Version,
@ -84,6 +95,7 @@ impl Request {
}
/// Answer the request with an error.
#[must_use]
pub fn error(&self, error: RpcError) -> Message {
Message::Response(Response {
jsonrpc: Version,
@ -96,7 +108,11 @@ impl Request {
/// A response to an RPC.
///
/// It is created by the methods on [Request](struct.Request.html).
#[derive(Debug, Clone, PartialEq)]
#[expect(
clippy::partial_pub_fields,
reason = "We don't want to allow access to the version, but the others are public for ease of use"
)]
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Response {
jsonrpc: Version,
pub result: Result<Value, RpcError>,
@ -107,30 +123,22 @@ impl serde::Serialize for Response {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
let mut sub = serializer.serialize_struct("Response", 3)?;
sub.serialize_field("jsonrpc", &self.jsonrpc)?;
match self.result {
Ok(ref value) => sub.serialize_field("result", value),
Err(ref err) => sub.serialize_field("error", err),
match &self.result {
Ok(value) => sub.serialize_field("result", value),
Err(err) => sub.serialize_field("error", err),
}?;
sub.serialize_field("id", &self.id)?;
sub.end()
}
}
/// Deserializer for `Option<Value>` that produces `Some(Value::Null)`.
///
/// The usual one produces None in that case. But we need to know the difference between
/// `{x: null}` and `{}`.
fn some_value<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Option<Value>, D::Error> {
serde::Deserialize::deserialize(deserializer).map(Some)
}
/// A helper trick for deserialization.
#[derive(serde::Deserialize)]
#[serde(deny_unknown_fields)]
struct WireResponse {
// It is actually used to eat and sanity check the deserialized text
#[allow(dead_code)]
jsonrpc: Version,
#[serde(rename = "jsonrpc")]
_jsonrpc: Version,
// Make sure we accept null as Some(Value::Null), instead of going to None
#[serde(default, deserialize_with = "some_value")]
result: Option<Value>,
@ -152,7 +160,7 @@ impl<'de> serde::Deserialize<'de> for Response {
return Err(err);
}
};
Ok(Response {
Ok(Self {
jsonrpc: Version,
result,
id: wr.id,
@ -161,7 +169,11 @@ impl<'de> serde::Deserialize<'de> for Response {
}
/// A notification (doesn't expect an answer).
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq)]
#[expect(
clippy::partial_pub_fields,
reason = "We don't want to allow access to the version, but the others are public for ease of use"
)]
#[derive(Debug, serde::Serialize, serde::Deserialize, Clone, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct Notification {
jsonrpc: Version,
@ -198,7 +210,7 @@ pub enum Message {
/// message.
///
/// This variant has no direct constructor and is expected to be constructed manually.
Batch(Vec<Message>),
Batch(Vec<Self>),
/// An unmatched sub entry in a `Batch`.
///
/// When there's a `Batch` and an element doesn't comform to the JSONRPC 2.0 format, that one
@ -212,9 +224,10 @@ impl Message {
/// A constructor for a request.
///
/// The ID is auto-set to dontcare.
#[must_use]
pub fn request(method: String, params: Value) -> Self {
let id = Value::from("dontcare");
Message::Request(Request {
Self::Request(Request {
jsonrpc: Version,
method,
params,
@ -223,8 +236,9 @@ impl Message {
}
/// Create a top-level error (without an ID).
pub fn error(error: RpcError) -> Self {
Message::Response(Response {
#[must_use]
pub const fn error(error: RpcError) -> Self {
Self::Response(Response {
jsonrpc: Version,
result: Err(error),
id: Value::Null,
@ -232,8 +246,9 @@ impl Message {
}
/// A constructor for a notification.
pub fn notification(method: String, params: Value) -> Self {
Message::Notification(Notification {
#[must_use]
pub const fn notification(method: String, params: Value) -> Self {
Self::Notification(Notification {
jsonrpc: Version,
method,
params,
@ -241,8 +256,9 @@ impl Message {
}
/// A constructor for a response.
pub fn response(id: Value, result: Result<Value, RpcError>) -> Self {
Message::Response(Response {
#[must_use]
pub const fn response(id: Value, result: Result<Value, RpcError>) -> Self {
Self::Response(Response {
jsonrpc: Version,
result,
id,
@ -250,18 +266,33 @@ impl Message {
}
/// Returns id or Null if there is no id.
#[must_use]
pub fn id(&self) -> Value {
match self {
Message::Request(req) => req.id.clone(),
_ => Value::Null,
Self::Request(req) => req.id.clone(),
Self::Response(response) => response.id.clone(),
Self::Notification(_) | Self::Batch(_) | Self::UnmatchedSub(_) => Value::Null,
}
}
}
impl From<Message> for String {
fn from(val: Message) -> Self {
::serde_json::ser::to_string(&val).expect("message serialization to json should not fail")
}
}
impl From<Message> for Vec<u8> {
fn from(val: Message) -> Self {
::serde_json::ser::to_vec(&val)
.expect("message serialization to json bytes should not fail")
}
}
/// A broken message.
///
/// Protocol-level errors.
#[derive(Debug, Clone, PartialEq, serde::Deserialize)]
#[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize)]
#[serde(untagged)]
pub enum Broken {
/// It was valid JSON, but doesn't match the form of a JSONRPC 2.0 message.
@ -276,12 +307,13 @@ impl Broken {
///
/// The error message for these things are specified in the RFC, so this just creates an error
/// with the right values.
#[must_use]
pub fn reply(&self) -> Message {
match *self {
Broken::Unmatched(_) => Message::error(RpcError::parse_error(
match self {
Self::Unmatched(_) => Message::error(RpcError::parse_error(
"JSON RPC Request format was expected".to_owned(),
)),
Broken::SyntaxError(ref e) => Message::error(RpcError::parse_error(e.clone())),
Self::SyntaxError(e) => Message::error(RpcError::parse_error(e.clone())),
}
}
}
@ -303,8 +335,6 @@ pub fn decoded_to_parsed(res: JsonResult<WireMessage>) -> Parsed {
}
}
pub type Parsed = Result<Message, Broken>;
/// Read a [Message](enum.Message.html) from a slice.
///
/// Invalid JSON or JSONRPC messages are reported as [Broken](enum.Broken.html).
@ -319,16 +349,12 @@ pub fn from_str(s: &str) -> Parsed {
from_slice(s.as_bytes())
}
impl From<Message> for String {
fn from(val: Message) -> Self {
::serde_json::ser::to_string(&val).unwrap()
}
}
impl From<Message> for Vec<u8> {
fn from(val: Message) -> Self {
::serde_json::ser::to_vec(&val).unwrap()
}
/// Deserializer for `Option<Value>` that produces `Some(Value::Null)`.
///
/// The usual one produces None in that case. But we need to know the difference between
/// `{x: null}` and `{}`.
fn some_value<'de, D: Deserializer<'de>>(deserializer: D) -> Result<Option<Value>, D::Error> {
serde::Deserialize::deserialize(deserializer).map(Some)
}
#[cfg(test)]
@ -337,13 +363,12 @@ mod tests {
use super::*;
/// Test serialization and deserialization of the Message
/// Test serialization and deserialization of the Message.
///
/// We first deserialize it from a string. That way we check deserialization works.
/// But since serialization doesn't have to produce the exact same result (order, spaces, …),
/// we then serialize and deserialize the thing again and check it matches.
#[test]
#[allow(clippy::too_many_lines)]
fn message_serde() {
// A helper for running one message test
fn one(input: &str, expected: &Message) {
@ -463,11 +488,10 @@ mod tests {
///
/// Check that the given JSON string parses, but is not recognized as a valid RPC message.
///
/// Test things that are almost but not entirely JSONRPC are rejected
/// Test things that are almost but not entirely JSONRPC are rejected.
///
/// The reject is done by returning it as Unmatched.
#[test]
#[allow(clippy::panic)]
fn broken() {
// A helper with one test
fn one(input: &str) {
@ -491,19 +515,18 @@ mod tests {
// Something completely different
one(r#"{"x": [1, 2, 3]}"#);
match from_str(r#"{]"#) {
match from_str("{]") {
Err(Broken::SyntaxError(_)) => (),
other => panic!("Something unexpected: {other:?}"),
};
}
}
/// Test some non-trivial aspects of the constructors
/// Test some non-trivial aspects of the constructors.
///
/// This doesn't have a full coverage, because there's not much to actually test there.
/// Most of it is related to the ids.
#[test]
#[allow(clippy::panic)]
#[ignore]
#[ignore = "Not a full coverage test"]
fn constructors() {
let msg1 = Message::request("call".to_owned(), json!([1, 2, 3]));
let msg2 = Message::request("call".to_owned(), json!([1, 2, 3]));
@ -520,9 +543,9 @@ mod tests {
};
let id1 = req1.id.clone();
// When we answer a message, we get the same ID
if let Message::Response(ref resp) = req1.reply(json!([1, 2, 3])) {
if let Message::Response(resp) = req1.reply(json!([1, 2, 3])) {
assert_eq!(
*resp,
resp,
Response {
jsonrpc: Version,
result: Ok(json!([1, 2, 3])),
@ -534,11 +557,9 @@ mod tests {
}
let id2 = req2.id.clone();
// The same with an error
if let Message::Response(ref resp) =
req2.error(RpcError::new(42, "Wrong!".to_owned(), None))
{
if let Message::Response(resp) = req2.error(RpcError::new(42, "Wrong!".to_owned(), None)) {
assert_eq!(
*resp,
resp,
Response {
jsonrpc: Version,
result: Err(RpcError::new(42, "Wrong!".to_owned(), None)),
@ -549,11 +570,11 @@ mod tests {
panic!("Not a response");
}
// When we have unmatched, we generate a top-level error with Null id.
if let Message::Response(ref resp) =
if let Message::Response(resp) =
Message::error(RpcError::new(43, "Also wrong!".to_owned(), None))
{
assert_eq!(
*resp,
resp,
Response {
jsonrpc: Version,
result: Err(RpcError::new(43, "Also wrong!".to_owned(), None)),

View File

@ -30,7 +30,7 @@ pub struct RpcConfig {
impl Default for RpcConfig {
fn default() -> Self {
RpcConfig {
Self {
addr: "0.0.0.0:3040".to_owned(),
cors_allowed_origins: vec!["*".to_owned()],
limits_config: RpcLimitsConfig::default(),
@ -39,15 +39,17 @@ impl Default for RpcConfig {
}
impl RpcConfig {
#[must_use]
pub fn new(addr: &str) -> Self {
RpcConfig {
Self {
addr: addr.to_owned(),
..Default::default()
}
}
#[must_use]
pub fn with_port(port: u16) -> Self {
RpcConfig {
Self {
addr: format!("0.0.0.0:{port}"),
..Default::default()
}

View File

@ -3,18 +3,6 @@ use serde_json::Value;
use super::errors::RpcParseError;
pub trait RpcRequest: Sized {
fn parse(value: Option<Value>) -> Result<Self, RpcParseError>;
}
pub fn parse_params<T: DeserializeOwned>(value: Option<Value>) -> Result<T, RpcParseError> {
if let Some(value) = value {
serde_json::from_value(value)
.map_err(|err| RpcParseError(format!("Failed parsing args: {err}")))
} else {
Err(RpcParseError("Require at least one parameter".to_owned()))
}
}
#[macro_export]
macro_rules! parse_request {
($request_name:ty) => {
@ -25,3 +13,17 @@ macro_rules! parse_request {
}
};
}
pub trait RpcRequest: Sized {
fn parse(value: Option<Value>) -> Result<Self, RpcParseError>;
}
pub fn parse_params<T: DeserializeOwned>(value: Option<Value>) -> Result<T, RpcParseError> {
value.map_or_else(
|| Err(RpcParseError("Require at least one parameter".to_owned())),
|value| {
serde_json::from_value(value)
.map_err(|err| RpcParseError(format!("Failed parsing args: {err}")))
},
)
}

View File

@ -11,8 +11,62 @@ use super::{
};
use crate::{HashType, parse_request};
mod base64_deser {
use base64::{Engine as _, engine::general_purpose};
use serde::{self, Deserialize, Deserializer, Serializer, ser::SerializeSeq as _};
pub mod vec {
use super::*;
pub fn serialize<S>(bytes_vec: &[Vec<u8>], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(bytes_vec.len()))?;
for bytes in bytes_vec {
let s = general_purpose::STANDARD.encode(bytes);
seq.serialize_element(&s)?;
}
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Vec<u8>>, D::Error>
where
D: Deserializer<'de>,
{
let base64_strings: Vec<String> = Deserialize::deserialize(deserializer)?;
base64_strings
.into_iter()
.map(|s| {
general_purpose::STANDARD
.decode(&s)
.map_err(serde::de::Error::custom)
})
.collect()
}
}
pub fn serialize<S>(bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let base64_string = general_purpose::STANDARD.encode(bytes);
serializer.serialize_str(&base64_string)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>
where
D: Deserializer<'de>,
{
let base64_string: String = Deserialize::deserialize(deserializer)?;
general_purpose::STANDARD
.decode(&base64_string)
.map_err(serde::de::Error::custom)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct HelloRequest {}
pub struct HelloRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct RegisterAccountRequest {
@ -30,7 +84,7 @@ pub struct GetBlockDataRequest {
pub block_id: u64,
}
/// Get a range of blocks from `start_block_id` to `end_block_id` (inclusive)
/// Get a range of blocks from `start_block_id` to `end_block_id` (inclusive).
#[derive(Serialize, Deserialize, Debug)]
pub struct GetBlockRangeDataRequest {
pub start_block_id: u64,
@ -38,13 +92,13 @@ pub struct GetBlockRangeDataRequest {
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetGenesisIdRequest {}
pub struct GetGenesisIdRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct GetLastBlockRequest {}
pub struct GetLastBlockRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct GetInitialTestnetAccountsRequest {}
pub struct GetInitialTestnetAccountsRequest;
#[derive(Serialize, Deserialize, Debug)]
pub struct GetAccountBalanceRequest {
@ -72,7 +126,7 @@ pub struct GetProofForCommitmentRequest {
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetProgramIdsRequest {}
pub struct GetProgramIdsRequest;
parse_request!(HelloRequest);
parse_request!(RegisterAccountRequest);
@ -117,60 +171,6 @@ pub struct GetBlockRangeDataResponse {
pub blocks: Vec<Vec<u8>>,
}
mod base64_deser {
use base64::{Engine as _, engine::general_purpose};
use serde::{self, Deserialize, Deserializer, Serializer, ser::SerializeSeq as _};
pub fn serialize<S>(bytes: &[u8], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let base64_string = general_purpose::STANDARD.encode(bytes);
serializer.serialize_str(&base64_string)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>
where
D: Deserializer<'de>,
{
let base64_string: String = Deserialize::deserialize(deserializer)?;
general_purpose::STANDARD
.decode(&base64_string)
.map_err(serde::de::Error::custom)
}
pub mod vec {
use super::*;
pub fn serialize<S>(bytes_vec: &[Vec<u8>], serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut seq = serializer.serialize_seq(Some(bytes_vec.len()))?;
for bytes in bytes_vec {
let s = general_purpose::STANDARD.encode(bytes);
seq.serialize_element(&s)?;
}
seq.end()
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Vec<Vec<u8>>, D::Error>
where
D: Deserializer<'de>,
{
let base64_strings: Vec<String> = Deserialize::deserialize(deserializer)?;
base64_strings
.into_iter()
.map(|s| {
general_purpose::STANDARD
.decode(&s)
.map_err(serde::de::Error::custom)
})
.collect()
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct GetGenesisIdResponse {
pub genesis_id: u64,
@ -213,7 +213,7 @@ pub struct GetProgramIdsResponse {
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct GetInitialTestnetAccountsResponse {
/// Hex encoded account id
/// Hex encoded account id.
pub account_id: String,
pub balance: u64,
}

View File

@ -30,6 +30,15 @@ use crate::{
transaction::NSSATransaction,
};
#[derive(Debug, Clone, Deserialize)]
struct SequencerRpcResponse {
#[serde(rename = "jsonrpc")]
_jsonrpc: String,
result: serde_json::Value,
#[serde(rename = "id")]
_id: u64,
}
#[derive(Clone)]
pub struct SequencerClient {
pub client: reqwest::Client,
@ -61,7 +70,7 @@ impl SequencerClient {
payload: Value,
) -> Result<Value, SequencerClientError> {
let request =
rpc_primitives::message::Request::from_payload_version_2_0(method.to_string(), payload);
rpc_primitives::message::Request::from_payload_version_2_0(method.to_owned(), payload);
log::debug!(
"Calling method {method} with payload {request:?} to sequencer at {}",
@ -86,14 +95,6 @@ impl SequencerClient {
})
.await?;
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)]
pub struct SequencerRpcResponse {
pub jsonrpc: String,
pub result: serde_json::Value,
pub id: u64,
}
if let Ok(response) = serde_json::from_value::<SequencerRpcResponse>(response_vall.clone())
{
Ok(response.result)
@ -104,7 +105,7 @@ impl SequencerClient {
}
}
/// Get block data at `block_id` from sequencer
/// Get block data at `block_id` from sequencer.
pub async fn get_block(
&self,
block_id: u64,
@ -140,7 +141,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Get last known `blokc_id` from sequencer
/// Get last known `blokc_id` from sequencer.
pub async fn get_last_block(&self) -> Result<GetLastBlockResponse, SequencerClientError> {
let block_req = GetLastBlockRequest {};
@ -224,7 +225,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Send transaction to sequencer
/// Send transaction to sequencer.
pub async fn send_tx_public(
&self,
transaction: nssa::PublicTransaction,
@ -244,7 +245,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Send transaction to sequencer
/// Send transaction to sequencer.
pub async fn send_tx_private(
&self,
transaction: nssa::PrivacyPreservingTransaction,
@ -264,7 +265,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Get genesis id from sequencer
/// Get genesis id from sequencer.
pub async fn get_genesis_id(&self) -> Result<GetGenesisIdResponse, SequencerClientError> {
let genesis_req = GetGenesisIdRequest {};
@ -280,7 +281,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Get initial testnet accounts from sequencer
/// Get initial testnet accounts from sequencer.
pub async fn get_initial_testnet_accounts(
&self,
) -> Result<Vec<GetInitialTestnetAccountsResponse>, SequencerClientError> {
@ -298,7 +299,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Get proof for commitment
/// Get proof for commitment.
pub async fn get_proof_for_commitment(
&self,
commitment: nssa_core::Commitment,
@ -338,7 +339,7 @@ impl SequencerClient {
Ok(resp_deser)
}
/// Get Ids of the programs used by the node
/// Get Ids of the programs used by the node.
pub async fn get_program_ids(
&self,
) -> Result<HashMap<String, ProgramId>, SequencerClientError> {

View File

@ -8,19 +8,21 @@ use crate::{
// Helpers
#[must_use]
pub fn sequencer_sign_key_for_testing() -> nssa::PrivateKey {
nssa::PrivateKey::try_new([37; 32]).unwrap()
}
// Dummy producers
/// Produce dummy block with
/// Produce dummy block with.
///
/// `id` - block id, provide zero for genesis
/// `id` - block id, provide zero for genesis.
///
/// `prev_hash` - hash of previous block, provide None for genesis
/// `prev_hash` - hash of previous block, provide None for genesis.
///
/// `transactions` - vector of `EncodedTransaction` objects
/// `transactions` - vector of `EncodedTransaction` objects.
#[must_use]
pub fn produce_dummy_block(
id: u64,
prev_hash: Option<HashType>,
@ -29,13 +31,14 @@ pub fn produce_dummy_block(
let block_data = HashableBlockData {
block_id: id,
prev_block_hash: prev_hash.unwrap_or_default(),
timestamp: id * 100,
timestamp: id.saturating_mul(100),
transactions,
};
block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32])
}
#[must_use]
pub fn produce_dummy_empty_transaction() -> NSSATransaction {
let program_id = nssa::program::Program::authenticated_transfer_program().id();
let account_ids = vec![];
@ -56,12 +59,13 @@ pub fn produce_dummy_empty_transaction() -> NSSATransaction {
NSSATransaction::Public(nssa_tx)
}
#[must_use]
pub fn create_transaction_native_token_transfer(
from: AccountId,
nonce: u128,
to: AccountId,
balance_to_move: u128,
signing_key: nssa::PrivateKey,
signing_key: &nssa::PrivateKey,
) -> NSSATransaction {
let account_ids = vec![from, to];
let nonces = vec![nonce.into()];
@ -73,7 +77,7 @@ pub fn create_transaction_native_token_transfer(
balance_to_move,
)
.unwrap();
let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[&signing_key]);
let witness_set = nssa::public_transaction::WitnessSet::for_message(&message, &[signing_key]);
let nssa_tx = nssa::PublicTransaction::new(message, witness_set);

View File

@ -13,19 +13,21 @@ pub enum NSSATransaction {
}
impl NSSATransaction {
#[must_use]
pub fn hash(&self) -> HashType {
HashType(match self {
NSSATransaction::Public(tx) => tx.hash(),
NSSATransaction::PrivacyPreserving(tx) => tx.hash(),
NSSATransaction::ProgramDeployment(tx) => tx.hash(),
Self::Public(tx) => tx.hash(),
Self::PrivacyPreserving(tx) => tx.hash(),
Self::ProgramDeployment(tx) => tx.hash(),
})
}
#[must_use]
pub fn affected_public_account_ids(&self) -> Vec<AccountId> {
match self {
NSSATransaction::ProgramDeployment(tx) => tx.affected_public_account_ids(),
NSSATransaction::Public(tx) => tx.affected_public_account_ids(),
NSSATransaction::PrivacyPreserving(tx) => tx.affected_public_account_ids(),
Self::ProgramDeployment(tx) => tx.affected_public_account_ids(),
Self::Public(tx) => tx.affected_public_account_ids(),
Self::PrivacyPreserving(tx) => tx.affected_public_account_ids(),
}
}
@ -33,21 +35,21 @@ impl NSSATransaction {
pub fn transaction_stateless_check(self) -> Result<Self, TransactionMalformationError> {
// Stateless checks here
match self {
NSSATransaction::Public(tx) => {
Self::Public(tx) => {
if tx.witness_set().is_valid_for(tx.message()) {
Ok(NSSATransaction::Public(tx))
Ok(Self::Public(tx))
} else {
Err(TransactionMalformationError::InvalidSignature)
}
}
NSSATransaction::PrivacyPreserving(tx) => {
Self::PrivacyPreserving(tx) => {
if tx.witness_set().signatures_are_valid_for(tx.message()) {
Ok(NSSATransaction::PrivacyPreserving(tx))
Ok(Self::PrivacyPreserving(tx))
} else {
Err(TransactionMalformationError::InvalidSignature)
}
}
NSSATransaction::ProgramDeployment(tx) => Ok(NSSATransaction::ProgramDeployment(tx)),
Self::ProgramDeployment(tx) => Ok(Self::ProgramDeployment(tx)),
}
}
@ -56,13 +58,9 @@ impl NSSATransaction {
state: &mut V02State,
) -> Result<Self, nssa::error::NssaError> {
match &self {
NSSATransaction::Public(tx) => state.transition_from_public_transaction(tx),
NSSATransaction::PrivacyPreserving(tx) => {
state.transition_from_privacy_preserving_transaction(tx)
}
NSSATransaction::ProgramDeployment(tx) => {
state.transition_from_program_deployment_transaction(tx)
}
Self::Public(tx) => state.transition_from_public_transaction(tx),
Self::PrivacyPreserving(tx) => state.transition_from_privacy_preserving_transaction(tx),
Self::ProgramDeployment(tx) => state.transition_from_program_deployment_transaction(tx),
}
.inspect_err(|err| warn!("Error at transition {err:#?}"))?;
@ -97,7 +95,7 @@ pub enum TxKind {
ProgramDeployment,
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize, thiserror::Error)]
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, thiserror::Error)]
pub enum TransactionMalformationError {
#[error("Invalid signature(-s)")]
InvalidSignature,

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
nssa.workspace = true
nssa_core.workspace = true

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[build-dependencies]
risc0-build.workspace = true

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
nssa_core.workspace = true

View File

@ -36,9 +36,7 @@ fn main() {
// Fail if the input account is not authorized
// The `is_authorized` field will be correctly populated or verified by the system if
// authorization is provided.
if !pre_state.is_authorized {
panic!("Missing required authorization");
}
assert!(pre_state.is_authorized, "Missing required authorization");
// ####
// Construct the post state account values

View File

@ -1,5 +1,5 @@
use nssa_core::{
account::{Account, AccountWithMetadata},
account::{Account, AccountWithMetadata, Data},
program::{
AccountPostState, DEFAULT_PROGRAM_ID, ProgramInput, read_nssa_inputs, write_nssa_outputs,
},
@ -21,10 +21,11 @@ use nssa_core::{
// In case an input account is uninitialized, the program will claim it when
// producing the post-state.
type Instruction = (u8, Vec<u8>);
const WRITE_FUNCTION_ID: u8 = 0;
const MOVE_DATA_FUNCTION_ID: u8 = 1;
type Instruction = (u8, Vec<u8>);
fn build_post_state(post_account: Account) -> AccountPostState {
if post_account.program_owner == DEFAULT_PROGRAM_ID {
// This produces a claim request
@ -35,12 +36,12 @@ fn build_post_state(post_account: Account) -> AccountPostState {
}
}
fn write(pre_state: AccountWithMetadata, greeting: Vec<u8>) -> AccountPostState {
fn write(pre_state: AccountWithMetadata, greeting: &[u8]) -> AccountPostState {
// Construct the post state account values
let post_account = {
let mut this = pre_state.account.clone();
let mut this = pre_state.account;
let mut bytes = this.data.into_inner();
bytes.extend_from_slice(&greeting);
bytes.extend_from_slice(greeting);
this.data = bytes
.try_into()
.expect("Data should fit within the allowed limits");
@ -50,21 +51,18 @@ fn write(pre_state: AccountWithMetadata, greeting: Vec<u8>) -> AccountPostState
build_post_state(post_account)
}
fn move_data(
from_pre: &AccountWithMetadata,
to_pre: &AccountWithMetadata,
) -> Vec<AccountPostState> {
fn move_data(from_pre: AccountWithMetadata, to_pre: AccountWithMetadata) -> Vec<AccountPostState> {
// Construct the post state account values
let from_data: Vec<u8> = from_pre.account.data.clone().into();
let from_post = {
let mut this = from_pre.account.clone();
this.data = Default::default();
let mut this = from_pre.account;
this.data = Data::default();
build_post_state(this)
};
let to_post = {
let mut this = to_pre.account.clone();
let mut this = to_pre.account;
let mut bytes = this.data.into_inner();
bytes.extend_from_slice(&from_data);
this.data = bytes
@ -88,11 +86,11 @@ fn main() {
let post_states = match (pre_states.as_slice(), function_id, data.len()) {
([account_pre], WRITE_FUNCTION_ID, _) => {
let post = write(account_pre.clone(), data);
let post = write(account_pre.clone(), &data);
vec![post]
}
([account_from_pre, account_to_pre], MOVE_DATA_FUNCTION_ID, 0) => {
move_data(account_from_pre, account_to_pre)
move_data(account_from_pre.clone(), account_to_pre.clone())
}
_ => panic!("invalid params"),
};

View File

@ -29,7 +29,7 @@ fn main() {
let (
ProgramInput {
pre_states,
instruction: _,
instruction: (),
},
instruction_data,
) = read_nssa_inputs::<()>();

View File

@ -34,14 +34,13 @@ fn main() {
let (
ProgramInput {
pre_states,
instruction: _,
instruction: (),
},
instruction_data,
) = read_nssa_inputs::<()>();
// Unpack the input account pre state
let [pre_state] = pre_states
.clone()
.try_into()
.unwrap_or_else(|_| panic!("Input pre states should consist of a single account"));

View File

@ -48,7 +48,7 @@ async fn main() {
let hello_world_bytecode: Vec<u8> = std::fs::read(hello_world_path).unwrap();
let hello_world = Program::new(hello_world_bytecode).unwrap();
let dependencies: HashMap<ProgramId, Program> =
[(hello_world.id(), hello_world)].into_iter().collect();
std::iter::once((hello_world.id(), hello_world)).collect();
let program_with_dependencies = ProgramWithDependencies::new(simple_tail_call, dependencies);
let accounts = vec![PrivacyPreservingAccount::PrivateOwned(account_id)];

View File

@ -1,3 +1,8 @@
#![expect(
clippy::print_stdout,
reason = "This is an example program, it's fine to print to stdout"
)]
use nssa::{
AccountId, PublicTransaction,
program::Program,

View File

@ -19,13 +19,14 @@ use wallet::{PrivacyPreservingAccount, WalletCore};
// methods/guest/target/riscv32im-risc0-zkvm-elf/docker/hello_world_with_move_function.bin \
// write-public Ds8q5PjLcKwwV97Zi7duhRVF9uwA2PuYMoLL7FwCzsXE Hola
type Instruction = (u8, Vec<u8>);
const WRITE_FUNCTION_ID: u8 = 0;
const MOVE_DATA_FUNCTION_ID: u8 = 1;
type Instruction = (u8, Vec<u8>);
#[derive(Parser, Debug)]
struct Cli {
/// Path to program binary
/// Path to program binary.
program_path: String,
#[command(subcommand)]
@ -34,7 +35,7 @@ struct Cli {
#[derive(Subcommand, Debug)]
enum Command {
/// Write instruction into one account
/// Write instruction into one account.
WritePublic {
account_id: String,
greeting: String,
@ -43,7 +44,7 @@ enum Command {
account_id: String,
greeting: String,
},
/// Move data between two accounts
/// Move data between two accounts.
MoveDataPublicToPublic {
from: String,
to: String,
@ -148,5 +149,5 @@ async fn main() {
.await
.unwrap();
}
};
}
}

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license.workspace = true
[lints]
workspace = true
[lib]
crate-type = ["cdylib", "rlib"]

View File

@ -1,4 +1,4 @@
FROM rust:1.91.1-trixie AS builder
FROM rust:1.94.0-trixie AS builder
# Install cargo-binstall, which makes it easier to install other
# cargo extensions like cargo-leptos

View File

@ -2,7 +2,7 @@ use indexer_service_protocol::{Account, AccountId, Block, BlockId, HashType, Tra
use leptos::prelude::*;
use serde::{Deserialize, Serialize};
/// Search results structure
/// Search results structure.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct SearchResults {
pub blocks: Vec<Block>,
@ -10,7 +10,7 @@ pub struct SearchResults {
pub accounts: Vec<(AccountId, Account)>,
}
/// RPC client type
/// RPC client type.
#[cfg(feature = "ssr")]
pub type IndexerRpcClient = jsonrpsee::http_client::HttpClient;
@ -22,7 +22,7 @@ pub async fn get_account(account_id: AccountId) -> Result<Account, ServerFnError
client
.get_account(account_id)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Search for a block, transaction, or account by query string
@ -80,7 +80,7 @@ pub async fn get_block_by_id(block_id: BlockId) -> Result<Block, ServerFnError>
client
.get_block_by_id(block_id)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Get latest block ID
@ -91,7 +91,7 @@ pub async fn get_latest_block_id() -> Result<BlockId, ServerFnError> {
client
.get_last_finalized_block_id()
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Get block by hash
@ -102,7 +102,7 @@ pub async fn get_block_by_hash(block_hash: HashType) -> Result<Block, ServerFnEr
client
.get_block_by_hash(block_hash)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Get transaction by hash
@ -113,36 +113,36 @@ pub async fn get_transaction(tx_hash: HashType) -> Result<Transaction, ServerFnE
client
.get_transaction(tx_hash)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Get blocks with pagination
#[server]
pub async fn get_blocks(before: Option<u64>, limit: u32) -> Result<Vec<Block>, ServerFnError> {
pub async fn get_blocks(before: Option<BlockId>, limit: u64) -> Result<Vec<Block>, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client
.get_blocks(before, limit)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Get transactions by account
#[server]
pub async fn get_transactions_by_account(
account_id: AccountId,
limit: u32,
offset: u32,
offset: u64,
limit: u64,
) -> Result<Vec<Transaction>, ServerFnError> {
use indexer_service_rpc::RpcClient as _;
let client = expect_context::<IndexerRpcClient>();
client
.get_transactions_by_account(account_id, limit, offset)
.get_transactions_by_account(account_id, offset, limit)
.await
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {}", e)))
.map_err(|e| ServerFnError::ServerError(format!("RPC error: {e}")))
}
/// Create the RPC client for the indexer service (server-side only)
/// Create the RPC client for the indexer service (server-side only).
#[cfg(feature = "ssr")]
pub fn create_indexer_rpc_client(url: &url::Url) -> Result<IndexerRpcClient, String> {
use jsonrpsee::http_client::HttpClientBuilder;

View File

@ -4,8 +4,8 @@ use leptos_router::components::A;
use crate::format_utils;
/// Get CSS class for bedrock status
fn status_class(status: &BedrockStatus) -> &'static str {
/// Get CSS class for bedrock status.
const fn status_class(status: &BedrockStatus) -> &'static str {
match status {
BedrockStatus::Pending => "status-pending",
BedrockStatus::Safe => "status-safe",

View File

@ -1,7 +1,7 @@
pub mod account_preview;
pub mod block_preview;
pub mod transaction_preview;
pub use account_preview::AccountPreview;
pub use block_preview::BlockPreview;
pub use transaction_preview::TransactionPreview;
pub mod account_preview;
pub mod block_preview;
pub mod transaction_preview;

View File

@ -2,8 +2,8 @@ use indexer_service_protocol::Transaction;
use leptos::prelude::*;
use leptos_router::components::A;
/// Get transaction type name and CSS class
fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) {
/// Get transaction type name and CSS class.
const fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) {
match tx {
Transaction::Public(_) => ("Public", "tx-type-public"),
Transaction::PrivacyPreserving(_) => ("Privacy-Preserving", "tx-type-private"),
@ -13,6 +13,10 @@ fn transaction_type_info(tx: &Transaction) -> (&'static str, &'static str) {
/// Transaction preview component
#[component]
#[expect(
clippy::needless_pass_by_value,
reason = "Leptos component props are passed by value by framework convention"
)]
pub fn TransactionPreview(transaction: Transaction) -> impl IntoView {
let hash = transaction.hash();
let hash_str = hash.to_string();

View File

@ -1,9 +1,17 @@
//! Formatting utilities for the explorer
//! Formatting utilities for the explorer.
/// Format timestamp to human-readable string
/// Format timestamp to human-readable string.
#[expect(
clippy::integer_division,
clippy::integer_division_remainder_used,
reason = "We need to convert milliseconds to seconds, and this is the most straightforward way to do it"
)]
pub fn format_timestamp(timestamp: u64) -> String {
let seconds = timestamp / 1000;
let datetime = chrono::DateTime::from_timestamp(seconds as i64, 0)
.unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap());
let datetime = chrono::DateTime::from_timestamp(
i64::try_from(seconds).expect("Timestamp out of range"),
0,
)
.unwrap_or_else(|| chrono::DateTime::from_timestamp(0, 0).unwrap());
datetime.format("%Y-%m-%d %H:%M:%S UTC").to_string()
}

View File

@ -1,3 +1,9 @@
#![expect(
clippy::must_use_candidate,
clippy::same_name_method,
reason = "Warns on code generated by leptos macros"
)]
use leptos::prelude::*;
use leptos_meta::{Meta, Stylesheet, Title, provide_meta_context};
use leptos_router::{

View File

@ -1,3 +1,7 @@
#[expect(
clippy::print_stdout,
reason = "This is just simple and handy for such a small server"
)]
#[cfg(feature = "ssr")]
#[tokio::main]
async fn main() {
@ -5,20 +9,20 @@ async fn main() {
use clap::Parser;
use explorer_service::App;
use leptos::prelude::*;
use leptos_axum::{LeptosRoutes, generate_route_list};
use leptos_axum::{LeptosRoutes as _, generate_route_list};
use leptos_meta::MetaTags;
env_logger::init();
/// LEZ Block Explorer Server CLI arguments.
#[derive(Parser, Debug)]
#[command(version, about, long_about = None)]
struct Args {
/// Indexer RPC URL
/// Indexer RPC URL.
#[arg(long, env = "INDEXER_RPC_URL", default_value = "http://localhost:8779")]
indexer_rpc_url: url::Url,
}
env_logger::init();
let args = Args::parse();
let conf = get_configuration(None).unwrap();

View File

@ -10,11 +10,11 @@ use crate::{api, components::TransactionPreview};
#[component]
pub fn AccountPage() -> impl IntoView {
let params = use_params_map();
let (tx_offset, set_tx_offset) = signal(0u32);
let (tx_offset, set_tx_offset) = signal(0_u64);
let (all_transactions, set_all_transactions) = signal(Vec::new());
let (is_loading, set_is_loading) = signal(false);
let (has_more, set_has_more) = signal(true);
let tx_limit = 10u32;
let tx_limit = 10_u64;
// Parse account ID from URL params
let account_id = move || {
@ -27,7 +27,7 @@ pub fn AccountPage() -> impl IntoView {
match acc_id_opt {
Some(acc_id) => api::get_account(acc_id).await,
None => Err(leptos::prelude::ServerFnError::ServerError(
"Invalid account ID".to_string(),
"Invalid account ID".to_owned(),
)),
}
});
@ -35,9 +35,9 @@ pub fn AccountPage() -> impl IntoView {
// Load initial transactions
let transactions_resource = Resource::new(account_id, move |acc_id_opt| async move {
match acc_id_opt {
Some(acc_id) => api::get_transactions_by_account(acc_id, tx_limit, 0).await,
Some(acc_id) => api::get_transactions_by_account(acc_id, 0, tx_limit).await,
None => Err(leptos::prelude::ServerFnError::ServerError(
"Invalid account ID".to_string(),
"Invalid account ID".to_owned(),
)),
}
});
@ -46,7 +46,9 @@ pub fn AccountPage() -> impl IntoView {
Effect::new(move || {
if let Some(Ok(txs)) = transactions_resource.get() {
set_all_transactions.set(txs.clone());
set_has_more.set(txs.len() as u32 == tx_limit);
set_has_more.set(
u64::try_from(txs.len()).expect("Transaction count should fit in u64") == tx_limit,
);
}
});
@ -57,18 +59,19 @@ pub fn AccountPage() -> impl IntoView {
};
set_is_loading.set(true);
let current_offset = tx_offset.get() + tx_limit;
let current_offset = tx_offset.get().saturating_add(tx_limit);
set_tx_offset.set(current_offset);
leptos::task::spawn_local(async move {
match api::get_transactions_by_account(acc_id, tx_limit, current_offset).await {
match api::get_transactions_by_account(acc_id, current_offset, tx_limit).await {
Ok(new_txs) => {
let txs_count = new_txs.len() as u32;
let txs_count =
u64::try_from(new_txs.len()).expect("Transaction count should fit in u64");
set_all_transactions.update(|txs| txs.extend(new_txs));
set_has_more.set(txs_count == tx_limit);
}
Err(e) => {
log::error!("Failed to load more transactions: {}", e);
log::error!("Failed to load more transactions: {e}");
}
}
set_is_loading.set(false);
@ -108,114 +111,111 @@ pub fn AccountPage() -> impl IntoView {
<div class="info-row">
<span class="info-label">"Account ID:"</span>
<span class="info-value hash">{account_id_str}</span>
</div>
<div class="info-row">
<span class="info-label">"Balance:"</span>
<span class="info-value">{balance_str}</span>
</div>
<div class="info-row">
<span class="info-label">"Program Owner:"</span>
<span class="info-value hash">{program_id}</span>
</div>
<div class="info-row">
<span class="info-label">"Nonce:"</span>
<span class="info-value">{nonce_str}</span>
</div>
<div class="info-row">
<span class="info-label">"Data:"</span>
<span class="info-value">{format!("{} bytes", data_len)}</span>
</div>
</div>
</div>
</div>
<div class="info-row">
<span class="info-label">"Balance:"</span>
<span class="info-value">{balance_str}</span>
</div>
<div class="info-row">
<span class="info-label">"Program Owner:"</span>
<span class="info-value hash">{program_id}</span>
</div>
<div class="info-row">
<span class="info-label">"Nonce:"</span>
<span class="info-value">{nonce_str}</span>
</div>
<div class="info-row">
<span class="info-label">"Data:"</span>
<span class="info-value">{format!("{data_len} bytes")}</span>
</div>
</div>
</div>
<div class="account-transactions">
<h2>"Transactions"</h2>
<Suspense fallback=move || {
view! { <div class="loading">"Loading transactions..."</div> }
}>
{move || {
transactions_resource
.get()
.map(|result| match result {
Ok(_) => {
let txs = all_transactions.get();
if txs.is_empty() {
view! {
<div class="no-transactions">
"No transactions found"
</div>
}
.into_any()
} else {
view! {
<div>
<div class="transactions-list">
{txs
.into_iter()
.map(|tx| {
view! { <TransactionPreview transaction=tx /> }
})
.collect::<Vec<_>>()}
</div>
{move || {
if has_more.get() {
view! {
<button
class="load-more-button"
on:click=load_more
disabled=move || is_loading.get()
>
{move || {
if is_loading.get() {
"Loading..."
} else {
"Load More"
}
}}
</button>
}
.into_any()
} else {
().into_any()
<div class="account-transactions">
<h2>"Transactions"</h2>
<Suspense fallback=move || {
view! { <div class="loading">"Loading transactions..."</div> }
}>
{move || {
transactions_resource
.get()
.map(|load_tx_result| match load_tx_result {
Ok(_) => {
let txs = all_transactions.get();
if txs.is_empty() {
view! {
<div class="no-transactions">
"No transactions found"
</div>
}
}}
.into_any()
} else {
view! {
<div>
<div class="transactions-list">
{txs
.into_iter()
.map(|tx| {
view! { <TransactionPreview transaction=tx /> }
})
.collect::<Vec<_>>()}
</div>
{move || {
if has_more.get() {
view! {
<button
class="load-more-button"
on:click=load_more
disabled=move || is_loading.get()
>
{move || {
if is_loading.get() {
"Loading..."
} else {
"Load More"
}
}}
</div>
}
.into_any()
}
}
Err(e) => {
view! {
<div class="error">
{format!("Failed to load transactions: {}", e)}
</div>
}
.into_any()
}
})
}}
</button>
}
.into_any()
} else {
().into_any()
}
}}
</Suspense>
</div>
</div>
}
.into_any()
</div>
}
.into_any()
}
}
Err(e) => {
view! {
<div class="error">
{format!("Failed to load transactions: {e}")}
</div>
}
.into_any()
}
})
}}
</Suspense>
</div>
</div>
}
.into_any()
}
Err(e) => {
view! {
<div class="error-page">
<h1>"Error"</h1>
<p>{format!("Failed to load account: {}", e)}</p>
<p>{format!("Failed to load account: {e}")}</p>
</div>
}
.into_any()
}
})
}}
</Suspense>
</div>
}

View File

@ -38,7 +38,7 @@ pub fn BlockPage() -> impl IntoView {
Some(BlockIdOrHash::BlockId(id)) => api::get_block_by_id(id).await,
Some(BlockIdOrHash::Hash(hash)) => api::get_block_by_hash(hash).await,
None => Err(leptos::prelude::ServerFnError::ServerError(
"Invalid block ID or hash".to_string(),
"Invalid block ID or hash".to_owned(),
)),
}
},
@ -144,7 +144,7 @@ pub fn BlockPage() -> impl IntoView {
view! {
<div class="error-page">
<h1>"Error"</h1>
<p>{format!("Failed to load block: {}", e)}</p>
<p>{format!("Failed to load block: {e}")}</p>
</div>
}
.into_any()

View File

@ -1,5 +1,8 @@
use leptos::prelude::*;
use leptos_router::hooks::{use_navigate, use_query_map};
use leptos_router::{
NavigateOptions,
hooks::{use_navigate, use_query_map},
};
use web_sys::SubmitEvent;
use crate::{
@ -7,7 +10,7 @@ use crate::{
components::{AccountPreview, BlockPreview, TransactionPreview},
};
const RECENT_BLOCKS_LIMIT: u32 = 10;
const RECENT_BLOCKS_LIMIT: u64 = 10;
/// Main page component
#[component]
@ -33,7 +36,7 @@ pub fn MainPage() -> impl IntoView {
match api::search(query).await {
Ok(result) => Some(result),
Err(e) => {
log::error!("Search error: {}", e);
log::error!("Search error: {e}");
None
}
}
@ -48,7 +51,7 @@ pub fn MainPage() -> impl IntoView {
// Load recent blocks on mount
let recent_blocks_resource = Resource::new(
|| (),
|_| async { api::get_blocks(None, RECENT_BLOCKS_LIMIT).await },
|()| async { api::get_blocks(None, RECENT_BLOCKS_LIMIT).await },
);
// Update all_blocks when initial load completes
@ -57,8 +60,11 @@ pub fn MainPage() -> impl IntoView {
let oldest_id = blocks.last().map(|b| b.header.block_id);
set_all_blocks.set(blocks.clone());
set_oldest_loaded_block_id.set(oldest_id);
set_has_more_blocks
.set(blocks.len() as u32 == RECENT_BLOCKS_LIMIT && oldest_id.unwrap_or(0) > 1);
set_has_more_blocks.set(
u64::try_from(blocks.len()).expect("usize should fit in u64")
== RECENT_BLOCKS_LIMIT
&& oldest_id.unwrap_or(0) > 1,
);
}
});
@ -75,7 +81,8 @@ pub fn MainPage() -> impl IntoView {
leptos::task::spawn_local(async move {
match api::get_blocks(before_id, RECENT_BLOCKS_LIMIT).await {
Ok(new_blocks) => {
let blocks_count = new_blocks.len() as u32;
let blocks_count =
u64::try_from(new_blocks.len()).expect("usize should fit in u64");
let new_oldest_id = new_blocks.last().map(|b| b.header.block_id);
set_all_blocks.update(|blocks| blocks.extend(new_blocks));
set_oldest_loaded_block_id.set(new_oldest_id);
@ -83,7 +90,7 @@ pub fn MainPage() -> impl IntoView {
.set(blocks_count == RECENT_BLOCKS_LIMIT && new_oldest_id.unwrap_or(0) > 1);
}
Err(e) => {
log::error!("Failed to load more blocks: {}", e);
log::error!("Failed to load more blocks: {e}");
}
}
set_is_loading_blocks.set(false);
@ -95,13 +102,13 @@ pub fn MainPage() -> impl IntoView {
ev.prevent_default();
let query = search_query.get();
if query.is_empty() {
navigate("?", Default::default());
navigate("?", NavigateOptions::default());
return;
}
navigate(
&format!("?q={}", urlencoding::encode(&query)),
Default::default(),
NavigateOptions::default(),
);
};
@ -142,78 +149,78 @@ pub fn MainPage() -> impl IntoView {
view! {
<div class="search-results">
<h2>"Search Results"</h2>
{if !has_results {
view! { <div class="not-found">"No results found"</div> }
.into_any()
} else {
view! {
<div class="results-container">
{if !blocks.is_empty() {
view! {
<div class="results-section">
<h3>"Blocks"</h3>
<div class="results-list">
{blocks
.into_iter()
.map(|block| {
view! { <BlockPreview block=block /> }
})
.collect::<Vec<_>>()}
{if has_results {
view! {
<div class="results-container">
{if blocks.is_empty() {
().into_any()
} else {
view! {
<div class="results-section">
<h3>"Blocks"</h3>
<div class="results-list">
{blocks
.into_iter()
.map(|block| {
view! { <BlockPreview block=block /> }
})
.collect::<Vec<_>>()}
</div>
</div>
</div>
}
.into_any()
} else {
().into_any()
}}
}
.into_any()
}}
{if !transactions.is_empty() {
view! {
<div class="results-section">
<h3>"Transactions"</h3>
<div class="results-list">
{transactions
.into_iter()
.map(|tx| {
view! { <TransactionPreview transaction=tx /> }
})
.collect::<Vec<_>>()}
{if transactions.is_empty() {
().into_any()
} else {
view! {
<div class="results-section">
<h3>"Transactions"</h3>
<div class="results-list">
{transactions
.into_iter()
.map(|tx| {
view! { <TransactionPreview transaction=tx /> }
})
.collect::<Vec<_>>()}
</div>
</div>
</div>
}
.into_any()
} else {
().into_any()
}}
}
.into_any()
}}
{if !accounts.is_empty() {
view! {
<div class="results-section">
<h3>"Accounts"</h3>
<div class="results-list">
{accounts
.into_iter()
.map(|(id, account)| {
view! {
<AccountPreview
account_id=id
account=account
/>
}
})
.collect::<Vec<_>>()}
{if accounts.is_empty() {
().into_any()
} else {
view! {
<div class="results-section">
<h3>"Accounts"</h3>
<div class="results-list">
{accounts
.into_iter()
.map(|(id, account)| {
view! {
<AccountPreview
account_id=id
account=account
/>
}
})
.collect::<Vec<_>>()}
</div>
</div>
</div>
}
.into_any()
} else {
().into_any()
}}
}
.into_any()
}}
</div>
}
.into_any()
}}
</div>
}
.into_any()
} else {
view! { <div class="not-found">"No results found"</div> }
.into_any()
}}
</div>
}
.into_any()
@ -274,7 +281,7 @@ pub fn MainPage() -> impl IntoView {
}
}
Err(e) => {
view! { <div class="error">{format!("Error: {}", e)}</div> }
view! { <div class="error">{format!("Error: {e}")}</div> }
.into_any()
}
})

View File

@ -1,9 +1,9 @@
pub mod account_page;
pub mod block_page;
pub mod main_page;
pub mod transaction_page;
pub use account_page::AccountPage;
pub use block_page::BlockPage;
pub use main_page::MainPage;
pub use transaction_page::TransactionPage;
pub mod account_page;
pub mod block_page;
pub mod main_page;
pub mod transaction_page;

View File

@ -4,7 +4,7 @@ use indexer_service_protocol::{
HashType, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, PublicMessage, PublicTransaction, Transaction, WitnessSet,
};
use itertools::{EitherOrBoth, Itertools};
use itertools::{EitherOrBoth, Itertools as _};
use leptos::prelude::*;
use leptos_router::{components::A, hooks::use_params_map};
@ -17,16 +17,14 @@ pub fn TransactionPage() -> impl IntoView {
let transaction_resource = Resource::new(
move || {
params
.read()
.get("hash")
.and_then(|s| HashType::from_str(&s).ok())
let s = params.read().get("hash")?;
HashType::from_str(&s).ok()
},
|hash_opt| async move {
match hash_opt {
Some(hash) => api::get_transaction(hash).await,
None => Err(leptos::prelude::ServerFnError::ServerError(
"Invalid transaction hash".to_string(),
"Invalid transaction hash".to_owned(),
)),
}
},
@ -105,7 +103,7 @@ pub fn TransactionPage() -> impl IntoView {
</div>
<div class="info-row">
<span class="info-label">"Proof Size:"</span>
<span class="info-value">{format!("{} bytes", proof_len)}</span>
<span class="info-value">{format!("{proof_len} bytes")}</span>
</div>
<div class="info-row">
<span class="info-label">"Signatures:"</span>
@ -141,7 +139,7 @@ pub fn TransactionPage() -> impl IntoView {
<span class="hash">{account_id_str}</span>
</A>
<span class="nonce">
" (nonce: "{"Not affected by this transaction".to_string()}" )"
" (nonce: "{"Not affected by this transaction".to_owned()}" )"
</span>
</div>
}
@ -153,7 +151,7 @@ pub fn TransactionPage() -> impl IntoView {
<span class="hash">{"Account not found"}</span>
</A>
<span class="nonce">
" (nonce: "{"Account not found".to_string()}" )"
" (nonce: "{"Account not found".to_owned()}" )"
</span>
</div>
}
@ -212,7 +210,7 @@ pub fn TransactionPage() -> impl IntoView {
</div>
<div class="info-row">
<span class="info-label">"Proof Size:"</span>
<span class="info-value">{format!("{} bytes", proof_len)}</span>
<span class="info-value">{format!("{proof_len} bytes")}</span>
</div>
</div>
@ -244,7 +242,7 @@ pub fn TransactionPage() -> impl IntoView {
<span class="hash">{account_id_str}</span>
</A>
<span class="nonce">
" (nonce: "{"Not affected by this transaction".to_string()}" )"
" (nonce: "{"Not affected by this transaction".to_owned()}" )"
</span>
</div>
}
@ -256,7 +254,7 @@ pub fn TransactionPage() -> impl IntoView {
<span class="hash">{"Account not found"}</span>
</A>
<span class="nonce">
" (nonce: "{"Account not found".to_string()}" )"
" (nonce: "{"Account not found".to_owned()}" )"
</span>
</div>
}
@ -284,7 +282,7 @@ pub fn TransactionPage() -> impl IntoView {
<div class="info-row">
<span class="info-label">"Bytecode Size:"</span>
<span class="info-value">
{format!("{} bytes", bytecode_len)}
{format!("{bytecode_len} bytes")}
</span>
</div>
</div>
@ -302,7 +300,7 @@ pub fn TransactionPage() -> impl IntoView {
view! {
<div class="error-page">
<h1>"Error"</h1>
<p>{format!("Failed to load transaction: {}", e)}</p>
<p>{format!("Failed to load transaction: {e}")}</p>
</div>
}
.into_any()

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
common.workspace = true
bedrock_client.workspace = true

View File

@ -3,7 +3,7 @@ use std::{path::Path, sync::Arc};
use anyhow::Result;
use bedrock_client::HeaderId;
use common::{
block::{BedrockStatus, Block},
block::{BedrockStatus, Block, BlockId},
transaction::NSSATransaction,
};
use nssa::{Account, AccountId, V02State};
@ -21,20 +21,16 @@ impl IndexerStore {
/// ATTENTION: Will overwrite genesis block.
pub fn open_db_with_genesis(
location: &Path,
start_data: Option<(Block, V02State)>,
genesis_block: &Block,
initial_state: &V02State,
) -> Result<Self> {
let dbio = RocksDBIO::open_or_create(location, start_data)?;
let dbio = RocksDBIO::open_or_create(location, genesis_block, initial_state)?;
Ok(Self {
dbio: Arc::new(dbio),
})
}
/// Reopening existing database
pub fn open_db_restart(location: &Path) -> Result<Self> {
Self::open_db_with_genesis(location, None)
}
pub fn last_observed_l1_lib_header(&self) -> Result<Option<HeaderId>> {
Ok(self
.dbio
@ -50,7 +46,7 @@ impl IndexerStore {
Ok(self.dbio.get_block(id)?)
}
pub fn get_block_batch(&self, before: Option<u64>, limit: u64) -> Result<Vec<Block>> {
pub fn get_block_batch(&self, before: Option<BlockId>, limit: u64) -> Result<Vec<Block>> {
Ok(self.dbio.get_block_batch(before, limit)?)
}
@ -79,12 +75,14 @@ impl IndexerStore {
Ok(self.dbio.get_acc_transactions(acc_id, offset, limit)?)
}
#[must_use]
pub fn genesis_id(&self) -> u64 {
self.dbio
.get_meta_first_block_in_db()
.expect("Must be set at the DB startup")
}
#[must_use]
pub fn last_block(&self) -> u64 {
self.dbio
.get_meta_last_block_in_db()
@ -118,6 +116,6 @@ impl IndexerStore {
// to represent correct block finality
block.bedrock_status = BedrockStatus::Finalized;
Ok(self.dbio.put_block(block, l1_header.into())?)
Ok(self.dbio.put_block(&block, l1_header.into())?)
}
}

View File

@ -27,13 +27,13 @@ pub struct ClientConfig {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IndexerConfig {
/// Home dir of sequencer storage
/// Home dir of sequencer storage.
pub home: PathBuf,
/// List of initial accounts data
/// List of initial accounts data.
pub initial_accounts: Vec<AccountInitialData>,
/// List of initial commitments
/// List of initial commitments.
pub initial_commitments: Vec<CommitmentsInitialData>,
/// Sequencers signing key
/// Sequencers signing key.
pub signing_key: [u8; 32],
#[serde(with = "humantime_serde")]
pub consensus_info_polling_interval: Duration,
@ -42,12 +42,17 @@ pub struct IndexerConfig {
}
impl IndexerConfig {
pub fn from_path(config_path: &Path) -> Result<IndexerConfig> {
let file = File::open(config_path)
.with_context(|| format!("Failed to open indexer config at {config_path:?}"))?;
pub fn from_path(config_path: &Path) -> Result<Self> {
let file = File::open(config_path).with_context(|| {
format!("Failed to open indexer config at {}", config_path.display())
})?;
let reader = BufReader::new(file);
serde_json::from_reader(reader)
.with_context(|| format!("Failed to parse indexer config at {config_path:?}"))
serde_json::from_reader(reader).with_context(|| {
format!(
"Failed to parse indexer config at {}",
config_path.display()
)
})
}
}

View File

@ -24,14 +24,14 @@ pub struct IndexerCore {
}
#[derive(Clone)]
/// This struct represents one L1 block data fetched from backfilling
/// This struct represents one L1 block data fetched from backfilling.
pub struct BackfillBlockData {
l2_blocks: Vec<Block>,
l1_header: HeaderId,
}
#[derive(Clone)]
/// This struct represents data fetched fom backfilling in one iteration
/// This struct represents data fetched fom backfilling in one iteration.
pub struct BackfillData {
block_data: VecDeque<BackfillBlockData>,
curr_fin_l1_lib_header: HeaderId,
@ -52,7 +52,7 @@ impl IndexerCore {
// ToDo: remove key from indexer config, use some default.
let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap();
let channel_genesis_msg_id = [0; 32];
let start_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id);
let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id);
// This is a troubling moment, because changes in key protocol can
// affect this. And indexer can not reliably ask this data from sequencer
@ -94,47 +94,44 @@ impl IndexerCore {
config.bedrock_client_config.auth.clone(),
)?,
config,
store: IndexerStore::open_db_with_genesis(&home, Some((start_block, state)))?,
store: IndexerStore::open_db_with_genesis(&home, &genesis_block, &state)?,
})
}
pub async fn subscribe_parse_block_stream(&self) -> impl futures::Stream<Item = Result<Block>> {
pub fn subscribe_parse_block_stream(&self) -> impl futures::Stream<Item = Result<Block>> {
async_stream::stream! {
info!("Searching for initial header");
let last_l1_lib_header = self.store.last_observed_l1_lib_header()?;
let last_stored_l1_lib_header = self.store.last_observed_l1_lib_header()?;
let mut prev_last_l1_lib_header = match last_l1_lib_header {
Some(last_l1_lib_header) => {
info!("Last l1 lib header found: {last_l1_lib_header}");
last_l1_lib_header
},
None => {
info!("Last l1 lib header not found in DB");
info!("Searching for the start of a channel");
let mut prev_last_l1_lib_header = if let Some(last_l1_lib_header) = last_stored_l1_lib_header {
info!("Last l1 lib header found: {last_l1_lib_header}");
last_l1_lib_header
} else {
info!("Last l1 lib header not found in DB");
info!("Searching for the start of a channel");
let BackfillData {
block_data: start_buff,
curr_fin_l1_lib_header: last_l1_lib_header,
} = self.search_for_channel_start().await?;
let BackfillData {
block_data: start_buff,
curr_fin_l1_lib_header: last_l1_lib_header,
} = self.search_for_channel_start().await?;
for BackfillBlockData {
l2_blocks: l2_block_vec,
l1_header,
} in start_buff {
let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect();
l2_blocks_parsed_ids.sort();
info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids);
for BackfillBlockData {
l2_blocks: l2_block_vec,
l1_header,
} in start_buff {
let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect();
l2_blocks_parsed_ids.sort_unstable();
info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids);
for l2_block in l2_block_vec {
self.store.put_block(l2_block.clone(), l1_header)?;
for l2_block in l2_block_vec {
self.store.put_block(l2_block.clone(), l1_header)?;
yield Ok(l2_block);
}
yield Ok(l2_block);
}
}
last_l1_lib_header
},
last_l1_lib_header
};
info!("Searching for initial header finished");
@ -157,7 +154,7 @@ impl IndexerCore {
l1_header: header,
} in buff {
let mut l2_blocks_parsed_ids: Vec<_> = l2_block_vec.iter().map(|block| block.header.block_id).collect();
l2_blocks_parsed_ids.sort();
l2_blocks_parsed_ids.sort_unstable();
info!("Parsed {} L2 blocks with ids {:?}", l2_block_vec.len(), l2_blocks_parsed_ids);
for l2_block in l2_block_vec {
@ -177,20 +174,20 @@ impl IndexerCore {
async fn get_next_lib(&self, prev_lib: HeaderId) -> Result<HeaderId> {
loop {
let next_lib = self.get_lib().await?;
if next_lib != prev_lib {
break Ok(next_lib);
} else {
if next_lib == prev_lib {
info!(
"Wait {:?} to not spam the node",
self.config.consensus_info_polling_interval
);
tokio::time::sleep(self.config.consensus_info_polling_interval).await;
} else {
break Ok(next_lib);
}
}
}
/// WARNING: depending on channel state,
/// may take indefinite amount of time
/// may take indefinite amount of time.
pub async fn search_for_channel_start(&self) -> Result<BackfillData> {
let mut curr_last_l1_lib_header = self.get_lib().await?;
let mut backfill_start = curr_last_l1_lib_header;
@ -204,15 +201,13 @@ impl IndexerCore {
let mut cycle_header = curr_last_l1_lib_header;
loop {
let cycle_block =
if let Some(block) = self.bedrock_client.get_block_by_id(cycle_header).await? {
block
} else {
// First run can reach root easily
// so here we are optimistic about L1
// failing to get parent.
break;
};
let Some(cycle_block) = self.bedrock_client.get_block_by_id(cycle_header).await?
else {
// First run can reach root easily
// so here we are optimistic about L1
// failing to get parent.
break;
};
// It would be better to have id, but block does not have it, so slot will do.
info!(
@ -289,10 +284,9 @@ impl IndexerCore {
if cycle_block.header().id() == last_fin_l1_lib_header {
break;
} else {
// Step back to parent
cycle_header = cycle_block.header().parent();
}
// Step back to parent
cycle_header = cycle_block.header().parent();
// It would be better to have id, but block does not have it, so slot will do.
info!(
@ -324,6 +318,10 @@ fn parse_block_owned(
decoded_channel_id: &ChannelId,
) -> (Vec<Block>, HeaderId) {
(
#[expect(
clippy::wildcard_enum_match_arm,
reason = "We are only interested in channel inscription ops, so it's fine to ignore the rest"
)]
l1_block
.transactions()
.flat_map(|tx| {
@ -335,7 +333,7 @@ fn parse_block_owned(
}) if channel_id == decoded_channel_id => {
borsh::from_slice::<Block>(inscription)
.inspect_err(|err| {
error!("Failed to deserialize our inscription with err: {err:#?}")
error!("Failed to deserialize our inscription with err: {err:#?}");
})
.ok()
}

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
indexer_service_protocol = { workspace = true, features = ["convert"] }
indexer_service_rpc = { workspace = true, features = ["server"] }

View File

@ -1,5 +1,5 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
FROM lukemathwalker/cargo-chef:latest-rust-1.94.0-slim-trixie AS chef
# Install build dependencies
RUN apt-get update && apt-get install -y \

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
nssa_core = { workspace = true, optional = true, features = ["host"] }
nssa = { workspace = true, optional = true }

View File

@ -1,6 +1,12 @@
//! Conversions between indexer_service_protocol types and nssa/nssa_core types
//! Conversions between `indexer_service_protocol` types and `nssa/nssa_core` types.
use crate::*;
use crate::{
Account, AccountId, BedrockStatus, Block, BlockBody, BlockHeader, Ciphertext, Commitment,
CommitmentSetDigest, Data, EncryptedAccountData, EphemeralPublicKey, HashType, MantleMsgId,
Nullifier, PrivacyPreservingMessage, PrivacyPreservingTransaction, ProgramDeploymentMessage,
ProgramDeploymentTransaction, ProgramId, Proof, PublicKey, PublicMessage, PublicTransaction,
Signature, Transaction, WitnessSet,
};
// ============================================================================
// Account-related conversions
@ -29,7 +35,7 @@ impl From<nssa_core::account::AccountId> for AccountId {
impl From<AccountId> for nssa_core::account::AccountId {
fn from(value: AccountId) -> Self {
let AccountId { value } = value;
nssa_core::account::AccountId::new(value)
Self::new(value)
}
}
@ -62,7 +68,7 @@ impl TryFrom<Account> for nssa_core::account::Account {
nonce,
} = value;
Ok(nssa_core::account::Account {
Ok(Self {
program_owner: program_owner.into(),
balance,
data: data.try_into()?,
@ -81,7 +87,7 @@ impl TryFrom<Data> for nssa_core::account::Data {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: Data) -> Result<Self, Self::Error> {
nssa_core::account::Data::try_from(value.0)
Self::try_from(value.0)
}
}
@ -97,7 +103,7 @@ impl From<nssa_core::Commitment> for Commitment {
impl From<Commitment> for nssa_core::Commitment {
fn from(value: Commitment) -> Self {
nssa_core::Commitment::from_byte_array(value.0)
Self::from_byte_array(value.0)
}
}
@ -109,7 +115,7 @@ impl From<nssa_core::Nullifier> for Nullifier {
impl From<Nullifier> for nssa_core::Nullifier {
fn from(value: Nullifier) -> Self {
nssa_core::Nullifier::from_byte_array(value.0)
Self::from_byte_array(value.0)
}
}
@ -137,7 +143,7 @@ impl From<nssa_core::encryption::Ciphertext> for Ciphertext {
impl From<Ciphertext> for nssa_core::encryption::Ciphertext {
fn from(value: Ciphertext) -> Self {
nssa_core::encryption::Ciphertext::from_inner(value.0)
Self::from_inner(value.0)
}
}
@ -149,7 +155,7 @@ impl From<nssa_core::encryption::EphemeralPublicKey> for EphemeralPublicKey {
impl From<EphemeralPublicKey> for nssa_core::encryption::EphemeralPublicKey {
fn from(value: EphemeralPublicKey) -> Self {
nssa_core::encryption::shared_key_derivation::Secp256k1Point(value.0)
Self(value.0)
}
}
@ -167,7 +173,7 @@ impl From<nssa::Signature> for Signature {
impl From<Signature> for nssa::Signature {
fn from(value: Signature) -> Self {
let Signature(sig_value) = value;
nssa::Signature { value: sig_value }
Self { value: sig_value }
}
}
@ -181,7 +187,7 @@ impl TryFrom<PublicKey> for nssa::PublicKey {
type Error = nssa::error::NssaError;
fn try_from(value: PublicKey) -> Result<Self, Self::Error> {
nssa::PublicKey::try_new(value.0)
Self::try_new(value.0)
}
}
@ -197,7 +203,7 @@ impl From<nssa::privacy_preserving_transaction::circuit::Proof> for Proof {
impl From<Proof> for nssa::privacy_preserving_transaction::circuit::Proof {
fn from(value: Proof) -> Self {
nssa::privacy_preserving_transaction::circuit::Proof::from_inner(value.0)
Self::from_inner(value.0)
}
}
@ -505,12 +511,12 @@ impl From<ProgramDeploymentTransaction> for nssa::ProgramDeploymentTransaction {
impl From<common::transaction::NSSATransaction> for Transaction {
fn from(value: common::transaction::NSSATransaction) -> Self {
match value {
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
common::transaction::NSSATransaction::Public(tx) => Self::Public(tx.into()),
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
Transaction::PrivacyPreserving(tx.into())
Self::PrivacyPreserving(tx.into())
}
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
Transaction::ProgramDeployment(tx.into())
Self::ProgramDeployment(tx.into())
}
}
}
@ -521,15 +527,9 @@ impl TryFrom<Transaction> for common::transaction::NSSATransaction {
fn try_from(value: Transaction) -> Result<Self, Self::Error> {
match value {
Transaction::Public(tx) => {
Ok(common::transaction::NSSATransaction::Public(tx.try_into()?))
}
Transaction::PrivacyPreserving(tx) => Ok(
common::transaction::NSSATransaction::PrivacyPreserving(tx.try_into()?),
),
Transaction::ProgramDeployment(tx) => Ok(
common::transaction::NSSATransaction::ProgramDeployment(tx.into()),
),
Transaction::Public(tx) => Ok(Self::Public(tx.try_into()?)),
Transaction::PrivacyPreserving(tx) => Ok(Self::PrivacyPreserving(tx.try_into()?)),
Transaction::ProgramDeployment(tx) => Ok(Self::ProgramDeployment(tx.into())),
}
}
}
@ -683,6 +683,6 @@ impl From<common::HashType> for HashType {
impl From<HashType> for common::HashType {
fn from(value: HashType) -> Self {
common::HashType(value.0)
Self(value.0)
}
}

View File

@ -16,6 +16,41 @@ mod convert;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Nonce(pub u128);
mod base64 {
use base64::prelude::{BASE64_STANDARD, Engine as _};
use serde::{Deserialize as _, Deserializer, Serialize as _, Serializer};
pub mod arr {
use super::{Deserializer, Serializer};
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
super::serialize(v, s)
}
pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>(
d: D,
) -> Result<[u8; N], D::Error> {
let vec = super::deserialize(d)?;
vec.try_into().map_err(|_bytes| {
serde::de::Error::custom(format!("Invalid length, expected {N} bytes"))
})
}
}
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
let base64 = BASE64_STANDARD.encode(v);
String::serialize(&base64, s)
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
let base64 = String::deserialize(d)?;
BASE64_STANDARD
.decode(base64.as_bytes())
.map_err(serde::de::Error::custom)
}
}
pub type Nonce = u128;
#[derive(
Debug, Copy, Clone, PartialEq, Eq, Hash, SerializeDisplay, DeserializeFromStr, JsonSchema,
@ -24,26 +59,43 @@ pub struct ProgramId(pub [u32; 8]);
impl Display for ProgramId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let bytes: Vec<u8> = self.0.iter().flat_map(|n| n.to_be_bytes()).collect();
let bytes: Vec<u8> = self.0.iter().flat_map(|n| n.to_le_bytes()).collect();
write!(f, "{}", bytes.to_base58())
}
}
#[derive(Debug)]
pub enum ProgramIdParseError {
InvalidBase58(base58::FromBase58Error),
InvalidLength(usize),
}
impl Display for ProgramIdParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::InvalidBase58(err) => write!(f, "invalid base58: {err:?}"),
Self::InvalidLength(len) => {
write!(f, "invalid length: expected 32 bytes, got {len}")
}
}
}
}
impl FromStr for ProgramId {
type Err = hex::FromHexError;
type Err = ProgramIdParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let bytes = s
.from_base58()
.map_err(|_| hex::FromHexError::InvalidStringLength)?;
.map_err(ProgramIdParseError::InvalidBase58)?;
if bytes.len() != 32 {
return Err(hex::FromHexError::InvalidStringLength);
return Err(ProgramIdParseError::InvalidLength(bytes.len()));
}
let mut arr = [0u32; 8];
let mut arr = [0_u32; 8];
for (i, chunk) in bytes.chunks_exact(4).enumerate() {
arr[i] = u32::from_be_bytes(chunk.try_into().unwrap());
arr[i] = u32::from_le_bytes(chunk.try_into().unwrap());
}
Ok(ProgramId(arr))
Ok(Self(arr))
}
}
@ -73,9 +125,9 @@ impl FromStr for AccountId {
bytes.len()
));
}
let mut value = [0u8; 32];
let mut value = [0_u8; 32];
value.copy_from_slice(&bytes);
Ok(AccountId { value })
Ok(Self { value })
}
}
@ -122,9 +174,9 @@ impl FromStr for Signature {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0u8; 64];
let mut bytes = [0_u8; 64];
hex::decode_to_slice(s, &mut bytes)?;
Ok(Signature(bytes))
Ok(Self(bytes))
}
}
@ -141,12 +193,14 @@ pub enum Transaction {
}
impl Transaction {
/// Get the hash of the transaction
pub fn hash(&self) -> &self::HashType {
/// Get the hash of the transaction.
#[expect(clippy::same_name_method, reason = "This is handy")]
#[must_use]
pub const fn hash(&self) -> &self::HashType {
match self {
Transaction::Public(tx) => &tx.hash,
Transaction::PrivacyPreserving(tx) => &tx.hash,
Transaction::ProgramDeployment(tx) => &tx.hash,
Self::Public(tx) => &tx.hash,
Self::PrivacyPreserving(tx) => &tx.hash,
Self::ProgramDeployment(tx) => &tx.hash,
}
}
}
@ -284,9 +338,9 @@ impl FromStr for HashType {
type Err = hex::FromHexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut bytes = [0u8; 32];
let mut bytes = [0_u8; 32];
hex::decode_to_slice(s, &mut bytes)?;
Ok(HashType(bytes))
Ok(Self(bytes))
}
}
@ -303,37 +357,3 @@ pub enum BedrockStatus {
Safe,
Finalized,
}
mod base64 {
use base64::prelude::{BASE64_STANDARD, Engine as _};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub mod arr {
use super::*;
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
super::serialize(v, s)
}
pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>(
d: D,
) -> Result<[u8; N], D::Error> {
let vec = super::deserialize(d)?;
vec.try_into().map_err(|_| {
serde::de::Error::custom(format!("Invalid length, expected {N} bytes"))
})
}
}
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
let base64 = BASE64_STANDARD.encode(v);
String::serialize(&base64, s)
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
let base64 = String::deserialize(d)?;
BASE64_STANDARD
.decode(base64.as_bytes())
.map_err(serde::de::Error::custom)
}
}

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
indexer_service_protocol.workspace = true

View File

@ -44,16 +44,16 @@ pub trait Rpc {
#[method(name = "getBlocks")]
async fn get_blocks(
&self,
before: Option<u64>,
limit: u32,
before: Option<BlockId>,
limit: u64,
) -> Result<Vec<Block>, ErrorObjectOwned>;
#[method(name = "getTransactionsByAccount")]
async fn get_transactions_by_account(
&self,
account_id: AccountId,
limit: u32,
offset: u32,
offset: u64,
limit: u64,
) -> Result<Vec<Transaction>, ErrorObjectOwned>;
// ToDo: expand healthcheck response into some kind of report

View File

@ -16,14 +16,15 @@ pub struct IndexerHandle {
server_handle: Option<jsonrpsee::server::ServerHandle>,
}
impl IndexerHandle {
fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self {
const fn new(addr: SocketAddr, server_handle: jsonrpsee::server::ServerHandle) -> Self {
Self {
addr,
server_handle: Some(server_handle),
}
}
pub fn addr(&self) -> SocketAddr {
#[must_use]
pub const fn addr(&self) -> SocketAddr {
self.addr
}
@ -33,9 +34,14 @@ impl IndexerHandle {
.take()
.expect("Indexer server handle is set");
handle.stopped().await
handle.stopped().await;
}
#[expect(
clippy::redundant_closure_for_method_calls,
reason = "Clippy suggested path jsonrpsee::jsonrpsee_server::ServerHandle is not accessible"
)]
#[must_use]
pub fn is_stopped(&self) -> bool {
self.server_handle
.as_ref()

View File

@ -15,6 +15,10 @@ struct Args {
}
#[tokio::main]
#[expect(
clippy::integer_division_remainder_used,
reason = "Generated by select! macro, can't be easily rewritten to avoid this lint"
)]
async fn main() -> Result<()> {
env_logger::init();
@ -26,10 +30,10 @@ async fn main() -> Result<()> {
let indexer_handle = indexer_service::run_server(config, port).await?;
tokio::select! {
_ = cancellation_token.cancelled() => {
() = cancellation_token.cancelled() => {
info!("Shutting down server...");
}
_ = indexer_handle.stopped() => {
() = indexer_handle.stopped() => {
error!("Server stopped unexpectedly");
}
}

View File

@ -1,3 +1,11 @@
#![expect(
clippy::as_conversions,
clippy::arithmetic_side_effects,
clippy::cast_possible_truncation,
clippy::cast_lossless,
clippy::integer_division_remainder_used,
reason = "Mock service uses intentional casts and format patterns for test data generation"
)]
use std::collections::HashMap;
use indexer_service_protocol::{
@ -9,7 +17,7 @@ use indexer_service_protocol::{
};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
/// A mock implementation of the IndexerService RPC for testing purposes.
/// A mock implementation of the `IndexerService` RPC for testing purposes.
pub struct MockIndexerService {
blocks: Vec<Block>,
accounts: HashMap<AccountId, Account>,
@ -17,6 +25,7 @@ pub struct MockIndexerService {
}
impl MockIndexerService {
#[must_use]
pub fn new_with_mock_blocks() -> Self {
let mut blocks = Vec::new();
let mut accounts = HashMap::new();
@ -25,7 +34,7 @@ impl MockIndexerService {
// Create some mock accounts
let account_ids: Vec<AccountId> = (0..5)
.map(|i| {
let mut value = [0u8; 32];
let mut value = [0_u8; 32];
value[0] = i;
AccountId { value }
})
@ -44,11 +53,11 @@ impl MockIndexerService {
}
// Create 100 blocks with transactions
let mut prev_hash = HashType([0u8; 32]);
let mut prev_hash = HashType([0_u8; 32]);
for block_id in 1..=100 {
let block_hash = {
let mut hash = [0u8; 32];
let mut hash = [0_u8; 32];
hash[0] = block_id as u8;
hash[1] = 0xff;
HashType(hash)
@ -61,7 +70,7 @@ impl MockIndexerService {
for tx_idx in 0..num_txs {
let tx_hash = {
let mut hash = [0u8; 32];
let mut hash = [0_u8; 32];
hash[0] = block_id as u8;
hash[1] = tx_idx as u8;
HashType(hash)
@ -73,7 +82,7 @@ impl MockIndexerService {
0 | 1 => Transaction::Public(PublicTransaction {
hash: tx_hash,
message: PublicMessage {
program_id: ProgramId([1u32; 8]),
program_id: ProgramId([1_u32; 8]),
account_ids: vec![
account_ids[tx_idx as usize % account_ids.len()],
account_ids[(tx_idx as usize + 1) % account_ids.len()],
@ -95,7 +104,7 @@ impl MockIndexerService {
],
nonces: vec![block_id as u128],
public_post_states: vec![Account {
program_owner: ProgramId([1u32; 8]),
program_owner: ProgramId([1_u32; 8]),
balance: 500,
data: Data(vec![0xdd, 0xee]),
nonce: block_id as u128,
@ -136,8 +145,8 @@ impl MockIndexerService {
block_id,
prev_block_hash: prev_hash,
hash: block_hash,
timestamp: 1704067200000 + (block_id * 12000), // ~12 seconds per block
signature: Signature([0u8; 64]),
timestamp: 1_704_067_200_000 + (block_id * 12_000), // ~12 seconds per block
signature: Signature([0_u8; 64]),
},
body: BlockBody {
transactions: block_transactions,
@ -185,7 +194,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.last()
.map(|bl| bl.header.block_id)
.ok_or_else(|| {
ErrorObjectOwned::owned(-32001, "Last block not found".to_string(), None::<()>)
ErrorObjectOwned::owned(-32001, "Last block not found".to_owned(), None::<()>)
})
}
@ -197,7 +206,7 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
.ok_or_else(|| {
ErrorObjectOwned::owned(
-32001,
format!("Block with ID {} not found", block_id),
format!("Block with ID {block_id} not found"),
None::<()>,
)
})
@ -227,15 +236,18 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
async fn get_blocks(
&self,
before: Option<u64>,
limit: u32,
before: Option<BlockId>,
limit: u64,
) -> Result<Vec<Block>, ErrorObjectOwned> {
let start_id = before.map_or_else(|| self.blocks.len() as u64, |id| id.saturating_sub(1));
let start_id = before.map_or_else(
|| self.blocks.len(),
|id| usize::try_from(id.saturating_sub(1)).expect("u64 should fit in usize"),
);
let result = (1..=start_id)
.rev()
.take(limit as usize)
.map_while(|block_id| self.blocks.get(block_id as usize - 1).cloned())
.map_while(|block_id| self.blocks.get(block_id - 1).cloned())
.collect();
Ok(result)
@ -244,8 +256,8 @@ impl indexer_service_rpc::RpcServer for MockIndexerService {
async fn get_transactions_by_account(
&self,
account_id: AccountId,
limit: u32,
offset: u32,
offset: u64,
limit: u64,
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
let mut account_txs: Vec<_> = self
.transactions

View File

@ -90,19 +90,19 @@ impl indexer_service_rpc::RpcServer for IndexerService {
async fn get_blocks(
&self,
before: Option<u64>,
limit: u32,
before: Option<BlockId>,
limit: u64,
) -> Result<Vec<Block>, ErrorObjectOwned> {
let blocks = self
.indexer
.store
.get_block_batch(before, limit as u64)
.get_block_batch(before, limit)
.map_err(db_error)?;
let mut block_res = vec![];
for block in blocks {
block_res.push(block.into())
block_res.push(block.into());
}
Ok(block_res)
@ -111,19 +111,19 @@ impl indexer_service_rpc::RpcServer for IndexerService {
async fn get_transactions_by_account(
&self,
account_id: AccountId,
limit: u32,
offset: u32,
offset: u64,
limit: u64,
) -> Result<Vec<Transaction>, ErrorObjectOwned> {
let transactions = self
.indexer
.store
.get_transactions_by_account(account_id.value, offset as u64, limit as u64)
.get_transactions_by_account(account_id.value, offset, limit)
.map_err(db_error)?;
let mut tx_res = vec![];
for tx in transactions {
tx_res.push(tx.into())
tx_res.push(tx.into());
}
Ok(tx_res)
@ -154,8 +154,10 @@ impl SubscriptionService {
pub async fn add_subscription(&self, subscription: Subscription<BlockId>) -> Result<()> {
let guard = self.parts.load();
if let Err(err) = guard.new_subscription_sender.send(subscription) {
error!("Failed to send new subscription to subscription service with error: {err:#?}");
if let Err(send_err) = guard.new_subscription_sender.send(subscription) {
error!(
"Failed to send new subscription to subscription service with error: {send_err:#?}"
);
// Respawn the subscription service loop if it has finished (either with error or panic)
if guard.handle.is_finished() {
@ -177,8 +179,8 @@ impl SubscriptionService {
}
}
bail!(err);
};
bail!(send_err)
}
Ok(())
}
@ -190,8 +192,12 @@ impl SubscriptionService {
let handle = tokio::spawn(async move {
let mut subscribers = Vec::new();
let mut block_stream = pin!(indexer.subscribe_parse_block_stream().await);
let mut block_stream = pin!(indexer.subscribe_parse_block_stream());
#[expect(
clippy::integer_division_remainder_used,
reason = "Generated by select! macro, can't be easily rewritten to avoid this lint"
)]
loop {
tokio::select! {
sub = sub_receiver.recv() => {
@ -246,7 +252,7 @@ struct Subscription<T> {
}
impl<T> Subscription<T> {
fn new(sink: SubscriptionSink) -> Self {
const fn new(sink: SubscriptionSink) -> Self {
Self {
sink,
_marker: std::marker::PhantomData,
@ -273,6 +279,7 @@ impl<T> Drop for Subscription<T> {
}
}
#[must_use]
pub fn not_yet_implemented_error() -> ErrorObjectOwned {
ErrorObject::owned(
ErrorCode::InternalError.code(),
@ -281,10 +288,14 @@ pub fn not_yet_implemented_error() -> ErrorObjectOwned {
)
}
#[expect(
clippy::needless_pass_by_value,
reason = "Error is consumed to extract details for error response"
)]
fn db_error(err: anyhow::Error) -> ErrorObjectOwned {
ErrorObjectOwned::owned(
ErrorCode::InternalError.code(),
"DBError".to_string(),
"DBError".to_owned(),
Some(format!("{err:#?}")),
)
}

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
nssa.workspace = true

View File

@ -1,6 +1,6 @@
use std::{net::SocketAddr, path::PathBuf, time::Duration};
use anyhow::{Context, Result};
use anyhow::{Context as _, Result};
use bytesize::ByteSize;
use common::block::{AccountInitialData, CommitmentsInitialData};
use indexer_service::{BackoffConfig, ChannelId, ClientConfig, IndexerConfig};
@ -13,31 +13,8 @@ use wallet::config::{
InitialAccountData, InitialAccountDataPrivate, InitialAccountDataPublic, WalletConfig,
};
pub fn indexer_config(
bedrock_addr: SocketAddr,
home: PathBuf,
initial_data: &InitialData,
) -> Result<IndexerConfig> {
Ok(IndexerConfig {
home,
consensus_info_polling_interval: Duration::from_secs(1),
bedrock_client_config: ClientConfig {
addr: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
backoff: BackoffConfig {
start_delay: Duration::from_millis(100),
max_retries: 10,
},
},
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
channel_id: bedrock_channel_id(),
})
}
/// Sequencer config options available for custom changes in integration tests.
#[derive(Debug, Clone, Copy)]
pub struct SequencerPartialConfig {
pub max_num_tx_in_block: usize,
pub max_block_size: ByteSize,
@ -56,72 +33,13 @@ impl Default for SequencerPartialConfig {
}
}
pub fn sequencer_config(
partial: SequencerPartialConfig,
home: PathBuf,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<SequencerConfig> {
let SequencerPartialConfig {
max_num_tx_in_block,
max_block_size,
mempool_max_size,
block_create_timeout,
} = partial;
Ok(SequencerConfig {
home,
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
max_num_tx_in_block,
max_block_size,
mempool_max_size,
block_create_timeout,
retry_pending_blocks_timeout: Duration::from_secs(120),
port: 0,
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay: Duration::from_millis(100),
max_retries: 5,
},
channel_id: bedrock_channel_id(),
node_url: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
},
indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr)
.context("Failed to convert indexer addr to URL")?,
})
}
pub fn wallet_config(
sequencer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<WalletConfig> {
Ok(WalletConfig {
override_rust_log: None,
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
.context("Failed to convert sequencer addr to URL")?,
seq_poll_timeout: Duration::from_secs(30),
seq_tx_poll_max_blocks: 15,
seq_poll_max_retries: 10,
seq_block_poll_max_amount: 100,
initial_accounts: initial_data.wallet_initial_accounts(),
basic_auth: None,
})
}
pub struct InitialData {
pub public_accounts: Vec<(PrivateKey, u128)>,
pub private_accounts: Vec<(KeyChain, Account)>,
}
impl InitialData {
#[must_use]
pub fn with_two_public_and_two_private_initialized_accounts() -> Self {
let mut public_alice_private_key = PrivateKey::new_os_random();
let mut public_alice_public_key =
@ -221,16 +139,17 @@ impl InitialData {
})
.chain(self.private_accounts.iter().map(|(key_chain, account)| {
let account_id = AccountId::from(&key_chain.nullifer_public_key);
InitialAccountData::Private(InitialAccountDataPrivate {
InitialAccountData::Private(Box::new(InitialAccountDataPrivate {
account_id,
account: account.clone(),
key_chain: key_chain.clone(),
})
}))
}))
.collect()
}
}
#[derive(Debug, Clone, Copy)]
pub enum UrlProtocol {
Http,
Ws,
@ -239,12 +158,96 @@ pub enum UrlProtocol {
impl std::fmt::Display for UrlProtocol {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
UrlProtocol::Http => write!(f, "http"),
UrlProtocol::Ws => write!(f, "ws"),
Self::Http => write!(f, "http"),
Self::Ws => write!(f, "ws"),
}
}
}
pub fn indexer_config(
bedrock_addr: SocketAddr,
home: PathBuf,
initial_data: &InitialData,
) -> Result<IndexerConfig> {
Ok(IndexerConfig {
home,
consensus_info_polling_interval: Duration::from_secs(1),
bedrock_client_config: ClientConfig {
addr: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
backoff: BackoffConfig {
start_delay: Duration::from_millis(100),
max_retries: 10,
},
},
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
channel_id: bedrock_channel_id(),
})
}
pub fn sequencer_config(
partial: SequencerPartialConfig,
home: PathBuf,
bedrock_addr: SocketAddr,
indexer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<SequencerConfig> {
let SequencerPartialConfig {
max_num_tx_in_block,
max_block_size,
mempool_max_size,
block_create_timeout,
} = partial;
Ok(SequencerConfig {
home,
override_rust_log: None,
genesis_id: 1,
is_genesis_random: true,
max_num_tx_in_block,
max_block_size,
mempool_max_size,
block_create_timeout,
retry_pending_blocks_timeout: Duration::from_secs(120),
port: 0,
initial_accounts: initial_data.sequencer_initial_accounts(),
initial_commitments: initial_data.sequencer_initial_commitments(),
signing_key: [37; 32],
bedrock_config: BedrockConfig {
backoff: BackoffConfig {
start_delay: Duration::from_millis(100),
max_retries: 5,
},
channel_id: bedrock_channel_id(),
node_url: addr_to_url(UrlProtocol::Http, bedrock_addr)
.context("Failed to convert bedrock addr to URL")?,
auth: None,
},
indexer_rpc_url: addr_to_url(UrlProtocol::Ws, indexer_addr)
.context("Failed to convert indexer addr to URL")?,
})
}
pub fn wallet_config(
sequencer_addr: SocketAddr,
initial_data: &InitialData,
) -> Result<WalletConfig> {
Ok(WalletConfig {
override_rust_log: None,
sequencer_addr: addr_to_url(UrlProtocol::Http, sequencer_addr)
.context("Failed to convert sequencer addr to URL")?,
seq_poll_timeout: Duration::from_secs(30),
seq_tx_poll_max_blocks: 15,
seq_poll_max_retries: 10,
seq_block_poll_max_amount: 100,
initial_accounts: initial_data.wallet_initial_accounts(),
basic_auth: None,
})
}
pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result<Url> {
// Convert 0.0.0.0 to 127.0.0.1 for client connections
// When binding to port 0, the server binds to 0.0.0.0:<random_port>
@ -259,7 +262,7 @@ pub fn addr_to_url(protocol: UrlProtocol, addr: SocketAddr) -> Result<Url> {
}
fn bedrock_channel_id() -> ChannelId {
let channel_id: [u8; 32] = [0u8, 1]
let channel_id: [u8; 32] = [0_u8, 1]
.repeat(16)
.try_into()
.unwrap_or_else(|_| unreachable!());

View File

@ -2,15 +2,15 @@
use std::{net::SocketAddr, path::PathBuf, sync::LazyLock};
use anyhow::{Context, Result, bail};
use base64::{Engine, engine::general_purpose::STANDARD as BASE64};
use anyhow::{Context as _, Result, bail};
use base64::{Engine as _, engine::general_purpose::STANDARD as BASE64};
use common::{HashType, sequencer_client::SequencerClient, transaction::NSSATransaction};
use futures::FutureExt as _;
use indexer_service::IndexerHandle;
use log::{debug, error, warn};
use nssa::{AccountId, PrivacyPreservingTransaction};
use nssa_core::Commitment;
use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait};
use sequencer_core::indexer_client::{IndexerClient, IndexerClientTrait as _};
use sequencer_runner::SequencerHandle;
use tempfile::TempDir;
use testcontainers::compose::DockerCompose;
@ -52,7 +52,8 @@ impl TestContext {
Self::builder().build().await
}
pub fn builder() -> TestContextBuilder {
#[must_use]
pub const fn builder() -> TestContextBuilder {
TestContextBuilder::new()
}
@ -120,6 +121,10 @@ impl TestContext {
// Setting port to 0 to avoid conflicts between parallel tests, actual port will be retrieved after container is up
.with_env("PORT", "0");
#[expect(
clippy::items_after_statements,
reason = "This is more readable is this function used just after its definition"
)]
async fn up_and_retrieve_port(compose: &mut DockerCompose) -> Result<u16> {
compose
.up()
@ -151,10 +156,12 @@ impl TestContext {
}
let mut port = None;
let mut attempt = 0;
let max_attempts = 5;
let mut attempt = 0_u32;
let max_attempts = 5_u32;
while port.is_none() && attempt < max_attempts {
attempt += 1;
attempt = attempt
.checked_add(1)
.expect("We check that attempt < max_attempts, so this won't overflow");
match up_and_retrieve_port(&mut compose).await {
Ok(p) => {
port = Some(p);
@ -181,7 +188,10 @@ impl TestContext {
let temp_indexer_dir =
tempfile::tempdir().context("Failed to create temp dir for indexer home")?;
debug!("Using temp indexer home at {:?}", temp_indexer_dir.path());
debug!(
"Using temp indexer home at {}",
temp_indexer_dir.path().display()
);
let indexer_config = config::indexer_config(
bedrock_addr,
@ -206,8 +216,8 @@ impl TestContext {
tempfile::tempdir().context("Failed to create temp dir for sequencer home")?;
debug!(
"Using temp sequencer home at {:?}",
temp_sequencer_dir.path()
"Using temp sequencer home at {}",
temp_sequencer_dir.path().display()
);
let config = config::sequencer_config(
@ -260,30 +270,35 @@ impl TestContext {
}
/// Get reference to the wallet.
pub fn wallet(&self) -> &WalletCore {
#[must_use]
pub const fn wallet(&self) -> &WalletCore {
&self.wallet
}
#[must_use]
pub fn wallet_password(&self) -> &str {
&self.wallet_password
}
/// Get mutable reference to the wallet.
pub fn wallet_mut(&mut self) -> &mut WalletCore {
pub const fn wallet_mut(&mut self) -> &mut WalletCore {
&mut self.wallet
}
/// Get reference to the sequencer client.
pub fn sequencer_client(&self) -> &SequencerClient {
#[must_use]
pub const fn sequencer_client(&self) -> &SequencerClient {
&self.sequencer_client
}
/// Get reference to the indexer client.
pub fn indexer_client(&self) -> &IndexerClient {
#[must_use]
pub const fn indexer_client(&self) -> &IndexerClient {
&self.indexer_client
}
/// Get existing public account IDs in the wallet.
#[must_use]
pub fn existing_public_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
@ -293,6 +308,7 @@ impl TestContext {
}
/// Get existing private account IDs in the wallet.
#[must_use]
pub fn existing_private_accounts(&self) -> Vec<AccountId> {
self.wallet
.storage()
@ -352,7 +368,7 @@ impl Drop for TestContext {
}
}
/// A test context to be used in normal #[test] tests
/// A test context to be used in normal #[test] tests.
pub struct BlockingTestContext {
ctx: Option<TestContext>,
runtime: tokio::runtime::Runtime,
@ -368,7 +384,7 @@ impl BlockingTestContext {
})
}
pub fn ctx(&self) -> &TestContext {
pub const fn ctx(&self) -> &TestContext {
self.ctx.as_ref().expect("TestContext is set")
}
}
@ -379,19 +395,21 @@ pub struct TestContextBuilder {
}
impl TestContextBuilder {
fn new() -> Self {
const fn new() -> Self {
Self {
initial_data: None,
sequencer_partial_config: None,
}
}
#[must_use]
pub fn with_initial_data(mut self, initial_data: config::InitialData) -> Self {
self.initial_data = Some(initial_data);
self
}
pub fn with_sequencer_partial_config(
#[must_use]
pub const fn with_sequencer_partial_config(
mut self,
sequencer_partial_config: config::SequencerPartialConfig,
) -> Self {
@ -419,18 +437,24 @@ impl Drop for BlockingTestContext {
if let Some(ctx) = ctx.take() {
drop(ctx);
}
})
});
}
}
#[must_use]
pub fn format_public_account_id(account_id: AccountId) -> String {
format!("Public/{account_id}")
}
#[must_use]
pub fn format_private_account_id(account_id: AccountId) -> String {
format!("Private/{account_id}")
}
#[expect(
clippy::wildcard_enum_match_arm,
reason = "We want the code to panic if the transaction type is not PrivacyPreserving"
)]
pub async fn fetch_privacy_preserving_tx(
seq_client: &SequencerClient,
tx_hash: HashType,

View File

@ -1,3 +1,8 @@
#![expect(
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use anyhow::Result;
use integration_tests::TestContext;
use log::info;
@ -36,7 +41,7 @@ async fn get_existing_account() -> Result<()> {
async fn new_public_account_with_label() -> Result<()> {
let mut ctx = TestContext::new().await?;
let label = "my-test-public-account".to_string();
let label = "my-test-public-account".to_owned();
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Public {
cci: None,
label: Some(label.clone()),
@ -45,9 +50,8 @@ async fn new_public_account_with_label() -> Result<()> {
let result = execute_subcommand(ctx.wallet_mut(), command).await?;
// Extract the account_id from the result
let account_id = match result {
wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } => account_id,
_ => panic!("Expected RegisterAccount return value"),
let wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } = result else {
panic!("Expected RegisterAccount return value")
};
// Verify the label was stored
@ -69,7 +73,7 @@ async fn new_public_account_with_label() -> Result<()> {
async fn new_private_account_with_label() -> Result<()> {
let mut ctx = TestContext::new().await?;
let label = "my-test-private-account".to_string();
let label = "my-test-private-account".to_owned();
let command = Command::Account(AccountSubcommand::New(NewSubcommand::Private {
cci: None,
label: Some(label.clone()),
@ -78,9 +82,9 @@ async fn new_private_account_with_label() -> Result<()> {
let result = execute_subcommand(ctx.wallet_mut(), command).await?;
// Extract the account_id from the result
let account_id = match result {
wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } => account_id,
_ => panic!("Expected RegisterAccount return value"),
let wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } = result else {
panic!("Expected RegisterAccount return value")
};
// Verify the label was stored
@ -110,9 +114,9 @@ async fn new_public_account_without_label() -> Result<()> {
let result = execute_subcommand(ctx.wallet_mut(), command).await?;
// Extract the account_id from the result
let account_id = match result {
wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } => account_id,
_ => panic!("Expected RegisterAccount return value"),
let wallet::cli::SubcommandReturnValue::RegisterAccount { account_id } = result else {
panic!("Expected RegisterAccount return value")
};
// Verify no label was stored

View File

@ -1,3 +1,9 @@
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use std::time::Duration;
use anyhow::Result;
@ -108,7 +114,7 @@ async fn amm_public() -> Result<()> {
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(definition_account_id_1),
supply_account_id: format_public_account_id(supply_account_id_1),
name: "A NAM1".to_string(),
name: "A NAM1".to_owned(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;
@ -132,7 +138,7 @@ async fn amm_public() -> Result<()> {
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(definition_account_id_2),
supply_account_id: format_public_account_id(supply_account_id_2),
name: "A NAM2".to_string(),
name: "A NAM2".to_owned(),
total_supply: 37,
};
wallet::cli::execute_subcommand(ctx.wallet_mut(), Command::Token(subcommand)).await?;

View File

@ -1,2 +1,8 @@
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
mod private;
mod public;

View File

@ -86,7 +86,7 @@ async fn private_transfer_to_foreign_account() -> Result<()> {
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
for commitment in tx.message.new_commitments.into_iter() {
for commitment in tx.message.new_commitments {
assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await);
}
@ -198,7 +198,7 @@ async fn private_transfer_to_owned_account_using_claiming_path() -> Result<()> {
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
for commitment in tx.message.new_commitments.into_iter() {
for commitment in tx.message.new_commitments {
assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await);
}
@ -353,7 +353,7 @@ async fn private_transfer_to_owned_account_continuous_run_path() -> Result<()> {
// Verify commitments are in state
assert_eq!(tx.message.new_commitments.len(), 2);
for commitment in tx.message.new_commitments.into_iter() {
for commitment in tx.message.new_commitments {
assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await);
}

View File

@ -112,7 +112,7 @@ async fn failed_transfer_with_insufficient_balance() -> Result<()> {
to: Some(format_public_account_id(ctx.existing_public_accounts()[1])),
to_npk: None,
to_vpk: None,
amount: 1000000,
amount: 1_000_000,
});
let failed_send = wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await;

View File

@ -1,3 +1,9 @@
#![expect(
clippy::as_conversions,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use std::time::Duration;
use anyhow::Result;
@ -24,7 +30,7 @@ async fn reject_oversized_transaction() -> Result<()> {
// Create a transaction that's definitely too large
// Block size is 1 MiB (1,048,576 bytes), minus ~200 bytes for header = ~1,048,376 bytes max tx
// Create a 1.1 MiB binary to ensure it exceeds the limit
let oversized_binary = vec![0u8; 1100 * 1024]; // 1.1 MiB binary
let oversized_binary = vec![0_u8; 1100 * 1024]; // 1.1 MiB binary
let message = nssa::program_deployment_transaction::Message::new(oversized_binary);
let tx = nssa::ProgramDeploymentTransaction::new(message);
@ -38,13 +44,12 @@ async fn reject_oversized_transaction() -> Result<()> {
);
let err = result.unwrap_err();
let err_str = format!("{:?}", err);
let err_str = format!("{err:?}");
// Check if the error contains information about transaction being too large
assert!(
err_str.contains("TransactionTooLarge") || err_str.contains("too large"),
"Expected TransactionTooLarge error, got: {}",
err_str
"Expected TransactionTooLarge error, got: {err_str}"
);
Ok(())
@ -63,7 +68,7 @@ async fn accept_transaction_within_limit() -> Result<()> {
.await?;
// Create a small program deployment that should fit
let small_binary = vec![0u8; 1024]; // 1 KiB binary
let small_binary = vec![0_u8; 1024]; // 1 KiB binary
let message = nssa::program_deployment_transaction::Message::new(small_binary);
let tx = nssa::ProgramDeploymentTransaction::new(message);

View File

@ -1,3 +1,9 @@
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use anyhow::Result;
use integration_tests::TestContext;
use log::info;
@ -12,8 +18,8 @@ async fn modify_config_field() -> Result<()> {
// Change config field
let command = Command::Config(ConfigSubcommand::Set {
key: "seq_poll_timeout".to_string(),
value: "1s".to_string(),
key: "seq_poll_timeout".to_owned(),
value: "1s".to_owned(),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;
@ -22,8 +28,8 @@ async fn modify_config_field() -> Result<()> {
// Return how it was at the beginning
let command = Command::Config(ConfigSubcommand::Set {
key: "seq_poll_timeout".to_string(),
value: format!("{:?}", old_seq_poll_timeout),
key: "seq_poll_timeout".to_owned(),
value: format!("{old_seq_poll_timeout:?}"),
});
wallet::cli::execute_subcommand(ctx.wallet_mut(), command).await?;

View File

@ -1,7 +1,13 @@
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use std::time::Duration;
use anyhow::{Context, Result};
use indexer_service_rpc::RpcClient;
use anyhow::{Context as _, Result};
use indexer_service_rpc::RpcClient as _;
use integration_tests::{
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, format_private_account_id,
format_public_account_id, verify_commitment_is_in_state,
@ -11,8 +17,8 @@ use nssa::AccountId;
use tokio::test;
use wallet::cli::{Command, programs::native_token_transfer::AuthTransferSubcommand};
/// Timeout in milliseconds to reliably await for block finalization
const L2_TO_L1_TIMEOUT_MILLIS: u64 = 600000;
/// Timeout in milliseconds to reliably await for block finalization.
const L2_TO_L1_TIMEOUT_MILLIS: u64 = 600_000;
#[test]
async fn indexer_test_run() -> Result<()> {

View File

@ -1,6 +1,12 @@
use std::{str::FromStr, time::Duration};
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use anyhow::{Context, Result};
use std::{str::FromStr as _, time::Duration};
use anyhow::{Context as _, Result};
use integration_tests::{
TIME_TO_WAIT_FOR_BLOCK_SECONDS, TestContext, fetch_privacy_preserving_tx,
format_private_account_id, format_public_account_id, verify_commitment_is_in_state,
@ -87,7 +93,7 @@ async fn sync_private_account_with_non_zero_chain_index() -> Result<()> {
assert_eq!(tx.message.new_commitments[0], new_commitment1);
assert_eq!(tx.message.new_commitments.len(), 2);
for commitment in tx.message.new_commitments.into_iter() {
for commitment in tx.message.new_commitments {
assert!(verify_commitment_is_in_state(commitment, ctx.sequencer_client()).await);
}

View File

@ -1,3 +1,9 @@
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use std::time::Duration;
use anyhow::{Context as _, Result};

View File

@ -1,3 +1,8 @@
#![expect(
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use std::{path::PathBuf, time::Duration};
use anyhow::Result;

View File

@ -1,3 +1,9 @@
#![expect(
clippy::shadow_unrelated,
clippy::tests_outside_test_module,
reason = "We don't care about these in tests"
)]
use std::time::Duration;
use anyhow::{Context as _, Result};
@ -69,7 +75,7 @@ async fn create_and_transfer_public_token() -> Result<()> {
};
// Create new token
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(definition_account_id),
@ -317,7 +323,7 @@ async fn create_and_transfer_token_with_private_supply() -> Result<()> {
};
// Create new token
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(definition_account_id),
@ -475,7 +481,7 @@ async fn create_token_with_private_definition() -> Result<()> {
};
// Create token with private definition
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(definition_account_id),
@ -671,7 +677,7 @@ async fn create_token_with_private_definition_and_supply() -> Result<()> {
};
// Create token with both private definition and supply
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(definition_account_id),
@ -843,7 +849,7 @@ async fn shielded_token_transfer() -> Result<()> {
};
// Create token
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(definition_account_id),
@ -966,7 +972,7 @@ async fn deshielded_token_transfer() -> Result<()> {
};
// Create token with private supply
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_public_account_id(definition_account_id),
@ -1073,7 +1079,7 @@ async fn token_claiming_path_with_private_accounts() -> Result<()> {
};
// Create token
let name = "A NAME".to_string();
let name = "A NAME".to_owned();
let total_supply = 37;
let subcommand = TokenProgramAgnosticSubcommand::New {
definition_account_id: format_private_account_id(definition_account_id),

View File

@ -1,3 +1,14 @@
#![expect(
clippy::arithmetic_side_effects,
clippy::float_arithmetic,
clippy::missing_asserts_for_indexing,
clippy::as_conversions,
clippy::tests_outside_test_module,
clippy::integer_division,
clippy::integer_division_remainder_used,
reason = "We don't care about these in tests"
)]
use std::time::{Duration, Instant};
use anyhow::Result;
@ -21,6 +32,102 @@ use nssa_core::{
};
use tokio::test;
pub(crate) struct TpsTestManager {
public_keypairs: Vec<(PrivateKey, AccountId)>,
target_tps: u64,
}
impl TpsTestManager {
/// Generates public account keypairs. These are used to populate the config and to generate
/// valid public transactions for the tps test.
pub(crate) fn new(target_tps: u64, number_transactions: usize) -> Self {
let public_keypairs = (1..(number_transactions + 2))
.map(|i| {
let mut private_key_bytes = [0_u8; 32];
private_key_bytes[..8].copy_from_slice(&i.to_le_bytes());
let private_key = PrivateKey::try_new(private_key_bytes).unwrap();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
(private_key, account_id)
})
.collect();
Self {
public_keypairs,
target_tps,
}
}
#[expect(
clippy::cast_precision_loss,
reason = "This is just for testing purposes, we don't care about precision loss here"
)]
pub(crate) fn target_time(&self) -> Duration {
let number_transactions = (self.public_keypairs.len() - 1) as u64;
Duration::from_secs_f64(number_transactions as f64 / self.target_tps as f64)
}
/// Build a batch of public transactions to submit to the node.
pub fn build_public_txs(&self) -> Vec<PublicTransaction> {
// Create valid public transactions
let program = Program::authenticated_transfer_program();
let public_txs: Vec<PublicTransaction> = self
.public_keypairs
.windows(2)
.map(|pair| {
let amount: u128 = 1;
let message = putx::Message::try_new(
program.id(),
[pair[0].1, pair[1].1].to_vec(),
[0_u128].to_vec(),
amount,
)
.unwrap();
let witness_set =
nssa::public_transaction::WitnessSet::for_message(&message, &[&pair[0].0]);
PublicTransaction::new(message, witness_set)
})
.collect();
public_txs
}
/// Generates a sequencer configuration with initial balance in a number of public accounts.
/// The transactions generated with the function `build_public_txs` will be valid in a node
/// started with the config from this method.
fn generate_initial_data(&self) -> InitialData {
// Create public public keypairs
let public_accounts = self
.public_keypairs
.iter()
.map(|(key, _)| (key.clone(), 10))
.collect();
// Generate an initial commitment to be used with the privacy preserving transaction
// created with the `build_privacy_transaction` function.
let key_chain = KeyChain::new_os_random();
let account = Account {
balance: 100,
nonce: 0xdead_beef,
program_owner: Program::authenticated_transfer_program().id(),
data: Data::default(),
};
InitialData {
public_accounts,
private_accounts: vec![(key_chain, account)],
}
}
const fn generate_sequencer_partial_config() -> SequencerPartialConfig {
SequencerPartialConfig {
max_num_tx_in_block: 300,
max_block_size: ByteSize::mb(500),
mempool_max_size: 10_000,
block_create_timeout: Duration::from_secs(12),
}
}
}
// TODO: Make a proper benchmark instead of an ad-hoc test
#[test]
pub async fn tps_test() -> Result<()> {
@ -56,16 +163,17 @@ pub async fn tps_test() -> Result<()> {
for (i, tx_hash) in tx_hashes.iter().enumerate() {
loop {
if now.elapsed().as_millis() > target_time.as_millis() {
panic!("TPS test failed by timeout");
}
assert!(
now.elapsed().as_millis() <= target_time.as_millis(),
"TPS test failed by timeout"
);
let tx_obj = ctx
.sequencer_client()
.get_transaction_by_hash(*tx_hash)
.await
.inspect_err(|err| {
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}")
log::warn!("Failed to get transaction by hash {tx_hash} with error: {err:#?}");
});
if let Ok(tx_obj) = tx_obj
@ -94,98 +202,6 @@ pub async fn tps_test() -> Result<()> {
Ok(())
}
pub(crate) struct TpsTestManager {
public_keypairs: Vec<(PrivateKey, AccountId)>,
target_tps: u64,
}
impl TpsTestManager {
/// Generates public account keypairs. These are used to populate the config and to generate
/// valid public transactions for the tps test.
pub(crate) fn new(target_tps: u64, number_transactions: usize) -> Self {
let public_keypairs = (1..(number_transactions + 2))
.map(|i| {
let mut private_key_bytes = [0u8; 32];
private_key_bytes[..8].copy_from_slice(&i.to_le_bytes());
let private_key = PrivateKey::try_new(private_key_bytes).unwrap();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
(private_key, account_id)
})
.collect();
Self {
public_keypairs,
target_tps,
}
}
pub(crate) fn target_time(&self) -> Duration {
let number_transactions = (self.public_keypairs.len() - 1) as u64;
Duration::from_secs_f64(number_transactions as f64 / self.target_tps as f64)
}
/// Build a batch of public transactions to submit to the node.
pub fn build_public_txs(&self) -> Vec<PublicTransaction> {
// Create valid public transactions
let program = Program::authenticated_transfer_program();
let public_txs: Vec<PublicTransaction> = self
.public_keypairs
.windows(2)
.map(|pair| {
let amount: u128 = 1;
let message = putx::Message::try_new(
program.id(),
[pair[0].1, pair[1].1].to_vec(),
[0u128.into()].to_vec(),
amount,
)
.unwrap();
let witness_set =
nssa::public_transaction::WitnessSet::for_message(&message, &[&pair[0].0]);
PublicTransaction::new(message, witness_set)
})
.collect();
public_txs
}
/// Generates a sequencer configuration with initial balance in a number of public accounts.
/// The transactions generated with the function `build_public_txs` will be valid in a node
/// started with the config from this method.
fn generate_initial_data(&self) -> InitialData {
// Create public public keypairs
let public_accounts = self
.public_keypairs
.iter()
.map(|(key, _)| (key.clone(), 10))
.collect();
// Generate an initial commitment to be used with the privacy preserving transaction
// created with the `build_privacy_transaction` function.
let key_chain = KeyChain::new_os_random();
let account = Account {
balance: 100,
nonce: Nonce(0xdeadbeef),
program_owner: Program::authenticated_transfer_program().id(),
data: Data::default(),
};
InitialData {
public_accounts,
private_accounts: vec![(key_chain, account)],
}
}
fn generate_sequencer_partial_config() -> SequencerPartialConfig {
SequencerPartialConfig {
max_num_tx_in_block: 300,
max_block_size: ByteSize::mb(500),
mempool_max_size: 10_000,
block_create_timeout: Duration::from_secs(12),
}
}
}
/// Builds a single privacy transaction to use in stress tests. This involves generating a proof so
/// it may take a while to run. In normal execution of the node this transaction will be accepted
/// only once. Disabling the node's nullifier uniqueness check allows to submit this transaction

View File

@ -1,7 +1,19 @@
#![expect(
clippy::redundant_test_prefix,
reason = "Otherwise names interfere with ffi bindings"
)]
#![expect(
clippy::tests_outside_test_module,
clippy::undocumented_unsafe_blocks,
clippy::multiple_unsafe_ops_per_block,
clippy::shadow_unrelated,
reason = "We don't care about these in tests"
)]
use std::{
collections::HashSet,
ffi::{CStr, CString, c_char},
io::Write,
io::Write as _,
path::Path,
time::Duration,
};
@ -152,7 +164,7 @@ unsafe extern "C" {
fn new_wallet_ffi_with_test_context_config(
ctx: &BlockingTestContext,
home: &Path,
) -> *mut WalletHandle {
) -> Result<*mut WalletHandle> {
let config_path = home.join("wallet_config.json");
let storage_path = home.join("storage.json");
let mut config = ctx.ctx().wallet().config().to_owned();
@ -163,75 +175,68 @@ fn new_wallet_ffi_with_test_context_config(
.write(true)
.create(true)
.truncate(true)
.open(&config_path)
.unwrap();
.open(&config_path)?;
let config_with_overrides_serialized = serde_json::to_vec_pretty(&config).unwrap();
let config_with_overrides_serialized = serde_json::to_vec_pretty(&config)?;
file.write_all(&config_with_overrides_serialized).unwrap();
file.write_all(&config_with_overrides_serialized)?;
let config_path = CString::new(config_path.to_str().unwrap()).unwrap();
let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap();
let password = CString::new(ctx.ctx().wallet_password()).unwrap();
let config_path = CString::new(config_path.to_str().unwrap())?;
let storage_path = CString::new(storage_path.to_str().unwrap())?;
let password = CString::new(ctx.ctx().wallet_password())?;
unsafe {
Ok(unsafe {
wallet_ffi_create_new(
config_path.as_ptr(),
storage_path.as_ptr(),
password.as_ptr(),
)
}
})
}
fn new_wallet_ffi_with_default_config(password: &str) -> *mut WalletHandle {
let tempdir = tempdir().unwrap();
fn new_wallet_ffi_with_default_config(password: &str) -> Result<*mut WalletHandle> {
let tempdir = tempdir()?;
let config_path = tempdir.path().join("wallet_config.json");
let storage_path = tempdir.path().join("storage.json");
let config_path_c = CString::new(config_path.to_str().unwrap()).unwrap();
let storage_path_c = CString::new(storage_path.to_str().unwrap()).unwrap();
let password = CString::new(password).unwrap();
let config_path_c = CString::new(config_path.to_str().unwrap())?;
let storage_path_c = CString::new(storage_path.to_str().unwrap())?;
let password = CString::new(password)?;
unsafe {
Ok(unsafe {
wallet_ffi_create_new(
config_path_c.as_ptr(),
storage_path_c.as_ptr(),
password.as_ptr(),
)
}
})
}
fn new_wallet_rust_with_default_config(password: &str) -> WalletCore {
let tempdir = tempdir().unwrap();
fn new_wallet_rust_with_default_config(password: &str) -> Result<WalletCore> {
let tempdir = tempdir()?;
let config_path = tempdir.path().join("wallet_config.json");
let storage_path = tempdir.path().join("storage.json");
WalletCore::new_init_storage(
config_path.to_path_buf(),
storage_path.to_path_buf(),
None,
password.to_string(),
)
.unwrap()
WalletCore::new_init_storage(config_path, storage_path, None, password.to_owned())
}
fn load_existing_ffi_wallet(home: &Path) -> *mut WalletHandle {
fn load_existing_ffi_wallet(home: &Path) -> Result<*mut WalletHandle> {
let config_path = home.join("wallet_config.json");
let storage_path = home.join("storage.json");
let config_path = CString::new(config_path.to_str().unwrap()).unwrap();
let storage_path = CString::new(storage_path.to_str().unwrap()).unwrap();
let config_path = CString::new(config_path.to_str().unwrap())?;
let storage_path = CString::new(storage_path.to_str().unwrap())?;
unsafe { wallet_ffi_open(config_path.as_ptr(), storage_path.as_ptr()) }
Ok(unsafe { wallet_ffi_open(config_path.as_ptr(), storage_path.as_ptr()) })
}
#[test]
fn test_wallet_ffi_create_public_accounts() {
fn wallet_ffi_create_public_accounts() -> Result<()> {
let password = "password_for_tests";
let n_accounts = 10;
// First `n_accounts` public accounts created with Rust wallet
let new_public_account_ids_rust = {
let mut account_ids = Vec::new();
let mut wallet_rust = new_wallet_rust_with_default_config(password);
let mut wallet_rust = new_wallet_rust_with_default_config(password)?;
for _ in 0..n_accounts {
let account_id = wallet_rust.create_new_account_public(None).0;
account_ids.push(*account_id.value());
@ -243,13 +248,10 @@ fn test_wallet_ffi_create_public_accounts() {
let new_public_account_ids_ffi = unsafe {
let mut account_ids = Vec::new();
let wallet_ffi_handle = new_wallet_ffi_with_default_config(password);
let wallet_ffi_handle = new_wallet_ffi_with_default_config(password)?;
for _ in 0..n_accounts {
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
wallet_ffi_create_account_public(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id);
account_ids.push(out_account_id.data);
}
wallet_ffi_destroy(wallet_ffi_handle);
@ -257,17 +259,19 @@ fn test_wallet_ffi_create_public_accounts() {
};
assert_eq!(new_public_account_ids_ffi, new_public_account_ids_rust);
Ok(())
}
#[test]
fn test_wallet_ffi_create_private_accounts() {
fn wallet_ffi_create_private_accounts() -> Result<()> {
let password = "password_for_tests";
let n_accounts = 10;
// First `n_accounts` private accounts created with Rust wallet
let new_private_account_ids_rust = {
let mut account_ids = Vec::new();
let mut wallet_rust = new_wallet_rust_with_default_config(password);
let mut wallet_rust = new_wallet_rust_with_default_config(password)?;
for _ in 0..n_accounts {
let account_id = wallet_rust.create_new_account_private(None).0;
account_ids.push(*account_id.value());
@ -279,56 +283,52 @@ fn test_wallet_ffi_create_private_accounts() {
let new_private_account_ids_ffi = unsafe {
let mut account_ids = Vec::new();
let wallet_ffi_handle = new_wallet_ffi_with_default_config(password);
let wallet_ffi_handle = new_wallet_ffi_with_default_config(password)?;
for _ in 0..n_accounts {
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
wallet_ffi_create_account_private(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id);
account_ids.push(out_account_id.data);
}
wallet_ffi_destroy(wallet_ffi_handle);
account_ids
};
assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust)
assert_eq!(new_private_account_ids_ffi, new_private_account_ids_rust);
Ok(())
}
#[test]
fn test_wallet_ffi_save_and_load_persistent_storage() -> Result<()> {
fn wallet_ffi_save_and_load_persistent_storage() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let mut out_private_account_id = FfiBytes32::from_bytes([0; 32]);
let home = tempfile::tempdir().unwrap();
let home = tempfile::tempdir()?;
// Create a private account with the wallet FFI and save it
unsafe {
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
wallet_ffi_create_account_private(
wallet_ffi_handle,
(&mut out_private_account_id) as *mut FfiBytes32,
);
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_private_account_id);
wallet_ffi_save(wallet_ffi_handle);
wallet_ffi_destroy(wallet_ffi_handle);
}
let private_account_keys = unsafe {
let wallet_ffi_handle = load_existing_ffi_wallet(home.path());
let wallet_ffi_handle = load_existing_ffi_wallet(home.path())?;
let mut private_account = FfiAccount::default();
let result = wallet_ffi_get_account_private(
wallet_ffi_handle,
(&out_private_account_id) as *const FfiBytes32,
(&mut private_account) as *mut FfiAccount,
&raw const out_private_account_id,
&raw mut private_account,
);
assert_eq!(result, error::WalletFfiError::Success);
let mut out_keys = FfiPrivateAccountKeys::default();
let result = wallet_ffi_get_private_account_keys(
wallet_ffi_handle,
(&out_private_account_id) as *const FfiBytes32,
(&mut out_keys) as *mut FfiPrivateAccountKeys,
&raw const out_private_account_id,
&raw mut out_keys,
);
assert_eq!(result, error::WalletFfiError::Success);
@ -346,17 +346,17 @@ fn test_wallet_ffi_save_and_load_persistent_storage() -> Result<()> {
}
#[test]
fn test_wallet_ffi_list_accounts() {
fn test_wallet_ffi_list_accounts() -> Result<()> {
let password = "password_for_tests";
// Create the wallet FFI
let wallet_ffi_handle = unsafe {
let handle = new_wallet_ffi_with_default_config(password);
let handle = new_wallet_ffi_with_default_config(password)?;
// Create 5 public accounts and 5 private accounts
for _ in 0..5 {
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
wallet_ffi_create_account_public(handle, (&mut out_account_id) as *mut FfiBytes32);
wallet_ffi_create_account_private(handle, (&mut out_account_id) as *mut FfiBytes32);
wallet_ffi_create_account_public(handle, &raw mut out_account_id);
wallet_ffi_create_account_private(handle, &raw mut out_account_id);
}
handle
@ -364,7 +364,7 @@ fn test_wallet_ffi_list_accounts() {
// Create the wallet Rust
let wallet_rust = {
let mut wallet = new_wallet_rust_with_default_config(password);
let mut wallet = new_wallet_rust_with_default_config(password)?;
// Create 5 public accounts and 5 private accounts
for _ in 0..5 {
wallet.create_new_account_public(None);
@ -376,7 +376,7 @@ fn test_wallet_ffi_list_accounts() {
// Get the account list with FFI method
let mut wallet_ffi_account_list = unsafe {
let mut out_list = FfiAccountList::default();
wallet_ffi_list_accounts(wallet_ffi_handle, (&mut out_list) as *mut FfiAccountList);
wallet_ffi_list_accounts(wallet_ffi_handle, &raw mut out_list);
out_list
};
@ -400,7 +400,7 @@ fn test_wallet_ffi_list_accounts() {
assert_eq!(
wallet_rust_account_ids
.iter()
.map(|id| id.value())
.map(nssa::AccountId::value)
.collect::<HashSet<_>>(),
wallet_ffi_account_list_slice
.iter()
@ -409,7 +409,7 @@ fn test_wallet_ffi_list_accounts() {
);
// Assert `is_pub` flag is correct in the FFI result
for entry in wallet_ffi_account_list_slice.iter() {
for entry in wallet_ffi_account_list_slice {
let account_id = AccountId::new(entry.account_id.data);
let is_pub_default_in_rust_wallet = wallet_rust
.storage()
@ -429,27 +429,30 @@ fn test_wallet_ffi_list_accounts() {
}
unsafe {
wallet_ffi_free_account_list((&mut wallet_ffi_account_list) as *mut FfiAccountList);
wallet_ffi_free_account_list(&raw mut wallet_ffi_account_list);
wallet_ffi_destroy(wallet_ffi_handle);
}
Ok(())
}
#[test]
fn test_wallet_ffi_get_balance_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ctx.ctx().existing_public_accounts()[0];
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_balance(
wallet_ffi_get_balance(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
&raw const ffi_account_id,
true,
(&mut out_balance) as *mut [u8; 16],
);
&raw mut out_balance,
)
.unwrap();
u128::from_le_bytes(out_balance)
};
assert_eq!(balance, 10000);
@ -467,17 +470,18 @@ fn test_wallet_ffi_get_balance_public() -> Result<()> {
fn test_wallet_ffi_get_account_public() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ctx.ctx().existing_public_accounts()[0];
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let mut out_account = FfiAccount::default();
let account: Account = unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_account_public(
wallet_ffi_get_account_public(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
&raw const ffi_account_id,
&raw mut out_account,
)
.unwrap();
(&out_account).try_into().unwrap()
};
@ -490,7 +494,7 @@ fn test_wallet_ffi_get_account_public() -> Result<()> {
assert_eq!(account.nonce.0, 0);
unsafe {
wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount);
wallet_ffi_free_account_data(&raw mut out_account);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -503,17 +507,18 @@ fn test_wallet_ffi_get_account_public() -> Result<()> {
fn test_wallet_ffi_get_account_private() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ctx.ctx().existing_private_accounts()[0];
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let mut out_account = FfiAccount::default();
let account: Account = unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_account_private(
wallet_ffi_get_account_private(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
&raw const ffi_account_id,
&raw mut out_account,
)
.unwrap();
(&out_account).try_into().unwrap()
};
@ -526,7 +531,7 @@ fn test_wallet_ffi_get_account_private() -> Result<()> {
assert_eq!(account.nonce, 0u128.into());
unsafe {
wallet_ffi_free_account_data((&mut out_account) as *mut FfiAccount);
wallet_ffi_free_account_data(&raw mut out_account);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -539,17 +544,18 @@ fn test_wallet_ffi_get_account_private() -> Result<()> {
fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ctx.ctx().existing_public_accounts()[0];
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let mut out_key = FfiPublicAccountKey::default();
let key: PublicKey = unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_public_account_key(
wallet_ffi_get_public_account_key(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut out_key) as *mut FfiPublicAccountKey,
);
&raw const ffi_account_id,
&raw mut out_key,
)
.unwrap();
(&out_key).try_into().unwrap()
};
@ -577,17 +583,18 @@ fn test_wallet_ffi_get_public_account_keys() -> Result<()> {
fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let account_id: AccountId = ctx.ctx().existing_private_accounts()[0];
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let mut keys = FfiPrivateAccountKeys::default();
unsafe {
let ffi_account_id = FfiBytes32::from(&account_id);
let _result = wallet_ffi_get_private_account_keys(
wallet_ffi_get_private_account_keys(
wallet_ffi_handle,
(&ffi_account_id) as *const FfiBytes32,
(&mut keys) as *mut FfiPrivateAccountKeys,
);
&raw const ffi_account_id,
&raw mut keys,
)
.unwrap();
};
let key_chain = &ctx
@ -606,7 +613,7 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
assert_eq!(&keys.vpk().unwrap(), expected_vpk);
unsafe {
wallet_ffi_free_private_account_keys((&mut keys) as *mut FfiPrivateAccountKeys);
wallet_ffi_free_private_account_keys(&raw mut keys);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -616,66 +623,65 @@ fn test_wallet_ffi_get_private_account_keys() -> Result<()> {
}
#[test]
fn test_wallet_ffi_account_id_to_base58() {
fn test_wallet_ffi_account_id_to_base58() -> Result<()> {
let private_key = PrivateKey::new_os_random();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
let ffi_bytes: FfiBytes32 = (&account_id).into();
let ptr = unsafe { wallet_ffi_account_id_to_base58((&ffi_bytes) as *const FfiBytes32) };
let ptr = unsafe { wallet_ffi_account_id_to_base58(&raw const ffi_bytes) };
let ffi_result = unsafe { CStr::from_ptr(ptr).to_str().unwrap() };
let ffi_result = unsafe { CStr::from_ptr(ptr).to_str()? };
assert_eq!(account_id.to_string(), ffi_result);
unsafe {
wallet_ffi_free_string(ptr);
}
Ok(())
}
#[test]
fn test_wallet_ffi_base58_to_account_id() {
fn wallet_ffi_base58_to_account_id() -> Result<()> {
let private_key = PrivateKey::new_os_random();
let public_key = PublicKey::new_from_private_key(&private_key);
let account_id = AccountId::from(&public_key);
let account_id_str = account_id.to_string();
let account_id_c_str = CString::new(account_id_str.clone()).unwrap();
let account_id_c_str = CString::new(account_id_str.clone())?;
let account_id: AccountId = unsafe {
let mut out_account_id_bytes = FfiBytes32::default();
wallet_ffi_account_id_from_base58(
account_id_c_str.as_ptr(),
(&mut out_account_id_bytes) as *mut FfiBytes32,
);
wallet_ffi_account_id_from_base58(account_id_c_str.as_ptr(), &raw mut out_account_id_bytes);
out_account_id_bytes.into()
};
let expected_account_id = account_id_str.parse().unwrap();
let expected_account_id = account_id_str.parse()?;
assert_eq!(account_id, expected_account_id);
Ok(())
}
#[test]
fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
fn wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
// Create a new uninitialized public account
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
unsafe {
wallet_ffi_create_account_public(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
wallet_ffi_create_account_public(wallet_ffi_handle, &raw mut out_account_id);
}
// Check its program owner is the default program id
let account: Account = unsafe {
let mut out_account = FfiAccount::default();
let _result = wallet_ffi_get_account_public(
wallet_ffi_get_account_public(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
&raw const out_account_id,
&raw mut out_account,
)
.unwrap();
(&out_account).try_into().unwrap()
};
assert_eq!(account.program_owner, DEFAULT_PROGRAM_ID);
@ -685,8 +691,8 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
unsafe {
wallet_ffi_register_public_account(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut transfer_result) as *mut FfiTransferResult,
&raw const out_account_id,
&raw mut transfer_result,
);
}
@ -696,11 +702,12 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
// Check that the program owner is now the authenticated transfer program
let account: Account = unsafe {
let mut out_account = FfiAccount::default();
let _result = wallet_ffi_get_account_public(
wallet_ffi_get_account_public(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
&raw const out_account_id,
&raw mut out_account,
)
.unwrap();
(&out_account).try_into().unwrap()
};
assert_eq!(
@ -709,7 +716,7 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_free_transfer_result(&raw mut transfer_result);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -717,18 +724,15 @@ fn test_wallet_ffi_init_public_account_auth_transfer() -> Result<()> {
}
#[test]
fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
fn wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
let ctx = BlockingTestContext::new()?;
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
// Create a new uninitialized public account
let mut out_account_id = FfiBytes32::from_bytes([0; 32]);
unsafe {
wallet_ffi_create_account_private(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id);
}
// Check its program owner is the default program id
@ -736,8 +740,8 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
let mut out_account = FfiAccount::default();
wallet_ffi_get_account_private(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
&raw const out_account_id,
&raw mut out_account,
);
(&out_account).try_into().unwrap()
};
@ -748,8 +752,8 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
unsafe {
wallet_ffi_register_private_account(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut transfer_result) as *mut FfiTransferResult,
&raw const out_account_id,
&raw mut transfer_result,
);
}
@ -759,18 +763,19 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
// Sync private account local storage with onchain encrypted state
unsafe {
let mut current_height = 0;
wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64);
wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height);
wallet_ffi_sync_to_block(wallet_ffi_handle, current_height);
};
// Check that the program owner is now the authenticated transfer program
let account: Account = unsafe {
let mut out_account = FfiAccount::default();
let _result = wallet_ffi_get_account_private(
wallet_ffi_get_account_private(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_account) as *mut FfiAccount,
);
&raw const out_account_id,
&raw mut out_account,
)
.unwrap();
(&out_account).try_into().unwrap()
};
assert_eq!(
@ -779,7 +784,7 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_free_transfer_result(&raw mut transfer_result);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -788,21 +793,21 @@ fn test_wallet_ffi_init_private_account_auth_transfer() -> Result<()> {
#[test]
fn test_wallet_ffi_transfer_public() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let ctx = BlockingTestContext::new()?;
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let from: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into();
let to: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[1]).into();
let amount: [u8; 16] = 100u128.to_le_bytes();
let amount: [u8; 16] = 100_u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();
unsafe {
wallet_ffi_transfer_public(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
(&to) as *const FfiBytes32,
(&amount) as *const [u8; 16],
(&mut transfer_result) as *mut FfiTransferResult,
&raw const from,
&raw const to,
&raw const amount,
&raw mut transfer_result,
);
}
@ -811,23 +816,20 @@ fn test_wallet_ffi_transfer_public() -> Result<()> {
let from_balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_get_balance(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
&raw const from,
true,
(&mut out_balance) as *mut [u8; 16],
);
&raw mut out_balance,
)
.unwrap();
u128::from_le_bytes(out_balance)
};
let to_balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&to) as *const FfiBytes32,
true,
(&mut out_balance) as *mut [u8; 16],
);
wallet_ffi_get_balance(wallet_ffi_handle, &raw const to, true, &raw mut out_balance)
.unwrap();
u128::from_le_bytes(out_balance)
};
@ -835,7 +837,7 @@ fn test_wallet_ffi_transfer_public() -> Result<()> {
assert_eq!(to_balance, 20100);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_free_transfer_result(&raw mut transfer_result);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -844,34 +846,31 @@ fn test_wallet_ffi_transfer_public() -> Result<()> {
#[test]
fn test_wallet_ffi_transfer_shielded() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let ctx = BlockingTestContext::new()?;
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let from: FfiBytes32 = (&ctx.ctx().existing_public_accounts()[0]).into();
let (to, to_keys) = unsafe {
let mut out_account_id = FfiBytes32::default();
let mut out_keys = FfiPrivateAccountKeys::default();
wallet_ffi_create_account_private(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id);
wallet_ffi_get_private_account_keys(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_keys) as *mut FfiPrivateAccountKeys,
&raw const out_account_id,
&raw mut out_keys,
);
(out_account_id, out_keys)
};
let amount: [u8; 16] = 100u128.to_le_bytes();
let amount: [u8; 16] = 100_u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();
unsafe {
wallet_ffi_transfer_shielded(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
(&to_keys) as *const FfiPrivateAccountKeys,
(&amount) as *const [u8; 16],
(&mut transfer_result) as *mut FfiTransferResult,
&raw const from,
&raw const to_keys,
&raw const amount,
&raw mut transfer_result,
);
}
@ -881,18 +880,19 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> {
// Sync private account local storage with onchain encrypted state
unsafe {
let mut current_height = 0;
wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64);
wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height);
wallet_ffi_sync_to_block(wallet_ffi_handle, current_height);
};
let from_balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_get_balance(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
&raw const from,
true,
(&mut out_balance) as *mut [u8; 16],
);
&raw mut out_balance,
)
.unwrap();
u128::from_le_bytes(out_balance)
};
@ -900,9 +900,9 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&to) as *const FfiBytes32,
&raw const to,
false,
(&mut out_balance) as *mut [u8; 16],
&raw mut out_balance,
);
u128::from_le_bytes(out_balance)
};
@ -911,7 +911,7 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> {
assert_eq!(to_balance, 100);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_free_transfer_result(&raw mut transfer_result);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -920,21 +920,21 @@ fn test_wallet_ffi_transfer_shielded() -> Result<()> {
#[test]
fn test_wallet_ffi_transfer_deshielded() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let ctx = BlockingTestContext::new()?;
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let from: FfiBytes32 = (&ctx.ctx().existing_private_accounts()[0]).into();
let to = FfiBytes32::from_bytes([37; 32]);
let amount: [u8; 16] = 100u128.to_le_bytes();
let amount: [u8; 16] = 100_u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();
unsafe {
wallet_ffi_transfer_deshielded(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
(&to) as *const FfiBytes32,
(&amount) as *const [u8; 16],
(&mut transfer_result) as *mut FfiTransferResult,
&raw const from,
&raw const to,
&raw const amount,
&raw mut transfer_result,
);
}
@ -944,7 +944,7 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> {
// Sync private account local storage with onchain encrypted state
unsafe {
let mut current_height = 0;
wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64);
wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height);
wallet_ffi_sync_to_block(wallet_ffi_handle, current_height);
};
@ -952,21 +952,17 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
&raw const from,
false,
(&mut out_balance) as *mut [u8; 16],
&raw mut out_balance,
);
u128::from_le_bytes(out_balance)
};
let to_balance = unsafe {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&to) as *const FfiBytes32,
true,
(&mut out_balance) as *mut [u8; 16],
);
let _result =
wallet_ffi_get_balance(wallet_ffi_handle, &raw const to, true, &raw mut out_balance);
u128::from_le_bytes(out_balance)
};
@ -974,7 +970,7 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> {
assert_eq!(to_balance, 100);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_free_transfer_result(&raw mut transfer_result);
wallet_ffi_destroy(wallet_ffi_handle);
}
@ -983,36 +979,33 @@ fn test_wallet_ffi_transfer_deshielded() -> Result<()> {
#[test]
fn test_wallet_ffi_transfer_private() -> Result<()> {
let ctx = BlockingTestContext::new().unwrap();
let home = tempfile::tempdir().unwrap();
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path());
let ctx = BlockingTestContext::new()?;
let home = tempfile::tempdir()?;
let wallet_ffi_handle = new_wallet_ffi_with_test_context_config(&ctx, home.path())?;
let from: FfiBytes32 = (&ctx.ctx().existing_private_accounts()[0]).into();
let (to, to_keys) = unsafe {
let mut out_account_id = FfiBytes32::default();
let mut out_keys = FfiPrivateAccountKeys::default();
wallet_ffi_create_account_private(
wallet_ffi_handle,
(&mut out_account_id) as *mut FfiBytes32,
);
wallet_ffi_create_account_private(wallet_ffi_handle, &raw mut out_account_id);
wallet_ffi_get_private_account_keys(
wallet_ffi_handle,
(&out_account_id) as *const FfiBytes32,
(&mut out_keys) as *mut FfiPrivateAccountKeys,
&raw const out_account_id,
&raw mut out_keys,
);
(out_account_id, out_keys)
};
let amount: [u8; 16] = 100u128.to_le_bytes();
let amount: [u8; 16] = 100_u128.to_le_bytes();
let mut transfer_result = FfiTransferResult::default();
unsafe {
wallet_ffi_transfer_private(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
(&to_keys) as *const FfiPrivateAccountKeys,
(&amount) as *const [u8; 16],
(&mut transfer_result) as *mut FfiTransferResult,
&raw const from,
&raw const to_keys,
&raw const amount,
&raw mut transfer_result,
);
}
@ -1022,7 +1015,7 @@ fn test_wallet_ffi_transfer_private() -> Result<()> {
// Sync private account local storage with onchain encrypted state
unsafe {
let mut current_height = 0;
wallet_ffi_get_current_block_height(wallet_ffi_handle, (&mut current_height) as *mut u64);
wallet_ffi_get_current_block_height(wallet_ffi_handle, &raw mut current_height);
wallet_ffi_sync_to_block(wallet_ffi_handle, current_height);
};
@ -1030,9 +1023,9 @@ fn test_wallet_ffi_transfer_private() -> Result<()> {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&from) as *const FfiBytes32,
&raw const from,
false,
(&mut out_balance) as *mut [u8; 16],
&raw mut out_balance,
);
u128::from_le_bytes(out_balance)
};
@ -1041,9 +1034,9 @@ fn test_wallet_ffi_transfer_private() -> Result<()> {
let mut out_balance: [u8; 16] = [0; 16];
let _result = wallet_ffi_get_balance(
wallet_ffi_handle,
(&to) as *const FfiBytes32,
&raw const to,
false,
(&mut out_balance) as *mut [u8; 16],
&raw mut out_balance,
);
u128::from_le_bytes(out_balance)
};
@ -1052,7 +1045,7 @@ fn test_wallet_ffi_transfer_private() -> Result<()> {
assert_eq!(to_balance, 100);
unsafe {
wallet_ffi_free_transfer_result((&mut transfer_result) as *mut FfiTransferResult);
wallet_ffi_free_transfer_result(&raw mut transfer_result);
wallet_ffi_destroy(wallet_ffi_handle);
}

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
secp256k1 = "0.31.1"

View File

@ -2,8 +2,8 @@ use nssa_core::{
NullifierPublicKey, SharedSecretKey,
encryption::{EphemeralPublicKey, EphemeralSecretKey, ViewingPublicKey},
};
use rand::{RngCore, rngs::OsRng};
use sha2::Digest;
use rand::{RngCore as _, rngs::OsRng};
use sha2::Digest as _;
#[derive(Debug)]
/// Ephemeral secret key holder. Non-clonable as intended for one-time use. Produces ephemeral
@ -12,18 +12,8 @@ pub struct EphemeralKeyHolder {
ephemeral_secret_key: EphemeralSecretKey,
}
pub fn produce_one_sided_shared_secret_receiver(
vpk: &ViewingPublicKey,
) -> (SharedSecretKey, EphemeralPublicKey) {
let mut esk = [0; 32];
OsRng.fill_bytes(&mut esk);
(
SharedSecretKey::new(&esk, vpk),
EphemeralPublicKey::from_scalar(esk),
)
}
impl EphemeralKeyHolder {
#[must_use]
pub fn new(receiver_nullifier_public_key: &NullifierPublicKey) -> Self {
let mut nonce_bytes = [0; 16];
OsRng.fill_bytes(&mut nonce_bytes);
@ -36,10 +26,12 @@ impl EphemeralKeyHolder {
}
}
#[must_use]
pub fn generate_ephemeral_public_key(&self) -> EphemeralPublicKey {
EphemeralPublicKey::from_scalar(self.ephemeral_secret_key)
}
#[must_use]
pub fn calculate_shared_secret_sender(
&self,
receiver_viewing_public_key: &ViewingPublicKey,
@ -47,3 +39,15 @@ impl EphemeralKeyHolder {
SharedSecretKey::new(&self.ephemeral_secret_key, receiver_viewing_public_key)
}
}
#[must_use]
pub fn produce_one_sided_shared_secret_receiver(
vpk: &ViewingPublicKey,
) -> (SharedSecretKey, EphemeralPublicKey) {
let mut esk = [0; 32];
OsRng.fill_bytes(&mut esk);
(
SharedSecretKey::new(&esk, vpk),
EphemeralPublicKey::from_scalar(esk),
)
}

View File

@ -1,6 +1,6 @@
use std::{fmt::Display, str::FromStr};
use itertools::Itertools;
use itertools::Itertools as _;
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Serialize, Deserialize, Hash)]
@ -23,12 +23,12 @@ impl FromStr for ChainIndex {
}
if s == "/" {
return Ok(ChainIndex(vec![]));
return Ok(Self(vec![]));
}
let uprooted_substring = s.strip_prefix("/").unwrap();
let splitted_chain: Vec<&str> = uprooted_substring.split("/").collect();
let splitted_chain: Vec<&str> = uprooted_substring.split('/').collect();
let mut res = vec![];
for split_ch in splitted_chain {
@ -47,7 +47,7 @@ impl Display for ChainIndex {
write!(f, "{cci}/")?;
}
if let Some(last) = self.0.last() {
write!(f, "{}", last)?;
write!(f, "{last}")?;
}
Ok(())
}
@ -55,84 +55,96 @@ impl Display for ChainIndex {
impl Default for ChainIndex {
fn default() -> Self {
ChainIndex::from_str("/").expect("Root parsing failure")
Self::from_str("/").expect("Root parsing failure")
}
}
impl ChainIndex {
#[must_use]
pub fn root() -> Self {
ChainIndex::default()
Self::default()
}
#[must_use]
pub fn chain(&self) -> &[u32] {
&self.0
}
#[must_use]
pub fn index(&self) -> Option<u32> {
self.chain().last().copied()
}
pub fn next_in_line(&self) -> ChainIndex {
#[must_use]
pub fn next_in_line(&self) -> Option<Self> {
let mut chain = self.0.clone();
// ToDo: Add overflow check
if let Some(last_p) = chain.last_mut() {
*last_p += 1
*last_p = last_p.checked_add(1)?;
}
ChainIndex(chain)
Some(Self(chain))
}
pub fn previous_in_line(&self) -> Option<ChainIndex> {
#[must_use]
pub fn previous_in_line(&self) -> Option<Self> {
let mut chain = self.0.clone();
if let Some(last_p) = chain.last_mut() {
*last_p = last_p.checked_sub(1)?;
}
Some(ChainIndex(chain))
Some(Self(chain))
}
pub fn parent(&self) -> Option<ChainIndex> {
#[must_use]
pub fn parent(&self) -> Option<Self> {
if self.0.is_empty() {
None
} else {
Some(ChainIndex(self.0[..(self.0.len() - 1)].to_vec()))
let last = self.0.len().checked_sub(1)?;
Some(Self(self.0[..last].to_vec()))
}
}
pub fn nth_child(&self, child_id: u32) -> ChainIndex {
#[must_use]
pub fn nth_child(&self, child_id: u32) -> Self {
let mut chain = self.0.clone();
chain.push(child_id);
ChainIndex(chain)
Self(chain)
}
#[must_use]
pub fn depth(&self) -> u32 {
self.0.iter().map(|cci| cci + 1).sum()
self.0
.iter()
.map(|cci| cci.checked_add(1).expect("Max cci reached"))
.sum()
}
fn collapse_back(&self) -> Option<Self> {
let mut res = self.parent()?;
let last_mut = res.0.last_mut()?;
*last_mut += *(self.0.last()?) + 1;
*last_mut = last_mut.checked_add(self.0.last()?.checked_add(1)?)?;
Some(res)
}
fn shuffle_iter(&self) -> impl Iterator<Item = ChainIndex> {
fn shuffle_iter(&self) -> impl Iterator<Item = Self> {
self.0
.iter()
.permutations(self.0.len())
.unique()
.map(|item| ChainIndex(item.into_iter().cloned().collect()))
.map(|item| Self(item.into_iter().copied().collect()))
}
pub fn chain_ids_at_depth(depth: usize) -> impl Iterator<Item = ChainIndex> {
let mut stack = vec![ChainIndex(vec![0; depth])];
let mut cumulative_stack = vec![ChainIndex(vec![0; depth])];
pub fn chain_ids_at_depth(depth: usize) -> impl Iterator<Item = Self> {
let mut stack = vec![Self(vec![0; depth])];
let mut cumulative_stack = vec![Self(vec![0; depth])];
while let Some(id) = stack.pop() {
if let Some(collapsed_id) = id.collapse_back() {
while let Some(top_id) = stack.pop() {
if let Some(collapsed_id) = top_id.collapse_back() {
for id in collapsed_id.shuffle_iter() {
stack.push(id.clone());
cumulative_stack.push(id);
@ -143,12 +155,12 @@ impl ChainIndex {
cumulative_stack.into_iter().unique()
}
pub fn chain_ids_at_depth_rev(depth: usize) -> impl Iterator<Item = ChainIndex> {
let mut stack = vec![ChainIndex(vec![0; depth])];
let mut cumulative_stack = vec![ChainIndex(vec![0; depth])];
pub fn chain_ids_at_depth_rev(depth: usize) -> impl Iterator<Item = Self> {
let mut stack = vec![Self(vec![0; depth])];
let mut cumulative_stack = vec![Self(vec![0; depth])];
while let Some(id) = stack.pop() {
if let Some(collapsed_id) = id.collapse_back() {
while let Some(top_id) = stack.pop() {
if let Some(collapsed_id) = top_id.collapse_back() {
for id in collapsed_id.shuffle_iter() {
stack.push(id.clone());
cumulative_stack.push(id);
@ -165,7 +177,7 @@ mod tests {
use super::*;
#[test]
fn test_chain_id_root_correct() {
fn chain_id_root_correct() {
let chain_id = ChainIndex::root();
let chain_id_2 = ChainIndex::from_str("/").unwrap();
@ -173,21 +185,21 @@ mod tests {
}
#[test]
fn test_chain_id_deser_correct() {
fn chain_id_deser_correct() {
let chain_id = ChainIndex::from_str("/257").unwrap();
assert_eq!(chain_id.chain(), &[257]);
}
#[test]
fn test_chain_id_deser_failure_no_root() {
fn chain_id_deser_failure_no_root() {
let chain_index_error = ChainIndex::from_str("257").err().unwrap();
assert!(matches!(chain_index_error, ChainIndexError::NoRootFound));
}
#[test]
fn test_chain_id_deser_failure_int_parsing_failure() {
fn chain_id_deser_failure_int_parsing_failure() {
let chain_index_error = ChainIndex::from_str("/hello").err().unwrap();
assert!(matches!(
@ -197,15 +209,15 @@ mod tests {
}
#[test]
fn test_chain_id_next_in_line_correct() {
fn chain_id_next_in_line_correct() {
let chain_id = ChainIndex::from_str("/257").unwrap();
let next_in_line = chain_id.next_in_line();
let next_in_line = chain_id.next_in_line().unwrap();
assert_eq!(next_in_line, ChainIndex::from_str("/258").unwrap());
}
#[test]
fn test_chain_id_child_correct() {
fn chain_id_child_correct() {
let chain_id = ChainIndex::from_str("/257").unwrap();
let child = chain_id.nth_child(3);
@ -213,88 +225,88 @@ mod tests {
}
#[test]
fn test_correct_display() {
fn correct_display() {
let chainid = ChainIndex(vec![5, 7, 8]);
let string_index = format!("{chainid}");
assert_eq!(string_index, "/5/7/8".to_string());
assert_eq!(string_index, "/5/7/8".to_owned());
}
#[test]
fn test_prev_in_line() {
fn prev_in_line() {
let chain_id = ChainIndex(vec![1, 7, 3]);
let prev_chain_id = chain_id.previous_in_line().unwrap();
assert_eq!(prev_chain_id, ChainIndex(vec![1, 7, 2]))
assert_eq!(prev_chain_id, ChainIndex(vec![1, 7, 2]));
}
#[test]
fn test_prev_in_line_no_prev() {
fn prev_in_line_no_prev() {
let chain_id = ChainIndex(vec![1, 7, 0]);
let prev_chain_id = chain_id.previous_in_line();
assert_eq!(prev_chain_id, None)
assert_eq!(prev_chain_id, None);
}
#[test]
fn test_parent() {
fn parent() {
let chain_id = ChainIndex(vec![1, 7, 3]);
let parent_chain_id = chain_id.parent().unwrap();
assert_eq!(parent_chain_id, ChainIndex(vec![1, 7]))
assert_eq!(parent_chain_id, ChainIndex(vec![1, 7]));
}
#[test]
fn test_parent_no_parent() {
fn parent_no_parent() {
let chain_id = ChainIndex(vec![]);
let parent_chain_id = chain_id.parent();
assert_eq!(parent_chain_id, None)
assert_eq!(parent_chain_id, None);
}
#[test]
fn test_parent_root() {
fn parent_root() {
let chain_id = ChainIndex(vec![1]);
let parent_chain_id = chain_id.parent().unwrap();
assert_eq!(parent_chain_id, ChainIndex::root())
assert_eq!(parent_chain_id, ChainIndex::root());
}
#[test]
fn test_collapse_back() {
fn collapse_back() {
let chain_id = ChainIndex(vec![1, 1]);
let collapsed = chain_id.collapse_back().unwrap();
assert_eq!(collapsed, ChainIndex(vec![3]))
assert_eq!(collapsed, ChainIndex(vec![3]));
}
#[test]
fn test_collapse_back_one() {
fn collapse_back_one() {
let chain_id = ChainIndex(vec![1]);
let collapsed = chain_id.collapse_back();
assert_eq!(collapsed, None)
assert_eq!(collapsed, None);
}
#[test]
fn test_collapse_back_root() {
fn collapse_back_root() {
let chain_id = ChainIndex(vec![]);
let collapsed = chain_id.collapse_back();
assert_eq!(collapsed, None)
assert_eq!(collapsed, None);
}
#[test]
fn test_shuffle() {
fn shuffle() {
for id in ChainIndex::chain_ids_at_depth(5) {
println!("{id}");
}

View File

@ -1,4 +1,4 @@
use k256::{Scalar, elliptic_curve::PrimeField};
use k256::{Scalar, elliptic_curve::PrimeField as _};
use nssa_core::{NullifierPublicKey, encryption::ViewingPublicKey};
use serde::{Deserialize, Serialize};
@ -12,7 +12,7 @@ use crate::key_management::{
pub struct ChildKeysPrivate {
pub value: (KeyChain, nssa::Account),
pub ccc: [u8; 32],
/// Can be [`None`] if root
/// Can be [`None`] if root.
pub cci: Option<u32>,
}
@ -54,6 +54,10 @@ impl KeyNode for ChildKeysPrivate {
}
fn nth_child(&self, cci: u32) -> Self {
#[expect(
clippy::arithmetic_side_effects,
reason = "Multiplying finite field scalars gives no unexpected side effects"
)]
let parent_pt =
Scalar::from_repr(self.value.0.private_key_holder.nullifier_secret_key.into())
.expect("Key generated as scalar, must be valid representation")
@ -113,27 +117,27 @@ impl KeyNode for ChildKeysPrivate {
}
}
impl<'a> From<&'a ChildKeysPrivate> for &'a (KeyChain, nssa::Account) {
fn from(value: &'a ChildKeysPrivate) -> Self {
impl<'keys> From<&'keys ChildKeysPrivate> for &'keys (KeyChain, nssa::Account) {
fn from(value: &'keys ChildKeysPrivate) -> Self {
&value.value
}
}
impl<'a> From<&'a mut ChildKeysPrivate> for &'a mut (KeyChain, nssa::Account) {
fn from(value: &'a mut ChildKeysPrivate) -> Self {
impl<'keys> From<&'keys mut ChildKeysPrivate> for &'keys mut (KeyChain, nssa::Account) {
fn from(value: &'keys mut ChildKeysPrivate) -> Self {
&mut value.value
}
}
#[cfg(test)]
mod tests {
use nssa_core::{NullifierPublicKey, NullifierSecretKey};
use nssa_core::NullifierSecretKey;
use super::*;
use crate::key_management::{self, secret_holders::ViewingSecretKey};
#[test]
fn test_master_key_generation() {
fn master_key_generation() {
let seed: [u8; 64] = [
252, 56, 204, 83, 232, 123, 209, 188, 187, 167, 39, 213, 71, 39, 58, 65, 125, 134, 255,
49, 43, 108, 92, 53, 173, 164, 94, 142, 150, 74, 21, 163, 43, 144, 226, 87, 199, 18,
@ -143,7 +147,7 @@ mod tests {
let keys = ChildKeysPrivate::root(seed);
let expected_ssk: SecretSpendingKey = key_management::secret_holders::SecretSpendingKey([
let expected_ssk = key_management::secret_holders::SecretSpendingKey([
246, 79, 26, 124, 135, 95, 52, 51, 201, 27, 48, 194, 2, 144, 51, 219, 245, 128, 139,
222, 42, 195, 105, 33, 115, 97, 186, 0, 97, 14, 218, 191,
]);
@ -158,7 +162,7 @@ mod tests {
34, 234, 19, 222, 2, 22, 12, 163, 252, 88, 11, 0, 163,
];
let expected_npk: NullifierPublicKey = nssa_core::NullifierPublicKey([
let expected_npk = nssa_core::NullifierPublicKey([
7, 123, 125, 191, 233, 183, 201, 4, 20, 214, 155, 210, 45, 234, 27, 240, 194, 111, 97,
247, 155, 113, 122, 246, 192, 0, 70, 61, 76, 71, 70, 2,
]);
@ -181,7 +185,7 @@ mod tests {
}
#[test]
fn test_child_keys_generation() {
fn child_keys_generation() {
let seed: [u8; 64] = [
252, 56, 204, 83, 232, 123, 209, 188, 187, 167, 39, 213, 71, 39, 58, 65, 125, 134, 255,
49, 43, 108, 92, 53, 173, 164, 94, 142, 150, 74, 21, 163, 43, 144, 226, 87, 199, 18,
@ -190,7 +194,7 @@ mod tests {
];
let root_node = ChildKeysPrivate::root(seed);
let child_node = ChildKeysPrivate::nth_child(&root_node, 42u32);
let child_node = ChildKeysPrivate::nth_child(&root_node, 42_u32);
let expected_ccc: [u8; 32] = [
145, 59, 225, 32, 54, 168, 14, 45, 60, 253, 57, 202, 31, 86, 142, 234, 51, 57, 154, 88,
@ -201,7 +205,7 @@ mod tests {
19, 100, 119, 73, 191, 225, 234, 219, 129, 88, 40, 229, 63, 225, 189, 136, 69, 172,
221, 186, 147, 83, 150, 207, 70, 17, 228, 70, 113, 87, 227, 31,
];
let expected_npk: NullifierPublicKey = nssa_core::NullifierPublicKey([
let expected_npk = nssa_core::NullifierPublicKey([
133, 235, 223, 151, 12, 69, 26, 222, 60, 125, 235, 125, 167, 212, 201, 168, 101, 242,
111, 239, 1, 228, 12, 252, 146, 53, 75, 17, 187, 255, 122, 181,
]);

View File

@ -8,7 +8,7 @@ pub struct ChildKeysPublic {
pub csk: nssa::PrivateKey,
pub cpk: nssa::PublicKey,
pub ccc: [u8; 32],
/// Can be [`None`] if root
/// Can be [`None`] if root.
pub cci: Option<u32>,
}
@ -16,22 +16,16 @@ impl ChildKeysPublic {
fn compute_hash_value(&self, cci: u32) -> [u8; 64] {
let mut hash_input = vec![];
match ((2u32).pow(31)).cmp(&cci) {
if 2_u32.pow(31) > cci {
// Non-harden
std::cmp::Ordering::Greater => {
hash_input.extend_from_slice(self.cpk.value());
hash_input.extend_from_slice(&cci.to_le_bytes());
hmac_sha512::HMAC::mac(hash_input, self.ccc)
}
hash_input.extend_from_slice(self.cpk.value());
} else {
// Harden
_ => {
hash_input.extend_from_slice(self.csk.value());
hash_input.extend_from_slice(&(cci).to_le_bytes());
hmac_sha512::HMAC::mac(hash_input, self.ccc)
}
hash_input.extend_from_slice(self.csk.value());
}
hash_input.extend_from_slice(&cci.to_le_bytes());
hmac_sha512::HMAC::mac(hash_input, self.ccc)
}
}
@ -68,9 +62,10 @@ impl KeyNode for ChildKeysPublic {
)
.unwrap();
if secp256k1::constants::CURVE_ORDER < *csk.value() {
panic!("Secret key cannot exceed curve order");
}
assert!(
secp256k1::constants::CURVE_ORDER >= *csk.value(),
"Secret key cannot exceed curve order"
);
let ccc = *hash_value
.last_chunk::<32>()
@ -99,8 +94,8 @@ impl KeyNode for ChildKeysPublic {
}
}
impl<'a> From<&'a ChildKeysPublic> for &'a nssa::PrivateKey {
fn from(value: &'a ChildKeysPublic) -> Self {
impl<'keys> From<&'keys ChildKeysPublic> for &'keys nssa::PrivateKey {
fn from(value: &'keys ChildKeysPublic) -> Self {
&value.csk
}
}
@ -112,7 +107,7 @@ mod tests {
use super::*;
#[test]
fn test_master_keys_generation() {
fn master_keys_generation() {
let seed = [
88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173,
134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87,
@ -143,7 +138,7 @@ mod tests {
}
#[test]
fn test_harden_child_keys_generation() {
fn harden_child_keys_generation() {
let seed = [
88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173,
134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87,
@ -151,7 +146,7 @@ mod tests {
187, 148, 92, 44, 253, 210, 37,
];
let root_keys = ChildKeysPublic::root(seed);
let cci = (2u32).pow(31) + 13;
let cci = (2_u32).pow(31) + 13;
let child_keys = ChildKeysPublic::nth_child(&root_keys, cci);
print!(
@ -183,7 +178,7 @@ mod tests {
}
#[test]
fn test_nonharden_child_keys_generation() {
fn nonharden_child_keys_generation() {
let seed = [
88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173,
134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87,
@ -223,7 +218,7 @@ mod tests {
}
#[test]
fn test_edge_case_child_keys_generation_2_power_31() {
fn edge_case_child_keys_generation_2_power_31() {
let seed = [
88, 189, 37, 237, 199, 125, 151, 226, 69, 153, 165, 113, 191, 69, 188, 221, 9, 34, 173,
134, 61, 109, 34, 103, 121, 39, 237, 14, 107, 194, 24, 194, 191, 14, 237, 185, 12, 87,
@ -231,7 +226,7 @@ mod tests {
187, 148, 92, 44, 253, 210, 37,
];
let root_keys = ChildKeysPublic::root(seed);
let cci = (2u32).pow(31); //equivant to 0, thus non-harden.
let cci = (2_u32).pow(31); //equivant to 0, thus non-harden.
let child_keys = ChildKeysPublic::nth_child(&root_keys, cci);
let expected_ccc = [

View File

@ -1,7 +1,4 @@
use std::{
collections::{BTreeMap, HashMap},
sync::Arc,
};
use std::{collections::BTreeMap, sync::Arc};
use anyhow::Result;
use common::sequencer_client::SequencerClient;
@ -25,13 +22,14 @@ pub const DEPTH_SOFT_CAP: u32 = 20;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct KeyTree<N: KeyNode> {
pub key_map: BTreeMap<ChainIndex, N>,
pub account_id_map: HashMap<nssa::AccountId, ChainIndex>,
pub account_id_map: BTreeMap<nssa::AccountId, ChainIndex>,
}
pub type KeyTreePublic = KeyTree<ChildKeysPublic>;
pub type KeyTreePrivate = KeyTree<ChildKeysPrivate>;
impl<N: KeyNode> KeyTree<N> {
#[must_use]
pub fn new(seed: &SeedHolder) -> Self {
let seed_fit: [u8; 64] = seed
.seed
@ -43,7 +41,7 @@ impl<N: KeyNode> KeyTree<N> {
let account_id = root_keys.account_id();
let key_map = BTreeMap::from_iter([(ChainIndex::root(), root_keys)]);
let account_id_map = HashMap::from_iter([(account_id, ChainIndex::root())]);
let account_id_map = BTreeMap::from_iter([(account_id, ChainIndex::root())]);
Self {
key_map,
@ -52,7 +50,7 @@ impl<N: KeyNode> KeyTree<N> {
}
pub fn new_from_root(root: N) -> Self {
let account_id_map = HashMap::from_iter([(root.account_id(), ChainIndex::root())]);
let account_id_map = BTreeMap::from_iter([(root.account_id(), ChainIndex::root())]);
let key_map = BTreeMap::from_iter([(ChainIndex::root(), root)]);
Self {
@ -63,6 +61,7 @@ impl<N: KeyNode> KeyTree<N> {
// ToDo: Add function to create a tree from list of nodes with consistency check.
#[must_use]
pub fn find_next_last_child_of_id(&self, parent_id: &ChainIndex) -> Option<u32> {
if !self.key_map.contains_key(parent_id) {
return None;
@ -82,19 +81,19 @@ impl<N: KeyNode> KeyTree<N> {
let rightmost_child = parent_id.nth_child(right);
let rightmost_ref = self.key_map.get(&rightmost_child);
let rightmost_ref_next = self.key_map.get(&rightmost_child.next_in_line());
let rightmost_ref_next = self.key_map.get(&rightmost_child.next_in_line()?);
match (&rightmost_ref, &rightmost_ref_next) {
(Some(_), Some(_)) => {
left_border = right;
right = (right + right_border) / 2;
right = u32::midpoint(right, right_border);
}
(Some(_), None) => {
break Some(right + 1);
break Some(right.checked_add(1)?);
}
(None, None) => {
right_border = right;
right = (left_border + right) / 2;
right = u32::midpoint(left_border, right);
}
(None, Some(_)) => {
unreachable!();
@ -131,7 +130,7 @@ impl<N: KeyNode> KeyTree<N> {
break 'outer chain_id;
}
}
depth += 1;
depth = depth.checked_add(1).expect("Max depth reached");
}
}
@ -152,16 +151,15 @@ impl<N: KeyNode> KeyTree<N> {
self.fill_node(&self.find_next_slot_layered())
}
#[must_use]
pub fn get_node(&self, account_id: nssa::AccountId) -> Option<&N> {
self.account_id_map
.get(&account_id)
.and_then(|chain_id| self.key_map.get(chain_id))
let chain_id = self.account_id_map.get(&account_id)?;
self.key_map.get(chain_id)
}
pub fn get_node_mut(&mut self, account_id: nssa::AccountId) -> Option<&mut N> {
self.account_id_map
.get(&account_id)
.and_then(|chain_id| self.key_map.get_mut(chain_id))
let chain_id = self.account_id_map.get(&account_id)?;
self.key_map.get_mut(chain_id)
}
pub fn insert(&mut self, account_id: nssa::AccountId, chain_index: ChainIndex, node: N) {
@ -170,7 +168,7 @@ impl<N: KeyNode> KeyTree<N> {
}
pub fn remove(&mut self, addr: nssa::AccountId) -> Option<N> {
let chain_index = self.account_id_map.remove(&addr).unwrap();
let chain_index = self.account_id_map.remove(&addr)?;
self.key_map.remove(&chain_index)
}
@ -179,7 +177,7 @@ impl<N: KeyNode> KeyTree<N> {
/// For given `depth` adds children to a tree such that their `ChainIndex::depth(&self) <
/// depth`.
///
/// Tree must be empty before start
/// Tree must be empty before start.
pub fn generate_tree_for_depth(&mut self, depth: u32) {
let mut id_stack = vec![ChainIndex::root()];
@ -189,23 +187,26 @@ impl<N: KeyNode> KeyTree<N> {
while (next_id.depth()) < depth {
self.generate_new_node(&curr_id);
id_stack.push(next_id.clone());
next_id = next_id.next_in_line();
next_id = match next_id.next_in_line() {
Some(id) => id,
None => break,
};
}
}
}
}
impl KeyTree<ChildKeysPrivate> {
/// Cleanup of all non-initialized accounts in a private tree
/// Cleanup of all non-initialized accounts in a private tree.
///
/// For given `depth` checks children to a tree such that their `ChainIndex::depth(&self) <
/// depth`.
///
/// If account is default, removes them.
///
/// Chain must be parsed for accounts beforehand
/// Chain must be parsed for accounts beforehand.
///
/// Fast, leaves gaps between accounts
/// Fast, leaves gaps between accounts.
pub fn cleanup_tree_remove_uninit_for_depth(&mut self, depth: u32) {
let mut id_stack = vec![ChainIndex::root()];
@ -222,22 +223,26 @@ impl KeyTree<ChildKeysPrivate> {
while (next_id.depth()) < depth {
id_stack.push(next_id.clone());
next_id = next_id.next_in_line();
next_id = match next_id.next_in_line() {
Some(id) => id,
None => break,
};
}
}
}
/// Cleanup of non-initialized accounts in a private tree
/// Cleanup of non-initialized accounts in a private tree.
///
/// If account is default, removes them, stops at first non-default account.
///
/// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`
/// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`.
///
/// Chain must be parsed for accounts beforehand
/// Chain must be parsed for accounts beforehand.
///
/// Slow, maintains tree consistency.
pub fn cleanup_tree_remove_uninit_layered(&mut self, depth: u32) {
'outer: for i in (1..(depth as usize)).rev() {
let depth = usize::try_from(depth).expect("Depth is expected to fit in usize");
'outer: for i in (1..depth).rev() {
println!("Cleanup of tree at depth {i}");
for id in ChainIndex::chain_ids_at_depth(i) {
if let Some(node) = self.key_map.get(&id) {
@ -254,14 +259,14 @@ impl KeyTree<ChildKeysPrivate> {
}
impl KeyTree<ChildKeysPublic> {
/// Cleanup of all non-initialized accounts in a public tree
/// Cleanup of all non-initialized accounts in a public tree.
///
/// For given `depth` checks children to a tree such that their `ChainIndex::depth(&self) <
/// depth`.
///
/// If account is default, removes them.
///
/// Fast, leaves gaps between accounts
/// Fast, leaves gaps between accounts.
pub async fn cleanup_tree_remove_ininit_for_depth(
&mut self,
depth: u32,
@ -283,18 +288,21 @@ impl KeyTree<ChildKeysPublic> {
while (next_id.depth()) < depth {
id_stack.push(next_id.clone());
next_id = next_id.next_in_line();
next_id = match next_id.next_in_line() {
Some(id) => id,
None => break,
};
}
}
Ok(())
}
/// Cleanup of non-initialized accounts in a public tree
/// Cleanup of non-initialized accounts in a public tree.
///
/// If account is default, removes them, stops at first non-default account.
///
/// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`
/// Walks through tree in lairs of same depth using `ChainIndex::chain_ids_at_depth()`.
///
/// Slow, maintains tree consistency.
pub async fn cleanup_tree_remove_uninit_layered(
@ -302,7 +310,8 @@ impl KeyTree<ChildKeysPublic> {
depth: u32,
client: Arc<SequencerClient>,
) -> Result<()> {
'outer: for i in (1..(depth as usize)).rev() {
let depth = usize::try_from(depth).expect("Depth is expected to fit in usize");
'outer: for i in (1..depth).rev() {
println!("Cleanup of tree at depth {i}");
for id in ChainIndex::chain_ids_at_depth(i) {
if let Some(node) = self.key_map.get(&id) {
@ -325,7 +334,9 @@ impl KeyTree<ChildKeysPublic> {
#[cfg(test)]
mod tests {
use std::{collections::HashSet, str::FromStr};
#![expect(clippy::shadow_unrelated, reason = "We don't care about this in tests")]
use std::{collections::HashSet, str::FromStr as _};
use nssa::AccountId;
@ -338,7 +349,7 @@ mod tests {
}
#[test]
fn test_simple_key_tree() {
fn simple_key_tree() {
let seed_holder = seed_holder_for_tests();
let tree = KeyTreePublic::new(&seed_holder);
@ -351,7 +362,7 @@ mod tests {
}
#[test]
fn test_small_key_tree() {
fn small_key_tree() {
let seed_holder = seed_holder_for_tests();
let mut tree = KeyTreePrivate::new(&seed_holder);
@ -390,7 +401,7 @@ mod tests {
}
#[test]
fn test_key_tree_can_not_make_child_keys() {
fn key_tree_can_not_make_child_keys() {
let seed_holder = seed_holder_for_tests();
let mut tree = KeyTreePrivate::new(&seed_holder);
@ -420,7 +431,7 @@ mod tests {
}
#[test]
fn test_key_tree_complex_structure() {
fn key_tree_complex_structure() {
let seed_holder = seed_holder_for_tests();
let mut tree = KeyTreePublic::new(&seed_holder);
@ -515,7 +526,7 @@ mod tests {
}
#[test]
fn test_tree_balancing_automatic() {
fn tree_balancing_automatic() {
let seed_holder = seed_holder_for_tests();
let mut tree = KeyTreePublic::new(&seed_holder);
@ -530,7 +541,7 @@ mod tests {
}
#[test]
fn test_cleanup() {
fn cleanup() {
let seed_holder = seed_holder_for_tests();
let mut tree = KeyTreePrivate::new(&seed_holder);
@ -563,13 +574,13 @@ mod tests {
tree.cleanup_tree_remove_uninit_layered(10);
let mut key_set_res = HashSet::new();
key_set_res.insert("/0".to_string());
key_set_res.insert("/1".to_string());
key_set_res.insert("/2".to_string());
key_set_res.insert("/".to_string());
key_set_res.insert("/0/0".to_string());
key_set_res.insert("/0/1".to_string());
key_set_res.insert("/1/0".to_string());
key_set_res.insert("/0".to_owned());
key_set_res.insert("/1".to_owned());
key_set_res.insert("/2".to_owned());
key_set_res.insert("/".to_owned());
key_set_res.insert("/0/0".to_owned());
key_set_res.insert("/0/1".to_owned());
key_set_res.insert("/1/0".to_owned());
let mut key_set = HashSet::new();
@ -579,28 +590,16 @@ mod tests {
assert_eq!(key_set, key_set_res);
let acc = tree
.key_map
.get(&ChainIndex::from_str("/1").unwrap())
.unwrap();
let acc = &tree.key_map[&ChainIndex::from_str("/1").unwrap()];
assert_eq!(acc.value.1.balance, 2);
let acc = tree
.key_map
.get(&ChainIndex::from_str("/2").unwrap())
.unwrap();
let acc = &tree.key_map[&ChainIndex::from_str("/2").unwrap()];
assert_eq!(acc.value.1.balance, 3);
let acc = tree
.key_map
.get(&ChainIndex::from_str("/0/1").unwrap())
.unwrap();
let acc = &tree.key_map[&ChainIndex::from_str("/0/1").unwrap()];
assert_eq!(acc.value.1.balance, 5);
let acc = tree
.key_map
.get(&ChainIndex::from_str("/1/0").unwrap())
.unwrap();
let acc = &tree.key_map[&ChainIndex::from_str("/1/0").unwrap()];
assert_eq!(acc.value.1.balance, 6);
}
}

View File

@ -1,9 +1,10 @@
/// Trait, that reperesents a Node in hierarchical key tree
/// Trait, that reperesents a Node in hierarchical key tree.
pub trait KeyNode {
/// Tree root node
/// Tree root node.
fn root(seed: [u8; 64]) -> Self;
/// `cci`'s child of node
/// `cci`'s child of node.
#[must_use]
fn nth_child(&self, cci: u32) -> Self;
fn chain_code(&self) -> &[u8; 32];

View File

@ -5,14 +5,14 @@ use nssa_core::{
use secret_holders::{PrivateKeyHolder, SecretSpendingKey, SeedHolder};
use serde::{Deserialize, Serialize};
pub type PublicAccountSigningKey = [u8; 32];
pub mod ephemeral_key_holder;
pub mod key_tree;
pub mod secret_holders;
pub type PublicAccountSigningKey = [u8; 32];
#[derive(Serialize, Deserialize, Clone, Debug)]
/// Entrypoint to key management
/// Entrypoint to key management.
pub struct KeyChain {
pub secret_spending_key: SecretSpendingKey,
pub private_key_holder: PrivateKeyHolder,
@ -21,6 +21,7 @@ pub struct KeyChain {
}
impl KeyChain {
#[must_use]
pub fn new_os_random() -> Self {
// Currently dropping SeedHolder at the end of initialization.
// Now entirely sure if we need it in the future.
@ -40,6 +41,7 @@ impl KeyChain {
}
}
#[must_use]
pub fn new_mnemonic(passphrase: String) -> Self {
// Currently dropping SeedHolder at the end of initialization.
// Not entirely sure if we need it in the future.
@ -59,14 +61,15 @@ impl KeyChain {
}
}
#[must_use]
pub fn calculate_shared_secret_receiver(
&self,
ephemeral_public_key_sender: EphemeralPublicKey,
ephemeral_public_key_sender: &EphemeralPublicKey,
index: Option<u32>,
) -> SharedSecretKey {
SharedSecretKey::new(
&self.secret_spending_key.generate_viewing_secret_key(index),
&ephemeral_public_key_sender,
ephemeral_public_key_sender,
)
}
}
@ -74,9 +77,9 @@ impl KeyChain {
#[cfg(test)]
mod tests {
use aes_gcm::aead::OsRng;
use base58::ToBase58;
use k256::{AffinePoint, elliptic_curve::group::GroupEncoding};
use rand::RngCore;
use base58::ToBase58 as _;
use k256::{AffinePoint, elliptic_curve::group::GroupEncoding as _};
use rand::RngCore as _;
use super::*;
use crate::key_management::{
@ -84,19 +87,19 @@ mod tests {
};
#[test]
fn test_new_os_random() {
fn new_os_random() {
// Ensure that a new KeyChain instance can be created without errors.
let account_id_key_holder = KeyChain::new_os_random();
// Check that key holder fields are initialized with expected types
assert_ne!(
account_id_key_holder.nullifer_public_key.as_ref(),
&[0u8; 32]
&[0_u8; 32]
);
}
#[test]
fn test_calculate_shared_secret_receiver() {
fn calculate_shared_secret_receiver() {
let account_id_key_holder = KeyChain::new_os_random();
// Generate a random ephemeral public key sender
@ -106,7 +109,7 @@ mod tests {
// Calculate shared secret
let _shared_secret = account_id_key_holder
.calculate_shared_secret_receiver(ephemeral_public_key_sender, None);
.calculate_shared_secret_receiver(&ephemeral_public_key_sender, None);
}
#[test]
@ -177,14 +180,14 @@ mod tests {
}
#[test]
fn test_non_trivial_chain_index() {
fn non_trivial_chain_index() {
let keys = account_with_chain_index_2_for_tests();
let eph_key_holder = EphemeralKeyHolder::new(&keys.nullifer_public_key);
let key_sender = eph_key_holder.calculate_shared_secret_sender(&keys.viewing_public_key);
let key_receiver = keys.calculate_shared_secret_receiver(
eph_key_holder.generate_ephemeral_public_key(),
&eph_key_holder.generate_ephemeral_public_key(),
Some(2),
);

View File

@ -4,9 +4,9 @@ use nssa_core::{
NullifierPublicKey, NullifierSecretKey,
encryption::{Scalar, ViewingPublicKey},
};
use rand::{RngCore, rngs::OsRng};
use rand::{RngCore as _, rngs::OsRng};
use serde::{Deserialize, Serialize};
use sha2::{Digest, digest::FixedOutput};
use sha2::{Digest as _, digest::FixedOutput as _};
const NSSA_ENTROPY_BYTES: [u8; 32] = [0; 32];
@ -25,14 +25,16 @@ pub struct SecretSpendingKey(pub(crate) [u8; 32]);
pub type ViewingSecretKey = Scalar;
#[derive(Serialize, Deserialize, Debug, Clone)]
/// Private key holder. Produces public keys. Can produce account_id. Can produce shared secret for
/// recepient.
/// Private key holder. Produces public keys. Can produce `account_id`. Can produce shared secret
/// for recepient.
#[expect(clippy::partial_pub_fields, reason = "TODO: fix later")]
pub struct PrivateKeyHolder {
pub nullifier_secret_key: NullifierSecretKey,
pub(crate) viewing_secret_key: ViewingSecretKey,
}
impl SeedHolder {
#[must_use]
pub fn new_os_random() -> Self {
let mut enthopy_bytes: [u8; 32] = [0; 32];
OsRng.fill_bytes(&mut enthopy_bytes);
@ -46,6 +48,7 @@ impl SeedHolder {
}
}
#[must_use]
pub fn new_mnemonic(passphrase: String) -> Self {
let mnemonic = Mnemonic::from_entropy(&NSSA_ENTROPY_BYTES)
.expect("Enthropy must be a multiple of 32 bytes");
@ -56,6 +59,7 @@ impl SeedHolder {
}
}
#[must_use]
pub fn generate_secret_spending_key_hash(&self) -> HashType {
let mut hash = hmac_sha512::HMAC::mac(&self.seed, "NSSA_seed");
@ -67,22 +71,24 @@ impl SeedHolder {
HashType(*hash.first_chunk::<32>().unwrap())
}
#[must_use]
pub fn produce_top_secret_key_holder(&self) -> SecretSpendingKey {
SecretSpendingKey(self.generate_secret_spending_key_hash().into())
}
}
impl SecretSpendingKey {
#[must_use]
pub fn generate_nullifier_secret_key(&self, index: Option<u32>) -> NullifierSecretKey {
let index = match index {
None => 0u32,
_ => index.expect("Expect a valid u32"),
};
const PREFIX: &[u8; 8] = b"LEE/keys";
const SUFFIX_1: &[u8; 1] = &[1];
const SUFFIX_2: &[u8; 19] = &[0; 19];
let index = match index {
None => 0_u32,
_ => index.expect("Expect a valid u32"),
};
let mut hasher = sha2::Sha256::new();
hasher.update(PREFIX);
hasher.update(self.0);
@ -93,15 +99,17 @@ impl SecretSpendingKey {
<NullifierSecretKey>::from(hasher.finalize_fixed())
}
#[must_use]
pub fn generate_viewing_secret_key(&self, index: Option<u32>) -> ViewingSecretKey {
let index = match index {
None => 0u32,
_ => index.expect("Expect a valid u32"),
};
const PREFIX: &[u8; 8] = b"LEE/keys";
const SUFFIX_1: &[u8; 1] = &[2];
const SUFFIX_2: &[u8; 19] = &[0; 19];
let index = match index {
None => 0_u32,
_ => index.expect("Expect a valid u32"),
};
let mut hasher = sha2::Sha256::new();
hasher.update(PREFIX);
hasher.update(self.0);
@ -112,6 +120,7 @@ impl SecretSpendingKey {
hasher.finalize_fixed().into()
}
#[must_use]
pub fn produce_private_key_holder(&self, index: Option<u32>) -> PrivateKeyHolder {
PrivateKeyHolder {
nullifier_secret_key: self.generate_nullifier_secret_key(index),
@ -121,10 +130,12 @@ impl SecretSpendingKey {
}
impl PrivateKeyHolder {
#[must_use]
pub fn generate_nullifier_public_key(&self) -> NullifierPublicKey {
(&self.nullifier_secret_key).into()
}
#[must_use]
pub fn generate_viewing_public_key(&self) -> ViewingPublicKey {
ViewingPublicKey::from_scalar(self.viewing_secret_key)
}
@ -148,7 +159,7 @@ mod tests {
assert_eq!(seed_holder.seed.len(), 64);
let _ = seed_holder.generate_secret_spending_key_hash();
let _hash = seed_holder.generate_secret_spending_key_hash();
}
#[test]
@ -159,15 +170,15 @@ mod tests {
let top_secret_key_holder = seed_holder.produce_top_secret_key_holder();
let _ = top_secret_key_holder.generate_viewing_secret_key(None);
let _vsk = top_secret_key_holder.generate_viewing_secret_key(None);
}
#[test]
fn two_seeds_generated_same_from_same_mnemonic() {
let mnemonic = "test_pass";
let seed_holder1 = SeedHolder::new_mnemonic(mnemonic.to_string());
let seed_holder2 = SeedHolder::new_mnemonic(mnemonic.to_string());
let seed_holder1 = SeedHolder::new_mnemonic(mnemonic.to_owned());
let seed_holder2 = SeedHolder::new_mnemonic(mnemonic.to_owned());
assert_eq!(seed_holder1.seed, seed_holder2.seed);
}

View File

@ -14,14 +14,14 @@ pub type PublicKey = AffinePoint;
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct NSSAUserData {
/// Default public accounts
/// Default public accounts.
pub default_pub_account_signing_keys: BTreeMap<nssa::AccountId, nssa::PrivateKey>,
/// Default private accounts
/// Default private accounts.
pub default_user_private_accounts:
BTreeMap<nssa::AccountId, (KeyChain, nssa_core::account::Account)>,
/// Tree of public keys
/// Tree of public keys.
pub public_key_tree: KeyTreePublic,
/// Tree of private keys
/// Tree of private keys.
pub private_key_tree: KeyTreePrivate,
}
@ -34,7 +34,7 @@ impl NSSAUserData {
let expected_account_id =
nssa::AccountId::from(&nssa::PublicKey::new_from_private_key(key));
if &expected_account_id != account_id {
println!("{}, {}", expected_account_id, account_id);
println!("{expected_account_id}, {account_id}");
check_res = false;
}
}
@ -48,7 +48,7 @@ impl NSSAUserData {
for (account_id, (key, _)) in accounts_keys_map {
let expected_account_id = nssa::AccountId::from(&key.nullifer_public_key);
if expected_account_id != *account_id {
println!("{}, {}", expected_account_id, account_id);
println!("{expected_account_id}, {account_id}");
check_res = false;
}
}
@ -84,9 +84,9 @@ impl NSSAUserData {
})
}
/// Generated new private key for public transaction signatures
/// Generated new private key for public transaction signatures.
///
/// Returns the account_id of new account
/// Returns the `account_id` of new account.
pub fn generate_new_public_transaction_private_key(
&mut self,
parent_cci: Option<ChainIndex>,
@ -103,23 +103,20 @@ impl NSSAUserData {
}
}
/// Returns the signing key for public transaction signatures
/// Returns the signing key for public transaction signatures.
#[must_use]
pub fn get_pub_account_signing_key(
&self,
account_id: nssa::AccountId,
) -> Option<&nssa::PrivateKey> {
// First seek in defaults
if let Some(key) = self.default_pub_account_signing_keys.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.public_key_tree.get_node(account_id).map(Into::into)
}
self.default_pub_account_signing_keys
.get(&account_id)
.or_else(|| self.public_key_tree.get_node(account_id).map(Into::into))
}
/// Generated new private key for privacy preserving transactions
/// Generated new private key for privacy preserving transactions.
///
/// Returns the account_id of new account
/// Returns the `account_id` of new account.
pub fn generate_new_privacy_preserving_transaction_key_chain(
&mut self,
parent_cci: Option<ChainIndex>,
@ -136,21 +133,18 @@ impl NSSAUserData {
}
}
/// Returns the signing key for public transaction signatures
/// Returns the signing key for public transaction signatures.
#[must_use]
pub fn get_private_account(
&self,
account_id: nssa::AccountId,
) -> Option<&(KeyChain, nssa_core::account::Account)> {
// First seek in defaults
if let Some(key) = self.default_user_private_accounts.get(&account_id) {
Some(key)
// Then seek in tree
} else {
self.private_key_tree.get_node(account_id).map(Into::into)
}
self.default_user_private_accounts
.get(&account_id)
.or_else(|| self.private_key_tree.get_node(account_id).map(Into::into))
}
/// Returns the signing key for public transaction signatures
/// Returns the signing key for public transaction signatures.
pub fn get_private_account_mut(
&mut self,
account_id: &nssa::AccountId,
@ -190,8 +184,8 @@ impl Default for NSSAUserData {
Self::new_with_accounts(
BTreeMap::new(),
BTreeMap::new(),
KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_string())),
KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_string())),
KeyTreePublic::new(&SeedHolder::new_mnemonic("default".to_owned())),
KeyTreePrivate::new(&SeedHolder::new_mnemonic("default".to_owned())),
)
.unwrap()
}
@ -202,7 +196,7 @@ mod tests {
use super::*;
#[test]
fn test_new_account() {
fn new_account() {
let mut user_data = NSSAUserData::default();
let (account_id_private, _) = user_data

View File

@ -1,2 +1,4 @@
#![expect(clippy::print_stdout, reason = "TODO: fix later")]
pub mod key_management;
pub mod key_protocol_core;

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
tokio = { workspace = true, features = ["sync"] }

View File

@ -6,6 +6,7 @@ pub struct MemPool<T> {
}
impl<T> MemPool<T> {
#[must_use]
pub fn new(max_size: usize) -> (Self, MemPoolHandle<T>) {
let (sender, receiver) = tokio::sync::mpsc::channel(max_size);
@ -17,6 +18,7 @@ impl<T> MemPool<T> {
(mem_pool, sender)
}
/// Pop an item from the mempool first checking the front buffer (LIFO) then the channel (FIFO).
pub fn pop(&mut self) -> Option<T> {
use tokio::sync::mpsc::error::TryRecvError;
@ -36,7 +38,7 @@ impl<T> MemPool<T> {
}
}
/// Push an item to the front of the mempool (will be popped first)
/// Push an item to the front of the mempool (will be popped first).
pub fn push_front(&mut self, item: T) {
self.front_buffer.push(item);
}
@ -47,11 +49,11 @@ pub struct MemPoolHandle<T> {
}
impl<T> MemPoolHandle<T> {
fn new(sender: Sender<T>) -> Self {
const fn new(sender: Sender<T>) -> Self {
Self { sender }
}
/// Send an item to the mempool blocking if max size is reached
/// Send an item to the mempool blocking if max size is reached.
pub async fn push(&self, item: T) -> Result<(), tokio::sync::mpsc::error::SendError<T>> {
self.sender.send(item).await
}
@ -64,13 +66,13 @@ mod tests {
use super::*;
#[test]
async fn test_mempool_new() {
async fn mempool_new() {
let (mut pool, _handle): (MemPool<u64>, _) = MemPool::new(10);
assert_eq!(pool.pop(), None);
}
#[test]
async fn test_push_and_pop() {
async fn push_and_pop() {
let (mut pool, handle) = MemPool::new(10);
handle.push(1).await.unwrap();
@ -81,7 +83,7 @@ mod tests {
}
#[test]
async fn test_multiple_push_pop() {
async fn multiple_push_pop() {
let (mut pool, handle) = MemPool::new(10);
handle.push(1).await.unwrap();
@ -95,13 +97,13 @@ mod tests {
}
#[test]
async fn test_pop_empty() {
async fn pop_empty() {
let (mut pool, _handle): (MemPool<u64>, _) = MemPool::new(10);
assert_eq!(pool.pop(), None);
}
#[test]
async fn test_max_size() {
async fn max_size() {
let (mut pool, handle) = MemPool::new(2);
handle.push(1).await.unwrap();
@ -114,7 +116,7 @@ mod tests {
}
#[test]
async fn test_push_front() {
async fn push_front() {
let (mut pool, handle) = MemPool::new(10);
handle.push(1).await.unwrap();

View File

@ -4,9 +4,13 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
nssa_core = { workspace = true, features = ["host"] }
anyhow.workspace = true
thiserror.workspace = true
risc0-zkvm.workspace = true
serde.workspace = true

View File

@ -1,4 +1,4 @@
use std::{env, fs, path::PathBuf};
use std::{env, fmt::Write as _, fs, path::PathBuf};
fn main() -> Result<(), Box<dyn std::error::Error>> {
let manifest_dir = PathBuf::from(env::var("CARGO_MANIFEST_DIR")?);
@ -15,7 +15,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
.collect::<Vec<_>>();
if bins.is_empty() {
return Err(format!("No .bin files found in {:?}", program_methods_dir).into());
return Err(format!("No .bin files found in {}", program_methods_dir.display()).into());
}
fs::create_dir_all(&mod_dir)?;
@ -25,14 +25,16 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let name = path.file_stem().unwrap().to_string_lossy();
let bytecode = fs::read(&path)?;
let image_id: [u32; 8] = risc0_binfmt::compute_image_id(&bytecode)?.into();
src.push_str(&format!(
write!(
src,
"pub const {}_ELF: &[u8] = include_bytes!(r#\"{}\"#);\n\
#[expect(clippy::unreadable_literal, reason = \"Generated image IDs from risc0 are cryptographic hashes represented as u32 arrays\")]\n\
pub const {}_ID: [u32; 8] = {:?};\n",
name.to_uppercase(),
path.display(),
name.to_uppercase(),
image_id
));
)?;
}
fs::write(&mod_file, src)?;
println!("cargo:warning=Generated module at {}", mod_file.display());

View File

@ -4,6 +4,9 @@ version = "0.1.0"
edition = "2024"
license = { workspace = true }
[lints]
workspace = true
[dependencies]
risc0-zkvm.workspace = true
borsh.workspace = true
@ -14,7 +17,7 @@ bytemuck.workspace = true
bytesize.workspace = true
base58.workspace = true
k256 = { workspace = true, optional = true }
chacha20 = { version = "0.9", default-features = false }
chacha20 = { version = "0.10" }
[dev-dependencies]
serde_json.workspace = true

View File

@ -1,6 +1,9 @@
use std::{fmt::Display, str::FromStr};
use std::{
fmt::{Display, Write as _},
str::FromStr,
};
use base58::{FromBase58, ToBase58};
use base58::{FromBase58 as _, ToBase58 as _};
use borsh::{BorshDeserialize, BorshSerialize};
pub use data::Data;
use risc0_zkvm::sha::{Impl, Sha256};
@ -81,7 +84,7 @@ impl BorshDeserialize for Nonce {
}
}
/// Account to be used both in public and private contexts
/// Account to be used both in public and private contexts.
#[derive(
Default, Clone, Eq, PartialEq, Serialize, Deserialize, BorshSerialize, BorshDeserialize,
)]
@ -94,12 +97,14 @@ pub struct Account {
impl std::fmt::Debug for Account {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let program_owner_hex: String = self
let program_owner_hex = self
.program_owner
.iter()
.flat_map(|n| n.to_le_bytes())
.map(|b| format!("{b:02x}"))
.collect();
.fold(String::new(), |mut acc, bytes| {
write!(acc, "{bytes:02x}").expect("writing to string should not fail");
acc
});
f.debug_struct("Account")
.field("program_owner", &program_owner_hex)
.field("balance", &self.balance)
@ -151,15 +156,18 @@ impl std::fmt::Debug for AccountId {
}
impl AccountId {
pub fn new(value: [u8; 32]) -> Self {
#[must_use]
pub const fn new(value: [u8; 32]) -> Self {
Self { value }
}
pub fn value(&self) -> &[u8; 32] {
#[must_use]
pub const fn value(&self) -> &[u8; 32] {
&self.value
}
pub fn into_value(self) -> [u8; 32] {
#[must_use]
pub const fn into_value(self) -> [u8; 32] {
self.value
}
}
@ -186,9 +194,9 @@ impl FromStr for AccountId {
if bytes.len() != 32 {
return Err(AccountIdError::InvalidLength(bytes.len()));
}
let mut value = [0u8; 32];
let mut value = [0_u8; 32];
value.copy_from_slice(&bytes);
Ok(AccountId { value })
Ok(Self { value })
}
}
@ -204,28 +212,28 @@ mod tests {
use crate::program::DEFAULT_PROGRAM_ID;
#[test]
fn test_zero_balance_account_data_creation() {
fn zero_balance_account_data_creation() {
let new_acc = Account::default();
assert_eq!(new_acc.balance, 0);
}
#[test]
fn test_zero_nonce_account_data_creation() {
fn zero_nonce_account_data_creation() {
let new_acc = Account::default();
assert_eq!(new_acc.nonce.0, 0);
}
#[test]
fn test_empty_data_account_data_creation() {
fn empty_data_account_data_creation() {
let new_acc = Account::default();
assert!(new_acc.data.is_empty());
}
#[test]
fn test_default_program_owner_account_data_creation() {
fn default_program_owner_account_data_creation() {
let new_acc = Account::default();
assert_eq!(new_acc.program_owner, DEFAULT_PROGRAM_ID);
@ -233,7 +241,7 @@ mod tests {
#[cfg(feature = "host")]
#[test]
fn test_account_with_metadata_constructor() {
fn account_with_metadata_constructor() {
let account = Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 1337,
@ -255,7 +263,7 @@ mod tests {
fn parse_valid_account_id() {
let base58_str = "11111111111111111111111111111111";
let account_id: AccountId = base58_str.parse().unwrap();
assert_eq!(account_id.value, [0u8; 32]);
assert_eq!(account_id.value, [0_u8; 32]);
}
#[cfg(feature = "host")]

View File

@ -10,26 +10,29 @@ pub const DATA_MAX_LENGTH: ByteSize = ByteSize::kib(100);
pub struct Data(Vec<u8>);
impl Data {
#[must_use]
pub fn into_inner(self) -> Vec<u8> {
self.0
}
/// Reads data from a cursor.
#[cfg(feature = "host")]
pub fn from_cursor(
cursor: &mut std::io::Cursor<&[u8]>,
) -> Result<Self, crate::error::NssaCoreError> {
use std::io::Read as _;
let mut u32_bytes = [0u8; 4];
let mut u32_bytes = [0_u8; 4];
cursor.read_exact(&mut u32_bytes)?;
let data_length = u32::from_le_bytes(u32_bytes);
if data_length as usize > DATA_MAX_LENGTH.as_u64() as usize {
if u64::from(data_length) > DATA_MAX_LENGTH.as_u64() {
return Err(
std::io::Error::new(std::io::ErrorKind::InvalidData, DataTooBigError).into(),
);
}
let mut data = vec![0; data_length as usize];
let mut data =
vec![0; usize::try_from(data_length).expect("data length is expected to fit in usize")];
cursor.read_exact(&mut data)?;
Ok(Self(data))
}
@ -49,7 +52,9 @@ impl TryFrom<Vec<u8>> for Data {
type Error = DataTooBigError;
fn try_from(value: Vec<u8>) -> Result<Self, Self::Error> {
if value.len() > DATA_MAX_LENGTH.as_u64() as usize {
if value.len()
> usize::try_from(DATA_MAX_LENGTH.as_u64()).expect("DATA_MAX_LENGTH fits in usize")
{
Err(DataTooBigError)
} else {
Ok(Self(value))
@ -98,13 +103,17 @@ impl<'de> Deserialize<'de> for Data {
A: serde::de::SeqAccess<'de>,
{
let mut vec = Vec::with_capacity(
seq.size_hint()
.unwrap_or(0)
.min(DATA_MAX_LENGTH.as_u64() as usize),
seq.size_hint().unwrap_or(0).min(
usize::try_from(DATA_MAX_LENGTH.as_u64())
.expect("DATA_MAX_LENGTH fits in usize"),
),
);
while let Some(value) = seq.next_element()? {
if vec.len() >= DATA_MAX_LENGTH.as_u64() as usize {
if vec.len()
>= usize::try_from(DATA_MAX_LENGTH.as_u64())
.expect("DATA_MAX_LENGTH fits in usize")
{
return Err(serde::de::Error::custom(DataTooBigError));
}
vec.push(value);
@ -125,7 +134,7 @@ impl BorshDeserialize for Data {
let len = u32::deserialize_reader(reader)?;
match len {
0 => Ok(Self::default()),
len if len as usize > DATA_MAX_LENGTH.as_u64() as usize => Err(std::io::Error::new(
len if u64::from(len) > DATA_MAX_LENGTH.as_u64() => Err(std::io::Error::new(
std::io::ErrorKind::InvalidData,
DataTooBigError,
)),
@ -143,22 +152,36 @@ mod tests {
use super::*;
#[test]
fn test_data_max_length_allowed() {
let max_vec = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize];
fn data_max_length_allowed() {
let max_vec = vec![
0_u8;
usize::try_from(DATA_MAX_LENGTH.as_u64())
.expect("DATA_MAX_LENGTH fits in usize")
];
let result = Data::try_from(max_vec);
assert!(result.is_ok());
}
#[test]
fn test_data_too_big_error() {
let big_vec = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize + 1];
fn data_too_big_error() {
let big_vec = vec![
0_u8;
usize::try_from(DATA_MAX_LENGTH.as_u64())
.expect("DATA_MAX_LENGTH fits in usize")
+ 1
];
let result = Data::try_from(big_vec);
assert!(matches!(result, Err(DataTooBigError)));
}
#[test]
fn test_borsh_deserialize_exceeding_limit_error() {
let too_big_data = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize + 1];
fn borsh_deserialize_exceeding_limit_error() {
let too_big_data = vec![
0_u8;
usize::try_from(DATA_MAX_LENGTH.as_u64())
.expect("DATA_MAX_LENGTH fits in usize")
+ 1
];
let mut serialized = Vec::new();
<_ as BorshSerialize>::serialize(&too_big_data, &mut serialized).unwrap();
@ -167,8 +190,13 @@ mod tests {
}
#[test]
fn test_json_deserialize_exceeding_limit_error() {
let data = vec![0u8; DATA_MAX_LENGTH.as_u64() as usize + 1];
fn json_deserialize_exceeding_limit_error() {
let data = vec![
0_u8;
usize::try_from(DATA_MAX_LENGTH.as_u64())
.expect("DATA_MAX_LENGTH fits in usize")
+ 1
];
let json = serde_json::to_string(&data).unwrap();
let result: Result<Data, _> = serde_json::from_str(&json);

View File

@ -40,6 +40,8 @@ pub struct PrivacyPreservingCircuitOutput {
#[cfg(feature = "host")]
impl PrivacyPreservingCircuitOutput {
/// Serializes the circuit output to a byte vector.
#[must_use]
pub fn to_bytes(&self) -> Vec<u8> {
bytemuck::cast_slice(&risc0_zkvm::serde::to_vec(&self).unwrap()).to_vec()
}
@ -57,13 +59,13 @@ mod tests {
};
#[test]
fn test_privacy_preserving_circuit_output_to_bytes_is_compatible_with_from_slice() {
fn privacy_preserving_circuit_output_to_bytes_is_compatible_with_from_slice() {
let output = PrivacyPreservingCircuitOutput {
public_pre_states: vec![
AccountWithMetadata::new(
Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 12345678901234567890,
balance: 12_345_678_901_234_567_890,
data: b"test data".to_vec().try_into().unwrap(),
nonce: Nonce(18446744073709551614),
},
@ -73,7 +75,7 @@ mod tests {
AccountWithMetadata::new(
Account {
program_owner: [9, 9, 9, 8, 8, 8, 7, 7],
balance: 123123123456456567112,
balance: 123_123_123_456_456_567_112,
data: b"test data".to_vec().try_into().unwrap(),
nonce: Nonce(9999999999999999999999),
},

View File

@ -1,24 +1,9 @@
use borsh::{BorshDeserialize, BorshSerialize};
use risc0_zkvm::sha::{Impl, Sha256};
use risc0_zkvm::sha::{Impl, Sha256 as _};
use serde::{Deserialize, Serialize};
use crate::{NullifierPublicKey, account::Account};
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(
any(feature = "host", test),
derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)
)]
pub struct Commitment(pub(super) [u8; 32]);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Commitment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
write!(f, "Commitment({hex})")
}
}
/// A commitment to all zero data.
/// ```python
/// from hashlib import sha256
@ -31,7 +16,7 @@ pub const DUMMY_COMMITMENT: Commitment = Commitment([
165, 33, 34, 172, 227, 30, 215, 20, 85, 47, 230, 29,
]);
/// The hash of the dummy commitment
/// The hash of the dummy commitment.
/// ```python
/// from hashlib import sha256
/// hasher = sha256()
@ -43,9 +28,30 @@ pub const DUMMY_COMMITMENT_HASH: [u8; 32] = [
194, 216, 67, 56, 251, 208, 226, 0, 117, 149, 39,
];
#[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)]
#[cfg_attr(
any(feature = "host", test),
derive(Clone, PartialEq, Eq, Hash, PartialOrd, Ord)
)]
pub struct Commitment(pub(super) [u8; 32]);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Commitment {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use std::fmt::Write as _;
let hex: String = self.0.iter().fold(String::new(), |mut acc, b| {
write!(acc, "{b:02x}").expect("writing to string should not fail");
acc
});
write!(f, "Commitment({hex})")
}
}
impl Commitment {
/// Generates the commitment to a private account owned by user for npk:
/// SHA256(npk || program_owner || balance || nonce || SHA256(data))
/// SHA256(npk || `program_owner` || balance || nonce || SHA256(data)).
#[must_use]
pub fn new(npk: &NullifierPublicKey, account: &Account) -> Self {
let mut bytes = Vec::new();
bytes.extend_from_slice(&npk.to_byte_array());
@ -72,7 +78,8 @@ pub type CommitmentSetDigest = [u8; 32];
pub type MembershipProof = (usize, Vec<[u8; 32]>);
/// Computes the resulting digest for the given membership proof and corresponding commitment
/// Computes the resulting digest for the given membership proof and corresponding commitment.
#[must_use]
pub fn compute_digest_for_path(
commitment: &Commitment,
proof: &MembershipProof,
@ -84,18 +91,16 @@ pub fn compute_digest_for_path(
.unwrap();
let mut level_index = proof.0;
for node in &proof.1 {
let mut bytes = [0_u8; 64];
let is_left_child = level_index & 1 == 0;
if is_left_child {
let mut bytes = [0u8; 64];
bytes[..32].copy_from_slice(&result);
bytes[32..].copy_from_slice(node);
result = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap();
} else {
let mut bytes = [0u8; 64];
bytes[..32].copy_from_slice(node);
bytes[32..].copy_from_slice(&result);
result = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap();
}
result = Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap();
level_index >>= 1;
}
result
@ -103,14 +108,14 @@ pub fn compute_digest_for_path(
#[cfg(test)]
mod tests {
use risc0_zkvm::sha::{Impl, Sha256};
use risc0_zkvm::sha::{Impl, Sha256 as _};
use crate::{
Commitment, DUMMY_COMMITMENT, DUMMY_COMMITMENT_HASH, NullifierPublicKey, account::Account,
};
#[test]
fn test_nothing_up_my_sleeve_dummy_commitment() {
fn nothing_up_my_sleeve_dummy_commitment() {
let default_account = Account::default();
let npk_null = NullifierPublicKey([0; 32]);
let expected_dummy_commitment = Commitment::new(&npk_null, &default_account);
@ -118,7 +123,7 @@ mod tests {
}
#[test]
fn test_nothing_up_my_sleeve_dummy_commitment_hash() {
fn nothing_up_my_sleeve_dummy_commitment_hash() {
let expected_dummy_commitment_hash: [u8; 32] =
Impl::hash_bytes(&DUMMY_COMMITMENT.to_byte_array())
.as_bytes()

View File

@ -2,7 +2,7 @@
#[cfg(feature = "host")]
use std::io::Cursor;
#[cfg(feature = "host")]
use std::io::Read;
use std::io::Read as _;
#[cfg(feature = "host")]
use crate::Nullifier;
@ -17,6 +17,8 @@ use crate::{
};
impl Account {
/// Serializes the account to bytes.
#[must_use]
pub fn to_bytes(&self) -> Vec<u8> {
let mut bytes = Vec::new();
for word in &self.program_owner {
@ -30,15 +32,16 @@ impl Account {
bytes
}
/// Deserializes an account from a cursor.
#[cfg(feature = "host")]
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
use crate::account::{Nonce, data::Data};
let mut u32_bytes = [0u8; 4];
let mut u128_bytes = [0u8; 16];
let mut u32_bytes = [0_u8; 4];
let mut u128_bytes = [0_u8; 16];
// program owner
let mut program_owner = [0u32; 8];
let mut program_owner = [0_u32; 8];
for word in &mut program_owner {
cursor.read_exact(&mut u32_bytes)?;
*word = u32::from_le_bytes(u32_bytes);
@ -65,51 +68,61 @@ impl Account {
}
impl Commitment {
pub fn to_byte_array(&self) -> [u8; 32] {
#[must_use]
pub const fn to_byte_array(&self) -> [u8; 32] {
self.0
}
#[cfg(feature = "host")]
pub fn from_byte_array(bytes: [u8; 32]) -> Self {
#[must_use]
pub const fn from_byte_array(bytes: [u8; 32]) -> Self {
Self(bytes)
}
/// Deserializes a commitment from a cursor.
#[cfg(feature = "host")]
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut bytes = [0u8; 32];
let mut bytes = [0_u8; 32];
cursor.read_exact(&mut bytes)?;
Ok(Self(bytes))
}
}
impl NullifierPublicKey {
pub fn to_byte_array(&self) -> [u8; 32] {
#[must_use]
pub const fn to_byte_array(&self) -> [u8; 32] {
self.0
}
}
#[cfg(feature = "host")]
impl Nullifier {
pub fn to_byte_array(&self) -> [u8; 32] {
#[must_use]
pub const fn to_byte_array(&self) -> [u8; 32] {
self.0
}
#[cfg(feature = "host")]
pub fn from_byte_array(bytes: [u8; 32]) -> Self {
#[must_use]
pub const fn from_byte_array(bytes: [u8; 32]) -> Self {
Self(bytes)
}
/// Deserializes a nullifier from a cursor.
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut bytes = [0u8; 32];
let mut bytes = [0_u8; 32];
cursor.read_exact(&mut bytes)?;
Ok(Self(bytes))
}
}
impl Ciphertext {
/// Serializes the ciphertext to bytes.
#[must_use]
pub fn to_bytes(&self) -> Vec<u8> {
let mut bytes = Vec::new();
let ciphertext_length: u32 = self.0.len() as u32;
let ciphertext_length: u32 =
u32::try_from(self.0.len()).expect("ciphertext length fits in u32");
bytes.extend_from_slice(&ciphertext_length.to_le_bytes());
bytes.extend_from_slice(&self.0);
@ -117,22 +130,27 @@ impl Ciphertext {
}
#[cfg(feature = "host")]
#[must_use]
pub fn into_inner(self) -> Vec<u8> {
self.0
}
#[cfg(feature = "host")]
pub fn from_inner(inner: Vec<u8>) -> Self {
#[must_use]
pub const fn from_inner(inner: Vec<u8>) -> Self {
Self(inner)
}
#[cfg(feature = "host")]
/// Deserializes ciphertext from a cursor.
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut u32_bytes = [0; 4];
cursor.read_exact(&mut u32_bytes)?;
let ciphertext_lenght = u32::from_le_bytes(u32_bytes);
let mut ciphertext = vec![0; ciphertext_lenght as usize];
let ciphertext_length =
usize::try_from(ciphertext_lenght).expect("ciphertext length fits in usize");
let mut ciphertext = vec![0; ciphertext_length];
cursor.read_exact(&mut ciphertext)?;
Ok(Self(ciphertext))
@ -141,10 +159,13 @@ impl Ciphertext {
#[cfg(feature = "host")]
impl Secp256k1Point {
/// Converts the point to bytes.
#[must_use]
pub fn to_bytes(&self) -> [u8; 33] {
self.0.clone().try_into().unwrap()
}
/// Deserializes a secp256k1 point from a cursor.
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut value = vec![0; 33];
cursor.read_exact(&mut value)?;
@ -153,7 +174,8 @@ impl Secp256k1Point {
}
impl AccountId {
pub fn to_bytes(&self) -> [u8; 32] {
#[must_use]
pub const fn to_bytes(&self) -> [u8; 32] {
*self.value()
}
}
@ -163,7 +185,7 @@ mod tests {
use super::*;
#[test]
fn test_enconding() {
fn enconding() {
let account = Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 123456789012345678901234567890123456,
@ -184,7 +206,7 @@ mod tests {
}
#[test]
fn test_commitment_to_bytes() {
fn commitment_to_bytes() {
let commitment = Commitment((0..32).collect::<Vec<u8>>().try_into().unwrap());
let expected_bytes: [u8; 32] = (0..32).collect::<Vec<u8>>().try_into().unwrap();
@ -194,7 +216,7 @@ mod tests {
#[cfg(feature = "host")]
#[test]
fn test_nullifier_to_bytes() {
fn nullifier_to_bytes() {
let nullifier = Nullifier((0..32).collect::<Vec<u8>>().try_into().unwrap());
let expected_bytes: [u8; 32] = (0..32).collect::<Vec<u8>>().try_into().unwrap();
@ -204,7 +226,7 @@ mod tests {
#[cfg(feature = "host")]
#[test]
fn test_commitment_to_bytes_roundtrip() {
fn commitment_to_bytes_roundtrip() {
let commitment = Commitment((0..32).collect::<Vec<u8>>().try_into().unwrap());
let bytes = commitment.to_byte_array();
let mut cursor = Cursor::new(bytes.as_ref());
@ -214,7 +236,7 @@ mod tests {
#[cfg(feature = "host")]
#[test]
fn test_nullifier_to_bytes_roundtrip() {
fn nullifier_to_bytes_roundtrip() {
let nullifier = Nullifier((0..32).collect::<Vec<u8>>().try_into().unwrap());
let bytes = nullifier.to_byte_array();
let mut cursor = Cursor::new(bytes.as_ref());
@ -224,7 +246,7 @@ mod tests {
#[cfg(feature = "host")]
#[test]
fn test_account_to_bytes_roundtrip() {
fn account_to_bytes_roundtrip() {
let account = Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 123456789012345678901234567890123456,

View File

@ -1,18 +1,16 @@
use borsh::{BorshDeserialize, BorshSerialize};
use chacha20::{
ChaCha20,
cipher::{KeyIvInit, StreamCipher},
cipher::{KeyIvInit as _, StreamCipher as _},
};
use risc0_zkvm::sha::{Impl, Sha256};
use risc0_zkvm::sha::{Impl, Sha256 as _};
use serde::{Deserialize, Serialize};
#[cfg(feature = "host")]
pub mod shared_key_derivation;
#[cfg(feature = "host")]
pub use shared_key_derivation::{EphemeralPublicKey, EphemeralSecretKey, ViewingPublicKey};
use crate::{Commitment, account::Account};
#[cfg(feature = "host")]
pub mod shared_key_derivation;
pub type Scalar = [u8; 32];
@ -28,19 +26,25 @@ pub struct Ciphertext(pub(crate) Vec<u8>);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Ciphertext {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
use std::fmt::Write as _;
let hex: String = self.0.iter().fold(String::new(), |mut acc, b| {
write!(acc, "{b:02x}").expect("writing to string should not fail");
acc
});
write!(f, "Ciphertext({hex})")
}
}
impl EncryptionScheme {
#[must_use]
pub fn encrypt(
account: &Account,
shared_secret: &SharedSecretKey,
commitment: &Commitment,
output_index: u32,
) -> Ciphertext {
let mut buffer = account.to_bytes().to_vec();
let mut buffer = account.to_bytes();
Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index);
Ciphertext(buffer)
}
@ -72,6 +76,11 @@ impl EncryptionScheme {
}
#[cfg(feature = "host")]
#[expect(
clippy::print_stdout,
reason = "This is the current way to debug things. TODO: fix later"
)]
#[must_use]
pub fn decrypt(
ciphertext: &Ciphertext,
shared_secret: &SharedSecretKey,
@ -79,7 +88,7 @@ impl EncryptionScheme {
output_index: u32,
) -> Option<Account> {
use std::io::Cursor;
let mut buffer = ciphertext.0.to_owned();
let mut buffer = ciphertext.0.clone();
Self::symmetric_transform(&mut buffer, shared_secret, commitment, output_index);
let mut cursor = Cursor::new(buffer.as_slice());
@ -87,12 +96,12 @@ impl EncryptionScheme {
.inspect_err(|err| {
println!(
"Failed to decode {ciphertext:?} \n
with secret {:?} ,\n
with secret {:?} ,\n
commitment {commitment:?} ,\n
and output_index {output_index} ,\n
with error {err:?}",
shared_secret.0
)
);
})
.ok()
}

View File

@ -1,9 +1,16 @@
#![expect(
clippy::arithmetic_side_effects,
reason = "Multiplication of finite field elements can't overflow"
)]
use std::fmt::Write as _;
use borsh::{BorshDeserialize, BorshSerialize};
use k256::{
AffinePoint, EncodedPoint, FieldBytes, ProjectivePoint,
elliptic_curve::{
PrimeField,
sec1::{FromEncodedPoint, ToEncodedPoint},
PrimeField as _,
sec1::{FromEncodedPoint as _, ToEncodedPoint as _},
},
};
use serde::{Deserialize, Serialize};
@ -15,13 +22,17 @@ pub struct Secp256k1Point(pub Vec<u8>);
impl std::fmt::Debug for Secp256k1Point {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
let hex: String = self.0.iter().fold(String::new(), |mut acc, b| {
write!(acc, "{b:02x}").expect("writing to string should not fail");
acc
});
write!(f, "Secp256k1Point({hex})")
}
}
impl Secp256k1Point {
pub fn from_scalar(value: Scalar) -> Secp256k1Point {
#[must_use]
pub fn from_scalar(value: Scalar) -> Self {
let x_bytes: FieldBytes = value.into();
let x = k256::Scalar::from_repr(x_bytes).unwrap();
@ -38,11 +49,13 @@ pub type EphemeralPublicKey = Secp256k1Point;
pub type ViewingPublicKey = Secp256k1Point;
impl From<&EphemeralSecretKey> for EphemeralPublicKey {
fn from(value: &EphemeralSecretKey) -> Self {
Secp256k1Point::from_scalar(*value)
Self::from_scalar(*value)
}
}
impl SharedSecretKey {
/// Creates a new shared secret key from a scalar and a point.
#[must_use]
pub fn new(scalar: &Scalar, point: &Secp256k1Point) -> Self {
let scalar = k256::Scalar::from_repr((*scalar).into()).unwrap();
let point: [u8; 33] = point.0.clone().try_into().unwrap();
@ -53,9 +66,9 @@ impl SharedSecretKey {
let shared = ProjectivePoint::from(pubkey_affine) * scalar;
let shared_affine = shared.to_affine();
let encoded = shared_affine.to_encoded_point(false);
let x_bytes_slice = encoded.x().unwrap();
let mut x_bytes = [0u8; 32];
let shared_affine_encoded = shared_affine.to_encoded_point(false);
let x_bytes_slice = shared_affine_encoded.x().unwrap();
let mut x_bytes = [0_u8; 32];
x_bytes.copy_from_slice(x_bytes_slice);
Self(x_bytes)

View File

@ -1,10 +1,7 @@
pub mod account;
mod circuit_io;
mod commitment;
mod encoding;
pub mod encryption;
mod nullifier;
pub mod program;
#![expect(
clippy::multiple_inherent_impl,
reason = "We prefer to group methods by functionality rather than by type for encoding"
)]
pub use circuit_io::{PrivacyPreservingCircuitInput, PrivacyPreservingCircuitOutput};
pub use commitment::{
@ -14,5 +11,13 @@ pub use commitment::{
pub use encryption::{EncryptionScheme, SharedSecretKey};
pub use nullifier::{Nullifier, NullifierPublicKey, NullifierSecretKey};
pub mod account;
mod circuit_io;
mod commitment;
mod encoding;
pub mod encryption;
mod nullifier;
pub mod program;
#[cfg(feature = "host")]
pub mod error;

View File

@ -1,5 +1,5 @@
use borsh::{BorshDeserialize, BorshSerialize};
use risc0_zkvm::sha::{Impl, Sha256};
use risc0_zkvm::sha::{Impl, Sha256 as _};
use serde::{Deserialize, Serialize};
use crate::{Commitment, account::AccountId};
@ -16,7 +16,12 @@ impl From<&NullifierPublicKey> for AccountId {
let mut bytes = [0; 64];
bytes[0..32].copy_from_slice(PRIVATE_ACCOUNT_ID_PREFIX);
bytes[32..].copy_from_slice(&value.0);
AccountId::new(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap())
Self::new(
Impl::hash_bytes(&bytes)
.as_bytes()
.try_into()
.expect("Conversion should not fail"),
)
}
}
@ -28,15 +33,20 @@ impl AsRef<[u8]> for NullifierPublicKey {
impl From<&NullifierSecretKey> for NullifierPublicKey {
fn from(value: &NullifierSecretKey) -> Self {
let mut bytes = Vec::new();
const PREFIX: &[u8; 8] = b"LEE/keys";
const SUFFIX_1: &[u8; 1] = &[7];
const SUFFIX_2: &[u8; 23] = &[0; 23];
let mut bytes = Vec::new();
bytes.extend_from_slice(PREFIX);
bytes.extend_from_slice(value);
bytes.extend_from_slice(SUFFIX_1);
bytes.extend_from_slice(SUFFIX_2);
Self(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap())
Self(
Impl::hash_bytes(&bytes)
.as_bytes()
.try_into()
.expect("hash should be exactly 32 bytes long"),
)
}
}
@ -52,12 +62,19 @@ pub struct Nullifier(pub(super) [u8; 32]);
#[cfg(any(feature = "host", test))]
impl std::fmt::Debug for Nullifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let hex: String = self.0.iter().map(|b| format!("{b:02x}")).collect();
use std::fmt::Write as _;
let hex: String = self.0.iter().fold(String::new(), |mut acc, b| {
write!(acc, "{b:02x}").expect("writing to string should not fail");
acc
});
write!(f, "Nullifier({hex})")
}
}
impl Nullifier {
/// Computes a nullifier for an account update.
#[must_use]
pub fn for_account_update(commitment: &Commitment, nsk: &NullifierSecretKey) -> Self {
const UPDATE_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Update/\x00\x00\x00\x00";
let mut bytes = UPDATE_PREFIX.to_vec();
@ -66,6 +83,8 @@ impl Nullifier {
Self(Impl::hash_bytes(&bytes).as_bytes().try_into().unwrap())
}
/// Computes a nullifier for an account initialization.
#[must_use]
pub fn for_account_initialization(npk: &NullifierPublicKey) -> Self {
const INIT_PREFIX: &[u8; 32] = b"/NSSA/v0.2/Nullifier/Initialize/";
let mut bytes = INIT_PREFIX.to_vec();
@ -79,8 +98,8 @@ mod tests {
use super::*;
#[test]
fn test_constructor_for_account_update() {
let commitment = Commitment((0..32u8).collect::<Vec<_>>().try_into().unwrap());
fn constructor_for_account_update() {
let commitment = Commitment((0..32_u8).collect::<Vec<_>>().try_into().unwrap());
let nsk = [0x42; 32];
let expected_nullifier = Nullifier([
148, 243, 116, 209, 140, 231, 211, 61, 35, 62, 114, 110, 143, 224, 82, 201, 221, 34,
@ -91,7 +110,7 @@ mod tests {
}
#[test]
fn test_constructor_for_account_initialization() {
fn constructor_for_account_initialization() {
let npk = NullifierPublicKey([
112, 188, 193, 129, 150, 55, 228, 67, 88, 168, 29, 151, 5, 92, 23, 190, 17, 162, 164,
255, 29, 105, 42, 186, 43, 11, 157, 168, 132, 225, 17, 163,
@ -105,7 +124,7 @@ mod tests {
}
#[test]
fn test_from_secret_key() {
fn from_secret_key() {
let nsk = [
57, 5, 64, 115, 153, 56, 184, 51, 207, 238, 99, 165, 147, 214, 213, 151, 30, 251, 30,
196, 134, 22, 224, 211, 237, 120, 136, 225, 188, 220, 249, 28,
@ -119,7 +138,7 @@ mod tests {
}
#[test]
fn test_account_id_from_nullifier_public_key() {
fn account_id_from_nullifier_public_key() {
let nsk = [
57, 5, 64, 115, 153, 56, 184, 51, 207, 238, 99, 165, 147, 214, 213, 151, 30, 251, 30,
196, 134, 22, 224, 211, 237, 120, 136, 225, 188, 220, 249, 28,

View File

@ -5,17 +5,17 @@ use serde::{Deserialize, Serialize};
use crate::account::{Account, AccountId, AccountWithMetadata};
pub type ProgramId = [u32; 8];
pub type InstructionData = Vec<u32>;
pub const DEFAULT_PROGRAM_ID: ProgramId = [0; 8];
pub const MAX_NUMBER_CHAINED_CALLS: usize = 10;
pub type ProgramId = [u32; 8];
pub type InstructionData = Vec<u32>;
pub struct ProgramInput<T> {
pub pre_states: Vec<AccountWithMetadata>,
pub instruction: T,
}
/// A 32-byte seed used to compute a *Program-Derived AccountId* (PDA).
/// A 32-byte seed used to compute a *Program-Derived `AccountId`* (PDA).
///
/// Each program can derive up to `2^256` unique account IDs by choosing different
/// seeds. PDAs allow programs to control namespaced account identifiers without
@ -24,28 +24,15 @@ pub struct ProgramInput<T> {
pub struct PdaSeed([u8; 32]);
impl PdaSeed {
#[must_use]
pub const fn new(value: [u8; 32]) -> Self {
Self(value)
}
}
pub fn compute_authorized_pdas(
caller_program_id: Option<ProgramId>,
pda_seeds: &[PdaSeed],
) -> HashSet<AccountId> {
caller_program_id
.map(|caller_program_id| {
pda_seeds
.iter()
.map(|pda_seed| AccountId::from((&caller_program_id, pda_seed)))
.collect()
})
.unwrap_or_default()
}
impl From<(&ProgramId, &PdaSeed)> for AccountId {
fn from(value: (&ProgramId, &PdaSeed)) -> Self {
use risc0_zkvm::sha::{Impl, Sha256};
use risc0_zkvm::sha::{Impl, Sha256 as _};
const PROGRAM_DERIVED_ACCOUNT_ID_PREFIX: &[u8; 32] =
b"/NSSA/v0.2/AccountId/PDA/\x00\x00\x00\x00\x00\x00\x00";
@ -55,7 +42,7 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId {
bytemuck::try_cast_slice(value.0).expect("ProgramId should be castable to &[u8]");
bytes[32..64].copy_from_slice(program_id_bytes);
bytes[64..].copy_from_slice(&value.1.0);
AccountId::new(
Self::new(
Impl::hash_bytes(&bytes)
.as_bytes()
.try_into()
@ -66,10 +53,10 @@ impl From<(&ProgramId, &PdaSeed)> for AccountId {
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
pub struct ChainedCall {
/// The program ID of the program to execute
/// The program ID of the program to execute.
pub program_id: ProgramId,
pub pre_states: Vec<AccountWithMetadata>,
/// The instruction data to pass
/// The instruction data to pass.
pub instruction_data: InstructionData,
pub pda_seeds: Vec<PdaSeed>,
}
@ -90,6 +77,7 @@ impl ChainedCall {
}
}
#[must_use]
pub fn with_pda_seeds(mut self, pda_seeds: Vec<PdaSeed>) -> Self {
self.pda_seeds = pda_seeds;
self
@ -97,6 +85,7 @@ impl ChainedCall {
}
/// Represents the final state of an `Account` after a program execution.
///
/// A post state may optionally request that the executing program
/// becomes the owner of the account (a “claim”). This is used to signal
/// that the program intends to take ownership of the account.
@ -110,7 +99,8 @@ pub struct AccountPostState {
impl AccountPostState {
/// Creates a post state without a claim request.
/// The executing program is not requesting ownership of the account.
pub fn new(account: Account) -> Self {
#[must_use]
pub const fn new(account: Account) -> Self {
Self {
account,
claim: false,
@ -120,7 +110,8 @@ impl AccountPostState {
/// Creates a post state that requests ownership of the account.
/// This indicates that the executing program intends to claim the
/// account as its own and is allowed to mutate it.
pub fn new_claimed(account: Account) -> Self {
#[must_use]
pub const fn new_claimed(account: Account) -> Self {
Self {
account,
claim: true,
@ -129,6 +120,7 @@ impl AccountPostState {
/// Creates a post state that requests ownership of the account
/// if the account's program owner is the default program ID.
#[must_use]
pub fn new_claimed_if_default(account: Account) -> Self {
let claim = account.program_owner == DEFAULT_PROGRAM_ID;
Self { account, claim }
@ -136,21 +128,24 @@ impl AccountPostState {
/// Returns `true` if this post state requests that the account
/// be claimed (owned) by the executing program.
pub fn requires_claim(&self) -> bool {
#[must_use]
pub const fn requires_claim(&self) -> bool {
self.claim
}
/// Returns the underlying account
pub fn account(&self) -> &Account {
/// Returns the underlying account.
#[must_use]
pub const fn account(&self) -> &Account {
&self.account
}
/// Returns the underlying account
pub fn account_mut(&mut self) -> &mut Account {
/// Returns the underlying account.
pub const fn account_mut(&mut self) -> &mut Account {
&mut self.account
}
/// Consumes the post state and returns the underlying account
/// Consumes the post state and returns the underlying account.
#[must_use]
pub fn into_account(self) -> Account {
self.account
}
@ -159,14 +154,58 @@ impl AccountPostState {
#[derive(Serialize, Deserialize, Clone)]
#[cfg_attr(any(feature = "host", test), derive(Debug, PartialEq, Eq))]
pub struct ProgramOutput {
/// The instruction data the program received to produce this output
/// The instruction data the program received to produce this output.
pub instruction_data: InstructionData,
/// The account pre states the program received to produce this output
/// The account pre states the program received to produce this output.
pub pre_states: Vec<AccountWithMetadata>,
pub post_states: Vec<AccountPostState>,
pub chained_calls: Vec<ChainedCall>,
}
/// Representation of a number as `lo + hi * 2^128`.
#[derive(PartialEq, Eq)]
struct WrappedBalanceSum {
lo: u128,
hi: u128,
}
impl WrappedBalanceSum {
/// Constructs a [`WrappedBalanceSum`] from an iterator of balances.
///
/// Returns [`None`] if balance sum overflows `lo + hi * 2^128` representation, which is not
/// expected in practical scenarios.
fn from_balances(balances: impl Iterator<Item = u128>) -> Option<Self> {
let mut wrapped = Self { lo: 0, hi: 0 };
for balance in balances {
let (new_sum, did_overflow) = wrapped.lo.overflowing_add(balance);
if did_overflow {
wrapped.hi = wrapped.hi.checked_add(1)?;
}
wrapped.lo = new_sum;
}
Some(wrapped)
}
}
#[must_use]
pub fn compute_authorized_pdas(
caller_program_id: Option<ProgramId>,
pda_seeds: &[PdaSeed],
) -> HashSet<AccountId> {
caller_program_id
.map(|caller_program_id| {
pda_seeds
.iter()
.map(|pda_seed| AccountId::from((&caller_program_id, pda_seed)))
.collect()
})
.unwrap_or_default()
}
/// Reads the NSSA inputs from the guest environment.
#[must_use]
pub fn read_nssa_inputs<T: DeserializeOwned>() -> (ProgramInput<T>, InstructionData) {
let pre_states: Vec<AccountWithMetadata> = env::read();
let instruction_words: InstructionData = env::read();
@ -209,12 +248,13 @@ pub fn write_nssa_outputs_with_chained_call(
env::commit(&output);
}
/// Validates well-behaved program execution
/// Validates well-behaved program execution.
///
/// # Parameters
/// - `pre_states`: The list of input accounts, each annotated with authorization metadata.
/// - `post_states`: The list of resulting accounts after executing the program logic.
/// - `executing_program_id`: The identifier of the program that was executed.
#[must_use]
pub fn validate_execution(
pre_states: &[AccountWithMetadata],
post_states: &[AccountPostState],
@ -298,39 +338,12 @@ fn validate_uniqueness_of_account_ids(pre_states: &[AccountWithMetadata]) -> boo
number_of_accounts == number_of_account_ids
}
/// Representation of a number as `lo + hi * 2^128`.
#[derive(PartialEq, Eq)]
struct WrappedBalanceSum {
lo: u128,
hi: u128,
}
impl WrappedBalanceSum {
/// Constructs a [`WrappedBalanceSum`] from an iterator of balances.
///
/// Returns [`None`] if balance sum overflows `lo + hi * 2^128` representation, which is not
/// expected in practical scenarios.
fn from_balances(balances: impl Iterator<Item = u128>) -> Option<Self> {
let mut wrapped = WrappedBalanceSum { lo: 0, hi: 0 };
for balance in balances {
let (new_sum, did_overflow) = wrapped.lo.overflowing_add(balance);
if did_overflow {
wrapped.hi = wrapped.hi.checked_add(1)?;
}
wrapped.lo = new_sum;
}
Some(wrapped)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_post_state_new_with_claim_constructor() {
fn post_state_new_with_claim_constructor() {
let account = Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 1337,
@ -345,7 +358,7 @@ mod tests {
}
#[test]
fn test_post_state_new_without_claim_constructor() {
fn post_state_new_without_claim_constructor() {
let account = Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 1337,
@ -360,7 +373,7 @@ mod tests {
}
#[test]
fn test_post_state_account_getter() {
fn post_state_account_getter() {
let mut account = Account {
program_owner: [1, 2, 3, 4, 5, 6, 7, 8],
balance: 1337,

Some files were not shown because too many files have changed in this diff Show More