Merge branch 'main' into marvin/private_keys

This commit is contained in:
jonesmarvin8 2026-01-27 16:30:11 -05:00
commit 819bb1b7f3
41 changed files with 3581 additions and 286 deletions

View File

@ -99,7 +99,7 @@ jobs:
run: rustup install
- name: Install nextest
run: cargo install cargo-nextest
run: cargo install --locked cargo-nextest
- name: Run tests
env:

2285
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -2,16 +2,19 @@
resolver = "3"
members = [
"integration_tests",
"sequencer_runner",
"storage",
"key_protocol",
"sequencer_rpc",
"mempool",
"wallet",
"sequencer_core",
"common",
"nssa",
"nssa/core",
"sequencer_core",
"sequencer_rpc",
"sequencer_runner",
"indexer_service",
"indexer_service/protocol",
"indexer_service/rpc",
"program_methods",
"program_methods/guest",
"test_program_methods",
@ -19,6 +22,7 @@ members = [
"examples/program_deployment",
"examples/program_deployment/methods",
"examples/program_deployment/methods/guest",
"bedrock_client",
]
[workspace.dependencies]
@ -31,8 +35,12 @@ key_protocol = { path = "key_protocol" }
sequencer_core = { path = "sequencer_core" }
sequencer_rpc = { path = "sequencer_rpc" }
sequencer_runner = { path = "sequencer_runner" }
indexer_service = { path = "indexer_service" }
indexer_service_protocol = { path = "indexer_service/protocol" }
indexer_service_rpc = { path = "indexer_service/rpc" }
wallet = { path = "wallet" }
test_program_methods = { path = "test_program_methods" }
bedrock_client = { path = "bedrock_client" }
tokio = { version = "1.28.2", features = [
"net",
@ -40,6 +48,7 @@ tokio = { version = "1.28.2", features = [
"sync",
"fs",
] }
tokio-util = "0.7.18"
risc0-zkvm = { version = "3.0.3", features = ['std'] }
risc0-build = "3.0.3"
anyhow = "1.0.98"
@ -50,6 +59,7 @@ serde = { version = "1.0.60", default-features = false, features = ["derive"] }
serde_json = "1.0.81"
actix = "0.13.0"
actix-cors = "0.6.1"
jsonrpsee = "0.26.0"
futures = "0.3"
actix-rt = "*"
lazy_static = "1.5.0"
@ -75,6 +85,12 @@ chrono = "0.4.41"
borsh = "1.5.7"
base58 = "0.2.0"
itertools = "0.14.0"
url = "2.5.4"
schemars = "1.2.0"
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
rocksdb = { version = "0.24.0", default-features = false, features = [
"snappy",
@ -93,4 +109,4 @@ actix-web = { version = "=4.1.0", default-features = false, features = [
"macros",
] }
clap = { version = "4.5.42", features = ["derive", "env"] }
reqwest = { version = "0.11.16", features = ["json"] }
reqwest = { version = "0.12", features = ["json", "rustls-tls", "stream"] }

10
bedrock_client/Cargo.toml Normal file
View File

@ -0,0 +1,10 @@
[package]
name = "bedrock_client"
version = "0.1.0"
edition = "2024"
[dependencies]
reqwest.workspace = true
anyhow.workspace = true
logos-blockchain-common-http-client.workspace = true
logos-blockchain-core.workspace = true

32
bedrock_client/src/lib.rs Normal file
View File

@ -0,0 +1,32 @@
use anyhow::Result;
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
use logos_blockchain_core::mantle::SignedMantleTx;
use reqwest::{Client, Url};
// Simple wrapper
// maybe extend in the future for our purposes
pub struct BedrockClient {
http_client: CommonHttpClient,
node_url: Url,
}
impl BedrockClient {
pub fn new(auth: Option<BasicAuthCredentials>, node_url: Url) -> Result<Self> {
let client = Client::builder()
//Add more fields if needed
.timeout(std::time::Duration::from_secs(60))
.build()?;
let http_client = CommonHttpClient::new_with_client(client, auth);
Ok(Self {
http_client,
node_url,
})
}
pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> {
self.http_client
.post_transaction(self.node_url.clone(), tx)
.await
}
}

View File

@ -52,7 +52,7 @@ if [ -d ".git" ]; then
git reset --hard origin/main
else
echo "Cloning repository..."
git clone https://github.com/vacp2p/nescience-testnet.git .
git clone https://github.com/logos-blockchain/lssa.git .
git checkout main
fi

View File

@ -23,7 +23,7 @@ pub type BlockHash = [u8; 32];
pub type BlockId = u64;
pub type TimeStamp = u64;
#[derive(Debug, Clone)]
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
pub struct BlockHeader {
pub block_id: BlockId,
pub prev_block_hash: BlockHash,
@ -32,18 +32,26 @@ pub struct BlockHeader {
pub signature: nssa::Signature,
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
pub struct BlockBody {
pub transactions: Vec<EncodedTransaction>,
}
#[derive(Debug, Clone)]
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
pub enum BedrockStatus {
Pending,
Safe,
Finalized,
}
#[derive(Debug, BorshSerialize, BorshDeserialize)]
pub struct Block {
pub header: BlockHeader,
pub body: BlockBody,
pub bedrock_status: BedrockStatus,
}
#[derive(Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct HashableBlockData {
pub block_id: BlockId,
pub prev_block_hash: BlockHash,
@ -52,7 +60,7 @@ pub struct HashableBlockData {
}
impl HashableBlockData {
pub fn into_block(self, signing_key: &nssa::PrivateKey) -> Block {
pub fn into_pending_block(self, signing_key: &nssa::PrivateKey) -> Block {
let data_bytes = borsh::to_vec(&self).unwrap();
let signature = nssa::Signature::new(signing_key, &data_bytes);
let hash = OwnHasher::hash(&data_bytes);
@ -67,6 +75,7 @@ impl HashableBlockData {
body: BlockBody {
transactions: self.transactions,
},
bedrock_status: BedrockStatus::Pending,
}
}
}

View File

@ -30,7 +30,7 @@ pub fn produce_dummy_block(
transactions,
};
block_data.into_block(&sequencer_sign_key_for_testing())
block_data.into_pending_block(&sequencer_sign_key_for_testing())
}
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {

View File

@ -0,0 +1,17 @@
[package]
name = "indexer_service"
version = "0.1.0"
edition = "2024"
[dependencies]
indexer_service_protocol.workspace = true
indexer_service_rpc = { workspace = true, features = ["server"] }
clap = { workspace = true, features = ["derive"] }
anyhow.workspace = true
tokio.workspace = true
tokio-util.workspace = true
env_logger.workspace = true
log.workspace = true
jsonrpsee.workspace = true
async-trait = "0.1.89"

View File

@ -0,0 +1,64 @@
# Chef stage - uses pre-built cargo-chef image
FROM lukemathwalker/cargo-chef:latest-rust-1.91.1-slim-trixie AS chef
# Install build dependencies
RUN apt-get update && apt-get install -y \
pkg-config \
libssl-dev \
libclang-dev \
clang \
curl \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /indexer_service
# Planner stage - generates dependency recipe
FROM chef AS planner
COPY . .
RUN cargo chef prepare --bin indexer_service --recipe-path recipe.json
# Builder stage - builds dependencies and application
FROM chef AS builder
COPY --from=planner /indexer_service/recipe.json recipe.json
# Build dependencies only (this layer will be cached)
RUN cargo chef cook --bin indexer_service --release --recipe-path recipe.json
# Copy source code
COPY . .
# Build the actual application
RUN cargo build --release --bin indexer_service
# Strip debug symbols to reduce binary size
RUN strip /indexer_service/target/release/indexer_service
# Runtime stage - minimal image
FROM debian:trixie-slim
# Create non-root user for security
RUN useradd -m -u 1000 -s /bin/bash indexer_service_user
# Copy binary from builder
COPY --from=builder --chown=indexer_service_user:indexer_service_user /indexer_service/target/release/indexer_service /usr/local/bin/indexer_service
# Expose default port
EXPOSE 8779
# Health check
HEALTHCHECK --interval=30s --timeout=3s --start-period=5s --retries=3 \
CMD curl http://localhost:8779 \
-H "Content-Type: application/json" \
-d "{ \
\"jsonrpc\": \"2.0\", \
\"method\": \"get_schema\", \
\"params\": {}, \
\"id\": 1 \
}" || exit 1
# Run the application
ENV RUST_LOG=info
USER indexer_service_user
WORKDIR /indexer_service
CMD ["indexer_service"]

View File

@ -0,0 +1,9 @@
services:
indexer_service:
image: lssa/indexer_service
build:
context: ..
dockerfile: indexer_service/Dockerfile
container_name: indexer_service
ports:
- "8779:8779"

View File

@ -0,0 +1,18 @@
[package]
name = "indexer_service_protocol"
version = "0.1.0"
edition = "2024"
[dependencies]
nssa_core = { workspace = true, optional = true, features = ["host"] }
nssa = { workspace = true, optional = true }
common = { workspace = true, optional = true }
serde = { workspace = true, features = ["derive"] }
schemars.workspace = true
base64.workspace = true
borsh = { workspace = true, optional = true }
[features]
# Enable conversion to/from NSSA core types
convert = ["dep:nssa_core", "dep:nssa", "dep:common", "dep:borsh"]

View File

@ -0,0 +1,648 @@
//! Conversions between indexer_service_protocol types and nssa/nssa_core types
use crate::*;
// ============================================================================
// Account-related conversions
// ============================================================================
impl From<nssa_core::account::AccountId> for AccountId {
fn from(value: nssa_core::account::AccountId) -> Self {
Self {
value: value.into_value(),
}
}
}
impl From<AccountId> for nssa_core::account::AccountId {
fn from(value: AccountId) -> Self {
let AccountId { value } = value;
nssa_core::account::AccountId::new(value)
}
}
impl From<nssa_core::account::Account> for Account {
fn from(value: nssa_core::account::Account) -> Self {
let nssa_core::account::Account {
program_owner,
balance,
data,
nonce,
} = value;
Self {
program_owner,
balance,
data: data.into(),
nonce,
}
}
}
impl TryFrom<Account> for nssa_core::account::Account {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: Account) -> Result<Self, Self::Error> {
let Account {
program_owner,
balance,
data,
nonce,
} = value;
Ok(nssa_core::account::Account {
program_owner,
balance,
data: data.try_into()?,
nonce,
})
}
}
impl From<nssa_core::account::Data> for Data {
fn from(value: nssa_core::account::Data) -> Self {
Self(value.into_inner())
}
}
impl TryFrom<Data> for nssa_core::account::Data {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: Data) -> Result<Self, Self::Error> {
nssa_core::account::Data::try_from(value.0)
}
}
// ============================================================================
// Commitment and Nullifier conversions
// ============================================================================
impl From<nssa_core::Commitment> for Commitment {
fn from(value: nssa_core::Commitment) -> Self {
Self(value.to_byte_array())
}
}
impl From<Commitment> for nssa_core::Commitment {
fn from(value: Commitment) -> Self {
nssa_core::Commitment::from_byte_array(value.0)
}
}
impl From<nssa_core::Nullifier> for Nullifier {
fn from(value: nssa_core::Nullifier) -> Self {
Self(value.to_byte_array())
}
}
impl From<Nullifier> for nssa_core::Nullifier {
fn from(value: Nullifier) -> Self {
nssa_core::Nullifier::from_byte_array(value.0)
}
}
impl From<nssa_core::CommitmentSetDigest> for CommitmentSetDigest {
fn from(value: nssa_core::CommitmentSetDigest) -> Self {
Self(value)
}
}
impl From<CommitmentSetDigest> for nssa_core::CommitmentSetDigest {
fn from(value: CommitmentSetDigest) -> Self {
value.0
}
}
// ============================================================================
// Encryption-related conversions
// ============================================================================
impl From<nssa_core::encryption::Ciphertext> for Ciphertext {
fn from(value: nssa_core::encryption::Ciphertext) -> Self {
Self(value.into_inner())
}
}
impl From<Ciphertext> for nssa_core::encryption::Ciphertext {
fn from(value: Ciphertext) -> Self {
nssa_core::encryption::Ciphertext::from_inner(value.0)
}
}
impl From<nssa_core::encryption::EphemeralPublicKey> for EphemeralPublicKey {
fn from(value: nssa_core::encryption::EphemeralPublicKey) -> Self {
Self(value.0)
}
}
impl From<EphemeralPublicKey> for nssa_core::encryption::EphemeralPublicKey {
fn from(value: EphemeralPublicKey) -> Self {
nssa_core::encryption::shared_key_derivation::Secp256k1Point(value.0)
}
}
// ============================================================================
// Signature and PublicKey conversions
// ============================================================================
impl From<nssa::Signature> for Signature {
fn from(value: nssa::Signature) -> Self {
let nssa::Signature { value } = value;
Self(value)
}
}
impl From<Signature> for nssa::Signature {
fn from(value: Signature) -> Self {
let Signature(sig_value) = value;
nssa::Signature { value: sig_value }
}
}
impl From<nssa::PublicKey> for PublicKey {
fn from(value: nssa::PublicKey) -> Self {
Self(*value.value())
}
}
impl TryFrom<PublicKey> for nssa::PublicKey {
type Error = nssa::error::NssaError;
fn try_from(value: PublicKey) -> Result<Self, Self::Error> {
nssa::PublicKey::try_new(value.0)
}
}
// ============================================================================
// Proof conversions
// ============================================================================
impl From<nssa::privacy_preserving_transaction::circuit::Proof> for Proof {
fn from(value: nssa::privacy_preserving_transaction::circuit::Proof) -> Self {
Self(value.into_inner())
}
}
impl From<Proof> for nssa::privacy_preserving_transaction::circuit::Proof {
fn from(value: Proof) -> Self {
nssa::privacy_preserving_transaction::circuit::Proof::from_inner(value.0)
}
}
// ============================================================================
// EncryptedAccountData conversions
// ============================================================================
impl From<nssa::privacy_preserving_transaction::message::EncryptedAccountData>
for EncryptedAccountData
{
fn from(value: nssa::privacy_preserving_transaction::message::EncryptedAccountData) -> Self {
Self {
ciphertext: value.ciphertext.into(),
epk: value.epk.into(),
view_tag: value.view_tag,
}
}
}
impl From<EncryptedAccountData>
for nssa::privacy_preserving_transaction::message::EncryptedAccountData
{
fn from(value: EncryptedAccountData) -> Self {
Self {
ciphertext: value.ciphertext.into(),
epk: value.epk.into(),
view_tag: value.view_tag,
}
}
}
// ============================================================================
// Transaction Message conversions
// ============================================================================
impl From<nssa::public_transaction::Message> for PublicMessage {
fn from(value: nssa::public_transaction::Message) -> Self {
let nssa::public_transaction::Message {
program_id,
account_ids,
nonces,
instruction_data,
} = value;
Self {
program_id,
account_ids: account_ids.into_iter().map(Into::into).collect(),
nonces,
instruction_data,
}
}
}
impl From<PublicMessage> for nssa::public_transaction::Message {
fn from(value: PublicMessage) -> Self {
let PublicMessage {
program_id,
account_ids,
nonces,
instruction_data,
} = value;
Self::new_preserialized(
program_id,
account_ids.into_iter().map(Into::into).collect(),
nonces,
instruction_data,
)
}
}
impl From<nssa::privacy_preserving_transaction::message::Message> for PrivacyPreservingMessage {
fn from(value: nssa::privacy_preserving_transaction::message::Message) -> Self {
let nssa::privacy_preserving_transaction::message::Message {
public_account_ids,
nonces,
public_post_states,
encrypted_private_post_states,
new_commitments,
new_nullifiers,
} = value;
Self {
public_account_ids: public_account_ids.into_iter().map(Into::into).collect(),
nonces,
public_post_states: public_post_states.into_iter().map(Into::into).collect(),
encrypted_private_post_states: encrypted_private_post_states
.into_iter()
.map(Into::into)
.collect(),
new_commitments: new_commitments.into_iter().map(Into::into).collect(),
new_nullifiers: new_nullifiers
.into_iter()
.map(|(n, d)| (n.into(), d.into()))
.collect(),
}
}
}
impl TryFrom<PrivacyPreservingMessage> for nssa::privacy_preserving_transaction::message::Message {
type Error = nssa_core::account::data::DataTooBigError;
fn try_from(value: PrivacyPreservingMessage) -> Result<Self, Self::Error> {
let PrivacyPreservingMessage {
public_account_ids,
nonces,
public_post_states,
encrypted_private_post_states,
new_commitments,
new_nullifiers,
} = value;
Ok(Self {
public_account_ids: public_account_ids.into_iter().map(Into::into).collect(),
nonces,
public_post_states: public_post_states
.into_iter()
.map(TryInto::try_into)
.collect::<Result<Vec<_>, _>>()?,
encrypted_private_post_states: encrypted_private_post_states
.into_iter()
.map(Into::into)
.collect(),
new_commitments: new_commitments.into_iter().map(Into::into).collect(),
new_nullifiers: new_nullifiers
.into_iter()
.map(|(n, d)| (n.into(), d.into()))
.collect(),
})
}
}
impl From<nssa::program_deployment_transaction::Message> for ProgramDeploymentMessage {
fn from(value: nssa::program_deployment_transaction::Message) -> Self {
Self {
bytecode: value.into_bytecode(),
}
}
}
impl From<ProgramDeploymentMessage> for nssa::program_deployment_transaction::Message {
fn from(value: ProgramDeploymentMessage) -> Self {
let ProgramDeploymentMessage { bytecode } = value;
Self::new(bytecode)
}
}
// ============================================================================
// WitnessSet conversions
// ============================================================================
impl TryFrom<nssa::public_transaction::WitnessSet> for WitnessSet {
type Error = ();
fn try_from(_value: nssa::public_transaction::WitnessSet) -> Result<Self, Self::Error> {
// Public transaction witness sets don't have proofs, so we can't convert them directly
Err(())
}
}
impl From<nssa::privacy_preserving_transaction::witness_set::WitnessSet> for WitnessSet {
fn from(value: nssa::privacy_preserving_transaction::witness_set::WitnessSet) -> Self {
let (sigs_and_pks, proof) = value.into_raw_parts();
Self {
signatures_and_public_keys: sigs_and_pks
.into_iter()
.map(|(sig, pk)| (sig.into(), pk.into()))
.collect(),
proof: proof.into(),
}
}
}
impl TryFrom<WitnessSet> for nssa::privacy_preserving_transaction::witness_set::WitnessSet {
type Error = nssa::error::NssaError;
fn try_from(value: WitnessSet) -> Result<Self, Self::Error> {
let WitnessSet {
signatures_and_public_keys,
proof,
} = value;
let signatures_and_public_keys = signatures_and_public_keys
.into_iter()
.map(|(sig, pk)| Ok((sig.into(), pk.try_into()?)))
.collect::<Result<Vec<_>, Self::Error>>()?;
Ok(Self::from_raw_parts(
signatures_and_public_keys,
proof.into(),
))
}
}
// ============================================================================
// Transaction conversions
// ============================================================================
impl From<nssa::PublicTransaction> for PublicTransaction {
fn from(value: nssa::PublicTransaction) -> Self {
Self {
message: value.message().clone().into(),
witness_set: WitnessSet {
signatures_and_public_keys: value
.witness_set()
.signatures_and_public_keys()
.iter()
.map(|(sig, pk)| (sig.clone().into(), pk.clone().into()))
.collect(),
proof: Proof(vec![]), // Public transactions don't have proofs
},
}
}
}
impl TryFrom<PublicTransaction> for nssa::PublicTransaction {
type Error = nssa::error::NssaError;
fn try_from(value: PublicTransaction) -> Result<Self, Self::Error> {
let PublicTransaction {
message,
witness_set,
} = value;
let WitnessSet {
signatures_and_public_keys,
proof: _,
} = witness_set;
Ok(Self::new(
message.into(),
nssa::public_transaction::WitnessSet::from_raw_parts(
signatures_and_public_keys
.into_iter()
.map(|(sig, pk)| Ok((sig.into(), pk.try_into()?)))
.collect::<Result<Vec<_>, Self::Error>>()?,
),
))
}
}
impl From<nssa::PrivacyPreservingTransaction> for PrivacyPreservingTransaction {
fn from(value: nssa::PrivacyPreservingTransaction) -> Self {
Self {
message: value.message().clone().into(),
witness_set: value.witness_set().clone().into(),
}
}
}
impl TryFrom<PrivacyPreservingTransaction> for nssa::PrivacyPreservingTransaction {
type Error = nssa::error::NssaError;
fn try_from(value: PrivacyPreservingTransaction) -> Result<Self, Self::Error> {
let PrivacyPreservingTransaction {
message,
witness_set,
} = value;
Ok(Self::new(
message.try_into().map_err(|_| {
nssa::error::NssaError::InvalidInput("Data too big error".to_string())
})?,
witness_set.try_into()?,
))
}
}
impl From<nssa::ProgramDeploymentTransaction> for ProgramDeploymentTransaction {
fn from(value: nssa::ProgramDeploymentTransaction) -> Self {
Self {
message: value.into_message().into(),
}
}
}
impl From<ProgramDeploymentTransaction> for nssa::ProgramDeploymentTransaction {
fn from(value: ProgramDeploymentTransaction) -> Self {
let ProgramDeploymentTransaction { message } = value;
Self::new(message.into())
}
}
impl From<common::transaction::NSSATransaction> for Transaction {
fn from(value: common::transaction::NSSATransaction) -> Self {
match value {
common::transaction::NSSATransaction::Public(tx) => Transaction::Public(tx.into()),
common::transaction::NSSATransaction::PrivacyPreserving(tx) => {
Transaction::PrivacyPreserving(tx.into())
}
common::transaction::NSSATransaction::ProgramDeployment(tx) => {
Transaction::ProgramDeployment(tx.into())
}
}
}
}
impl TryFrom<Transaction> for common::transaction::NSSATransaction {
type Error = nssa::error::NssaError;
fn try_from(value: Transaction) -> Result<Self, Self::Error> {
match value {
Transaction::Public(tx) => {
Ok(common::transaction::NSSATransaction::Public(tx.try_into()?))
}
Transaction::PrivacyPreserving(tx) => Ok(
common::transaction::NSSATransaction::PrivacyPreserving(tx.try_into()?),
),
Transaction::ProgramDeployment(tx) => Ok(
common::transaction::NSSATransaction::ProgramDeployment(tx.into()),
),
}
}
}
// ============================================================================
// Block conversions
// ============================================================================
impl From<common::block::BlockHeader> for BlockHeader {
fn from(value: common::block::BlockHeader) -> Self {
let common::block::BlockHeader {
block_id,
prev_block_hash,
hash,
timestamp,
signature,
} = value;
Self {
block_id,
prev_block_hash: Hash(prev_block_hash),
hash: Hash(hash),
timestamp,
signature: signature.into(),
}
}
}
impl TryFrom<BlockHeader> for common::block::BlockHeader {
type Error = nssa::error::NssaError;
fn try_from(value: BlockHeader) -> Result<Self, Self::Error> {
let BlockHeader {
block_id,
prev_block_hash,
hash,
timestamp,
signature,
} = value;
Ok(Self {
block_id,
prev_block_hash: prev_block_hash.0,
hash: hash.0,
timestamp,
signature: signature.into(),
})
}
}
impl TryFrom<common::block::BlockBody> for BlockBody {
type Error = std::io::Error;
fn try_from(value: common::block::BlockBody) -> Result<Self, Self::Error> {
// Note: EncodedTransaction doesn't have a direct conversion to NSSATransaction
// This conversion will decode and re-encode the transactions
use borsh::BorshDeserialize as _;
let common::block::BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|encoded_tx| match encoded_tx.tx_kind {
common::transaction::TxKind::Public => {
nssa::PublicTransaction::try_from_slice(&encoded_tx.encoded_transaction_data)
.map(|tx| Transaction::Public(tx.into()))
}
common::transaction::TxKind::PrivacyPreserving => {
nssa::PrivacyPreservingTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::PrivacyPreserving(tx.into()))
}
common::transaction::TxKind::ProgramDeployment => {
nssa::ProgramDeploymentTransaction::try_from_slice(
&encoded_tx.encoded_transaction_data,
)
.map(|tx| Transaction::ProgramDeployment(tx.into()))
}
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Self { transactions })
}
}
impl TryFrom<BlockBody> for common::block::BlockBody {
type Error = nssa::error::NssaError;
fn try_from(value: BlockBody) -> Result<Self, Self::Error> {
let BlockBody { transactions } = value;
let transactions = transactions
.into_iter()
.map(|tx| {
let nssa_tx: common::transaction::NSSATransaction = tx.try_into()?;
Ok::<_, nssa::error::NssaError>(nssa_tx.into())
})
.collect::<Result<Vec<_>, _>>()?;
Ok(Self { transactions })
}
}
impl TryFrom<common::block::Block> for Block {
type Error = std::io::Error;
fn try_from(value: common::block::Block) -> Result<Self, Self::Error> {
let common::block::Block {
header,
body,
bedrock_status,
} = value;
Ok(Self {
header: header.into(),
body: body.try_into()?,
bedrock_status: bedrock_status.into(),
})
}
}
impl TryFrom<Block> for common::block::Block {
type Error = nssa::error::NssaError;
fn try_from(value: Block) -> Result<Self, Self::Error> {
let Block {
header,
body,
bedrock_status,
} = value;
Ok(Self {
header: header.try_into()?,
body: body.try_into()?,
bedrock_status: bedrock_status.into(),
})
}
}
impl From<common::block::BedrockStatus> for BedrockStatus {
fn from(value: common::block::BedrockStatus) -> Self {
match value {
common::block::BedrockStatus::Pending => Self::Pending,
common::block::BedrockStatus::Safe => Self::Safe,
common::block::BedrockStatus::Finalized => Self::Finalized,
}
}
}
impl From<BedrockStatus> for common::block::BedrockStatus {
fn from(value: BedrockStatus) -> Self {
match value {
BedrockStatus::Pending => Self::Pending,
BedrockStatus::Safe => Self::Safe,
BedrockStatus::Finalized => Self::Finalized,
}
}
}

View File

@ -0,0 +1,230 @@
//! This crate defines the protocol types used by the indexer service.
//!
//! Currently it mostly mimics types from `nssa_core`, but it's important to have a separate crate
//! to define a stable interface for the indexer service RPCs which evolves in its own way.
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
#[cfg(feature = "convert")]
mod convert;
pub type Nonce = u128;
pub type ProgramId = [u32; 8];
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct AccountId {
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded account ID")]
pub value: [u8; 32],
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Account {
pub program_owner: ProgramId,
pub balance: u128,
pub data: Data,
pub nonce: Nonce,
}
pub type BlockId = u64;
pub type TimeStamp = u64;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Block {
pub header: BlockHeader,
pub body: BlockBody,
pub bedrock_status: BedrockStatus,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockHeader {
pub block_id: BlockId,
pub prev_block_hash: Hash,
pub hash: Hash,
pub timestamp: TimeStamp,
pub signature: Signature,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Signature(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded signature")]
pub [u8; 64],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct BlockBody {
pub transactions: Vec<Transaction>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub enum Transaction {
Public(PublicTransaction),
PrivacyPreserving(PrivacyPreservingTransaction),
ProgramDeployment(ProgramDeploymentTransaction),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicTransaction {
pub message: PublicMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingTransaction {
pub message: PrivacyPreservingMessage,
pub witness_set: WitnessSet,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicMessage {
pub program_id: ProgramId,
pub account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub instruction_data: InstructionData,
}
pub type InstructionData = Vec<u32>;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PrivacyPreservingMessage {
pub public_account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub public_post_states: Vec<Account>,
pub encrypted_private_post_states: Vec<EncryptedAccountData>,
pub new_commitments: Vec<Commitment>,
pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct WitnessSet {
pub signatures_and_public_keys: Vec<(Signature, PublicKey)>,
pub proof: Proof,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Proof(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded proof")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct EncryptedAccountData {
pub ciphertext: Ciphertext,
pub epk: EphemeralPublicKey,
pub view_tag: ViewTag,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentTransaction {
pub message: ProgramDeploymentMessage,
}
pub type ViewTag = u8;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Ciphertext(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded ciphertext")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct PublicKey(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded public key")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct EphemeralPublicKey(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded ephemeral public key")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Commitment(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded commitment")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Nullifier(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded nullifier")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct CommitmentSetDigest(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded commitment set digest")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct ProgramDeploymentMessage {
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded program bytecode")]
pub bytecode: Vec<u8>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Data(
#[serde(with = "base64")]
#[schemars(with = "String", description = "base64-encoded account data")]
pub Vec<u8>,
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub struct Hash(
#[serde(with = "base64::arr")]
#[schemars(with = "String", description = "base64-encoded hash")]
pub [u8; 32],
);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)]
pub enum BedrockStatus {
Pending,
Safe,
Finalized,
}
mod base64 {
use base64::prelude::{BASE64_STANDARD, Engine as _};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
pub mod arr {
use super::*;
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
super::serialize(v, s)
}
pub fn deserialize<'de, const N: usize, D: Deserializer<'de>>(
d: D,
) -> Result<[u8; N], D::Error> {
let vec = super::deserialize(d)?;
vec.try_into().map_err(|_| {
serde::de::Error::custom(format!("Invalid length, expected {N} bytes"))
})
}
}
pub fn serialize<S: Serializer>(v: &[u8], s: S) -> Result<S::Ok, S::Error> {
let base64 = BASE64_STANDARD.encode(v);
String::serialize(&base64, s)
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
let base64 = String::deserialize(d)?;
BASE64_STANDARD
.decode(base64.as_bytes())
.map_err(serde::de::Error::custom)
}
}

View File

@ -0,0 +1,15 @@
[package]
name = "indexer_service_rpc"
version = "0.1.0"
edition = "2024"
[dependencies]
indexer_service_protocol = { workspace = true }
jsonrpsee = { workspace = true, features = ["macros"] }
serde_json.workspace = true
schemars.workspace = true
[features]
client = ["jsonrpsee/client"]
server = ["jsonrpsee/server"]

View File

@ -0,0 +1,40 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use jsonrpsee::{core::SubscriptionResult, proc_macros::rpc, types::ErrorObjectOwned};
#[cfg(all(not(feature = "server"), not(feature = "client")))]
compile_error!("At least one of `server` or `client` features must be enabled.");
#[cfg_attr(feature = "server", rpc(server))]
#[cfg_attr(feature = "client", rpc(client))]
pub trait Rpc {
#[method(name = "get_schema")]
fn get_schema(&self) -> Result<serde_json::Value, ErrorObjectOwned> {
// TODO: Canonical solution would be to provide `describe` method returning OpenRPC spec,
// But for now it's painful to implement, although can be done if really needed.
// Currently we can wait until we can auto-generated it: https://github.com/paritytech/jsonrpsee/issues/737
// and just return JSON schema.
// Block schema contains all other types used in the protocol, so it's sufficient to return
// its schema.
let block_schema = schemars::schema_for!(Block);
Ok(serde_json::to_value(block_schema).expect("Schema serialization should not fail"))
}
#[subscription(name = "subscribeToBlocks", item = Vec<Block>)]
async fn subscribe_to_blocks(&self, from: BlockId) -> SubscriptionResult;
#[method(name = "getBlockById")]
async fn get_block_by_id(&self, block_id: BlockId) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getBlockByHash")]
async fn get_block_by_hash(&self, block_hash: Hash) -> Result<Block, ErrorObjectOwned>;
#[method(name = "getLastBlockId")]
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned>;
#[method(name = "getAccount")]
async fn get_account(&self, account_id: AccountId) -> Result<Account, ErrorObjectOwned>;
#[method(name = "getTransaction")]
async fn get_transaction(&self, tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned>;
}

View File

@ -0,0 +1 @@
pub mod service;

View File

@ -0,0 +1,72 @@
use std::net::SocketAddr;
use anyhow::{Context as _, Result};
use clap::Parser;
use indexer_service_rpc::RpcServer as _;
use jsonrpsee::server::Server;
use log::{error, info};
use tokio_util::sync::CancellationToken;
#[derive(Debug, Parser)]
#[clap(version)]
struct Args {
#[clap(short, long, default_value = "8779")]
port: u16,
}
#[tokio::main]
async fn main() -> Result<()> {
env_logger::init();
let args = Args::parse();
let cancellation_token = listen_for_shutdown_signal();
let handle = run_server(args.port).await?;
let handle_clone = handle.clone();
tokio::select! {
_ = cancellation_token.cancelled() => {
info!("Shutting down server...");
}
_ = handle_clone.stopped() => {
error!("Server stopped unexpectedly");
}
}
info!("Server shutdown complete");
Ok(())
}
async fn run_server(port: u16) -> Result<jsonrpsee::server::ServerHandle> {
let server = Server::builder()
.build(SocketAddr::from(([0, 0, 0, 0], port)))
.await
.context("Failed to build RPC server")?;
let addr = server
.local_addr()
.context("Failed to get local address of RPC server")?;
info!("Starting Indexer Service RPC server on {addr}");
let handle = server.start(indexer_service::service::IndexerService.into_rpc());
Ok(handle)
}
fn listen_for_shutdown_signal() -> CancellationToken {
let cancellation_token = CancellationToken::new();
let cancellation_token_clone = cancellation_token.clone();
tokio::spawn(async move {
if let Err(err) = tokio::signal::ctrl_c().await {
error!("Failed to listen for Ctrl-C signal: {err}");
return;
}
info!("Received Ctrl-C signal");
cancellation_token_clone.cancel();
});
cancellation_token
}

View File

@ -0,0 +1,36 @@
use indexer_service_protocol::{Account, AccountId, Block, BlockId, Hash, Transaction};
use jsonrpsee::{core::SubscriptionResult, types::ErrorObjectOwned};
pub struct IndexerService;
// `async_trait` is required by `jsonrpsee`
#[async_trait::async_trait]
impl indexer_service_rpc::RpcServer for IndexerService {
async fn subscribe_to_blocks(
&self,
_subscription_sink: jsonrpsee::PendingSubscriptionSink,
_from: BlockId,
) -> SubscriptionResult {
todo!()
}
async fn get_block_by_id(&self, _block_id: BlockId) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_block_by_hash(&self, _block_hash: Hash) -> Result<Block, ErrorObjectOwned> {
todo!()
}
async fn get_last_block_id(&self) -> Result<BlockId, ErrorObjectOwned> {
todo!()
}
async fn get_account(&self, _account_id: AccountId) -> Result<Account, ErrorObjectOwned> {
todo!()
}
async fn get_transaction(&self, _tx_hash: Hash) -> Result<Transaction, ErrorObjectOwned> {
todo!()
}
}

View File

@ -185,6 +185,7 @@ impl TpsTestManager {
initial_accounts: initial_public_accounts,
initial_commitments: vec![initial_commitment],
signing_key: [37; 32],
bedrock_config: None,
}
}
}

View File

@ -68,6 +68,10 @@ impl AccountId {
pub fn value(&self) -> &[u8; 32] {
&self.value
}
pub fn into_value(self) -> [u8; 32] {
self.value
}
}
impl AsRef<[u8]> for AccountId {

View File

@ -69,6 +69,11 @@ impl Commitment {
self.0
}
#[cfg(feature = "host")]
pub fn from_byte_array(bytes: [u8; 32]) -> Self {
Self(bytes)
}
#[cfg(feature = "host")]
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut bytes = [0u8; 32];
@ -89,6 +94,11 @@ impl Nullifier {
self.0
}
#[cfg(feature = "host")]
pub fn from_byte_array(bytes: [u8; 32]) -> Self {
Self(bytes)
}
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut bytes = [0u8; 32];
cursor.read_exact(&mut bytes)?;
@ -106,6 +116,16 @@ impl Ciphertext {
bytes
}
#[cfg(feature = "host")]
pub fn into_inner(self) -> Vec<u8> {
self.0
}
#[cfg(feature = "host")]
pub fn from_inner(inner: Vec<u8>) -> Self {
Self(inner)
}
#[cfg(feature = "host")]
pub fn from_cursor(cursor: &mut Cursor<&[u8]>) -> Result<Self, NssaCoreError> {
let mut u32_bytes = [0; 4];

View File

@ -20,6 +20,16 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Proof(pub(crate) Vec<u8>);
impl Proof {
pub fn into_inner(self) -> Vec<u8> {
self.0
}
pub fn from_inner(inner: Vec<u8>) -> Self {
Self(inner)
}
}
#[derive(Clone)]
pub struct ProgramWithDependencies {
pub program: Program,

View File

@ -45,12 +45,12 @@ impl EncryptedAccountData {
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Message {
pub(crate) public_account_ids: Vec<AccountId>,
pub(crate) nonces: Vec<Nonce>,
pub(crate) public_post_states: Vec<Account>,
pub public_account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub public_post_states: Vec<Account>,
pub encrypted_private_post_states: Vec<EncryptedAccountData>,
pub new_commitments: Vec<Commitment>,
pub(crate) new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
pub new_nullifiers: Vec<(Nullifier, CommitmentSetDigest)>,
}
impl Message {

View File

@ -16,7 +16,7 @@ use crate::{
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct PrivacyPreservingTransaction {
pub message: Message,
witness_set: WitnessSet,
pub witness_set: WitnessSet,
}
impl PrivacyPreservingTransaction {

View File

@ -46,4 +46,18 @@ impl WitnessSet {
pub fn proof(&self) -> &Proof {
&self.proof
}
pub fn into_raw_parts(self) -> (Vec<(Signature, PublicKey)>, Proof) {
(self.signatures_and_public_keys, self.proof)
}
pub fn from_raw_parts(
signatures_and_public_keys: Vec<(Signature, PublicKey)>,
proof: Proof,
) -> Self {
Self {
signatures_and_public_keys,
proof,
}
}
}

View File

@ -9,4 +9,8 @@ impl Message {
pub fn new(bytecode: Vec<u8>) -> Self {
Self { bytecode }
}
pub fn into_bytecode(self) -> Vec<u8> {
self.bytecode
}
}

View File

@ -14,6 +14,10 @@ impl ProgramDeploymentTransaction {
Self { message }
}
pub fn into_message(self) -> Message {
self.message
}
pub(crate) fn validate_and_produce_public_state_diff(
&self,
state: &V02State,

View File

@ -9,10 +9,10 @@ use crate::{AccountId, error::NssaError, program::Program};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Message {
pub(crate) program_id: ProgramId,
pub(crate) account_ids: Vec<AccountId>,
pub(crate) nonces: Vec<Nonce>,
pub(crate) instruction_data: InstructionData,
pub program_id: ProgramId,
pub account_ids: Vec<AccountId>,
pub nonces: Vec<Nonce>,
pub instruction_data: InstructionData,
}
impl Message {

View File

@ -37,6 +37,16 @@ impl WitnessSet {
pub fn signatures_and_public_keys(&self) -> &[(Signature, PublicKey)] {
&self.signatures_and_public_keys
}
pub fn into_raw_parts(self) -> Vec<(Signature, PublicKey)> {
self.signatures_and_public_keys
}
pub fn from_raw_parts(signatures_and_public_keys: Vec<(Signature, PublicKey)>) -> Self {
Self {
signatures_and_public_keys,
}
}
}
#[cfg(test)]

View File

@ -8,7 +8,7 @@ use rand::{RngCore, rngs::OsRng};
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
pub struct Signature {
value: [u8; 64],
pub value: [u8; 64],
}
impl Signature {

View File

@ -17,6 +17,12 @@ serde_json.workspace = true
tempfile.workspace = true
chrono.workspace = true
log.workspace = true
bedrock_client.workspace = true
logos-blockchain-key-management-system-service.workspace = true
logos-blockchain-core.workspace = true
rand.workspace = true
reqwest.workspace = true
borsh.workspace = true
[features]
default = []

View File

@ -0,0 +1,117 @@
use std::{fs, path::Path};
use anyhow::{Context, Result, anyhow};
use bedrock_client::BedrockClient;
use common::block::HashableBlockData;
use logos_blockchain_core::mantle::{
MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger,
ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp},
};
use logos_blockchain_key_management_system_service::keys::{
ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey,
};
use crate::config::BedrockConfig;
/// A component that posts block data to logos blockchain
pub struct BlockSettlementClient {
bedrock_client: BedrockClient,
bedrock_signing_key: Ed25519Key,
bedrock_channel_id: ChannelId,
last_message_id: MsgId,
}
impl BlockSettlementClient {
pub fn try_new(home: &Path, config: &BedrockConfig) -> Result<Self> {
let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key"))
.context("Failed to load or create signing key")?;
let bedrock_channel_id = ChannelId::from(config.channel_id);
let bedrock_client = BedrockClient::new(None, config.node_url.clone())
.context("Failed to initialize bedrock client")?;
let channel_genesis_msg = MsgId::from([0; 32]);
Ok(Self {
bedrock_client,
bedrock_signing_key,
bedrock_channel_id,
last_message_id: channel_genesis_msg,
})
}
/// Create and sign a transaction for inscribing data
pub fn create_inscribe_tx(&self, data: Vec<u8>) -> (SignedMantleTx, MsgId) {
let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes();
let verifying_key =
Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key");
let inscribe_op = InscriptionOp {
channel_id: self.bedrock_channel_id,
inscription: data,
parent: self.last_message_id,
signer: verifying_key,
};
let inscribe_op_id = inscribe_op.id();
let ledger_tx = ledger::Tx::new(vec![], vec![]);
let inscribe_tx = MantleTx {
ops: vec![Op::ChannelInscribe(inscribe_op)],
ledger_tx,
// Altruistic test config
storage_gas_price: 0,
execution_gas_price: 0,
};
let tx_hash = inscribe_tx.hash();
let signature_bytes = self
.bedrock_signing_key
.sign_payload(tx_hash.as_signing_bytes().as_ref())
.to_bytes();
let signature =
logos_blockchain_key_management_system_service::keys::Ed25519Signature::from_bytes(
&signature_bytes,
);
let signed_mantle_tx = SignedMantleTx {
ops_proofs: vec![OpProof::Ed25519Sig(signature)],
ledger_tx_proof: empty_ledger_signature(&tx_hash),
mantle_tx: inscribe_tx,
};
(signed_mantle_tx, inscribe_op_id)
}
/// Post a transaction to the node and wait for inclusion
pub async fn post_and_wait(&mut self, block_data: &HashableBlockData) -> Result<u64> {
let inscription_data = borsh::to_vec(&block_data)?;
let (tx, new_msg_id) = self.create_inscribe_tx(inscription_data);
// Post the transaction
self.bedrock_client.post_transaction(tx).await?;
self.last_message_id = new_msg_id;
Ok(block_data.block_id)
}
}
/// Load signing key from file or generate a new one if it doesn't exist
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
if path.exists() {
let key_bytes = fs::read(path)?;
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
.try_into()
.map_err(|_| anyhow!("Found key with incorrect length"))?;
Ok(Ed25519Key::from_bytes(&key_array))
} else {
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
fs::write(path, key_bytes)?;
Ok(Ed25519Key::from_bytes(&key_bytes))
}
}
fn empty_ledger_signature(
tx_hash: &TxHash,
) -> logos_blockchain_key_management_system_service::keys::ZkSignature {
logos_blockchain_key_management_system_service::keys::ZkKey::multi_sign(&[], tx_hash.as_ref())
.expect("multi-sign with empty key set works")
}

View File

@ -46,7 +46,7 @@ impl SequencerBlockStore {
}
pub fn get_block_at_id(&self, id: u64) -> Result<Block> {
Ok(self.dbio.get_block(id)?.into_block(&self.signing_key))
Ok(self.dbio.get_block(id)?)
}
pub fn put_block_at_id(&mut self, block: Block) -> Result<()> {
@ -113,7 +113,7 @@ mod tests {
transactions: vec![],
};
let genesis_block = genesis_block_hashable_data.into_block(&signing_key);
let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key);
// Start an empty node store
let mut node_store =
SequencerBlockStore::open_db_with_genesis(path, Some(genesis_block), signing_key)

View File

@ -5,6 +5,7 @@ use std::{
};
use anyhow::Result;
use reqwest::Url;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone)]
@ -47,6 +48,16 @@ pub struct SequencerConfig {
pub initial_commitments: Vec<CommitmentsInitialData>,
/// Sequencer own signing key
pub signing_key: [u8; 32],
/// Bedrock configuration options
pub bedrock_config: Option<BedrockConfig>,
}
#[derive(Clone, Serialize, Deserialize)]
pub struct BedrockConfig {
/// Bedrock channel ID
pub channel_id: [u8; 32],
/// Bedrock Url
pub node_url: Url,
}
impl SequencerConfig {

View File

@ -13,8 +13,9 @@ use log::warn;
use mempool::{MemPool, MemPoolHandle};
use serde::{Deserialize, Serialize};
use crate::block_store::SequencerBlockStore;
use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore};
mod block_settlement_client;
pub mod block_store;
pub mod config;
@ -24,6 +25,7 @@ pub struct SequencerCore {
mempool: MemPool<EncodedTransaction>,
sequencer_config: SequencerConfig,
chain_height: u64,
block_settlement_client: Option<BlockSettlementClient>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
@ -51,7 +53,7 @@ impl SequencerCore {
};
let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap();
let genesis_block = hashable_data.into_block(&signing_key);
let genesis_block = hashable_data.into_pending_block(&signing_key);
// Sequencer should panic if unable to open db,
// as fixing this issue may require actions non-native to program scope
@ -87,12 +89,18 @@ impl SequencerCore {
state.add_pinata_program(PINATA_BASE58.parse().unwrap());
let (mempool, mempool_handle) = MemPool::new(config.mempool_max_size);
let block_settlement_client = config.bedrock_config.as_ref().map(|bedrock_config| {
BlockSettlementClient::try_new(&config.home, bedrock_config)
.expect("Block settlement client should be constructible")
});
let mut this = Self {
state,
block_store,
mempool,
chain_height: config.genesis_id,
sequencer_config: config,
block_settlement_client,
};
this.sync_state_with_stored_blocks();
@ -137,9 +145,21 @@ impl SequencerCore {
Ok(tx)
}
pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result<u64> {
let block_data = self.produce_new_block_with_mempool_transactions()?;
if let Some(block_settlement) = self.block_settlement_client.as_mut() {
block_settlement.post_and_wait(&block_data).await?;
log::info!("Posted block data to Bedrock");
}
Ok(self.chain_height)
}
/// Produces new block from transactions in mempool
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<u64> {
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<HashableBlockData> {
let now = Instant::now();
let new_block_height = self.chain_height + 1;
let mut valid_transactions = vec![];
@ -167,8 +187,6 @@ impl SequencerCore {
let curr_time = chrono::Utc::now().timestamp_millis() as u64;
let num_txs_in_block = valid_transactions.len();
let hashable_data = HashableBlockData {
block_id: new_block_height,
transactions: valid_transactions,
@ -176,7 +194,9 @@ impl SequencerCore {
timestamp: curr_time,
};
let block = hashable_data.into_block(self.block_store.signing_key());
let block = hashable_data
.clone()
.into_pending_block(self.block_store.signing_key());
self.block_store.put_block_at_id(block)?;
@ -194,11 +214,10 @@ impl SequencerCore {
// ```
log::info!(
"Created block with {} transactions in {} seconds",
num_txs_in_block,
hashable_data.transactions.len(),
now.elapsed().as_secs()
);
Ok(self.chain_height)
Ok(hashable_data)
}
pub fn state(&self) -> &nssa::V02State {
@ -277,6 +296,7 @@ mod tests {
initial_accounts,
initial_commitments: vec![],
signing_key: *sequencer_sign_key_for_testing().value(),
bedrock_config: None,
}
}
@ -619,9 +639,9 @@ mod tests {
let tx = common::test_utils::produce_dummy_empty_transaction();
mempool_handle.push(tx).await.unwrap();
let block_id = sequencer.produce_new_block_with_mempool_transactions();
assert!(block_id.is_ok());
assert_eq!(block_id.unwrap(), genesis_height + 1);
let block = sequencer.produce_new_block_with_mempool_transactions();
assert!(block.is_ok());
assert_eq!(block.unwrap().block_id, genesis_height + 1);
}
#[tokio::test]
@ -658,7 +678,8 @@ mod tests {
// Create block
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
.unwrap()
.block_id;
let block = sequencer
.block_store
.get_block_at_id(current_height)
@ -697,7 +718,8 @@ mod tests {
mempool_handle.push(tx.clone()).await.unwrap();
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
.unwrap()
.block_id;
let block = sequencer
.block_store
.get_block_at_id(current_height)
@ -708,7 +730,8 @@ mod tests {
mempool_handle.push(tx.clone()).await.unwrap();
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
.unwrap()
.block_id;
let block = sequencer
.block_store
.get_block_at_id(current_height)
@ -743,7 +766,8 @@ mod tests {
mempool_handle.push(tx.clone()).await.unwrap();
let current_height = sequencer
.produce_new_block_with_mempool_transactions()
.unwrap();
.unwrap()
.block_id;
let block = sequencer
.block_store
.get_block_at_id(current_height)

View File

@ -388,6 +388,7 @@ mod tests {
initial_accounts,
initial_commitments: vec![],
signing_key: *sequencer_sign_key_for_testing().value(),
bedrock_config: None,
}
}

View File

@ -154,5 +154,9 @@
37,
37,
37
]
}
],
"bedrock_config": {
"channel_id": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
"node_url": "http://localhost:8080"
}
}

View File

@ -50,7 +50,9 @@ pub async fn startup_sequencer(
let id = {
let mut state = seq_core_wrapped.lock().await;
state.produce_new_block_with_mempool_transactions()?
state
.produce_new_block_and_post_to_settlement_layer()
.await?
};
info!("Block with id {id} created");

View File

@ -1,6 +1,6 @@
use std::{path::Path, sync::Arc};
use common::block::{Block, HashableBlockData};
use common::block::Block;
use error::DbError;
use rocksdb::{
BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options,
@ -26,6 +26,8 @@ pub const DB_META_FIRST_BLOCK_IN_DB_KEY: &str = "first_block_in_db";
pub const DB_META_LAST_BLOCK_IN_DB_KEY: &str = "last_block_in_db";
/// Key base for storing metainformation which describe if first block has been set
pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set";
/// Key base for storing metainformation about the last finalized block on Bedrock
pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id";
/// Key base for storing snapshot which describe block id
pub const DB_SNAPSHOT_BLOCK_ID_KEY: &str = "block_id";
@ -75,6 +77,7 @@ impl RocksDBIO {
dbio.put_meta_first_block_in_db(block)?;
dbio.put_meta_is_first_block_set()?;
dbio.put_meta_last_block_in_db(block_id)?;
dbio.put_meta_last_finalized_block_id(None)?;
Ok(dbio)
} else {
@ -232,6 +235,28 @@ impl RocksDBIO {
Ok(())
}
pub fn put_meta_last_finalized_block_id(&self, block_id: Option<u64>) -> DbResult<()> {
let cf_meta = self.meta_column();
self.db
.put_cf(
&cf_meta,
borsh::to_vec(&DB_META_LAST_FINALIZED_BLOCK_ID).map_err(|err| {
DbError::borsh_cast_message(
err,
Some("Failed to serialize DB_META_LAST_FINALIZED_BLOCK_ID".to_string()),
)
})?,
borsh::to_vec(&block_id).map_err(|err| {
DbError::borsh_cast_message(
err,
Some("Failed to serialize last block id".to_string()),
)
})?,
)
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
Ok(())
}
pub fn put_meta_is_first_block_set(&self) -> DbResult<()> {
let cf_meta = self.meta_column();
self.db
@ -269,7 +294,7 @@ impl RocksDBIO {
Some("Failed to serialize block id".to_string()),
)
})?,
borsh::to_vec(&HashableBlockData::from(block)).map_err(|err| {
borsh::to_vec(&block).map_err(|err| {
DbError::borsh_cast_message(
err,
Some("Failed to serialize block data".to_string()),
@ -280,7 +305,7 @@ impl RocksDBIO {
Ok(())
}
pub fn get_block(&self, block_id: u64) -> DbResult<HashableBlockData> {
pub fn get_block(&self, block_id: u64) -> DbResult<Block> {
let cf_block = self.block_column();
let res = self
.db
@ -296,14 +321,12 @@ impl RocksDBIO {
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
if let Some(data) = res {
Ok(
borsh::from_slice::<HashableBlockData>(&data).map_err(|serr| {
DbError::borsh_cast_message(
serr,
Some("Failed to deserialize block data".to_string()),
)
})?,
)
Ok(borsh::from_slice::<Block>(&data).map_err(|serr| {
DbError::borsh_cast_message(
serr,
Some("Failed to deserialize block data".to_string()),
)
})?)
} else {
Err(DbError::db_interaction_error(
"Block on this id not found".to_string(),

View File

@ -19,7 +19,7 @@ pub enum ChainSubcommand {
/// Get transaction at hash from sequencer
Transaction {
/// hash - valid 32 byte hex string
#[arg(short, long)]
#[arg(short = 't', long)]
hash: String,
},
}