From 2218ad1c64ec4ed690b626c688894bf5a301aac3 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 22 Jan 2026 02:01:02 -0300 Subject: [PATCH 01/20] add functionality to resubmit pending transactions and delete finalized blocks --- common/src/rpc_primitives/requests.rs | 9 ++++ common/src/sequencer_client.rs | 33 ++++++++++--- sequencer_core/src/block_settlement_client.rs | 12 +++-- sequencer_core/src/block_store.rs | 14 +++++- sequencer_core/src/lib.rs | 27 ++++++++++- sequencer_rpc/src/process.rs | 33 +++++++++---- storage/src/lib.rs | 47 ++++++++++++++++++- 7 files changed, 151 insertions(+), 24 deletions(-) diff --git a/common/src/rpc_primitives/requests.rs b/common/src/rpc_primitives/requests.rs index 71641936..feb534ea 100644 --- a/common/src/rpc_primitives/requests.rs +++ b/common/src/rpc_primitives/requests.rs @@ -73,6 +73,11 @@ pub struct GetProofForCommitmentRequest { #[derive(Serialize, Deserialize, Debug)] pub struct GetProgramIdsRequest {} +#[derive(Serialize, Deserialize, Debug)] +pub struct DeleteFinalizedBlockRequest { + pub block_id: u64, +} + parse_request!(HelloRequest); parse_request!(RegisterAccountRequest); parse_request!(SendTxRequest); @@ -87,6 +92,7 @@ parse_request!(GetAccountsNoncesRequest); parse_request!(GetProofForCommitmentRequest); parse_request!(GetAccountRequest); parse_request!(GetProgramIdsRequest); +parse_request!(DeleteFinalizedBlockRequest); #[derive(Serialize, Deserialize, Debug)] pub struct HelloResponse { @@ -216,3 +222,6 @@ pub struct GetInitialTestnetAccountsResponse { pub account_id: String, pub balance: u64, } + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct DeleteFinalizedBlockResponse; diff --git a/common/src/sequencer_client.rs b/common/src/sequencer_client.rs index 0cb03f6f..ea110e37 100644 --- a/common/src/sequencer_client.rs +++ b/common/src/sequencer_client.rs @@ -15,12 +15,13 @@ use crate::{ rpc_primitives::{ self, requests::{ - GetAccountRequest, GetAccountResponse, GetAccountsNoncesRequest, - GetAccountsNoncesResponse, GetBlockRangeDataRequest, GetBlockRangeDataResponse, - GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse, - GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, - GetProofForCommitmentResponse, GetTransactionByHashRequest, - GetTransactionByHashResponse, SendTxRequest, SendTxResponse, + DeleteFinalizedBlockRequest, GetAccountRequest, GetAccountResponse, + GetAccountsNoncesRequest, GetAccountsNoncesResponse, GetBlockRangeDataRequest, + GetBlockRangeDataResponse, GetInitialTestnetAccountsResponse, GetLastBlockRequest, + GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, + GetProofForCommitmentRequest, GetProofForCommitmentResponse, + GetTransactionByHashRequest, GetTransactionByHashResponse, SendTxRequest, + SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, @@ -347,4 +348,24 @@ impl SequencerClient { Ok(resp_deser) } + + pub async fn delete_finalized_block( + &self, + block_id: u64, + ) -> Result, SequencerClientError> { + let acc_req = DeleteFinalizedBlockRequest { block_id }; + + let req = serde_json::to_value(acc_req).unwrap(); + + let resp = self + .call_method_with_payload("delete_finalized_block", req) + .await + .unwrap(); + + let resp_deser = serde_json::from_value::(resp) + .unwrap() + .program_ids; + + Ok(resp_deser) + } } diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 58f4d7f4..03e9f164 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -33,6 +33,10 @@ impl BlockSettlementClient { }) } + pub fn set_last_message_id(&mut self, msg_id: MsgId) { + self.last_message_id = msg_id; + } + /// Create and sign a transaction for inscribing data pub fn create_inscribe_tx(&self, data: Vec) -> (SignedMantleTx, MsgId) { let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes(); @@ -73,17 +77,15 @@ impl BlockSettlementClient { (signed_mantle_tx, inscribe_op_id) } - /// Post a transaction to the node and wait for inclusion - pub async fn post_and_wait(&mut self, block_data: &HashableBlockData) -> Result { + /// Post a transaction to the node + pub async fn post_transaction(&self, block_data: &HashableBlockData) -> Result { let inscription_data = borsh::to_vec(&block_data)?; let (tx, new_msg_id) = self.create_inscribe_tx(inscription_data); // Post the transaction self.bedrock_client.post_transaction(tx).await?; - self.last_message_id = new_msg_id; - - Ok(block_data.block_id) + Ok(new_msg_id) } } diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index cd9aa194..e050f181 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -1,7 +1,11 @@ use std::{collections::HashMap, path::Path}; use anyhow::Result; -use common::{HashType, block::Block, transaction::EncodedTransaction}; +use common::{ + HashType, + block::{Block, BlockHash}, + transaction::EncodedTransaction, +}; use storage::RocksDBIO; pub struct SequencerBlockStore { @@ -56,6 +60,10 @@ impl SequencerBlockStore { Ok(()) } + pub fn delete_block_at_id(&mut self, block_id: u64) -> Result<()> { + Ok(self.dbio.delete_block(block_id)?) + } + /// Returns the transaction corresponding to the given hash, if it exists in the blockchain. pub fn get_transaction_by_hash(&self, hash: HashType) -> Option { let block_id = self.tx_hash_to_block_map.get(&hash); @@ -81,6 +89,10 @@ impl SequencerBlockStore { pub fn signing_key(&self) -> &nssa::PrivateKey { &self.signing_key } + + pub(crate) fn get_pending_blocks(&self) -> impl Iterator> { + self.dbio.get_all_blocks().map(|res| Ok(res?)) + } } pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap { diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 89cafc4c..115baf80 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -5,12 +5,13 @@ use anyhow::Result; use common::PINATA_BASE58; use common::{ HashType, - block::HashableBlockData, + block::{BedrockStatus, Block, BlockHash, HashableBlockData}, transaction::{EncodedTransaction, NSSATransaction}, }; use config::SequencerConfig; use log::warn; use mempool::{MemPool, MemPoolHandle}; +use nomos_core::mantle::ops::channel::MsgId; use serde::{Deserialize, Serialize}; use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore}; @@ -149,7 +150,8 @@ impl SequencerCore { let block_data = self.produce_new_block_with_mempool_transactions()?; if let Some(block_settlement) = self.block_settlement_client.as_mut() { - block_settlement.post_and_wait(&block_data).await?; + let msg_id = block_settlement.post_transaction(&block_data).await?; + block_settlement.set_last_message_id(msg_id); log::info!("Posted block data to Bedrock"); } @@ -235,6 +237,27 @@ impl SequencerCore { pub fn sequencer_config(&self) -> &SequencerConfig { &self.sequencer_config } + + pub fn delete_finalized_block_from_db(&mut self, block_id: u64) -> Result<()> { + self.block_store.delete_block_at_id(block_id) + } + + pub async fn resubmit_pending_blocks(&self) -> Result<()> { + for res in self.block_store.get_pending_blocks() { + let block = res?; + match block.bedrock_status { + BedrockStatus::Pending => { + if let Some(block_settlement) = self.block_settlement_client.as_ref() { + let block_data: HashableBlockData = block.into(); + block_settlement.post_transaction(&block_data).await?; + log::info!("Posted block data to Bedrock"); + } + } + _ => continue, + } + } + Ok(()) + } } // TODO: Introduce type-safe wrapper around checked transaction, e.g. AuthenticatedTransaction diff --git a/sequencer_rpc/src/process.rs b/sequencer_rpc/src/process.rs index b89993f9..dff4cb60 100644 --- a/sequencer_rpc/src/process.rs +++ b/sequencer_rpc/src/process.rs @@ -11,15 +11,15 @@ use common::{ message::{Message, Request}, parser::RpcRequest, requests::{ - GetAccountBalanceRequest, GetAccountBalanceResponse, GetAccountRequest, - GetAccountResponse, GetAccountsNoncesRequest, GetAccountsNoncesResponse, - GetBlockDataRequest, GetBlockDataResponse, GetBlockRangeDataRequest, - GetBlockRangeDataResponse, GetGenesisIdRequest, GetGenesisIdResponse, - GetInitialTestnetAccountsRequest, GetLastBlockRequest, GetLastBlockResponse, - GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, - GetProofForCommitmentResponse, GetTransactionByHashRequest, - GetTransactionByHashResponse, HelloRequest, HelloResponse, SendTxRequest, - SendTxResponse, + DeleteFinalizedBlockRequest, DeleteFinalizedBlockResponse, GetAccountBalanceRequest, + GetAccountBalanceResponse, GetAccountRequest, GetAccountResponse, + GetAccountsNoncesRequest, GetAccountsNoncesResponse, GetBlockDataRequest, + GetBlockDataResponse, GetBlockRangeDataRequest, GetBlockRangeDataResponse, + GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest, + GetLastBlockRequest, GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, + GetProofForCommitmentRequest, GetProofForCommitmentResponse, + GetTransactionByHashRequest, GetTransactionByHashResponse, HelloRequest, HelloResponse, + SendTxRequest, SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, @@ -44,6 +44,7 @@ pub const GET_ACCOUNTS_NONCES: &str = "get_accounts_nonces"; pub const GET_ACCOUNT: &str = "get_account"; pub const GET_PROOF_FOR_COMMITMENT: &str = "get_proof_for_commitment"; pub const GET_PROGRAM_IDS: &str = "get_program_ids"; +pub const DELETE_FINALIZED_BLOCK: &str = "delete_finalized_block"; pub const HELLO_FROM_SEQUENCER: &str = "HELLO_FROM_SEQUENCER"; @@ -314,6 +315,19 @@ impl JsonHandler { respond(response) } + async fn delete_finalized_block(&self, request: Request) -> Result { + let delete_finalized_block_req = DeleteFinalizedBlockRequest::parse(Some(request.params))?; + let block_id = delete_finalized_block_req.block_id; + + self.sequencer_state + .lock() + .await + .delete_finalized_block_from_db(block_id)?; + + let response = DeleteFinalizedBlockResponse; + respond(response) + } + pub async fn process_request_internal(&self, request: Request) -> Result { match request.method.as_ref() { HELLO => self.process_temp_hello(request).await, @@ -329,6 +343,7 @@ impl JsonHandler { GET_TRANSACTION_BY_HASH => self.process_get_transaction_by_hash(request).await, GET_PROOF_FOR_COMMITMENT => self.process_get_proof_by_commitment(request).await, GET_PROGRAM_IDS => self.process_get_program_ids(request).await, + DELETE_FINALIZED_BLOCK => self.delete_finalized_block(request).await, _ => Err(RpcErr(RpcError::method_not_found(request.method))), } } diff --git a/storage/src/lib.rs b/storage/src/lib.rs index 883684c2..498197b6 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -1,6 +1,6 @@ use std::{path::Path, sync::Arc}; -use common::block::Block; +use common::block::{Block, BlockHash}; use error::DbError; use rocksdb::{ BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, @@ -362,4 +362,49 @@ impl RocksDBIO { )) } } + + pub fn delete_block(&self, block_id: u64) -> DbResult<()> { + let cf_block = self.block_column(); + let key = borsh::to_vec(&block_id).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + })?; + + if self + .db + .get_cf(&cf_block, &key) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))? + .is_none() + { + return Err(DbError::db_interaction_error( + "Block on this id not found".to_string(), + )); + } + + self.db + .delete_cf(&cf_block, key) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + + Ok(()) + } + + pub fn get_all_blocks(&self) -> impl Iterator> { + let cf_block = self.block_column(); + self.db + .iterator_cf(&cf_block, rocksdb::IteratorMode::Start) + .map(|res| { + let (_key, value) = res.map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some("Failed to get key value pair".to_string()), + ) + })?; + + borsh::from_slice::(&value).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to deserialize block data".to_string()), + ) + }) + }) + } } From e897fd607651d0c9582e2b2124d600646c463a2a Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Mon, 26 Jan 2026 14:51:39 -0300 Subject: [PATCH 02/20] fix parent when resubmitting pending blocks --- Cargo.lock | 1 + common/Cargo.toml | 1 + common/src/block.rs | 29 ++++++++++++++++++- common/src/test_utils.rs | 4 ++- sequencer_core/src/block_settlement_client.rs | 21 +++++++++----- sequencer_core/src/block_store.rs | 4 ++- sequencer_core/src/lib.rs | 21 ++++++++++---- 7 files changed, 64 insertions(+), 17 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 16baf5a4..c8f0b26a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1319,6 +1319,7 @@ dependencies = [ "borsh", "hex", "log", + "logos-blockchain-core", "nssa", "nssa_core", "reqwest", diff --git a/common/Cargo.toml b/common/Cargo.toml index a6e26fad..96f267df 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -17,3 +17,4 @@ log.workspace = true hex.workspace = true borsh.workspace = true base64.workspace = true +logos-blockchain-core.workspace = true diff --git a/common/src/block.rs b/common/src/block.rs index 84b7a419..e7659fbf 100644 --- a/common/src/block.rs +++ b/common/src/block.rs @@ -1,4 +1,5 @@ use borsh::{BorshDeserialize, BorshSerialize}; +use logos_blockchain_core::mantle::ops::channel::MsgId; use sha2::{Digest, Sha256, digest::FixedOutput}; use crate::transaction::EncodedTransaction; @@ -49,6 +50,11 @@ pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, + #[borsh( + serialize_with = "borsh_msg_id::serialize", + deserialize_with = "borsh_msg_id::deserialize" + )] + pub bedrock_parent_id: MsgId, } #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] @@ -60,7 +66,11 @@ pub struct HashableBlockData { } impl HashableBlockData { - pub fn into_pending_block(self, signing_key: &nssa::PrivateKey) -> Block { + pub fn into_pending_block( + self, + signing_key: &nssa::PrivateKey, + bedrock_parent_id: MsgId, + ) -> Block { let data_bytes = borsh::to_vec(&self).unwrap(); let signature = nssa::Signature::new(signing_key, &data_bytes); let hash = OwnHasher::hash(&data_bytes); @@ -76,6 +86,7 @@ impl HashableBlockData { transactions: self.transactions, }, bedrock_status: BedrockStatus::Pending, + bedrock_parent_id, } } } @@ -91,6 +102,22 @@ impl From for HashableBlockData { } } +mod borsh_msg_id { + use std::io::{Read, Write}; + + use logos_blockchain_core::mantle::ops::channel::MsgId; + + pub fn serialize(v: &MsgId, w: &mut W) -> std::io::Result<()> { + w.write_all(v.as_ref()) + } + + pub fn deserialize(r: &mut R) -> std::io::Result { + let mut buf = [0u8; 32]; + r.read_exact(&mut buf)?; + Ok(MsgId::from(buf)) + } +} + #[cfg(test)] mod tests { use crate::{block::HashableBlockData, test_utils}; diff --git a/common/src/test_utils.rs b/common/src/test_utils.rs index 1125b86e..8ae97599 100644 --- a/common/src/test_utils.rs +++ b/common/src/test_utils.rs @@ -1,3 +1,5 @@ +use logos_blockchain_core::mantle::ops::channel::MsgId; + use crate::{ block::{Block, HashableBlockData}, transaction::{EncodedTransaction, NSSATransaction}, @@ -30,7 +32,7 @@ pub fn produce_dummy_block( transactions, }; - block_data.into_pending_block(&sequencer_sign_key_for_testing()) + block_data.into_pending_block(&sequencer_sign_key_for_testing(), MsgId::from([0; 32])) } pub fn produce_dummy_empty_transaction() -> EncodedTransaction { diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 8a01b934..62070ee2 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -2,7 +2,7 @@ use std::{fs, path::Path}; use anyhow::{Result, anyhow}; use bedrock_client::BedrockClient; -use common::block::HashableBlockData; +use common::block::{Block, HashableBlockData}; use logos_blockchain_core::mantle::{ MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger, ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp}, @@ -39,16 +39,22 @@ impl BlockSettlementClient { self.last_message_id = msg_id; } + + pub fn last_message_id(&self) -> MsgId { + self.last_message_id + } + /// Create and sign a transaction for inscribing data - pub fn create_inscribe_tx(&self, data: Vec) -> (SignedMantleTx, MsgId) { + pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> { + let inscription_data = borsh::to_vec(block)?; let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes(); let verifying_key = Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key"); let inscribe_op = InscriptionOp { channel_id: self.bedrock_channel_id, - inscription: data, - parent: self.last_message_id, + inscription: inscription_data, + parent: block.bedrock_parent_id, signer: verifying_key, }; let inscribe_op_id = inscribe_op.id(); @@ -78,13 +84,12 @@ impl BlockSettlementClient { ledger_tx_proof: empty_ledger_signature(&tx_hash), mantle_tx: inscribe_tx, }; - (signed_mantle_tx, inscribe_op_id) + Ok((signed_mantle_tx, inscribe_op_id)) } /// Post a transaction to the node - pub async fn post_transaction(&self, block_data: &HashableBlockData) -> Result { - let inscription_data = borsh::to_vec(&block_data)?; - let (tx, new_msg_id) = self.create_inscribe_tx(inscription_data); + pub async fn submit_block_to_bedrock(&self, block: &Block) -> Result { + let (tx, new_msg_id) = self.create_inscribe_tx(block)?; // Post the transaction self.bedrock_client.post_transaction(tx).await?; diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index e050f181..92543be7 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -107,6 +107,7 @@ pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap #[cfg(test)] mod tests { use common::{block::HashableBlockData, test_utils::sequencer_sign_key_for_testing}; + use logos_blockchain_core::mantle::ops::channel::MsgId; use tempfile::tempdir; use super::*; @@ -125,7 +126,8 @@ mod tests { transactions: vec![], }; - let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key); + let genesis_block = + genesis_block_hashable_data.into_pending_block(&signing_key, MsgId::from([0; 32])); // Start an empty node store let mut node_store = SequencerBlockStore::open_db_with_genesis(path, Some(genesis_block), signing_key) diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 115baf80..5183ac43 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -10,8 +10,8 @@ use common::{ }; use config::SequencerConfig; use log::warn; +use logos_blockchain_core::mantle::ops::channel::MsgId; use mempool::{MemPool, MemPoolHandle}; -use nomos_core::mantle::ops::channel::MsgId; use serde::{Deserialize, Serialize}; use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore}; @@ -54,7 +54,8 @@ impl SequencerCore { }; let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); - let genesis_block = hashable_data.into_pending_block(&signing_key); + let channel_genesis_msg = MsgId::from([0; 32]); + let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg); // Sequencer should panic if unable to open db, // as fixing this issue may require actions non-native to program scope @@ -150,7 +151,10 @@ impl SequencerCore { let block_data = self.produce_new_block_with_mempool_transactions()?; if let Some(block_settlement) = self.block_settlement_client.as_mut() { - let msg_id = block_settlement.post_transaction(&block_data).await?; + let last_message_id = block_settlement.last_message_id(); + let block = + block_data.into_pending_block(self.block_store.signing_key(), last_message_id); + let msg_id = block_settlement.submit_block_to_bedrock(&block).await?; block_settlement.set_last_message_id(msg_id); log::info!("Posted block data to Bedrock"); } @@ -196,9 +200,15 @@ impl SequencerCore { timestamp: curr_time, }; + let bedrock_parent_id = self + .block_settlement_client + .as_ref() + .map(|client| client.last_message_id()) + .unwrap_or(MsgId::from([0; 32])); + let block = hashable_data .clone() - .into_pending_block(self.block_store.signing_key()); + .into_pending_block(self.block_store.signing_key(), bedrock_parent_id); self.block_store.put_block_at_id(block)?; @@ -248,8 +258,7 @@ impl SequencerCore { match block.bedrock_status { BedrockStatus::Pending => { if let Some(block_settlement) = self.block_settlement_client.as_ref() { - let block_data: HashableBlockData = block.into(); - block_settlement.post_transaction(&block_data).await?; + block_settlement.submit_block_to_bedrock(&block).await?; log::info!("Posted block data to Bedrock"); } } From fe4a89191cf13da41ebe38c15bb1c255f7420904 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 27 Jan 2026 01:20:17 -0300 Subject: [PATCH 03/20] add state to rocksdb --- Cargo.lock | 1 + nssa/core/src/commitment.rs | 2 +- nssa/core/src/nullifier.rs | 2 +- nssa/src/merkle_tree/mod.rs | 2 + nssa/src/program.rs | 3 +- nssa/src/state.rs | 46 ++++++++++- sequencer_core/src/block_store.rs | 15 ++-- sequencer_core/src/lib.rs | 2 +- storage/Cargo.toml | 1 + storage/src/lib.rs | 124 ++++++++++++++++-------------- 10 files changed, 125 insertions(+), 73 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c8f0b26a..f17e34f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5455,6 +5455,7 @@ version = "0.1.0" dependencies = [ "borsh", "common", + "nssa", "rocksdb", "thiserror 2.0.17", ] diff --git a/nssa/core/src/commitment.rs b/nssa/core/src/commitment.rs index 52344177..90f3132c 100644 --- a/nssa/core/src/commitment.rs +++ b/nssa/core/src/commitment.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use crate::{NullifierPublicKey, account::Account}; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))] +#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord))] pub struct Commitment(pub(super) [u8; 32]); /// A commitment to all zero data. diff --git a/nssa/core/src/nullifier.rs b/nssa/core/src/nullifier.rs index 8d9d59fa..09446779 100644 --- a/nssa/core/src/nullifier.rs +++ b/nssa/core/src/nullifier.rs @@ -42,7 +42,7 @@ impl From<&NullifierSecretKey> for NullifierPublicKey { pub type NullifierSecretKey = [u8; 32]; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash))] +#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash))] pub struct Nullifier(pub(super) [u8; 32]); impl Nullifier { diff --git a/nssa/src/merkle_tree/mod.rs b/nssa/src/merkle_tree/mod.rs index c4501cf8..b3637b13 100644 --- a/nssa/src/merkle_tree/mod.rs +++ b/nssa/src/merkle_tree/mod.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use sha2::{Digest, Sha256}; mod default_values; @@ -20,6 +21,7 @@ fn hash_value(value: &Value) -> Node { } #[cfg_attr(test, derive(Debug, PartialEq, Eq))] +#[derive(BorshSerialize, BorshDeserialize)] pub struct MerkleTree { nodes: Vec, capacity: usize, diff --git a/nssa/src/program.rs b/nssa/src/program.rs index 943b16ed..06c7ad29 100644 --- a/nssa/src/program.rs +++ b/nssa/src/program.rs @@ -1,3 +1,4 @@ +use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ account::AccountWithMetadata, program::{InstructionData, ProgramId, ProgramOutput}, @@ -14,7 +15,7 @@ use crate::{ /// TODO: Make this variable when fees are implemented const MAX_NUM_CYCLES_PUBLIC_EXECUTION: u64 = 1024 * 1024 * 32; // 32M cycles -#[derive(Clone, Debug, PartialEq, Eq)] +#[derive(Clone, Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)] pub struct Program { id: ProgramId, elf: Vec, diff --git a/nssa/src/state.rs b/nssa/src/state.rs index 1a384b2f..9888f583 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -1,4 +1,4 @@ -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; use nssa_core::{ Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, MembershipProof, Nullifier, @@ -15,6 +15,7 @@ use crate::{ pub const MAX_NUMBER_CHAINED_CALLS: usize = 10; +#[derive(BorshSerialize, BorshDeserialize)] pub(crate) struct CommitmentSet { merkle_tree: MerkleTree, commitments: HashMap, @@ -60,8 +61,49 @@ impl CommitmentSet { } } -type NullifierSet = HashSet; +struct NullifierSet(BTreeSet); +impl NullifierSet { + fn new() -> Self { + Self(BTreeSet::new()) + } + + fn extend(&mut self, new_nullifiers: Vec) { + self.0.extend(new_nullifiers.into_iter()); + } + + fn contains(&self, nullifier: &Nullifier) -> bool { + self.0.contains(nullifier) + } +} + +impl BorshSerialize for NullifierSet { + fn serialize(&self, writer: &mut W) -> std::io::Result<()> { + self.0.iter().collect::>().serialize(writer) + } +} + +impl BorshDeserialize for NullifierSet { + fn deserialize_reader(reader: &mut R) -> std::io::Result { + let vec = Vec::::deserialize_reader(reader)?; + + let mut set = BTreeSet::new(); + for n in vec { + if !set.insert(n) { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "duplicate nullifier in NullifierSet", + )); + } + } + + Ok(Self(set)) + } +} + +use borsh::{BorshDeserialize, BorshSerialize}; + +#[derive(BorshSerialize, BorshDeserialize)] pub struct V02State { public_state: HashMap, private_state: (CommitmentSet, NullifierSet), diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index 92543be7..108538c2 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -6,6 +6,7 @@ use common::{ block::{Block, BlockHash}, transaction::EncodedTransaction, }; +use nssa::V02State; use storage::RocksDBIO; pub struct SequencerBlockStore { @@ -53,13 +54,6 @@ impl SequencerBlockStore { Ok(self.dbio.get_block(id)?) } - pub fn put_block_at_id(&mut self, block: Block) -> Result<()> { - let new_transactions_map = block_to_transactions_map(&block); - self.dbio.put_block(block, false)?; - self.tx_hash_to_block_map.extend(new_transactions_map); - Ok(()) - } - pub fn delete_block_at_id(&mut self, block_id: u64) -> Result<()> { Ok(self.dbio.delete_block(block_id)?) } @@ -93,6 +87,10 @@ impl SequencerBlockStore { pub(crate) fn get_pending_blocks(&self) -> impl Iterator> { self.dbio.get_all_blocks().map(|res| Ok(res?)) } + + pub(crate) fn update(&self, block: Block, state: &V02State) -> Result<()> { + Ok(self.dbio.atomic_update(block, state)?) + } } pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap { @@ -140,7 +138,8 @@ mod tests { let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(None, retrieved_tx); // Add the block with the transaction - node_store.put_block_at_id(block).unwrap(); + let dummy_state = V02State::new_with_genesis_accounts(&[], &[]); + node_store.update(block, &dummy_state).unwrap(); // Try again let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(Some(tx), retrieved_tx); diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 5183ac43..f161521c 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -210,7 +210,7 @@ impl SequencerCore { .clone() .into_pending_block(self.block_store.signing_key(), bedrock_parent_id); - self.block_store.put_block_at_id(block)?; + self.block_store.update(block, &self.state)?; self.chain_height = new_block_height; diff --git a/storage/Cargo.toml b/storage/Cargo.toml index 4678560e..98257526 100644 --- a/storage/Cargo.toml +++ b/storage/Cargo.toml @@ -9,3 +9,4 @@ common.workspace = true thiserror.workspace = true borsh.workspace = true rocksdb.workspace = true +nssa.workspace = true diff --git a/storage/src/lib.rs b/storage/src/lib.rs index 498197b6..27d1f38a 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -1,9 +1,10 @@ use std::{path::Path, sync::Arc}; -use common::block::{Block, BlockHash}; +use common::block::Block; use error::DbError; +use nssa::V02State; use rocksdb::{ - BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, + BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options, WriteBatch, }; pub mod error; @@ -29,15 +30,15 @@ pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set"; /// Key base for storing metainformation about the last finalized block on Bedrock pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id"; -/// Key base for storing snapshot which describe block id -pub const DB_SNAPSHOT_BLOCK_ID_KEY: &str = "block_id"; +/// Key base for storing the NSSA state +pub const DB_NSSA_STATE_KEY: &str = "nssa_state"; /// Name of block column family pub const CF_BLOCK_NAME: &str = "cf_block"; /// Name of meta column family pub const CF_META_NAME: &str = "cf_meta"; -/// Name of snapshot column family -pub const CF_SNAPSHOT_NAME: &str = "cf_snapshot"; +/// Name of state column family +pub const CF_NSSA_STATE_NAME: &str = "cf_state"; pub type DbResult = Result; @@ -52,7 +53,7 @@ impl RocksDBIO { // ToDo: Add more column families for different data let cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); let cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone()); + let cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); @@ -60,7 +61,7 @@ impl RocksDBIO { let db = DBWithThreadMode::::open_cf_descriptors( &db_opts, path, - vec![cfb, cfmeta, cfsnapshot], + vec![cfb, cfmeta, cfstate], ); let dbio = Self { @@ -92,7 +93,7 @@ impl RocksDBIO { // ToDo: Add more column families for different data let _cfb = ColumnFamilyDescriptor::new(CF_BLOCK_NAME, cf_opts.clone()); let _cfmeta = ColumnFamilyDescriptor::new(CF_META_NAME, cf_opts.clone()); - let _cfsnapshot = ColumnFamilyDescriptor::new(CF_SNAPSHOT_NAME, cf_opts.clone()); + let _cfstate = ColumnFamilyDescriptor::new(CF_NSSA_STATE_NAME, cf_opts.clone()); let mut db_opts = Options::default(); db_opts.create_missing_column_families(true); @@ -109,8 +110,8 @@ impl RocksDBIO { self.db.cf_handle(CF_BLOCK_NAME).unwrap() } - pub fn snapshot_column(&self) -> Arc> { - self.db.cf_handle(CF_SNAPSHOT_NAME).unwrap() + pub fn nssa_state_column(&self) -> Arc> { + self.db.cf_handle(CF_NSSA_STATE_NAME).unwrap() } pub fn get_meta_first_block_in_db(&self) -> DbResult { @@ -189,6 +190,27 @@ impl RocksDBIO { Ok(res.is_some()) } + pub fn put_nssa_state_in_db(&self, state: &V02State, batch: &mut WriteBatch) -> DbResult<()> { + let cf_state = self.nssa_state_column(); + batch.put_cf( + &cf_state, + borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize DB_NSSA_STATE_KEY".to_string()), + ) + })?, + borsh::to_vec(state).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize first block id".to_string()), + ) + })?, + ); + + Ok(()) + } + pub fn put_meta_first_block_in_db(&self, block: Block) -> DbResult<()> { let cf_meta = self.meta_column(); self.db @@ -209,7 +231,15 @@ impl RocksDBIO { ) .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; - self.put_block(block, true)?; + let mut batch = WriteBatch::default(); + self.put_block(block, true, &mut batch)?; + self.db.write(batch).map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some("Failed to write first block in db".to_string()), + ) + }); + Ok(()) } @@ -274,7 +304,7 @@ impl RocksDBIO { Ok(()) } - pub fn put_block(&self, block: Block, first: bool) -> DbResult<()> { + pub fn put_block(&self, block: Block, first: bool, batch: &mut WriteBatch) -> DbResult<()> { let cf_block = self.block_column(); if !first { @@ -285,23 +315,15 @@ impl RocksDBIO { } } - self.db - .put_cf( - &cf_block, - borsh::to_vec(&block.header.block_id).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize block id".to_string()), - ) - })?, - borsh::to_vec(&block).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize block data".to_string()), - ) - })?, - ) - .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + batch.put_cf( + &cf_block, + borsh::to_vec(&block.header.block_id).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) + })?, + borsh::to_vec(&block).map_err(|err| { + DbError::borsh_cast_message(err, Some("Failed to serialize block data".to_string())) + })?, + ); Ok(()) } @@ -334,35 +356,6 @@ impl RocksDBIO { } } - pub fn get_snapshot_block_id(&self) -> DbResult { - let cf_snapshot = self.snapshot_column(); - let res = self - .db - .get_cf( - &cf_snapshot, - borsh::to_vec(&DB_SNAPSHOT_BLOCK_ID_KEY).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize DB_SNAPSHOT_BLOCK_ID_KEY".to_string()), - ) - })?, - ) - .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; - - if let Some(data) = res { - Ok(borsh::from_slice::(&data).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to deserialize last block".to_string()), - ) - })?) - } else { - Err(DbError::db_interaction_error( - "Snapshot block ID not found".to_string(), - )) - } - } - pub fn delete_block(&self, block_id: u64) -> DbResult<()> { let cf_block = self.block_column(); let key = borsh::to_vec(&block_id).map_err(|err| { @@ -407,4 +400,17 @@ impl RocksDBIO { }) }) } + + pub fn atomic_update(&self, block: Block, state: &V02State) -> DbResult<()> { + let block_id = block.header.block_id; + let mut batch = WriteBatch::default(); + self.put_block(block, false, &mut batch)?; + self.put_nssa_state_in_db(state, &mut batch)?; + self.db.write(batch).map_err(|rerr| { + DbError::rocksdb_cast_message( + rerr, + Some(format!("Failed to udpate db with block {block_id}")), + ) + }) + } } From d7cac557af56c7ac48d0062f19a9904a0da9f439 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 27 Jan 2026 10:09:34 -0300 Subject: [PATCH 04/20] fix test --- common/src/rpc_primitives/requests.rs | 9 --- common/src/sequencer_client.rs | 33 ++------- nssa/core/src/commitment.rs | 5 +- nssa/core/src/nullifier.rs | 5 +- sequencer_core/src/block_settlement_client.rs | 1 - sequencer_core/src/block_store.rs | 22 +++--- sequencer_core/src/lib.rs | 69 ++++++++----------- sequencer_rpc/src/process.rs | 33 +++------ storage/src/lib.rs | 2 +- 9 files changed, 61 insertions(+), 118 deletions(-) diff --git a/common/src/rpc_primitives/requests.rs b/common/src/rpc_primitives/requests.rs index feb534ea..71641936 100644 --- a/common/src/rpc_primitives/requests.rs +++ b/common/src/rpc_primitives/requests.rs @@ -73,11 +73,6 @@ pub struct GetProofForCommitmentRequest { #[derive(Serialize, Deserialize, Debug)] pub struct GetProgramIdsRequest {} -#[derive(Serialize, Deserialize, Debug)] -pub struct DeleteFinalizedBlockRequest { - pub block_id: u64, -} - parse_request!(HelloRequest); parse_request!(RegisterAccountRequest); parse_request!(SendTxRequest); @@ -92,7 +87,6 @@ parse_request!(GetAccountsNoncesRequest); parse_request!(GetProofForCommitmentRequest); parse_request!(GetAccountRequest); parse_request!(GetProgramIdsRequest); -parse_request!(DeleteFinalizedBlockRequest); #[derive(Serialize, Deserialize, Debug)] pub struct HelloResponse { @@ -222,6 +216,3 @@ pub struct GetInitialTestnetAccountsResponse { pub account_id: String, pub balance: u64, } - -#[derive(Debug, Serialize, Deserialize, Clone)] -pub struct DeleteFinalizedBlockResponse; diff --git a/common/src/sequencer_client.rs b/common/src/sequencer_client.rs index ea110e37..0cb03f6f 100644 --- a/common/src/sequencer_client.rs +++ b/common/src/sequencer_client.rs @@ -15,13 +15,12 @@ use crate::{ rpc_primitives::{ self, requests::{ - DeleteFinalizedBlockRequest, GetAccountRequest, GetAccountResponse, - GetAccountsNoncesRequest, GetAccountsNoncesResponse, GetBlockRangeDataRequest, - GetBlockRangeDataResponse, GetInitialTestnetAccountsResponse, GetLastBlockRequest, - GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, - GetProofForCommitmentRequest, GetProofForCommitmentResponse, - GetTransactionByHashRequest, GetTransactionByHashResponse, SendTxRequest, - SendTxResponse, + GetAccountRequest, GetAccountResponse, GetAccountsNoncesRequest, + GetAccountsNoncesResponse, GetBlockRangeDataRequest, GetBlockRangeDataResponse, + GetInitialTestnetAccountsResponse, GetLastBlockRequest, GetLastBlockResponse, + GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, + GetProofForCommitmentResponse, GetTransactionByHashRequest, + GetTransactionByHashResponse, SendTxRequest, SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, @@ -348,24 +347,4 @@ impl SequencerClient { Ok(resp_deser) } - - pub async fn delete_finalized_block( - &self, - block_id: u64, - ) -> Result, SequencerClientError> { - let acc_req = DeleteFinalizedBlockRequest { block_id }; - - let req = serde_json::to_value(acc_req).unwrap(); - - let resp = self - .call_method_with_payload("delete_finalized_block", req) - .await - .unwrap(); - - let resp_deser = serde_json::from_value::(resp) - .unwrap() - .program_ids; - - Ok(resp_deser) - } } diff --git a/nssa/core/src/commitment.rs b/nssa/core/src/commitment.rs index 90f3132c..b08e3005 100644 --- a/nssa/core/src/commitment.rs +++ b/nssa/core/src/commitment.rs @@ -5,7 +5,10 @@ use serde::{Deserialize, Serialize}; use crate::{NullifierPublicKey, account::Account}; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord))] +#[cfg_attr( + any(feature = "host", test), + derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord) +)] pub struct Commitment(pub(super) [u8; 32]); /// A commitment to all zero data. diff --git a/nssa/core/src/nullifier.rs b/nssa/core/src/nullifier.rs index 09446779..5c420cb1 100644 --- a/nssa/core/src/nullifier.rs +++ b/nssa/core/src/nullifier.rs @@ -42,7 +42,10 @@ impl From<&NullifierSecretKey> for NullifierPublicKey { pub type NullifierSecretKey = [u8; 32]; #[derive(Serialize, Deserialize, BorshSerialize, BorshDeserialize)] -#[cfg_attr(any(feature = "host", test), derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash))] +#[cfg_attr( + any(feature = "host", test), + derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash) +)] pub struct Nullifier(pub(super) [u8; 32]); impl Nullifier { diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 62070ee2..13c45b9a 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -39,7 +39,6 @@ impl BlockSettlementClient { self.last_message_id = msg_id; } - pub fn last_message_id(&self) -> MsgId { self.last_message_id } diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index 108538c2..f81fb121 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -1,15 +1,11 @@ use std::{collections::HashMap, path::Path}; use anyhow::Result; -use common::{ - HashType, - block::{Block, BlockHash}, - transaction::EncodedTransaction, -}; +use common::{HashType, block::Block, transaction::EncodedTransaction}; use nssa::V02State; use storage::RocksDBIO; -pub struct SequencerBlockStore { +pub struct SequencerStore { dbio: RocksDBIO, // TODO: Consider adding the hashmap to the database for faster recovery. tx_hash_to_block_map: HashMap, @@ -17,7 +13,7 @@ pub struct SequencerBlockStore { signing_key: nssa::PrivateKey, } -impl SequencerBlockStore { +impl SequencerStore { /// Starting database at the start of new chain. /// Creates files if necessary. /// @@ -47,7 +43,7 @@ impl SequencerBlockStore { /// Reopening existing database pub fn open_db_restart(location: &Path, signing_key: nssa::PrivateKey) -> Result { - SequencerBlockStore::open_db_with_genesis(location, None, signing_key) + SequencerStore::open_db_with_genesis(location, None, signing_key) } pub fn get_block_at_id(&self, id: u64) -> Result { @@ -88,8 +84,11 @@ impl SequencerBlockStore { self.dbio.get_all_blocks().map(|res| Ok(res?)) } - pub(crate) fn update(&self, block: Block, state: &V02State) -> Result<()> { - Ok(self.dbio.atomic_update(block, state)?) + pub(crate) fn update(&mut self, block: Block, state: &V02State) -> Result<()> { + let new_transactions_map = block_to_transactions_map(&block); + self.dbio.atomic_update(block, state)?; + self.tx_hash_to_block_map.extend(new_transactions_map); + Ok(()) } } @@ -128,8 +127,7 @@ mod tests { genesis_block_hashable_data.into_pending_block(&signing_key, MsgId::from([0; 32])); // Start an empty node store let mut node_store = - SequencerBlockStore::open_db_with_genesis(path, Some(genesis_block), signing_key) - .unwrap(); + SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap(); let tx = common::test_utils::produce_dummy_empty_transaction(); let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index f161521c..b542c474 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -14,7 +14,7 @@ use logos_blockchain_core::mantle::ops::channel::MsgId; use mempool::{MemPool, MemPoolHandle}; use serde::{Deserialize, Serialize}; -use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore}; +use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerStore}; mod block_settlement_client; pub mod block_store; @@ -22,7 +22,7 @@ pub mod config; pub struct SequencerCore { state: nssa::V02State, - block_store: SequencerBlockStore, + store: SequencerStore, mempool: MemPool, sequencer_config: SequencerConfig, chain_height: u64, @@ -59,7 +59,7 @@ impl SequencerCore { // Sequencer should panic if unable to open db, // as fixing this issue may require actions non-native to program scope - let block_store = SequencerBlockStore::open_db_with_genesis( + let block_store = SequencerStore::open_db_with_genesis( &config.home.join("rocksdb"), Some(genesis_block), signing_key, @@ -98,7 +98,7 @@ impl SequencerCore { let mut this = Self { state, - block_store, + store: block_store, mempool, chain_height: config.genesis_id, sequencer_config: config, @@ -115,14 +115,14 @@ impl SequencerCore { /// accordingly. fn sync_state_with_stored_blocks(&mut self) { let mut next_block_id = self.sequencer_config.genesis_id + 1; - while let Ok(block) = self.block_store.get_block_at_id(next_block_id) { + while let Ok(block) = self.store.get_block_at_id(next_block_id) { for encoded_transaction in block.body.transactions { let transaction = NSSATransaction::try_from(&encoded_transaction).unwrap(); // Process transaction and update state self.execute_check_transaction_on_state(transaction) .unwrap(); // Update the tx hash to block id map. - self.block_store.insert(&encoded_transaction, next_block_id); + self.store.insert(&encoded_transaction, next_block_id); } self.chain_height = next_block_id; next_block_id += 1; @@ -150,12 +150,11 @@ impl SequencerCore { pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result { let block_data = self.produce_new_block_with_mempool_transactions()?; - if let Some(block_settlement) = self.block_settlement_client.as_mut() { - let last_message_id = block_settlement.last_message_id(); - let block = - block_data.into_pending_block(self.block_store.signing_key(), last_message_id); - let msg_id = block_settlement.submit_block_to_bedrock(&block).await?; - block_settlement.set_last_message_id(msg_id); + if let Some(client) = self.block_settlement_client.as_mut() { + let last_message_id = client.last_message_id(); + let block = block_data.into_pending_block(self.store.signing_key(), last_message_id); + let msg_id = client.submit_block_to_bedrock(&block).await?; + client.set_last_message_id(msg_id); log::info!("Posted block data to Bedrock"); } @@ -185,11 +184,7 @@ impl SequencerCore { } } - let prev_block_hash = self - .block_store - .get_block_at_id(self.chain_height)? - .header - .hash; + let prev_block_hash = self.store.get_block_at_id(self.chain_height)?.header.hash; let curr_time = chrono::Utc::now().timestamp_millis() as u64; @@ -208,9 +203,9 @@ impl SequencerCore { let block = hashable_data .clone() - .into_pending_block(self.block_store.signing_key(), bedrock_parent_id); + .into_pending_block(self.store.signing_key(), bedrock_parent_id); - self.block_store.update(block, &self.state)?; + self.store.update(block, &self.state)?; self.chain_height = new_block_height; @@ -236,8 +231,8 @@ impl SequencerCore { &self.state } - pub fn block_store(&self) -> &SequencerBlockStore { - &self.block_store + pub fn block_store(&self) -> &SequencerStore { + &self.store } pub fn chain_height(&self) -> u64 { @@ -248,17 +243,19 @@ impl SequencerCore { &self.sequencer_config } - pub fn delete_finalized_block_from_db(&mut self, block_id: u64) -> Result<()> { - self.block_store.delete_block_at_id(block_id) + pub fn delete_finalized_blocks_from_db(&mut self, block_ids: &[u64]) -> Result<()> { + block_ids + .iter() + .try_for_each(|&id| self.store.delete_block_at_id(id)) } pub async fn resubmit_pending_blocks(&self) -> Result<()> { - for res in self.block_store.get_pending_blocks() { + for res in self.store.get_pending_blocks() { let block = res?; match block.bedrock_status { BedrockStatus::Pending => { - if let Some(block_settlement) = self.block_settlement_client.as_ref() { - block_settlement.submit_block_to_bedrock(&block).await?; + if let Some(client) = self.block_settlement_client.as_ref() { + client.submit_block_to_bedrock(&block).await?; log::info!("Posted block data to Bedrock"); } } @@ -712,10 +709,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); // Only one should be included in the block assert_eq!(block.body.transactions, vec![tx.clone()]); @@ -752,10 +746,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); // Add same transaction should fail @@ -764,10 +755,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert!(block.body.transactions.is_empty()); } @@ -800,10 +788,7 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap() .block_id; - let block = sequencer - .block_store - .get_block_at_id(current_height) - .unwrap(); + let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); } diff --git a/sequencer_rpc/src/process.rs b/sequencer_rpc/src/process.rs index dff4cb60..b89993f9 100644 --- a/sequencer_rpc/src/process.rs +++ b/sequencer_rpc/src/process.rs @@ -11,15 +11,15 @@ use common::{ message::{Message, Request}, parser::RpcRequest, requests::{ - DeleteFinalizedBlockRequest, DeleteFinalizedBlockResponse, GetAccountBalanceRequest, - GetAccountBalanceResponse, GetAccountRequest, GetAccountResponse, - GetAccountsNoncesRequest, GetAccountsNoncesResponse, GetBlockDataRequest, - GetBlockDataResponse, GetBlockRangeDataRequest, GetBlockRangeDataResponse, - GetGenesisIdRequest, GetGenesisIdResponse, GetInitialTestnetAccountsRequest, - GetLastBlockRequest, GetLastBlockResponse, GetProgramIdsRequest, GetProgramIdsResponse, - GetProofForCommitmentRequest, GetProofForCommitmentResponse, - GetTransactionByHashRequest, GetTransactionByHashResponse, HelloRequest, HelloResponse, - SendTxRequest, SendTxResponse, + GetAccountBalanceRequest, GetAccountBalanceResponse, GetAccountRequest, + GetAccountResponse, GetAccountsNoncesRequest, GetAccountsNoncesResponse, + GetBlockDataRequest, GetBlockDataResponse, GetBlockRangeDataRequest, + GetBlockRangeDataResponse, GetGenesisIdRequest, GetGenesisIdResponse, + GetInitialTestnetAccountsRequest, GetLastBlockRequest, GetLastBlockResponse, + GetProgramIdsRequest, GetProgramIdsResponse, GetProofForCommitmentRequest, + GetProofForCommitmentResponse, GetTransactionByHashRequest, + GetTransactionByHashResponse, HelloRequest, HelloResponse, SendTxRequest, + SendTxResponse, }, }, transaction::{EncodedTransaction, NSSATransaction}, @@ -44,7 +44,6 @@ pub const GET_ACCOUNTS_NONCES: &str = "get_accounts_nonces"; pub const GET_ACCOUNT: &str = "get_account"; pub const GET_PROOF_FOR_COMMITMENT: &str = "get_proof_for_commitment"; pub const GET_PROGRAM_IDS: &str = "get_program_ids"; -pub const DELETE_FINALIZED_BLOCK: &str = "delete_finalized_block"; pub const HELLO_FROM_SEQUENCER: &str = "HELLO_FROM_SEQUENCER"; @@ -315,19 +314,6 @@ impl JsonHandler { respond(response) } - async fn delete_finalized_block(&self, request: Request) -> Result { - let delete_finalized_block_req = DeleteFinalizedBlockRequest::parse(Some(request.params))?; - let block_id = delete_finalized_block_req.block_id; - - self.sequencer_state - .lock() - .await - .delete_finalized_block_from_db(block_id)?; - - let response = DeleteFinalizedBlockResponse; - respond(response) - } - pub async fn process_request_internal(&self, request: Request) -> Result { match request.method.as_ref() { HELLO => self.process_temp_hello(request).await, @@ -343,7 +329,6 @@ impl JsonHandler { GET_TRANSACTION_BY_HASH => self.process_get_transaction_by_hash(request).await, GET_PROOF_FOR_COMMITMENT => self.process_get_proof_by_commitment(request).await, GET_PROGRAM_IDS => self.process_get_program_ids(request).await, - DELETE_FINALIZED_BLOCK => self.delete_finalized_block(request).await, _ => Err(RpcErr(RpcError::method_not_found(request.method))), } } diff --git a/storage/src/lib.rs b/storage/src/lib.rs index 27d1f38a..8ac1da0d 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -238,7 +238,7 @@ impl RocksDBIO { rerr, Some("Failed to write first block in db".to_string()), ) - }); + })?; Ok(()) } From e78d6a59a0cf003bfb36accc8ca17bf1c2316bdc Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 27 Jan 2026 13:27:52 -0300 Subject: [PATCH 05/20] add tokio task to retry pending blocks in bedrock --- bedrock_client/src/lib.rs | 1 + .../configs/sequencer/sequencer_config.json | 1 + integration_tests/src/lib.rs | 2 +- integration_tests/tests/tps.rs | 1 + sequencer_core/src/block_settlement_client.rs | 14 +--- sequencer_core/src/block_store.rs | 2 +- sequencer_core/src/config.rs | 2 + sequencer_core/src/lib.rs | 42 +++++------ sequencer_rpc/src/process.rs | 1 + sequencer_runner/src/lib.rs | 70 +++++++++++++++++-- 10 files changed, 92 insertions(+), 44 deletions(-) diff --git a/bedrock_client/src/lib.rs b/bedrock_client/src/lib.rs index 530fdfc2..b16204c9 100644 --- a/bedrock_client/src/lib.rs +++ b/bedrock_client/src/lib.rs @@ -5,6 +5,7 @@ use reqwest::{Client, Url}; // Simple wrapper // maybe extend in the future for our purposes +#[derive(Clone)] pub struct BedrockClient { http_client: CommonHttpClient, node_url: Url, diff --git a/integration_tests/configs/sequencer/sequencer_config.json b/integration_tests/configs/sequencer/sequencer_config.json index 575d3de3..5c642d37 100644 --- a/integration_tests/configs/sequencer/sequencer_config.json +++ b/integration_tests/configs/sequencer/sequencer_config.json @@ -6,6 +6,7 @@ "max_num_tx_in_block": 20, "mempool_max_size": 10000, "block_create_timeout_millis": 10000, + "retry_pending_blocks_timeout_millis": 240000, "port": 0, "initial_accounts": [ { diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index 12d718ec..5cb7233e 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -113,7 +113,7 @@ impl TestContext { // Setting port to 0 lets the OS choose a free port for us config.port = 0; - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle) = + let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle, _) = sequencer_runner::startup_sequencer(config).await?; Ok(( diff --git a/integration_tests/tests/tps.rs b/integration_tests/tests/tps.rs index 3fdc8ac8..73b823cf 100644 --- a/integration_tests/tests/tps.rs +++ b/integration_tests/tests/tps.rs @@ -186,6 +186,7 @@ impl TpsTestManager { initial_commitments: vec![initial_commitment], signing_key: [37; 32], bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } } diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 13c45b9a..e53c02e6 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -2,7 +2,7 @@ use std::{fs, path::Path}; use anyhow::{Result, anyhow}; use bedrock_client::BedrockClient; -use common::block::{Block, HashableBlockData}; +use common::block::{BedrockStatus, Block, HashableBlockData}; use logos_blockchain_core::mantle::{ MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger, ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp}, @@ -14,11 +14,11 @@ use logos_blockchain_key_management_system_service::keys::{ use crate::config::BedrockConfig; /// A component that posts block data to logos blockchain +#[derive(Clone)] pub struct BlockSettlementClient { bedrock_client: BedrockClient, bedrock_signing_key: Ed25519Key, bedrock_channel_id: ChannelId, - last_message_id: MsgId, } impl BlockSettlementClient { @@ -26,23 +26,13 @@ impl BlockSettlementClient { let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key"))?; let bedrock_channel_id = ChannelId::from(config.channel_id); let bedrock_client = BedrockClient::new(None, config.node_url.clone())?; - let channel_genesis_msg = MsgId::from([0; 32]); Ok(Self { bedrock_client, bedrock_signing_key, bedrock_channel_id, - last_message_id: channel_genesis_msg, }) } - pub fn set_last_message_id(&mut self, msg_id: MsgId) { - self.last_message_id = msg_id; - } - - pub fn last_message_id(&self) -> MsgId { - self.last_message_id - } - /// Create and sign a transaction for inscribing data pub fn create_inscribe_tx(&self, block: &Block) -> Result<(SignedMantleTx, MsgId)> { let inscription_data = borsh::to_vec(block)?; diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index f81fb121..7f3c1140 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -80,7 +80,7 @@ impl SequencerStore { &self.signing_key } - pub(crate) fn get_pending_blocks(&self) -> impl Iterator> { + pub fn get_pending_blocks(&self) -> impl Iterator> { self.dbio.get_all_blocks().map(|res| Ok(res?)) } diff --git a/sequencer_core/src/config.rs b/sequencer_core/src/config.rs index 5911cc52..74460931 100644 --- a/sequencer_core/src/config.rs +++ b/sequencer_core/src/config.rs @@ -40,6 +40,8 @@ pub struct SequencerConfig { pub mempool_max_size: usize, /// Interval in which blocks produced pub block_create_timeout_millis: u64, + /// Interval in which pending blocks are retried + pub retry_pending_blocks_timeout_millis: u64, /// Port to listen pub port: u16, /// List of initial accounts data diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index b542c474..1df91f0f 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -27,6 +27,7 @@ pub struct SequencerCore { sequencer_config: SequencerConfig, chain_height: u64, block_settlement_client: Option, + last_bedrock_msg_id: MsgId, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -96,6 +97,7 @@ impl SequencerCore { .expect("Block settlement client should be constructible") }); + let channel_genesis_msg_id = MsgId::from([0; 32]); let mut this = Self { state, store: block_store, @@ -103,6 +105,7 @@ impl SequencerCore { chain_height: config.genesis_id, sequencer_config: config, block_settlement_client, + last_bedrock_msg_id: channel_genesis_msg_id, }; this.sync_state_with_stored_blocks(); @@ -151,10 +154,10 @@ impl SequencerCore { let block_data = self.produce_new_block_with_mempool_transactions()?; if let Some(client) = self.block_settlement_client.as_mut() { - let last_message_id = client.last_message_id(); - let block = block_data.into_pending_block(self.store.signing_key(), last_message_id); + let block = + block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); let msg_id = client.submit_block_to_bedrock(&block).await?; - client.set_last_message_id(msg_id); + self.last_bedrock_msg_id = msg_id; log::info!("Posted block data to Bedrock"); } @@ -195,15 +198,9 @@ impl SequencerCore { timestamp: curr_time, }; - let bedrock_parent_id = self - .block_settlement_client - .as_ref() - .map(|client| client.last_message_id()) - .unwrap_or(MsgId::from([0; 32])); - let block = hashable_data .clone() - .into_pending_block(self.store.signing_key(), bedrock_parent_id); + .into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); self.store.update(block, &self.state)?; @@ -249,20 +246,16 @@ impl SequencerCore { .try_for_each(|&id| self.store.delete_block_at_id(id)) } - pub async fn resubmit_pending_blocks(&self) -> Result<()> { - for res in self.store.get_pending_blocks() { - let block = res?; - match block.bedrock_status { - BedrockStatus::Pending => { - if let Some(client) = self.block_settlement_client.as_ref() { - client.submit_block_to_bedrock(&block).await?; - log::info!("Posted block data to Bedrock"); - } - } - _ => continue, - } - } - Ok(()) + pub fn get_pending_blocks(&self) -> Vec { + self.store + .get_pending_blocks() + .flatten() + .filter(|block| matches!(block.bedrock_status, BedrockStatus::Pending)) + .collect() + } + + pub fn block_settlement_client(&self) -> Option { + self.block_settlement_client.clone() } } @@ -326,6 +319,7 @@ mod tests { initial_commitments: vec![], signing_key: *sequencer_sign_key_for_testing().value(), bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } diff --git a/sequencer_rpc/src/process.rs b/sequencer_rpc/src/process.rs index b89993f9..eb19b620 100644 --- a/sequencer_rpc/src/process.rs +++ b/sequencer_rpc/src/process.rs @@ -389,6 +389,7 @@ mod tests { initial_commitments: vec![], signing_key: *sequencer_sign_key_for_testing().value(), bedrock_config: None, + retry_pending_blocks_timeout_millis: 1000 * 60 * 4, } } diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index fd4a6c08..3387c524 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -4,7 +4,7 @@ use actix_web::dev::ServerHandle; use anyhow::Result; use clap::Parser; use common::rpc_primitives::RpcConfig; -use log::info; +use log::{info, warn}; use sequencer_core::{SequencerCore, config::SequencerConfig}; use sequencer_rpc::new_http_server; use tokio::{sync::Mutex, task::JoinHandle}; @@ -20,8 +20,14 @@ struct Args { pub async fn startup_sequencer( app_config: SequencerConfig, -) -> Result<(ServerHandle, SocketAddr, JoinHandle>)> { +) -> Result<( + ServerHandle, + SocketAddr, + JoinHandle>, + JoinHandle>, +)> { let block_timeout = app_config.block_create_timeout_millis; + let retry_pending_blocks_timeout = app_config.retry_pending_blocks_timeout_millis; let port = app_config.port; let (sequencer_core, mempool_handle) = SequencerCore::start_from_config(app_config); @@ -39,8 +45,38 @@ pub async fn startup_sequencer( let http_server_handle = http_server.handle(); tokio::spawn(http_server); - info!("Starting main sequencer loop"); + info!("Starting pending block retry loop"); + let seq_core_wrapped_for_block_retry = seq_core_wrapped.clone(); + let retry_pending_blocks_handle = tokio::spawn(async move { + loop { + tokio::time::sleep(std::time::Duration::from_millis( + retry_pending_blocks_timeout, + )) + .await; + let (pending_blocks, block_settlement_client) = { + let sequencer_core = seq_core_wrapped_for_block_retry.lock().await; + let client = sequencer_core.block_settlement_client(); + let pending_blocks = sequencer_core.get_pending_blocks(); + (pending_blocks, client) + }; + + let Some(client) = block_settlement_client else { + continue; + }; + + for block in pending_blocks.iter() { + if let Err(e) = client.submit_block_to_bedrock(block).await { + warn!( + "Failed to resubmit block with id {} with error {}", + block.header.block_id, e + ); + } + } + } + }); + + info!("Starting main sequencer loop"); let main_loop_handle = tokio::spawn(async move { loop { tokio::time::sleep(std::time::Duration::from_millis(block_timeout)).await; @@ -61,7 +97,12 @@ pub async fn startup_sequencer( } }); - Ok((http_server_handle, addr, main_loop_handle)) + Ok(( + http_server_handle, + addr, + main_loop_handle, + retry_pending_blocks_handle, + )) } pub async fn main_runner() -> Result<()> { @@ -81,9 +122,26 @@ pub async fn main_runner() -> Result<()> { } // ToDo: Add restart on failures - let (_, _, main_loop_handle) = startup_sequencer(app_config).await?; + let (_, _, main_loop_handle, retry_loop_handle) = startup_sequencer(app_config).await?; - main_loop_handle.await??; + info!("Sequencer running. Monitoring concurrent tasks..."); + + tokio::select! { + res = main_loop_handle => { + match res { + Ok(inner_res) => warn!("Main loop exited unexpectedly: {:?}", inner_res), + Err(e) => warn!("Main loop task panicked: {:?}", e), + } + } + res = retry_loop_handle => { + match res { + Ok(inner_res) => warn!("Retry loop exited unexpectedly: {:?}", inner_res), + Err(e) => warn!("Retry loop task panicked: {:?}", e), + } + } + } + + info!("Shutting down sequencer..."); Ok(()) } From bc2350b3490aaea952d91f270186e7d016278926 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 27 Jan 2026 16:03:21 -0300 Subject: [PATCH 06/20] fix start_from_config function --- sequencer_core/src/block_store.rs | 4 ++++ sequencer_core/src/lib.rs | 33 +++++++---------------------- storage/src/lib.rs | 35 ++++++++++++++++++++++++++++--- 3 files changed, 43 insertions(+), 29 deletions(-) diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index 7f3c1140..3f1eb3b8 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -90,6 +90,10 @@ impl SequencerStore { self.tx_hash_to_block_map.extend(new_transactions_map); Ok(()) } + + pub fn get_nssa_state(&self) -> Option { + self.dbio.get_nssa_state().ok() + } } pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap { diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 1df91f0f..5288f80a 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -60,7 +60,7 @@ impl SequencerCore { // Sequencer should panic if unable to open db, // as fixing this issue may require actions non-native to program scope - let block_store = SequencerStore::open_db_with_genesis( + let store = SequencerStore::open_db_with_genesis( &config.home.join("rocksdb"), Some(genesis_block), signing_key, @@ -86,7 +86,9 @@ impl SequencerCore { .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) .collect(); - let mut state = nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments); + let mut state = store.get_nssa_state().unwrap_or_else(|| { + nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) + }); #[cfg(feature = "testnet")] state.add_pinata_program(PINATA_BASE58.parse().unwrap()); @@ -98,9 +100,9 @@ impl SequencerCore { }); let channel_genesis_msg_id = MsgId::from([0; 32]); - let mut this = Self { + let sequencer_core = Self { state, - store: block_store, + store, mempool, chain_height: config.genesis_id, sequencer_config: config, @@ -108,28 +110,7 @@ impl SequencerCore { last_bedrock_msg_id: channel_genesis_msg_id, }; - this.sync_state_with_stored_blocks(); - - (this, mempool_handle) - } - - /// If there are stored blocks ahead of the current height, this method will load and process - /// all transaction in them in the order they are stored. The NSSA state will be updated - /// accordingly. - fn sync_state_with_stored_blocks(&mut self) { - let mut next_block_id = self.sequencer_config.genesis_id + 1; - while let Ok(block) = self.store.get_block_at_id(next_block_id) { - for encoded_transaction in block.body.transactions { - let transaction = NSSATransaction::try_from(&encoded_transaction).unwrap(); - // Process transaction and update state - self.execute_check_transaction_on_state(transaction) - .unwrap(); - // Update the tx hash to block id map. - self.store.insert(&encoded_transaction, next_block_id); - } - self.chain_height = next_block_id; - next_block_id += 1; - } + (sequencer_core, mempool_handle) } fn execute_check_transaction_on_state( diff --git a/storage/src/lib.rs b/storage/src/lib.rs index 8ac1da0d..a806d1ec 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -38,7 +38,7 @@ pub const CF_BLOCK_NAME: &str = "cf_block"; /// Name of meta column family pub const CF_META_NAME: &str = "cf_meta"; /// Name of state column family -pub const CF_NSSA_STATE_NAME: &str = "cf_state"; +pub const CF_NSSA_STATE_NAME: &str = "cf_nssa_state"; pub type DbResult = Result; @@ -191,9 +191,9 @@ impl RocksDBIO { } pub fn put_nssa_state_in_db(&self, state: &V02State, batch: &mut WriteBatch) -> DbResult<()> { - let cf_state = self.nssa_state_column(); + let cf_nssa_state = self.nssa_state_column(); batch.put_cf( - &cf_state, + &cf_nssa_state, borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { DbError::borsh_cast_message( err, @@ -356,6 +356,35 @@ impl RocksDBIO { } } + pub fn get_nssa_state(&self) -> DbResult { + let cf_nssa_state = self.nssa_state_column(); + let res = self + .db + .get_cf( + &cf_nssa_state, + borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| { + DbError::borsh_cast_message( + err, + Some("Failed to serialize block id".to_string()), + ) + })?, + ) + .map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?; + + if let Some(data) = res { + Ok(borsh::from_slice::(&data).map_err(|serr| { + DbError::borsh_cast_message( + serr, + Some("Failed to deserialize block data".to_string()), + ) + })?) + } else { + Err(DbError::db_interaction_error( + "Block on this id not found".to_string(), + )) + } + } + pub fn delete_block(&self, block_id: u64) -> DbResult<()> { let cf_block = self.block_column(); let key = borsh::to_vec(&block_id).map_err(|err| { From 37c85de38dd1c3c18dccf1d9951482630bbefc31 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Tue, 27 Jan 2026 16:06:51 -0300 Subject: [PATCH 07/20] clippy --- nssa/src/state.rs | 2 +- sequencer_core/src/block_settlement_client.rs | 2 +- sequencer_core/src/lib.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/nssa/src/state.rs b/nssa/src/state.rs index 9888f583..4e5c3f2f 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -69,7 +69,7 @@ impl NullifierSet { } fn extend(&mut self, new_nullifiers: Vec) { - self.0.extend(new_nullifiers.into_iter()); + self.0.extend(new_nullifiers); } fn contains(&self, nullifier: &Nullifier) -> bool { diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 150acf39..5b323ca5 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -2,7 +2,7 @@ use std::{fs, path::Path}; use anyhow::{Context, Result, anyhow}; use bedrock_client::BedrockClient; -use common::block::{BedrockStatus, Block, HashableBlockData}; +use common::block::Block; use logos_blockchain_core::mantle::{ MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger, ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp}, diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 5288f80a..02a05231 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -5,7 +5,7 @@ use anyhow::Result; use common::PINATA_BASE58; use common::{ HashType, - block::{BedrockStatus, Block, BlockHash, HashableBlockData}, + block::{BedrockStatus, Block, HashableBlockData}, transaction::{EncodedTransaction, NSSATransaction}, }; use config::SequencerConfig; From ab2b16956ead1120e81c825a7fa1d6ce99bf734a Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Wed, 28 Jan 2026 15:53:29 -0300 Subject: [PATCH 08/20] minor refactor --- sequencer_core/src/block_store.rs | 2 +- sequencer_core/src/lib.rs | 57 +++++++++++++++++++------------ sequencer_runner/src/lib.rs | 4 ++- 3 files changed, 39 insertions(+), 24 deletions(-) diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index 3f1eb3b8..3aed2143 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -80,7 +80,7 @@ impl SequencerStore { &self.signing_key } - pub fn get_pending_blocks(&self) -> impl Iterator> { + pub fn get_all_blocks(&self) -> impl Iterator> { self.dbio.get_all_blocks().map(|res| Ok(res?)) } diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 02a05231..2caa99ed 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -9,7 +9,7 @@ use common::{ transaction::{EncodedTransaction, NSSATransaction}, }; use config::SequencerConfig; -use log::warn; +use log::{info, warn}; use logos_blockchain_core::mantle::ops::channel::MsgId; use mempool::{MemPool, MemPoolHandle}; use serde::{Deserialize, Serialize}; @@ -66,29 +66,40 @@ impl SequencerCore { signing_key, ) .unwrap(); - let mut initial_commitments = vec![]; - for init_comm_data in config.initial_commitments.clone() { - let npk = init_comm_data.npk; + let mut state = match store.get_nssa_state() { + Some(state) => { + info!("Found local database. Loading state and pending blocks from it."); + state + }, + None => { + info!( + "No database found when starting the sequencer. Creating a fresh new with the initial data in config" + ); + let mut initial_commitments = vec![]; - let mut acc = init_comm_data.account; + for init_comm_data in config.initial_commitments.clone() { + let npk = init_comm_data.npk; - acc.program_owner = nssa::program::Program::authenticated_transfer_program().id(); + let mut acc = init_comm_data.account; - let comm = nssa_core::Commitment::new(&npk, &acc); + acc.program_owner = + nssa::program::Program::authenticated_transfer_program().id(); - initial_commitments.push(comm); - } + let comm = nssa_core::Commitment::new(&npk, &acc); - let init_accs: Vec<(nssa::AccountId, u128)> = config - .initial_accounts - .iter() - .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) - .collect(); + initial_commitments.push(comm); + } - let mut state = store.get_nssa_state().unwrap_or_else(|| { - nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) - }); + let init_accs: Vec<(nssa::AccountId, u128)> = config + .initial_accounts + .iter() + .map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance)) + .collect(); + + nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments) + } + }; #[cfg(feature = "testnet")] state.add_pinata_program(PINATA_BASE58.parse().unwrap()); @@ -227,12 +238,14 @@ impl SequencerCore { .try_for_each(|&id| self.store.delete_block_at_id(id)) } - pub fn get_pending_blocks(&self) -> Vec { - self.store - .get_pending_blocks() - .flatten() + pub fn get_pending_blocks(&self) -> Result> { + Ok(self + .store + .get_all_blocks() + .collect::>>()? + .into_iter() .filter(|block| matches!(block.bedrock_status, BedrockStatus::Pending)) - .collect() + .collect()) } pub fn block_settlement_client(&self) -> Option { diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index 3387c524..488b755f 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -57,7 +57,9 @@ pub async fn startup_sequencer( let (pending_blocks, block_settlement_client) = { let sequencer_core = seq_core_wrapped_for_block_retry.lock().await; let client = sequencer_core.block_settlement_client(); - let pending_blocks = sequencer_core.get_pending_blocks(); + let pending_blocks = sequencer_core + .get_pending_blocks() + .expect("Sequencer should be able to retrieve pending blocks"); (pending_blocks, client) }; From deca3ffe69bb0d8eb18a281ab16f5515ee7d9198 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 00:41:38 -0300 Subject: [PATCH 09/20] fmt --- sequencer_core/src/lib.rs | 2 +- sequencer_runner/configs/debug/sequencer_config.json | 5 +++-- sequencer_runner/src/lib.rs | 1 + 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 2caa99ed..dfc488e8 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -71,7 +71,7 @@ impl SequencerCore { Some(state) => { info!("Found local database. Loading state and pending blocks from it."); state - }, + } None => { info!( "No database found when starting the sequencer. Creating a fresh new with the initial data in config" diff --git a/sequencer_runner/configs/debug/sequencer_config.json b/sequencer_runner/configs/debug/sequencer_config.json index ad43ba65..5aeacb48 100644 --- a/sequencer_runner/configs/debug/sequencer_config.json +++ b/sequencer_runner/configs/debug/sequencer_config.json @@ -5,7 +5,8 @@ "is_genesis_random": true, "max_num_tx_in_block": 20, "mempool_max_size": 1000, - "block_create_timeout_millis": 10000, + "block_create_timeout_millis": 5000, + "retry_pending_blocks_timeout_millis": 7000, "port": 3040, "initial_accounts": [ { @@ -157,6 +158,6 @@ ], "bedrock_config": { "channel_id": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1], - "node_url": "http://localhost:8080" + "node_url": "http://localhost:58072" } } diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index 488b755f..8540c2c2 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -67,6 +67,7 @@ pub async fn startup_sequencer( continue; }; + info!("Resubmitting {} pending blocks", pending_blocks.len()); for block in pending_blocks.iter() { if let Err(e) = client.submit_block_to_bedrock(block).await { warn!( From d6df9a77968a529a7b9076103f80bf6fa8190683 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 00:57:27 -0300 Subject: [PATCH 10/20] minor refactor --- sequencer_core/src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index dfc488e8..38671beb 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -232,7 +232,7 @@ impl SequencerCore { &self.sequencer_config } - pub fn delete_finalized_blocks_from_db(&mut self, block_ids: &[u64]) -> Result<()> { + pub fn delete_blocks_from_db(&mut self, block_ids: &[u64]) -> Result<()> { block_ids .iter() .try_for_each(|&id| self.store.delete_block_at_id(id)) From fb1221d985b2f813a95369fea4e5c2bf29e0e85f Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 09:51:11 -0300 Subject: [PATCH 11/20] fix indexer service types --- indexer_service/protocol/src/convert.rs | 4 ++++ indexer_service/protocol/src/lib.rs | 2 ++ 2 files changed, 6 insertions(+) diff --git a/indexer_service/protocol/src/convert.rs b/indexer_service/protocol/src/convert.rs index 46c8811d..2c5d4b09 100644 --- a/indexer_service/protocol/src/convert.rs +++ b/indexer_service/protocol/src/convert.rs @@ -599,12 +599,14 @@ impl TryFrom for Block { header, body, bedrock_status, + bedrock_parent_id, } = value; Ok(Self { header: header.into(), body: body.try_into()?, bedrock_status: bedrock_status.into(), + bedrock_parent_id: bedrock_parent_id.into(), }) } } @@ -617,12 +619,14 @@ impl TryFrom for common::block::Block { header, body, bedrock_status, + bedrock_parent_id, } = value; Ok(Self { header: header.try_into()?, body: body.try_into()?, bedrock_status: bedrock_status.into(), + bedrock_parent_id: bedrock_parent_id.into(), }) } } diff --git a/indexer_service/protocol/src/lib.rs b/indexer_service/protocol/src/lib.rs index 8189f7d8..096fbc2f 100644 --- a/indexer_service/protocol/src/lib.rs +++ b/indexer_service/protocol/src/lib.rs @@ -30,12 +30,14 @@ pub struct Account { pub type BlockId = u64; pub type TimeStamp = u64; +pub type MsgId = [u8; 32]; #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, + pub bedrock_parent_id: MsgId, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] From 81921c340686bac8de4f8a9eb91622db0ba409d0 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 09:54:01 -0300 Subject: [PATCH 12/20] build artifacts --- artifacts/program_methods/amm.bin | Bin 460392 -> 460392 bytes .../authenticated_transfer.bin | Bin 380112 -> 380112 bytes artifacts/program_methods/pinata.bin | Bin 382736 -> 382736 bytes artifacts/program_methods/pinata_token.bin | Bin 390380 -> 390380 bytes .../privacy_preserving_circuit.bin | Bin 501580 -> 501540 bytes artifacts/program_methods/token.bin | Bin 418724 -> 418724 bytes artifacts/test_program_methods/burner.bin | Bin 376928 -> 376928 bytes .../test_program_methods/chain_caller.bin | Bin 390632 -> 390632 bytes .../test_program_methods/changer_claimer.bin | Bin 380056 -> 380056 bytes artifacts/test_program_methods/claimer.bin | Bin 375844 -> 375844 bytes .../test_program_methods/data_changer.bin | Bin 378544 -> 378544 bytes .../test_program_methods/extra_output.bin | Bin 375924 -> 375924 bytes .../malicious_authorization_changer.bin | Bin 382160 -> 382160 bytes artifacts/test_program_methods/minter.bin | Bin 375856 -> 375856 bytes .../test_program_methods/missing_output.bin | Bin 375992 -> 375992 bytes .../modified_transfer.bin | Bin 379380 -> 379380 bytes .../test_program_methods/nonce_changer.bin | Bin 375960 -> 375960 bytes artifacts/test_program_methods/noop.bin | Bin 375656 -> 375656 bytes .../program_owner_changer.bin | Bin 375868 -> 375868 bytes .../simple_balance_transfer.bin | Bin 377740 -> 377740 bytes 20 files changed, 0 insertions(+), 0 deletions(-) diff --git a/artifacts/program_methods/amm.bin b/artifacts/program_methods/amm.bin index 30cb7662bcb42bfeb9469c5a34e1e36a97043760..01efd3245ccabd51a46aa41808a1e96c50ec7104 100644 GIT binary patch delta 976 zcmaFSBlDt1rlEzgg{g(Pg=Gut*Q>&&1}3Ja7M4b)=7y$5CT2!J-t>#LY$DTTuCXo= z!j#>9=^85=Hx`NMuO6~)#3p|64XdW4UQSL~uAU*LdDGuNVm*W16&F~Ar_X=NsxY1L z3F~@nCU1Pnsv&J?jN5FG)zkH!vT|V44U&{L!KoP-3{VGH5OALCGuA{b7HpsJj8%;Z ztKJ2-Swg4Fy<}y_D*NgSYnm{&paS~W5Q|0Af#RCeWnW?UCRj27x1@%&i2=?)m<|dg zDKp&iK+`p(4Drh2i+qsw>1$r&bh*a#claa?-mr4w3J@vWQ40<*DNEe02KpLnK!ekh z?OW{51xq&Ik(4&YX`|`%hv!&jw*Pv|dW=!Z#Mscp)WFaPh)vDRjSVa$?hUS(A7AD5#hGqsvh6aX4=BCpx*0PCAm$}Be zL)l6pBgWx0Al^U-aa{{9i`8SJjOz$!d_{!>5NZU*JCqz<4aZzX+vY&W`nGruJ@Fc1DkG;q_hc6&A?!QI>3T}^JJf~CStK*`-Eq# zYD`%5F1XDSI$iE1D?3)%S6^7ugs}w`(7%RQESe4!*PJf<3cEMKk_osaHKa`pa0bG3 zP#{T};g$!Qt|4WJR~}#FgS1ay^BSkiHKxDACu#78l@nKhNa2oJaDYi!;&wIA*H{A@ zoStmoVs|cBvH_2zv?)#-O{YIR$11b^*IU+Oj8bN%#+C*KW)>!fCg!HbCWZ!FEi5iug-whNO-v08jeywH%-q<((!ywZ{wbD}_T5`qw(s7` HDk=^D%o-6i delta 52 zcmcaGQ~bhA@rD-07N!>FEi5iuh0RQjEe#CJEKCed%uS6=3=J$Tr{|wyNon7`m1XFEiBg#NSPQLnwT0G8UeAXnYponrG*hge3YM|p}};=(=3_o Okq23}M;>I|E(!oQ>JsMw delta 59 zcmbQRPJF^T@rD-07N!>FEiBg#NST=$TN)UcS(q4_n421#7#dhwGQ>yu85$Z)cRbCK P*&cb2Wqag7*6pGII&u>E diff --git a/artifacts/program_methods/pinata_token.bin b/artifacts/program_methods/pinata_token.bin index 57c73fbc0c3fbba749cff71aa664929e96bc9aef..8ba4f1dd0fc05160fc7b46e4392ade7f98dfbde1 100644 GIT binary patch delta 59 zcmaF!QvA(J@rD-07N!>FEi7B^NtqZMnwT0G8UeAXnYponrG*hge3YM|p}} O=J#2)o8M>MDhdF3trPFEi7B^Ntu}%TN)UcS(q4_n421#7#dhwGQ>yu85$Z)mw(BU P*=~NHWxM%()~%udd^Hpm diff --git a/artifacts/program_methods/privacy_preserving_circuit.bin b/artifacts/program_methods/privacy_preserving_circuit.bin index 91f4ac9aed0ee0326c838fbfe11695721d624802..b937d66f0fb42b78331a8deaf518f1355212dfc8 100644 GIT binary patch delta 11629 zcmai)33L@j)_|*D`n^OZLf9b$cuSCgEJ+9$lt@^V$bf`Jh{_TW9Y(_4b(PdT_ZB`f8wk{D9pXzdl5F(AA4u21bM)GfdYB_iS|Gvw? z_x~673jXJ*h;B7I(k#_{hniUaPB(i`jX1f=sNGl-@kT(z(5AUu;=v}G81xv6&`v z=6FR+nEm1y4O8>IVt^hVF!2^mloxo#U^Sqgc`#0&uO+?MNE4GBhIUBML`soY6evS- zT4-V~T&Vb0b6yku-!6aZtr#%fUf^yx(ZN@f%#}WUrOUV!tBLHxUg5PAGNEwrX*k`% zQ(9^wW2UXR1kT;>6*oyXyuQk1jP`3H^`uvv!d|&$(${PL77#aw6^m~00obd!dTUK&ISlU$ zUqo(&z%lqC_)n^Tqc)nzbBu3#nly@H*U^BH0u4Xr-(ic+=G&Z(N{&VZUqf!dRyJdjAJYE z0^(MsXaSr6Tdq9m0l`O%vm6 zHbhouO=Q~*VhJ4c9ec$DGB&!UlQj-D;Vd{o$(wb-AXrxhtb_|Hykap+mE&W(5*sXF zgfjR+xD)gFLmc4}t<1DG`c;o{S1c~hw=JIyFPdxfepa4w(JP|W#I;#@tN+;9_q!iY zUG<7_%8c$^-S@=g=vI~8+l_3}43Rrp45M$eGVt``;6JEpwN7aN^;a+ft;vOAH zPx@yno)4%0<`u^je+p+Kk5fF8Am7A5$@;mlM%&D|U1n(pk;M-6jEA6Ln z1K8Tz`fSx22VaI0FhH(Ix_&vXzJdXXiciB+9XzHR9?ey0ZZN?w-6`dlY?ltYi#jmd zp0M5BxXi&5;QDNtTNzjk7d#md6y}h@zra^8FckIZ@6baN5%jmRVj>&^OSvIeIH4zd zN`sYF#c+Ga1P}Jo#A#R_XR^Q6TNA|?EYn!7_;I)lwp{)Rd>XcbwcEX#h`wygH^B+; zC`}k41-JCkMBx>pS1DW$C;V1fOuokHW+7KzC(u-M*S= z$DR*YJP_UwM~eF46$NQHLWAX^V`lGE-O!BV&GF2ie4>Gh?%9;+l1QI8tqk1`Ly1x|kuh`+q@Q)?jBbYU1c$e4;=ZoHCdc zdkA+ZcUDM7pR567i22%G`c_T*t2v~HJZ=8SCTgDaRTlSY!#Q^+`ot+UI%))WyGblx zMcEy2g9Y~KGz~6>E$jBcr4H5};-GhMH+YTB{lqL9PETf=)ag(HpRr01?#ceECgNtW zTxHPYM>&B|kPXX(-}o5O&@5i0gjg54Ii#09-)#g~*qY}8B1-jJ{V>J;8PY>7WOxq$ znCTO_$U{*CXXg1r3z2*Q&USFxXpTwPI&9{`Qyu&oY{tF-$niCpM@F^5IfC z^iq5TjzPXo@f~9+6bmc&Nlb%F9lQq~Iol`3ss3?~a~S2bb86o|gZD17cA~#a7mDZX$nX6m zk-yYVtk>ZLSk5CoS$`t8W!S5DI()Ry7uqo?-wNl#mi!c)=@{R0(lz=Gv4sZb1a&4~ zJHa%#$T7ht*g3)HuycYQQ^>f5cHx=~pI=N!s!%cEQP2C-v2JMM91T$~*h$jtDfWJm z&4b{wpigk~3VGmpct@oI>N|YKG5+4CWs=ynV;L-7u(r}A9%asL?BqRoPz%l-q(4+M zW%{)*hbM!-57PT+9|U`6>BF>#&5c=lXP2>MC@Wv-t9(z~#IaVgo|sX!BM+Yb3PG!i z+t-9dI&5u4W-f*}cqd%&vb}X@;Y`@t>V{8{GTjAj$#|Lea)Lj@naHhr_bj{wefKMQIc(5BTe7^?*A9*zte-T} z9%jooGLN+ocvNm63;p|Cv)yT!r+^u%3CsB{HVC!^l zOg&D36VWFfI&>VDZuG0F;}@%GD6=gSQ|4%*H7pCK^u#W6+Aw{rrj0hwW$Rbn+G_LM zBYKUfq@I7rV{hUSER~A)z+2w1lP0==va+;t#p2mu>#_PO?OyZXSiPysnDz_~+2s@6 zRk;)lKUU%kZGco4^DIBhY=`!P1#E5fBDl!GpTO%FZ>_V*d>$*X<%gwk)*he84sUoZ zaft@!4t6r9jo1Gjopd-z9bavi*_w+9;TP?R-&?|k1i4j~qs$cs>oSfkWMhun2Ugr7 z4k*~V`Sym3&|^jJe7O9WUAE6k`6oU>F%9{%+4Iap|CVYW{sPCnWZUs7+{)$=qJoA( zhoNm=;A+4G)_w0OINu&1cEk73zq^`1u<%bD+!gLb|4@}95M~7%&m<7Gn)7Dz@uu%( z{Bq``PZX*R9D!f5kF$Pd|MT#;6ZZPMEG0wWkt*Z1!WSLp6q%#*^rn9@UVW9Cc!v9v z3fSb0oU<2vqC)X_IDM_{hhn${Rsk)3g{L|=brTihXD*Ox{3JNhG5!rW8V-crP)l5q z18A@ue`m09q5ki_->t>Hx5S46Omn$K+jY3-S1O2F|6ur(L+?ts6mAsOD^9_C8IOC& zE4S!i4q2dY){S8+<(m&bDLotD!b?7JM%nuxc+zE`$WVU0dlh=F*nz$P_O7$z^&|Ma zL*Ar_!^|PihP}U6jyJ?>G@Qp!>t1mV&ZNTzWx$K8$xIYj4*nWG1zVYz@FEvl`qxng zJ^;sDwYdO}7ye55Za7u*i!~wr95B6K;-*D|yfVtvKLN+M{32T^d~6Msh6x)YmlbIf zJOj2e@JHBb@SK;gdJ8TQ0K zC~5Fi#p~fr`u9@&S@83v`sSK1Y|uoV^|pgXnJbp*!y=4>ukk>m-Ew3M{U)ujOMQR1 z&@t}jVj?rrU-|i>KRg(=O5A?9%n?!dZsXy`c9cUJT7HqIQf0y$$Q@_nHh3N5 z>#Ftq3YR+^bf=m2ihejcsm*$>P4)fO+dzzvet;8{{c+55v4PFQgK4kpCu@yj|1w(o zMY;-)Z{9N76zjVq4R3!orh{Lks__>nVOLuFD@#}-26~&@seg=(K9Go9mBPOGHvxG# zFx;XO{`I!*0GUf5x&5hsaZ6mZL z=I7t(hxIN$?jzJU*^$s|Ke@fxj)Z-1%tn9cjFG9`_y8eaVjDWlJokg%RPVQ%_KYol zQLa7?oQJp5uS^-7e2@ig_lsO*=x1;-a?A=v?vz8kE**NW95RPq(8ok-JIsTZ^gcHk zC3ML6(JzE@`IsYwPE??>=3Is|e)fwBwYNWg%r(goTJ1j3#Gl`>KcwZtSKNWhCq<)A z@nmE`c-8n)xI6k~YO!#0neOiEHU@sAiLcROnY{!)4KpaTj1%y;4sLpsGuF|61iTlv zO7$A}D~J4B_+5k?=3 z>E1pd&Z|8ufJHJF7RAR;XyUB{_T)`IC#_+2Ib>`$T=$^u-Iw5cu(gulO#0I1u_&DEs$>p{ z^72afgp|Az&W01i{SEPS0(YC=!zZ}x@Ecqf6A+7)f+Sp;O?=9)PE!6WyyI|p*Er=}2Mctmcc zls^F{4hV=|%D}Uo*ddOvOyo8BjNdek+it1tkrsDiP+MQyL^1lgxDU{r^HwK*H7*`(owjV(s zZEw-toaSD2YmhEC26DRJ6n-bk@tAga77KMKaQCAjcb9DpN6Bq zCyFV}(txp#vuy=F;Z@vdJRVx%6UA!$c{ux5f>v>h33#YF=c3xuN8p5*fFX)hhnmyq z;Be7EIIlHFkdp6&Q;6c_iW@#n;W}i;%op&Q!#+{M{X-hq&Y*B1-=g|&gq>TsV-_)W z+^`j7&!$kErShp2ErIvG%QsD>AbJja4qGl2`MB8hAr)+sCL-a`rxwKx;XHyAdqQ<# zFr3I0QvQh{Xoti3hNHtd8U~OS@OFn=T;RC9F%rl&Q z1ZBLE_k^dWg|8w~UwOf`X`DQd4{y#qUPQwrg32l^`(W=q_7*%{NTI>N!D_c5!7>j$4$qhjE~QcWy77T~0mTi_DimSG@4X?ggoWdy|{3XfO0tjBU1(8L$p6 z{xOW~c9_wL8{U*4)j-5q2H&YoMTV7$%(14(@f~3h7 z4jTv0-%7FI@R_bAh1C(7{$vePfgfuH`jc>yZj`{4<;qV)no9TNHxkM73JcJh-TZZNN6T>^4p-rJ(NHR6<-F z4a>vlZ8!@93zU2zuDmkBFSf9!(qk>&rBL;T86f#t$>{4O`x`WTLPLD2eFJLp9)$tU zQVJh|Pr=sVvIU;*2(nu5^VaNWAFtVP5q!!r2tGxBD{ZTlG5+@O6)bfA-$BC`8uHbE z>K|~Nrm-UB;+gQ7JC(&4d<0JH#zz=cDI@k#xVlq_)QZ#Kc-X2VQ{Yw*&{!cl0?S-KcPeJE};m*SZ(H<(Hn&3+~o5ED23>Z>Q|5CpC@Lff^_&fN_5j@8K z0FXTXL$c~$_NBHA&JVFV|KpAj6pjw>9plQyd2W^7Haw~Z-yEns-75Y`kiEmLoX4;% zEXl{og4*1o)r1B--OGJ|U(refs-568b=;y?!Q&DGqA)?Zc+yEK6SX^4xv2gr3KK5d zpjKF8&giagb97hF(%4!Z6O&WgCnY8)C8oB&twZ~yl$79yi#*L^+jVS{lk?>0HdV<} zlB<%ND~EVu>sFySp+QySf`tP-DQ5ZvPtT|-#x`%5(yoe0gFlY*B$%G5o(lbXgWWSc z%dgiiSK2VO3Y}p`1mk9UqH6!I7euh{2=^`K(3zgbdd>e42ep?yjm&kkJ=3(R<_!+Z z_e5S#I62?5?|SXh*x1su9`Fd`dc^VxyX9KUWJ~K*PC+l-R}DuR5@YN0?+-|8&$N* zon7^)^q1X3tCnVG+-PiQ>sl|o{(gsqV{Rned?Vrcg;hsHY)aLeCZ~-}xnWQwG`wL< zw7pK;lf-p`oA!HZnC+kU9M@{6v`b29pO}=)Px}tHwM*=nnjG{__N1B*20ghE*A)-m l=x%IYUFvC95KR9dLJxWqB6YJ<^T=FHt8Q*hh+Ovc{{fV00ww?e delta 11675 zcmai)3tUxI*1-3~~1T;m(ybn@*1ENAnf?`=|Vp>u8z(;12CMlIBcF|DLF!9k` z^NIP$F<%vSEJ>}wkxGS%${9_VvKcE%`|@k7aQy!J-hDX78TIq`^C$nk_FjAKwbovH z?}M9;-E`#GO~oa!J^34D<6-?9kG_f_`EsiMp$8c@|tcChV$7&*NzFTxv-1lZp zOkUs?5h3=96Ex%%y2S`RG~n3|ny4yri;-$TYvX8)zC=sd+*T7aY=)-9X(Fk_EsB&O zQ#)zmAY82YS7Y%_`dNqf)-D*3Z!PdWINrv3qOs1SuXFe>w$Vg(rCYd7g-j@H{7*Q| z#mSrJzZQ zChmi8P<#*UfEz0=hM%zUr|>)sY^LNl-lB;WHhuu!2)7K%{SNUm4R6!XLJc?pAGLA2 zTQ%`_*rViQ;0trC6~6+{z<}$N{2Tan*z`bLUm^pW9(^3HgTrNion5ojxW1F#L-!Al zAru8Z(M2g*0ms3nYd?c4Y@EgJ%b({H(W<}3ZX5J9yGw;~FG;={WOtnHHb}c#J9wX31n{Elg6&G@%wNV9H->*=@bc1ZD6FCa6{u>b*5F7#^f} zFua}qnTl7yY5#JImqNja=!W=Z%bq5AuU@+^^KL`$^zqua-vVZgtO_XA=+{SXn3*idbbop8MG;Aj8sCzWg^mj}C0UQTU z(u6;#p#M-!6xWlzO5xjZ+(j$96Eiij8xNR;W;%QTHurrge9|WWUw8~2$WR9N8K#Mz z#C)vcneaz&xM&qx(LNeJp}};~MPo>^?$`XMI^vlo9ucjwdm$ydEZiebD?^XM2OS6MPeX%CR8qb&EgeTXVJ(pv4K@M)X+)+3Gm1N0{}|Gg;O zI$iBCGPVY8v83S)7ar{1{i0>pcsLEYxyPG4Ou3xk5i8Y6xeTs;)FXqjoX-kYzcgWKwb}U)*w5OrCZ-g_R@rYVAy8Afpb~9MM%CfO= z^iu0|+6b4zrgh)Ij(o`c@TkHMQPzE`|P!|9oXNu3T~!Dq}8gnQ;criqw2ELRz{ z{tucMfP!pTZun0hCmU{#6-hzXMQ3BwV10?xe`humE%S*+s^8&9DE5ok9%>;ACiBBw zkH|$H%py3mz!O}Ed=SirQ%dE_w{xq4t z%Gy|eh2vm3kMv~fGu)P8x8f~ub+IQHF)9B7&V^06YX*_9jn9E?`u*Z_8tfC?GV}5Y zHo_&g394cH1Yyr!KEV`t_j0Rnt%uL8BqddSzb2Z`BB}pqZIZj;!W9020X~7pcTui3s@^Z z1=nuyh)K%e=s8SilRpBtfICUgkh8DCdV`!B)VDOGH8eOxm${laZ*%Pw_#g(Pp+F|% z_i*+~>)p})c}>(XUT%9i!IN+%a=!l z$6Uxnaw^vs^t9CbAiUMac?B$-{-$UD2HVSG+j%^wu)};MjD&M-cCCe5yxd@?Lku*I zj?$ZH{)Z<}&=I(l!E52pFo6y(um;Yz-Pk-Bo;|~=8mllg_jN1RAD|v*!RF~afO;GU z$D>a=v>=B|H~Ll8@ruJVR9KdYk_DRR2Ft=JJ@K_MdyGC+)0P`QWb2ol+F|2|iF&g} z2{|v|v3+<1OQqs(;N9<7nt5zUg4`_2-Ho+=*5$vjoWN9D2UhYZ-#mB!kY3?pT?ZMDd)QY_G13^d zRNtlh=dF`(KK!Kgd;%B$<`HLS)EMSjSDc>W z+W_&hv12_uer)vX_~8bxn55Qu9v*zX*Zf*2hT?}pCMs9*ljy0lEu`IEQtma&A*10E znQ;fsE z?yla3?^vDp<4<_9(m(Ni;X5PjL;Y0S$gbslTg>)~3gz)^W5+-A9%0%6RN2K~rD-)){$-AsfI0r}U@CMHqxwQuzA?3>~L+2Sk{HV9r?>$WWpxs_kr9KX{ zJgSMe=vSc(&VsYw@`_w#=q0!mIc5bjx8xYFOPgMAr7`+v{fThxlyUSoedu-mujnx7 zC$A97bYy&=nf3P};#N(Ry(Z>7#O6`a7TF^l8H?H3{?hk(nn+qy{ z>ulo>!_`qfRkDol8at!HJzwFw`0l=jlXD@yD(>PF=aug!8ttN-m0|v;FeamyPn=UR zdjl4UTv!xqUuxpLBi7^@a1l(DgT~H>Z#-&w_XD^kY%ZqhDU#PF9}2g&ZltbWU~p&z>M#ZSBWvHIbHXB}<3DYohTdzF?u3 zb$Al|5bR~WlDENmpYp0wTnE=5^NB1qZ}+eGE?(&iekzpxbKux2pZG_pzeBuXtc`IV zaHPLlNy`wQ$mJio9n-7W-k(_pAB8hu`P;j`>V_MZ)xoC&=$B==9?lH0)CZJtZy7XP**+?8wU081dnDp{+$6Vkk4xuJUl93Qh1z z1_piQ3I1YF4%j@9uSb9K1}|HZPr})3M&&|r5~(Ny9=%HZj6)w>i< z#no`x{t)}6!d`dc(jUFScY@>zuziBVuwLd3eo&P1j(2ehvvHHV+0~tW!TY7CKk3ejZPt!@p{ z<(u#4biXe2PLksOiBgU|zl+gB;d>0(in6pOppAFYLLT8thAA-|ry}?sK zvMZbGgk{6Q~!QzA|Z~g0K0ekW*%Pq|>8ZL~Tj84bCE| zN2vbp$EZ*D`5MMY`~d?mc!j?}t@!UWlro@R4M@!)C;r8?K*<|F$pqh%#gt}gz)SF& zB9Cw@PM?N{)_O##8sBm{F8+n2RXh}qxPfy~g|rBci}3kHiR#d2HXUp(%7Y8KaRe!O z4V*+4uTk9ZISSV?D`)rxkJwb{5oO#zq=65@=aKJL{Xd26Av`sYoI2sR6wIGbq47lq;8Qf$PJ#Q>u`hh2!BPs((dFAXYcU_*?ECl~8fK7GW??x8yYI9@u&J0rgMlN}gg?OLgvi__nM*GZ(QepY zcp{fsg(tYcW&AgWIQF^e;*aQ{b7i}rCb$IK&w}LTB&o|P#VcWZVVb#;!|O+n*ralx z^=eJ5KJMeBlp%14$KWKA()4hnH6+Cf3Xfa4Y|0vPfI^a^3^)dt{xkH@mKM)gD+`mS zVH?G+V;eF)TJ>MI4UgSnWqmbV)xz3EHM^*cwvb2pcT=lL3NuME;20b4d7YO$hfltm zp!=Jbm)&mI~*zh4W2M8biI)5zxyp#)ZZt{l?Q6~ za-HNMnV}{u-N!eSLEhm12#^CJ%kU6|WU@-iqp&>$J>I?Cz@_lk_UtO9{}3G4!7E}y z>=(b&Fo~B_ftv8K{ao|C)`~a7r+KNFF8&_AfIMC)eChxSV|o9m@xQ_6;9|wk93*LP z=3S!rB77cB3NrD3<~<5QN3vS2=uNn!lV!jOxS|iIl~Qo)`&2?)+!U6F&lxxi1B;aW zZCqJD&MS5kQ|YmxhbUA-UWIP^bnZeZX;=%8HbWx58&` zQx;?J1vq{HA7NCb?DQdpD}zF$Ry-b#h0Qur0(YelWh(u(@R?gUt{v+9FFAVog(01% zp#8!yp^_vzKqXWYgjbQY6s8ho!1M6ra=!ZTT}8UM*~d)q2_EBr07za5$9!#FYJY?a zgRIW~{-2N(whnd28N_*RmfjIOvPNF-YdGDaPLO1O=2p&QSQeHnSZ~fPT1~ha&UbSk z;8(QLfcTS~rnX!3Avh=ACyL{gi`UgqnW){#%0;QQ6ee7@U9GU!f+_u-#+3f9Rc%@% zB=k(`nUI{Al$g*fB_*kMVq%I2q(9>95J(&4YIJpR+oZ(qlP5ntrTY~W7^_FQ+T3{c zv4J0_xf(_N_rQSmS67Qb#0Hn!xOKYgu4~L5*gwnFsQG_uNC}kWxjGt`X1VJ0D~d1X zyVhLYu(rupT~p909or^cqr@!bpzV;ysG}dk zj%!)D*cE=oxD~e!ztK3g*p;tcQJk~HwdRU~n~Gh}UQw{?cISP8+UH%aD@xt#ox@vS zb;W^__0H^Th|@MZM_)snv&=Q|%07^x2t+J*xvne@#yRt<;^3}bvixdk5L8}uEpdlH z#ENU}`Iw|@rHPzArq{LdMD}RY>#B(&x4+M5_LA#_*1UJGo+F7M3ln0s&Gc#)c-Q28KpJY-(n1Y+z|J3lXJ}|J{b4j~ PX8Ys-*6ou6*aU?EJB$&v delta 61 zcmZ2-Tyn{A$%Yoj7N!>F7M3ln0s&HHrpA^A24)r}h9>5w#wLaamX-|hQGSMo2Gbu# Rvu3tW4q)9rIe<-27yv#L5yt=k diff --git a/artifacts/test_program_methods/burner.bin b/artifacts/test_program_methods/burner.bin index f21a5499b9f36938b8e9d12ac924273121052802..8b739241e2ee03c79669f19b7eca86739b445d6a 100644 GIT binary patch delta 52 zcmaFRApW31yrG4$g{g&k3(JfZ!Y0OsCZ-04MnG(8W^Qa?X<;<|;$D`NcK($t+xb_r H28sg!&O#81 delta 52 zcmaFRApW31yrG4$g{g&k3(JfZ!e*w%mIel97AA%!=BCCbh6a|F(=YC2NonU_$+De) IC2OEK0MJYjoB#j- diff --git a/artifacts/test_program_methods/chain_caller.bin b/artifacts/test_program_methods/chain_caller.bin index 56d2c07798cec930ffd00b5428c08dbe7b978c0b..e72262b285b7f96b8a193cd309d4f7b2797628d2 100644 GIT binary patch delta 58 zcmaFyTKvUp@rD-07N!>FEi4N6Bu$JBO-v08jeywH%-q<((!z)#KFZJ1WV*t47O{5! Ndo0`i@3GDl2LMhn6BPge delta 58 zcmaFyTKvUp@rD-07N!>FEi4N6B+X2XEe#CJEKCed%uS6=3=J$T8RDb-EKQ~>d}k4B O_rJ%o-TxlzOmP5ErxPat diff --git a/artifacts/test_program_methods/changer_claimer.bin b/artifacts/test_program_methods/changer_claimer.bin index 024480a1790e7af67c5365fc04dee3da466cb86b..86c3e695e134cc129704ccd129fbc59ac2fb909e 100644 GIT binary patch delta 59 zcmbO+Q+&ou@rD-07N!>FEi6LYq)dzrO-v08jeywH%-q<((!z)#KFZI~&|rG)0hY}6 OsoPk#Pu<4KECv7)^%36y delta 59 zcmbO+Q+&ou@rD-07N!>FEi6LYq|8i>Ee#CJEKCed%uS6=3=J$T8RDb-3=IvY*B)TW PY@fP~W&6}^tjuBn6@3x$ diff --git a/artifacts/test_program_methods/claimer.bin b/artifacts/test_program_methods/claimer.bin index 38fc98be870ba23bdd8b74797edcae6e1b06c178..5c23dfe4f4ba3761fce30ceb37da421a61107d7a 100644 GIT binary patch delta 47 zcmZ4TLu|FEiBPXgiVYMO-v08jeywH%-q<((!!`cZwbrxyd|vL!~usv B4@&?5 delta 47 zcmZ4TLu|FEiBPXgw0HiEe#CJEKCed%uS6=3=J$T+w+#NY|mT5x=kDa Di180y diff --git a/artifacts/test_program_methods/data_changer.bin b/artifacts/test_program_methods/data_changer.bin index 2443b1d4df4b847cbc0b603b790d9f2db98a5fdb..ee2d8f2b381743fc40fb6f7426561f8ae9298ad9 100644 GIT binary patch delta 47 zcmdncE54yuyrG4$g{g&k3ybasVH0CR6H^02BOo?4GdDJ{v@mLS*ub*gVFPP{7yw_~ B4dMU* delta 47 zcmdncE54yuyrG4$g{g&k3ybasVKY->O9KNl3ll>Vb5mmzLjz08c83it+Z{Hr7Ki}= DVT=v* diff --git a/artifacts/test_program_methods/extra_output.bin b/artifacts/test_program_methods/extra_output.bin index 428796ea3df1b3f882c11baa15c1d122f750af3c..39c0998954d7eaf98e680bf36164d46846f9dfd6 100644 GIT binary patch delta 52 zcmezJL+r~Bv4$4L7N!>FEi4LU&S1I`hK delta 52 zcmezJL+r~Bv4$4L7N!>FEi4U00r0)m;e9( diff --git a/artifacts/test_program_methods/malicious_authorization_changer.bin b/artifacts/test_program_methods/malicious_authorization_changer.bin index e6882add9af5201377337d8eca9cf8874577df72..cee9a3b96ee8c0e25653f766db309ead514710c1 100644 GIT binary patch delta 52 zcmcbxQvAY7@rD-07N!>FEi7z%g-whNO-v08jeywH%-q<((!ywZ{Z*Ef_S(HH+iUl- H28#m#$y*T% delta 52 zcmcbxQvAY7@rD-07N!>FEi7z%h0RQjEe#CJEKCed%uS6=3=J$Tr`KO)NolX$%d)+8 IFKe(k0Lx_&9RL6T diff --git a/artifacts/test_program_methods/minter.bin b/artifacts/test_program_methods/minter.bin index 2232725f31adc3aa97958bc0d5d4bafc5d7fca1a..285cae2fdeaf5e31bc9c1ae5a39f7632007c8f9e 100644 GIT binary patch delta 52 zcmdn+Lu|tjv4$4L7N!>FEi4&JgiVYMO-v08jeywH%-q<((!yx^!ksKB?e~|kY`?#R HHBcM?>M0UN delta 52 zcmdn+Lu|tjv4$4L7N!>FEi4&Jgw0HiEe#CJEKCed%uS6=3=J$Tr!U;elG1*E3Cs5T IOIQQN0qippSO5S3 diff --git a/artifacts/test_program_methods/missing_output.bin b/artifacts/test_program_methods/missing_output.bin index 873f61a87eeb4698c16f40703498f31db11a14d2..5b8fb3110d7d175e7a08451d233586df6ed3ba4b 100644 GIT binary patch delta 58 zcmdn-Q*6giv4$4L7N!>FEi50FNSYWMnwT0G8UeAXnYponrG*hge3YN1$@C4oSj5`p Nm$Gb^U&=aD8~|6166OE^ delta 58 zcmdn-Q*6giv4$4L7N!>FEi50FNSc`%TN)UcS(q4_n421#7#dhwGQ>yuS(;4Wu!}{k OU4ALccKM~OGsOW~0}}WE diff --git a/artifacts/test_program_methods/modified_transfer.bin b/artifacts/test_program_methods/modified_transfer.bin index 3f237713df8c4840d358ca818ee2a185a5b03a8d..5bb5fbbd6140752ce9bb119a9258ccd1912454ce 100644 GIT binary patch delta 47 zcmew|S^Udn@rD-07N!>FEi9RvgiVYMO-v08jeywH%-q<((!!{{W)sWynoX<~;sAwB B4>FEi9Rvgw0HiEe#CJEKCed%uS6=3=J$T+iNzlY_HkGS|JVq DhW!sq diff --git a/artifacts/test_program_methods/nonce_changer.bin b/artifacts/test_program_methods/nonce_changer.bin index 6a11964735dad4073cc4c8229c6706743eb0084e..ff63175fb10237a113535cba5adb7e19186596a2 100644 GIT binary patch delta 47 zcmbR7Q*6dhv4$4L7N!>FEiC7j2%8uinwT0G8UeAXnYponrG-)Z!zC=+A1-0tE)D>g C0uYY? delta 47 zcmbR7Q*6dhv4$4L7N!>FEiC7j2%DK2TN)UcS(q4_n421#7#dhwwm)3Lvi;!_*6rc| DnX(X} diff --git a/artifacts/test_program_methods/noop.bin b/artifacts/test_program_methods/noop.bin index ecb1662df5f7343f9938161a6afe2d89b1530b4e..0fc49a5522706eddfcd635d25be445081863569b 100644 GIT binary patch delta 52 zcmaFyP3*-tv4$4L7N!>FEi7q^g-whNO-v08jeywH%-q<((!yxE?@E@`_Irz2w%=RK H+9eJE`Unz% delta 52 zcmaFyP3*-tv4$4L7N!>FEi7q^h0RQjEe#CJEKCed%uS6=3=J$Tr~9sCNo~Kkm}UFD I#jIW80Q?dXlK=n! diff --git a/artifacts/test_program_methods/program_owner_changer.bin b/artifacts/test_program_methods/program_owner_changer.bin index 9d3364258f56e93655bbbbf267c7b32cb30393f0..7268da57a74355da6a9e68253938679269a0dc7f 100644 GIT binary patch delta 52 zcmdnFEi5HVgiVYMO-v08jeywH%-q<((!yx^+MO&Z?XQ=xY=6Ck HHBcM?@$nLl delta 52 zcmdnFEi5HVgw0HiEe#CJEKCed%uS6=3=J$Tr?1_~lG6Tq3Cs4^ IOIQQN0rUG3pa1{> diff --git a/artifacts/test_program_methods/simple_balance_transfer.bin b/artifacts/test_program_methods/simple_balance_transfer.bin index b8b7b705c0de5c38c96e3a9c7fd92d97195a1fea..f597d8bac671a6a873bc6c3e01849a72571f3d13 100644 GIT binary patch delta 52 zcmeBa7w>5oZ)jm`VQOLC!g6G_u!*suiK&615fGc2nHw8eS{P0Ldw?aS-FgkncI!2) Gf#LwWwh*iU delta 52 zcmeBa7w>5oZ)jm`VQOLC!g6G_u$if`rGbH&g^8hwxv8; Date: Thu, 29 Jan 2026 11:17:30 -0300 Subject: [PATCH 13/20] add state serialization unit test --- nssa/src/state.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/nssa/src/state.rs b/nssa/src/state.rs index 4e5c3f2f..da86103b 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -16,6 +16,7 @@ use crate::{ pub const MAX_NUMBER_CHAINED_CALLS: usize = 10; #[derive(BorshSerialize, BorshDeserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub(crate) struct CommitmentSet { merkle_tree: MerkleTree, commitments: HashMap, @@ -61,6 +62,7 @@ impl CommitmentSet { } } +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] struct NullifierSet(BTreeSet); impl NullifierSet { @@ -104,6 +106,7 @@ impl BorshDeserialize for NullifierSet { use borsh::{BorshDeserialize, BorshSerialize}; #[derive(BorshSerialize, BorshDeserialize)] +#[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub struct V02State { public_state: HashMap, private_state: (CommitmentSet, NullifierSet), @@ -4570,4 +4573,15 @@ pub mod tests { // Assert - should fail because the malicious program tries to manipulate is_authorized assert!(matches!(result, Err(NssaError::CircuitProvingError(_)))); } + + #[test] + fn test_state_serialization_roundtrip() { + let account_id_1 = AccountId::new([1; 32]); + let account_id_2 = AccountId::new([2; 32]); + let initial_data = [(account_id_1, 100u128), (account_id_2, 151u128)]; + let state = V02State::new_with_genesis_accounts(&initial_data, &[]).with_test_programs(); + let bytes = borsh::to_vec(&state).unwrap(); + let state_from_bytes: V02State = borsh::from_slice(&bytes).unwrap(); + assert_eq!(state, state_from_bytes); + } } From 95a58faf947a80fe5a544df231d45cdc523106dc Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 12:07:52 -0300 Subject: [PATCH 14/20] add delete blocks test --- sequencer_core/src/lib.rs | 46 +++++++++++++++++++++++++++++++++++---- 1 file changed, 42 insertions(+), 4 deletions(-) diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 9417439b..3946b5dd 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -45,7 +45,11 @@ impl Display for TransactionMalformationError { impl std::error::Error for TransactionMalformationError {} impl SequencerCore { - /// Start Sequencer from configuration and construct transaction sender + /// Starts the sequencer using the provided configuration. + /// If an existing database is found, the sequencer state is loaded from it and + /// assumed to represent the correct latest state consistent with Bedrock-finalized data. + /// If no database is found, the sequencer performs a fresh start from genesis, + /// initializing its state with the accounts defined in the configuration file. pub fn start_from_config(config: SequencerConfig) -> (Self, MemPoolHandle) { let hashable_data = HashableBlockData { block_id: config.genesis_id, @@ -231,10 +235,22 @@ impl SequencerCore { &self.sequencer_config } - pub fn delete_blocks_from_db(&mut self, block_ids: &[u64]) -> Result<()> { - block_ids + /// Deletes finalized blocks from the sequencer's pending block list. + /// This method must be called when new blocks are finalized on Bedrock. + /// All pending blocks with an ID less than or equal to `last_finalized_block_id` + /// are removed from the database. + pub fn delete_finalized_blocks_from_db(&mut self, last_finalized_block_id: u64) -> Result<()> { + if let Some(first_pending_block_id) = self + .get_pending_blocks()? .iter() - .try_for_each(|&id| self.store.delete_block_at_id(id)) + .map(|block| block.header.block_id) + .min() + { + (first_pending_block_id..=last_finalized_block_id) + .try_for_each(|id| self.store.delete_block_at_id(id)) + } else { + Ok(()) + } } pub fn get_pending_blocks(&self) -> Result> { @@ -795,4 +811,26 @@ mod tests { config.initial_accounts[1].balance + balance_to_move ); } + + #[test] + fn test_delete_blocks() { + let config = setup_sequencer_config(); + let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4); + + let last_finalized_block = 3; + sequencer + .delete_finalized_blocks_from_db(last_finalized_block) + .unwrap(); + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 1); + } } From 22290c05fb950431702a7fb00d8a707fd506eb69 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 12:09:14 -0300 Subject: [PATCH 15/20] add docs and unit tests --- sequencer_core/src/lib.rs | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 3946b5dd..5e14c26f 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -253,6 +253,7 @@ impl SequencerCore { } } + /// Returns the list of stored pending blocks. pub fn get_pending_blocks(&self) -> Result> { Ok(self .store @@ -813,7 +814,7 @@ mod tests { } #[test] - fn test_delete_blocks() { + fn test_get_pending_blocks() { let config = setup_sequencer_config(); let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); sequencer @@ -826,11 +827,27 @@ mod tests { .produce_new_block_with_mempool_transactions() .unwrap(); assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 4); + } + + #[test] + fn test_delete_blocks() { + let config = setup_sequencer_config(); + let (mut sequencer, _mempool_handle) = SequencerCore::start_from_config(config); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); + sequencer + .produce_new_block_with_mempool_transactions() + .unwrap(); let last_finalized_block = 3; sequencer .delete_finalized_blocks_from_db(last_finalized_block) .unwrap(); + assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 1); } } From e44eade97b6e700fa968f521433a34a40d92ca41 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 12:25:48 -0300 Subject: [PATCH 16/20] fix error message --- storage/src/lib.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/storage/src/lib.rs b/storage/src/lib.rs index a806d1ec..b96e0d61 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -201,10 +201,7 @@ impl RocksDBIO { ) })?, borsh::to_vec(state).map_err(|err| { - DbError::borsh_cast_message( - err, - Some("Failed to serialize first block id".to_string()), - ) + DbError::borsh_cast_message(err, Some("Failed to serialize NSSA state".to_string())) })?, ); From 476dc50482392a39b9bb4e1678b524a65f5bd102 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 13:47:39 -0300 Subject: [PATCH 17/20] handle comments --- bedrock_client/src/lib.rs | 1 + indexer_service/protocol/src/lib.rs | 8 +++++++- integration_tests/src/lib.rs | 8 ++++++-- nssa/src/state.rs | 3 +-- sequencer_core/src/lib.rs | 26 +++++++++++++------------- sequencer_runner/src/lib.rs | 2 +- 6 files changed, 29 insertions(+), 19 deletions(-) diff --git a/bedrock_client/src/lib.rs b/bedrock_client/src/lib.rs index 631216bd..b34687c3 100644 --- a/bedrock_client/src/lib.rs +++ b/bedrock_client/src/lib.rs @@ -17,6 +17,7 @@ pub struct BackoffConfig { // Simple wrapper // maybe extend in the future for our purposes +// `Clone` is cheap because `CommonHttpClient` is internally reference counted (`Arc`). #[derive(Clone)] pub struct BedrockClient { http_client: CommonHttpClient, diff --git a/indexer_service/protocol/src/lib.rs b/indexer_service/protocol/src/lib.rs index 096fbc2f..627d8513 100644 --- a/indexer_service/protocol/src/lib.rs +++ b/indexer_service/protocol/src/lib.rs @@ -30,7 +30,6 @@ pub struct Account { pub type BlockId = u64; pub type TimeStamp = u64; -pub type MsgId = [u8; 32]; #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub struct Block { @@ -190,6 +189,13 @@ pub struct Hash( pub [u8; 32], ); +#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] +pub struct MsgId( + #[serde(with = "base64::arr")] + #[schemars(with = "String", description = "base64-encoded Bedrock message id")] + pub [u8; 32], +); + #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] pub enum BedrockStatus { Pending, diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index 524621ad..5d810166 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -163,8 +163,12 @@ impl TestContext { // Setting port to 0 lets the OS choose a free port for us config.port = 0; - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle, _) = - sequencer_runner::startup_sequencer(config).await?; + let ( + sequencer_server_handle, + sequencer_addr, + sequencer_loop_handle, + _retry_pending_blocks_handle, + ) = sequencer_runner::startup_sequencer(config).await?; Ok(( sequencer_server_handle, diff --git a/nssa/src/state.rs b/nssa/src/state.rs index da86103b..d6bf8d60 100644 --- a/nssa/src/state.rs +++ b/nssa/src/state.rs @@ -1,5 +1,6 @@ use std::collections::{BTreeSet, HashMap, HashSet}; +use borsh::{BorshDeserialize, BorshSerialize}; use nssa_core::{ Commitment, CommitmentSetDigest, DUMMY_COMMITMENT, MembershipProof, Nullifier, account::{Account, AccountId}, @@ -103,8 +104,6 @@ impl BorshDeserialize for NullifierSet { } } -use borsh::{BorshDeserialize, BorshSerialize}; - #[derive(BorshSerialize, BorshDeserialize)] #[cfg_attr(test, derive(Debug, PartialEq, Eq))] pub struct V02State { diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 5e14c26f..119d5d73 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -80,20 +80,20 @@ impl SequencerCore { info!( "No database found when starting the sequencer. Creating a fresh new with the initial data in config" ); - let mut initial_commitments = vec![]; + let initial_commitments: Vec = config + .initial_commitments + .iter() + .map(|init_comm_data| { + let npk = &init_comm_data.npk; - for init_comm_data in config.initial_commitments.clone() { - let npk = init_comm_data.npk; + let mut acc = init_comm_data.account.clone(); - let mut acc = init_comm_data.account; + acc.program_owner = + nssa::program::Program::authenticated_transfer_program().id(); - acc.program_owner = - nssa::program::Program::authenticated_transfer_program().id(); - - let comm = nssa_core::Commitment::new(&npk, &acc); - - initial_commitments.push(comm); - } + nssa_core::Commitment::new(&npk, &acc) + }) + .collect(); let init_accs: Vec<(nssa::AccountId, u128)> = config .initial_accounts @@ -239,7 +239,7 @@ impl SequencerCore { /// This method must be called when new blocks are finalized on Bedrock. /// All pending blocks with an ID less than or equal to `last_finalized_block_id` /// are removed from the database. - pub fn delete_finalized_blocks_from_db(&mut self, last_finalized_block_id: u64) -> Result<()> { + pub fn clean_finalized_blocks_from_db(&mut self, last_finalized_block_id: u64) -> Result<()> { if let Some(first_pending_block_id) = self .get_pending_blocks()? .iter() @@ -845,7 +845,7 @@ mod tests { let last_finalized_block = 3; sequencer - .delete_finalized_blocks_from_db(last_finalized_block) + .clean_finalized_blocks_from_db(last_finalized_block) .unwrap(); assert_eq!(sequencer.get_pending_blocks().unwrap().len(), 1); diff --git a/sequencer_runner/src/lib.rs b/sequencer_runner/src/lib.rs index 8540c2c2..8dbea525 100644 --- a/sequencer_runner/src/lib.rs +++ b/sequencer_runner/src/lib.rs @@ -68,7 +68,7 @@ pub async fn startup_sequencer( }; info!("Resubmitting {} pending blocks", pending_blocks.len()); - for block in pending_blocks.iter() { + for block in &pending_blocks { if let Err(e) = client.submit_block_to_bedrock(block).await { warn!( "Failed to resubmit block with id {} with error {}", From de54744893a7762fcb8d2024a7e4eacf58635f81 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 15:21:15 -0300 Subject: [PATCH 18/20] use own type MantleMsgId --- Cargo.lock | 1 - common/Cargo.toml | 1 - common/src/block.rs | 26 +++---------------- common/src/test_utils.rs | 4 +-- indexer_service/protocol/src/convert.rs | 4 +-- sequencer_core/src/block_settlement_client.rs | 2 +- sequencer_core/src/block_store.rs | 4 +-- sequencer_core/src/lib.rs | 11 ++++---- 8 files changed, 13 insertions(+), 40 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7ad1d1e2..b637399e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1341,7 +1341,6 @@ dependencies = [ "hex", "log", "logos-blockchain-common-http-client", - "logos-blockchain-core", "nssa", "nssa_core", "reqwest", diff --git a/common/Cargo.toml b/common/Cargo.toml index def7f2fa..14d00f09 100644 --- a/common/Cargo.toml +++ b/common/Cargo.toml @@ -17,6 +17,5 @@ log.workspace = true hex.workspace = true borsh.workspace = true base64.workspace = true -logos-blockchain-core.workspace = true url.workspace = true logos-blockchain-common-http-client.workspace = true diff --git a/common/src/block.rs b/common/src/block.rs index 1eab90d5..391bc57d 100644 --- a/common/src/block.rs +++ b/common/src/block.rs @@ -1,10 +1,10 @@ use borsh::{BorshDeserialize, BorshSerialize}; -use logos_blockchain_core::mantle::ops::channel::MsgId; use sha2::{Digest, Sha256, digest::FixedOutput}; use crate::transaction::EncodedTransaction; pub type HashType = [u8; 32]; +pub type MantleMsgId = [u8; 32]; #[derive(Debug, Clone)] /// Our own hasher. @@ -50,11 +50,7 @@ pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, - #[borsh( - serialize_with = "borsh_msg_id::serialize", - deserialize_with = "borsh_msg_id::deserialize" - )] - pub bedrock_parent_id: MsgId, + pub bedrock_parent_id: MantleMsgId, } #[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)] @@ -69,7 +65,7 @@ impl HashableBlockData { pub fn into_pending_block( self, signing_key: &nssa::PrivateKey, - bedrock_parent_id: MsgId, + bedrock_parent_id: MantleMsgId, ) -> Block { let data_bytes = borsh::to_vec(&self).unwrap(); let signature = nssa::Signature::new(signing_key, &data_bytes); @@ -106,22 +102,6 @@ impl From for HashableBlockData { } } -mod borsh_msg_id { - use std::io::{Read, Write}; - - use logos_blockchain_core::mantle::ops::channel::MsgId; - - pub fn serialize(v: &MsgId, w: &mut W) -> std::io::Result<()> { - w.write_all(v.as_ref()) - } - - pub fn deserialize(r: &mut R) -> std::io::Result { - let mut buf = [0u8; 32]; - r.read_exact(&mut buf)?; - Ok(MsgId::from(buf)) - } -} - #[cfg(test)] mod tests { use crate::{block::HashableBlockData, test_utils}; diff --git a/common/src/test_utils.rs b/common/src/test_utils.rs index 8ae97599..80703342 100644 --- a/common/src/test_utils.rs +++ b/common/src/test_utils.rs @@ -1,5 +1,3 @@ -use logos_blockchain_core::mantle::ops::channel::MsgId; - use crate::{ block::{Block, HashableBlockData}, transaction::{EncodedTransaction, NSSATransaction}, @@ -32,7 +30,7 @@ pub fn produce_dummy_block( transactions, }; - block_data.into_pending_block(&sequencer_sign_key_for_testing(), MsgId::from([0; 32])) + block_data.into_pending_block(&sequencer_sign_key_for_testing(), [0; 32]) } pub fn produce_dummy_empty_transaction() -> EncodedTransaction { diff --git a/indexer_service/protocol/src/convert.rs b/indexer_service/protocol/src/convert.rs index 2c5d4b09..b6d9d348 100644 --- a/indexer_service/protocol/src/convert.rs +++ b/indexer_service/protocol/src/convert.rs @@ -606,7 +606,7 @@ impl TryFrom for Block { header: header.into(), body: body.try_into()?, bedrock_status: bedrock_status.into(), - bedrock_parent_id: bedrock_parent_id.into(), + bedrock_parent_id: MsgId(bedrock_parent_id), }) } } @@ -626,7 +626,7 @@ impl TryFrom for common::block::Block { header: header.try_into()?, body: body.try_into()?, bedrock_status: bedrock_status.into(), - bedrock_parent_id: bedrock_parent_id.into(), + bedrock_parent_id: bedrock_parent_id.0, }) } } diff --git a/sequencer_core/src/block_settlement_client.rs b/sequencer_core/src/block_settlement_client.rs index 53839159..f99a116e 100644 --- a/sequencer_core/src/block_settlement_client.rs +++ b/sequencer_core/src/block_settlement_client.rs @@ -47,7 +47,7 @@ impl BlockSettlementClient { let inscribe_op = InscriptionOp { channel_id: self.bedrock_channel_id, inscription: inscription_data, - parent: block.bedrock_parent_id, + parent: block.bedrock_parent_id.into(), signer: verifying_key, }; let inscribe_op_id = inscribe_op.id(); diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index 3aed2143..a0b07445 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -108,7 +108,6 @@ pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap #[cfg(test)] mod tests { use common::{block::HashableBlockData, test_utils::sequencer_sign_key_for_testing}; - use logos_blockchain_core::mantle::ops::channel::MsgId; use tempfile::tempdir; use super::*; @@ -127,8 +126,7 @@ mod tests { transactions: vec![], }; - let genesis_block = - genesis_block_hashable_data.into_pending_block(&signing_key, MsgId::from([0; 32])); + let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); // Start an empty node store let mut node_store = SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap(); diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 119d5d73..efddcd7e 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -5,12 +5,11 @@ use anyhow::Result; use common::PINATA_BASE58; use common::{ HashType, - block::{BedrockStatus, Block, HashableBlockData}, + block::{BedrockStatus, Block, HashableBlockData, MantleMsgId}, transaction::{EncodedTransaction, NSSATransaction}, }; use config::SequencerConfig; use log::{info, warn}; -use logos_blockchain_core::mantle::ops::channel::MsgId; use mempool::{MemPool, MemPoolHandle}; use serde::{Deserialize, Serialize}; @@ -27,7 +26,7 @@ pub struct SequencerCore { sequencer_config: SequencerConfig, chain_height: u64, block_settlement_client: Option, - last_bedrock_msg_id: MsgId, + last_bedrock_msg_id: MantleMsgId, } #[derive(Debug, Clone, Serialize, Deserialize, PartialEq)] @@ -59,7 +58,7 @@ impl SequencerCore { }; let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap(); - let channel_genesis_msg_id = MsgId::from([0; 32]); + let channel_genesis_msg_id = [0; 32]; let genesis_block = hashable_data.into_pending_block(&signing_key, channel_genesis_msg_id); // Sequencer should panic if unable to open db, @@ -91,7 +90,7 @@ impl SequencerCore { acc.program_owner = nssa::program::Program::authenticated_transfer_program().id(); - nssa_core::Commitment::new(&npk, &acc) + nssa_core::Commitment::new(npk, &acc) }) .collect(); @@ -152,7 +151,7 @@ impl SequencerCore { let block = block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); let msg_id = client.submit_block_to_bedrock(&block).await?; - self.last_bedrock_msg_id = msg_id; + self.last_bedrock_msg_id = msg_id.into(); log::info!("Posted block data to Bedrock"); } From df02c593107a8358b05c3972f0a9885be2820b95 Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 15:26:29 -0300 Subject: [PATCH 19/20] minor rename --- indexer_service/protocol/src/convert.rs | 2 +- indexer_service/protocol/src/lib.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/indexer_service/protocol/src/convert.rs b/indexer_service/protocol/src/convert.rs index b6d9d348..8c6de2f4 100644 --- a/indexer_service/protocol/src/convert.rs +++ b/indexer_service/protocol/src/convert.rs @@ -606,7 +606,7 @@ impl TryFrom for Block { header: header.into(), body: body.try_into()?, bedrock_status: bedrock_status.into(), - bedrock_parent_id: MsgId(bedrock_parent_id), + bedrock_parent_id: MantleMsgId(bedrock_parent_id), }) } } diff --git a/indexer_service/protocol/src/lib.rs b/indexer_service/protocol/src/lib.rs index 627d8513..f12bdf5b 100644 --- a/indexer_service/protocol/src/lib.rs +++ b/indexer_service/protocol/src/lib.rs @@ -36,7 +36,7 @@ pub struct Block { pub header: BlockHeader, pub body: BlockBody, pub bedrock_status: BedrockStatus, - pub bedrock_parent_id: MsgId, + pub bedrock_parent_id: MantleMsgId, } #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] @@ -190,7 +190,7 @@ pub struct Hash( ); #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, JsonSchema)] -pub struct MsgId( +pub struct MantleMsgId( #[serde(with = "base64::arr")] #[schemars(with = "String", description = "base64-encoded Bedrock message id")] pub [u8; 32], From 2e071b538f4f9724eda20dfb895ceb38ea15dc4b Mon Sep 17 00:00:00 2001 From: Sergio Chouhy Date: Thu, 29 Jan 2026 15:56:06 -0300 Subject: [PATCH 20/20] drop retry pending block handle on Drop --- integration_tests/src/lib.rs | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index 5d810166..21e1ca81 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -40,6 +40,7 @@ static LOGGER: LazyLock<()> = LazyLock::new(env_logger::init); pub struct TestContext { sequencer_server_handle: ServerHandle, sequencer_loop_handle: JoinHandle>, + sequencer_retry_pending_blocks_handle: JoinHandle>, indexer_loop_handle: Option>>, sequencer_client: SequencerClient, wallet: WalletCore, @@ -94,10 +95,15 @@ impl TestContext { debug!("Test context setup"); - let (sequencer_server_handle, sequencer_addr, sequencer_loop_handle, temp_sequencer_dir) = - Self::setup_sequencer(sequencer_config) - .await - .context("Failed to setup sequencer")?; + let ( + sequencer_server_handle, + sequencer_addr, + sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, + temp_sequencer_dir, + ) = Self::setup_sequencer(sequencer_config) + .await + .context("Failed to setup sequencer")?; // Convert 0.0.0.0 to 127.0.0.1 for client connections // When binding to port 0, the server binds to 0.0.0.0: @@ -130,6 +136,7 @@ impl TestContext { Ok(Self { sequencer_server_handle, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, indexer_loop_handle, sequencer_client, wallet, @@ -140,6 +147,7 @@ impl TestContext { Ok(Self { sequencer_server_handle, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, indexer_loop_handle: None, sequencer_client, wallet, @@ -151,7 +159,13 @@ impl TestContext { async fn setup_sequencer( mut config: SequencerConfig, - ) -> Result<(ServerHandle, SocketAddr, JoinHandle>, TempDir)> { + ) -> Result<( + ServerHandle, + SocketAddr, + JoinHandle>, + JoinHandle>, + TempDir, + )> { let temp_sequencer_dir = tempfile::tempdir().context("Failed to create temp dir for sequencer home")?; @@ -167,13 +181,14 @@ impl TestContext { sequencer_server_handle, sequencer_addr, sequencer_loop_handle, - _retry_pending_blocks_handle, + sequencer_retry_pending_blocks_handle, ) = sequencer_runner::startup_sequencer(config).await?; Ok(( sequencer_server_handle, sequencer_addr, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, temp_sequencer_dir, )) } @@ -234,6 +249,7 @@ impl Drop for TestContext { let Self { sequencer_server_handle, sequencer_loop_handle, + sequencer_retry_pending_blocks_handle, indexer_loop_handle, sequencer_client: _, wallet: _, @@ -242,6 +258,7 @@ impl Drop for TestContext { } = self; sequencer_loop_handle.abort(); + sequencer_retry_pending_blocks_handle.abort(); if let Some(indexer_loop_handle) = indexer_loop_handle { indexer_loop_handle.abort(); }