fix start_from_config function

This commit is contained in:
Sergio Chouhy 2026-01-27 16:03:21 -03:00
parent 561b4a2dcf
commit bc2350b349
3 changed files with 43 additions and 29 deletions

View File

@ -90,6 +90,10 @@ impl SequencerStore {
self.tx_hash_to_block_map.extend(new_transactions_map);
Ok(())
}
pub fn get_nssa_state(&self) -> Option<V02State> {
self.dbio.get_nssa_state().ok()
}
}
pub(crate) fn block_to_transactions_map(block: &Block) -> HashMap<HashType, u64> {

View File

@ -60,7 +60,7 @@ impl SequencerCore {
// Sequencer should panic if unable to open db,
// as fixing this issue may require actions non-native to program scope
let block_store = SequencerStore::open_db_with_genesis(
let store = SequencerStore::open_db_with_genesis(
&config.home.join("rocksdb"),
Some(genesis_block),
signing_key,
@ -86,7 +86,9 @@ impl SequencerCore {
.map(|acc_data| (acc_data.account_id.parse().unwrap(), acc_data.balance))
.collect();
let mut state = nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments);
let mut state = store.get_nssa_state().unwrap_or_else(|| {
nssa::V02State::new_with_genesis_accounts(&init_accs, &initial_commitments)
});
#[cfg(feature = "testnet")]
state.add_pinata_program(PINATA_BASE58.parse().unwrap());
@ -98,9 +100,9 @@ impl SequencerCore {
});
let channel_genesis_msg_id = MsgId::from([0; 32]);
let mut this = Self {
let sequencer_core = Self {
state,
store: block_store,
store,
mempool,
chain_height: config.genesis_id,
sequencer_config: config,
@ -108,28 +110,7 @@ impl SequencerCore {
last_bedrock_msg_id: channel_genesis_msg_id,
};
this.sync_state_with_stored_blocks();
(this, mempool_handle)
}
/// If there are stored blocks ahead of the current height, this method will load and process
/// all transaction in them in the order they are stored. The NSSA state will be updated
/// accordingly.
fn sync_state_with_stored_blocks(&mut self) {
let mut next_block_id = self.sequencer_config.genesis_id + 1;
while let Ok(block) = self.store.get_block_at_id(next_block_id) {
for encoded_transaction in block.body.transactions {
let transaction = NSSATransaction::try_from(&encoded_transaction).unwrap();
// Process transaction and update state
self.execute_check_transaction_on_state(transaction)
.unwrap();
// Update the tx hash to block id map.
self.store.insert(&encoded_transaction, next_block_id);
}
self.chain_height = next_block_id;
next_block_id += 1;
}
(sequencer_core, mempool_handle)
}
fn execute_check_transaction_on_state(

View File

@ -38,7 +38,7 @@ pub const CF_BLOCK_NAME: &str = "cf_block";
/// Name of meta column family
pub const CF_META_NAME: &str = "cf_meta";
/// Name of state column family
pub const CF_NSSA_STATE_NAME: &str = "cf_state";
pub const CF_NSSA_STATE_NAME: &str = "cf_nssa_state";
pub type DbResult<T> = Result<T, DbError>;
@ -191,9 +191,9 @@ impl RocksDBIO {
}
pub fn put_nssa_state_in_db(&self, state: &V02State, batch: &mut WriteBatch) -> DbResult<()> {
let cf_state = self.nssa_state_column();
let cf_nssa_state = self.nssa_state_column();
batch.put_cf(
&cf_state,
&cf_nssa_state,
borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| {
DbError::borsh_cast_message(
err,
@ -356,6 +356,35 @@ impl RocksDBIO {
}
}
pub fn get_nssa_state(&self) -> DbResult<V02State> {
let cf_nssa_state = self.nssa_state_column();
let res = self
.db
.get_cf(
&cf_nssa_state,
borsh::to_vec(&DB_NSSA_STATE_KEY).map_err(|err| {
DbError::borsh_cast_message(
err,
Some("Failed to serialize block id".to_string()),
)
})?,
)
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
if let Some(data) = res {
Ok(borsh::from_slice::<V02State>(&data).map_err(|serr| {
DbError::borsh_cast_message(
serr,
Some("Failed to deserialize block data".to_string()),
)
})?)
} else {
Err(DbError::db_interaction_error(
"Block on this id not found".to_string(),
))
}
}
pub fn delete_block(&self, block_id: u64) -> DbResult<()> {
let cf_block = self.block_column();
let key = borsh::to_vec(&block_id).map_err(|err| {