mirror of
https://github.com/logos-blockchain/lssa.git
synced 2026-02-18 04:13:07 +00:00
fix: always store latest block hash in db
This commit is contained in:
parent
cb6fb881ac
commit
0a8d6f1eb4
@ -68,8 +68,8 @@ impl SequencerStore {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, tx: &NSSATransaction, block_id: u64) {
|
||||
self.tx_hash_to_block_map.insert(tx.hash(), block_id);
|
||||
pub fn latest_block_hash(&self) -> Result<HashType> {
|
||||
Ok(self.dbio.latest_block_hash()?)
|
||||
}
|
||||
|
||||
pub fn genesis_id(&self) -> u64 {
|
||||
@ -144,4 +144,60 @@ mod tests {
|
||||
let retrieved_tx = node_store.get_transaction_by_hash(tx.hash());
|
||||
assert_eq!(Some(tx), retrieved_tx);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_latest_block_hash_returns_genesis_hash_initially() {
|
||||
let temp_dir = tempdir().unwrap();
|
||||
let path = temp_dir.path();
|
||||
|
||||
let signing_key = sequencer_sign_key_for_testing();
|
||||
|
||||
let genesis_block_hashable_data = HashableBlockData {
|
||||
block_id: 0,
|
||||
prev_block_hash: HashType([0; 32]),
|
||||
timestamp: 0,
|
||||
transactions: vec![],
|
||||
};
|
||||
|
||||
let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]);
|
||||
let genesis_hash = genesis_block.header.hash;
|
||||
|
||||
let node_store =
|
||||
SequencerStore::open_db_with_genesis(path, Some(&genesis_block), signing_key).unwrap();
|
||||
|
||||
// Verify that initially the latest block hash equals genesis hash
|
||||
let latest_hash = node_store.latest_block_hash().unwrap();
|
||||
assert_eq!(latest_hash, genesis_hash);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_latest_block_hash_updates_after_new_block() {
|
||||
let temp_dir = tempdir().unwrap();
|
||||
let path = temp_dir.path();
|
||||
|
||||
let signing_key = sequencer_sign_key_for_testing();
|
||||
|
||||
let genesis_block_hashable_data = HashableBlockData {
|
||||
block_id: 0,
|
||||
prev_block_hash: HashType([0; 32]),
|
||||
timestamp: 0,
|
||||
transactions: vec![],
|
||||
};
|
||||
|
||||
let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]);
|
||||
let mut node_store =
|
||||
SequencerStore::open_db_with_genesis(path, Some(&genesis_block), signing_key).unwrap();
|
||||
|
||||
// Add a new block
|
||||
let tx = common::test_utils::produce_dummy_empty_transaction();
|
||||
let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]);
|
||||
let block_hash = block.header.hash;
|
||||
|
||||
let dummy_state = V02State::new_with_genesis_accounts(&[], &[]);
|
||||
node_store.update(&block, &dummy_state).unwrap();
|
||||
|
||||
// Verify that the latest block hash now equals the new block's hash
|
||||
let latest_hash = node_store.latest_block_hash().unwrap();
|
||||
assert_eq!(latest_hash, block_hash);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use std::{fmt::Display, path::Path, time::Instant};
|
||||
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
#[cfg(feature = "testnet")]
|
||||
use common::PINATA_BASE58;
|
||||
use common::{
|
||||
@ -214,7 +214,10 @@ impl<BC: BlockSettlementClientTrait, IC: IndexerClientTrait> SequencerCore<BC, I
|
||||
}
|
||||
}
|
||||
|
||||
let prev_block_hash = self.store.get_block_at_id(self.chain_height)?.header.hash;
|
||||
let prev_block_hash = self
|
||||
.store
|
||||
.latest_block_hash()
|
||||
.context("Failed to get latest block hash from store")?;
|
||||
|
||||
let curr_time = chrono::Utc::now().timestamp_millis() as u64;
|
||||
|
||||
@ -233,16 +236,6 @@ impl<BC: BlockSettlementClientTrait, IC: IndexerClientTrait> SequencerCore<BC, I
|
||||
|
||||
self.chain_height = new_block_height;
|
||||
|
||||
// TODO: Consider switching to `tracing` crate to have more structured and consistent logs
|
||||
// e.g.
|
||||
//
|
||||
// ```
|
||||
// info!(
|
||||
// num_txs = num_txs_in_block,
|
||||
// time = now.elapsed(),
|
||||
// "Created block"
|
||||
// );
|
||||
// ```
|
||||
log::info!(
|
||||
"Created block with {} transactions in {} seconds",
|
||||
hashable_data.transactions.len(),
|
||||
|
||||
@ -29,6 +29,8 @@ pub const DB_META_LAST_BLOCK_IN_DB_KEY: &str = "last_block_in_db";
|
||||
pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set";
|
||||
/// Key base for storing metainformation about the last finalized block on Bedrock
|
||||
pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id";
|
||||
/// Key base for storing metainformation about the latest block hash
|
||||
pub const DB_META_LATEST_BLOCK_HASH_KEY: &str = "latest_block_hash";
|
||||
|
||||
/// Key base for storing the NSSA state
|
||||
pub const DB_NSSA_STATE_KEY: &str = "nssa_state";
|
||||
@ -79,6 +81,7 @@ impl RocksDBIO {
|
||||
dbio.put_meta_is_first_block_set()?;
|
||||
dbio.put_meta_last_block_in_db(block_id)?;
|
||||
dbio.put_meta_last_finalized_block_id(None)?;
|
||||
dbio.put_meta_latest_block_hash(block.header.hash)?;
|
||||
|
||||
Ok(dbio)
|
||||
} else {
|
||||
@ -262,6 +265,30 @@ impl RocksDBIO {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn put_meta_last_block_in_db_batch(
|
||||
&self,
|
||||
block_id: u64,
|
||||
batch: &mut WriteBatch,
|
||||
) -> DbResult<()> {
|
||||
let cf_meta = self.meta_column();
|
||||
batch.put_cf(
|
||||
&cf_meta,
|
||||
borsh::to_vec(&DB_META_LAST_BLOCK_IN_DB_KEY).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize DB_META_LAST_BLOCK_IN_DB_KEY".to_string()),
|
||||
)
|
||||
})?,
|
||||
borsh::to_vec(&block_id).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize last block id".to_string()),
|
||||
)
|
||||
})?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn put_meta_last_finalized_block_id(&self, block_id: Option<u64>) -> DbResult<()> {
|
||||
let cf_meta = self.meta_column();
|
||||
self.db
|
||||
@ -301,6 +328,81 @@ impl RocksDBIO {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn put_meta_latest_block_hash(&self, block_hash: common::HashType) -> DbResult<()> {
|
||||
let cf_meta = self.meta_column();
|
||||
self.db
|
||||
.put_cf(
|
||||
&cf_meta,
|
||||
borsh::to_vec(&DB_META_LATEST_BLOCK_HASH_KEY).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize DB_META_LATEST_BLOCK_HASH_KEY".to_string()),
|
||||
)
|
||||
})?,
|
||||
borsh::to_vec(&block_hash).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize latest block hash".to_string()),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn put_meta_latest_block_hash_batch(
|
||||
&self,
|
||||
block_hash: common::HashType,
|
||||
batch: &mut WriteBatch,
|
||||
) -> DbResult<()> {
|
||||
let cf_meta = self.meta_column();
|
||||
batch.put_cf(
|
||||
&cf_meta,
|
||||
borsh::to_vec(&DB_META_LATEST_BLOCK_HASH_KEY).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize DB_META_LATEST_BLOCK_HASH_KEY".to_string()),
|
||||
)
|
||||
})?,
|
||||
borsh::to_vec(&block_hash).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize latest block hash".to_string()),
|
||||
)
|
||||
})?,
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn latest_block_hash(&self) -> DbResult<common::HashType> {
|
||||
let cf_meta = self.meta_column();
|
||||
let res = self
|
||||
.db
|
||||
.get_cf(
|
||||
&cf_meta,
|
||||
borsh::to_vec(&DB_META_LATEST_BLOCK_HASH_KEY).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize DB_META_LATEST_BLOCK_HASH_KEY".to_string()),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||
|
||||
if let Some(data) = res {
|
||||
Ok(borsh::from_slice::<common::HashType>(&data).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to deserialize latest block hash".to_string()),
|
||||
)
|
||||
})?)
|
||||
} else {
|
||||
Err(DbError::db_interaction_error(
|
||||
"Latest block hash not found".to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn put_block(&self, block: &Block, first: bool, batch: &mut WriteBatch) -> DbResult<()> {
|
||||
let cf_block = self.block_column();
|
||||
|
||||
@ -308,7 +410,8 @@ impl RocksDBIO {
|
||||
let last_curr_block = self.get_meta_last_block_in_db()?;
|
||||
|
||||
if block.header.block_id > last_curr_block {
|
||||
self.put_meta_last_block_in_db(block.header.block_id)?;
|
||||
self.put_meta_last_block_in_db_batch(block.header.block_id, batch)?;
|
||||
self.put_meta_latest_block_hash_batch(block.header.hash, batch)?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user