diff --git a/sequencer_core/src/block_store.rs b/sequencer_core/src/block_store.rs index a0b07445..4404a34d 100644 --- a/sequencer_core/src/block_store.rs +++ b/sequencer_core/src/block_store.rs @@ -20,7 +20,7 @@ impl SequencerStore { /// ATTENTION: Will overwrite genesis block. pub fn open_db_with_genesis( location: &Path, - genesis_block: Option, + genesis_block: Option<&Block>, signing_key: nssa::PrivateKey, ) -> Result { let tx_hash_to_block_map = if let Some(block) = &genesis_block { @@ -84,8 +84,8 @@ impl SequencerStore { self.dbio.get_all_blocks().map(|res| Ok(res?)) } - pub(crate) fn update(&mut self, block: Block, state: &V02State) -> Result<()> { - let new_transactions_map = block_to_transactions_map(&block); + pub(crate) fn update(&mut self, block: &Block, state: &V02State) -> Result<()> { + let new_transactions_map = block_to_transactions_map(block); self.dbio.atomic_update(block, state)?; self.tx_hash_to_block_map.extend(new_transactions_map); Ok(()) @@ -129,7 +129,7 @@ mod tests { let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key, [0; 32]); // Start an empty node store let mut node_store = - SequencerStore::open_db_with_genesis(path, Some(genesis_block), signing_key).unwrap(); + SequencerStore::open_db_with_genesis(path, Some(&genesis_block), signing_key).unwrap(); let tx = common::test_utils::produce_dummy_empty_transaction(); let block = common::test_utils::produce_dummy_block(1, None, vec![tx.clone()]); @@ -139,7 +139,7 @@ mod tests { assert_eq!(None, retrieved_tx); // Add the block with the transaction let dummy_state = V02State::new_with_genesis_accounts(&[], &[]); - node_store.update(block, &dummy_state).unwrap(); + node_store.update(&block, &dummy_state).unwrap(); // Try again let retrieved_tx = node_store.get_transaction_by_hash(tx.hash()); assert_eq!(Some(tx), retrieved_tx); diff --git a/sequencer_core/src/lib.rs b/sequencer_core/src/lib.rs index 5d8947b6..f54fb78f 100644 --- a/sequencer_core/src/lib.rs +++ b/sequencer_core/src/lib.rs @@ -70,7 +70,7 @@ impl SequencerCore { // as fixing this issue may require actions non-native to program scope let store = SequencerStore::open_db_with_genesis( &config.home.join("rocksdb"), - Some(genesis_block), + Some(&genesis_block), signing_key, ) .unwrap(); @@ -118,6 +118,15 @@ impl SequencerCore { .expect("Block settlement client should be constructible") }); + let last_bedrock_msg_id = if let Some(client) = block_settlement_client.as_ref() { + let (_, msg_id) = client + .create_inscribe_tx(&genesis_block) + .expect("Inscription transaction with genesis block should be constructible"); + msg_id.into() + } else { + channel_genesis_msg_id + }; + let indexer_client = Arc::new( jsonrpsee::ws_client::WsClientBuilder::default() .build(config.indexer_rpc_url.clone()) @@ -133,7 +142,7 @@ impl SequencerCore { sequencer_config: config, block_settlement_client, indexer_client, - last_bedrock_msg_id: channel_genesis_msg_id, + last_bedrock_msg_id, }; (sequencer_core, mempool_handle) @@ -158,11 +167,9 @@ impl SequencerCore { } pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result { - let block_data = self.produce_new_block_with_mempool_transactions()?; + let block = self.produce_new_block_with_mempool_transactions()?; if let Some(client) = self.block_settlement_client.as_mut() { - let block = - block_data.into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); let msg_id = client.submit_block_to_bedrock(&block).await?; self.last_bedrock_msg_id = msg_id.into(); log::info!("Posted block data to Bedrock"); @@ -172,7 +179,7 @@ impl SequencerCore { } /// Produces new block from transactions in mempool - pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result { + pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result { let now = Instant::now(); let new_block_height = self.chain_height + 1; @@ -209,7 +216,7 @@ impl SequencerCore { .clone() .into_pending_block(self.store.signing_key(), self.last_bedrock_msg_id); - self.store.update(block, &self.state)?; + self.store.update(&block, &self.state)?; self.chain_height = new_block_height; @@ -228,7 +235,7 @@ impl SequencerCore { hashable_data.transactions.len(), now.elapsed().as_secs() ); - Ok(hashable_data) + Ok(block) } pub fn state(&self) -> &nssa::V02State { @@ -692,7 +699,7 @@ mod tests { let block = sequencer.produce_new_block_with_mempool_transactions(); assert!(block.is_ok()); - assert_eq!(block.unwrap().block_id, genesis_height + 1); + assert_eq!(block.unwrap().header.block_id, genesis_height + 1); } #[tokio::test] @@ -730,6 +737,7 @@ mod tests { let current_height = sequencer .produce_new_block_with_mempool_transactions() .unwrap() + .header .block_id; let block = sequencer.store.get_block_at_id(current_height).unwrap(); @@ -767,6 +775,7 @@ mod tests { let current_height = sequencer .produce_new_block_with_mempool_transactions() .unwrap() + .header .block_id; let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); @@ -776,6 +785,7 @@ mod tests { let current_height = sequencer .produce_new_block_with_mempool_transactions() .unwrap() + .header .block_id; let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert!(block.body.transactions.is_empty()); @@ -810,6 +820,7 @@ mod tests { let current_height = sequencer .produce_new_block_with_mempool_transactions() .unwrap() + .header .block_id; let block = sequencer.store.get_block_at_id(current_height).unwrap(); assert_eq!(block.body.transactions, vec![tx.clone()]); diff --git a/storage/src/lib.rs b/storage/src/lib.rs index b96e0d61..e24bc7ab 100644 --- a/storage/src/lib.rs +++ b/storage/src/lib.rs @@ -47,7 +47,7 @@ pub struct RocksDBIO { } impl RocksDBIO { - pub fn open_or_create(path: &Path, start_block: Option) -> DbResult { + pub fn open_or_create(path: &Path, start_block: Option<&Block>) -> DbResult { let mut cf_opts = Options::default(); cf_opts.set_max_write_buffer_number(16); // ToDo: Add more column families for different data @@ -208,7 +208,7 @@ impl RocksDBIO { Ok(()) } - pub fn put_meta_first_block_in_db(&self, block: Block) -> DbResult<()> { + pub fn put_meta_first_block_in_db(&self, block: &Block) -> DbResult<()> { let cf_meta = self.meta_column(); self.db .put_cf( @@ -301,7 +301,7 @@ impl RocksDBIO { Ok(()) } - pub fn put_block(&self, block: Block, first: bool, batch: &mut WriteBatch) -> DbResult<()> { + pub fn put_block(&self, block: &Block, first: bool, batch: &mut WriteBatch) -> DbResult<()> { let cf_block = self.block_column(); if !first { @@ -317,7 +317,7 @@ impl RocksDBIO { borsh::to_vec(&block.header.block_id).map_err(|err| { DbError::borsh_cast_message(err, Some("Failed to serialize block id".to_string())) })?, - borsh::to_vec(&block).map_err(|err| { + borsh::to_vec(block).map_err(|err| { DbError::borsh_cast_message(err, Some("Failed to serialize block data".to_string())) })?, ); @@ -427,7 +427,7 @@ impl RocksDBIO { }) } - pub fn atomic_update(&self, block: Block, state: &V02State) -> DbResult<()> { + pub fn atomic_update(&self, block: &Block, state: &V02State) -> DbResult<()> { let block_id = block.header.block_id; let mut batch = WriteBatch::default(); self.put_block(block, false, &mut batch)?;