mirror of
https://github.com/logos-blockchain/lssa.git
synced 2026-01-27 17:43:09 +00:00
Merge pull request #286 from logos-blockchain/schouhy/adapt-sequencer-to-bedrock
Adapt sequencer to post block data to Bedrock
This commit is contained in:
commit
f4c81f5e78
1078
Cargo.lock
generated
1078
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -34,6 +34,7 @@ sequencer_rpc = { path = "sequencer_rpc" }
|
||||
sequencer_runner = { path = "sequencer_runner" }
|
||||
wallet = { path = "wallet" }
|
||||
test_program_methods = { path = "test_program_methods" }
|
||||
bedrock_client = { path = "bedrock_client" }
|
||||
|
||||
tokio = { version = "1.28.2", features = [
|
||||
"net",
|
||||
@ -78,7 +79,9 @@ base58 = "0.2.0"
|
||||
itertools = "0.14.0"
|
||||
url = "2.5.4"
|
||||
|
||||
common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
|
||||
logos-blockchain-common-http-client = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
|
||||
logos-blockchain-key-management-system-service = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
|
||||
logos-blockchain-core = { git = "https://github.com/logos-blockchain/logos-blockchain.git" }
|
||||
|
||||
rocksdb = { version = "0.24.0", default-features = false, features = [
|
||||
"snappy",
|
||||
|
||||
@ -6,4 +6,5 @@ edition = "2024"
|
||||
[dependencies]
|
||||
reqwest.workspace = true
|
||||
anyhow.workspace = true
|
||||
common-http-client.workspace = true
|
||||
logos-blockchain-common-http-client.workspace = true
|
||||
logos-blockchain-core.workspace = true
|
||||
|
||||
@ -1,21 +1,32 @@
|
||||
use anyhow::Result;
|
||||
use common_http_client::CommonHttpClient;
|
||||
pub use common_http_client::{BasicAuthCredentials, Error};
|
||||
use reqwest::Client;
|
||||
pub use logos_blockchain_common_http_client::{BasicAuthCredentials, CommonHttpClient, Error};
|
||||
use logos_blockchain_core::mantle::SignedMantleTx;
|
||||
use reqwest::{Client, Url};
|
||||
|
||||
// Simple wrapper
|
||||
// maybe extend in the future for our purposes
|
||||
pub struct BedrockClient(pub CommonHttpClient);
|
||||
pub struct BedrockClient {
|
||||
http_client: CommonHttpClient,
|
||||
node_url: Url,
|
||||
}
|
||||
|
||||
impl BedrockClient {
|
||||
pub fn new(auth: Option<BasicAuthCredentials>) -> Result<Self> {
|
||||
pub fn new(auth: Option<BasicAuthCredentials>, node_url: Url) -> Result<Self> {
|
||||
let client = Client::builder()
|
||||
//Add more fields if needed
|
||||
.timeout(std::time::Duration::from_secs(60))
|
||||
.build()?;
|
||||
|
||||
Ok(BedrockClient(CommonHttpClient::new_with_client(
|
||||
client, auth,
|
||||
)))
|
||||
let http_client = CommonHttpClient::new_with_client(client, auth);
|
||||
Ok(Self {
|
||||
http_client,
|
||||
node_url,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn post_transaction(&self, tx: SignedMantleTx) -> Result<(), Error> {
|
||||
self.http_client
|
||||
.post_transaction(self.node_url.clone(), tx)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@ -23,7 +23,7 @@ pub type BlockHash = [u8; 32];
|
||||
pub type BlockId = u64;
|
||||
pub type TimeStamp = u64;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
|
||||
pub struct BlockHeader {
|
||||
pub block_id: BlockId,
|
||||
pub prev_block_hash: BlockHash,
|
||||
@ -32,18 +32,26 @@ pub struct BlockHeader {
|
||||
pub signature: nssa::Signature,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
|
||||
pub struct BlockBody {
|
||||
pub transactions: Vec<EncodedTransaction>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, BorshSerialize, BorshDeserialize)]
|
||||
pub enum BedrockStatus {
|
||||
Pending,
|
||||
Safe,
|
||||
Finalized,
|
||||
}
|
||||
|
||||
#[derive(Debug, BorshSerialize, BorshDeserialize)]
|
||||
pub struct Block {
|
||||
pub header: BlockHeader,
|
||||
pub body: BlockBody,
|
||||
pub bedrock_status: BedrockStatus,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, BorshSerialize, BorshDeserialize)]
|
||||
pub struct HashableBlockData {
|
||||
pub block_id: BlockId,
|
||||
pub prev_block_hash: BlockHash,
|
||||
@ -52,7 +60,7 @@ pub struct HashableBlockData {
|
||||
}
|
||||
|
||||
impl HashableBlockData {
|
||||
pub fn into_block(self, signing_key: &nssa::PrivateKey) -> Block {
|
||||
pub fn into_pending_block(self, signing_key: &nssa::PrivateKey) -> Block {
|
||||
let data_bytes = borsh::to_vec(&self).unwrap();
|
||||
let signature = nssa::Signature::new(signing_key, &data_bytes);
|
||||
let hash = OwnHasher::hash(&data_bytes);
|
||||
@ -67,6 +75,7 @@ impl HashableBlockData {
|
||||
body: BlockBody {
|
||||
transactions: self.transactions,
|
||||
},
|
||||
bedrock_status: BedrockStatus::Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -30,7 +30,7 @@ pub fn produce_dummy_block(
|
||||
transactions,
|
||||
};
|
||||
|
||||
block_data.into_block(&sequencer_sign_key_for_testing())
|
||||
block_data.into_pending_block(&sequencer_sign_key_for_testing())
|
||||
}
|
||||
|
||||
pub fn produce_dummy_empty_transaction() -> EncodedTransaction {
|
||||
|
||||
@ -155,4 +155,4 @@
|
||||
37,
|
||||
37
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@ -185,6 +185,7 @@ impl TpsTestManager {
|
||||
initial_accounts: initial_public_accounts,
|
||||
initial_commitments: vec![initial_commitment],
|
||||
signing_key: [37; 32],
|
||||
bedrock_config: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -17,6 +17,12 @@ serde_json.workspace = true
|
||||
tempfile.workspace = true
|
||||
chrono.workspace = true
|
||||
log.workspace = true
|
||||
bedrock_client.workspace = true
|
||||
logos-blockchain-key-management-system-service.workspace = true
|
||||
logos-blockchain-core.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest.workspace = true
|
||||
borsh.workspace = true
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
117
sequencer_core/src/block_settlement_client.rs
Normal file
117
sequencer_core/src/block_settlement_client.rs
Normal file
@ -0,0 +1,117 @@
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use bedrock_client::BedrockClient;
|
||||
use common::block::HashableBlockData;
|
||||
use logos_blockchain_core::mantle::{
|
||||
MantleTx, Op, OpProof, SignedMantleTx, Transaction, TxHash, ledger,
|
||||
ops::channel::{ChannelId, MsgId, inscribe::InscriptionOp},
|
||||
};
|
||||
use logos_blockchain_key_management_system_service::keys::{
|
||||
ED25519_SECRET_KEY_SIZE, Ed25519Key, Ed25519PublicKey,
|
||||
};
|
||||
|
||||
use crate::config::BedrockConfig;
|
||||
|
||||
/// A component that posts block data to logos blockchain
|
||||
pub struct BlockSettlementClient {
|
||||
bedrock_client: BedrockClient,
|
||||
bedrock_signing_key: Ed25519Key,
|
||||
bedrock_channel_id: ChannelId,
|
||||
last_message_id: MsgId,
|
||||
}
|
||||
|
||||
impl BlockSettlementClient {
|
||||
pub fn try_new(home: &Path, config: &BedrockConfig) -> Result<Self> {
|
||||
let bedrock_signing_key = load_or_create_signing_key(&home.join("bedrock_signing_key"))
|
||||
.context("Failed to load or create signing key")?;
|
||||
let bedrock_channel_id = ChannelId::from(config.channel_id);
|
||||
let bedrock_client = BedrockClient::new(None, config.node_url.clone())
|
||||
.context("Failed to initialize bedrock client")?;
|
||||
let channel_genesis_msg = MsgId::from([0; 32]);
|
||||
Ok(Self {
|
||||
bedrock_client,
|
||||
bedrock_signing_key,
|
||||
bedrock_channel_id,
|
||||
last_message_id: channel_genesis_msg,
|
||||
})
|
||||
}
|
||||
|
||||
/// Create and sign a transaction for inscribing data
|
||||
pub fn create_inscribe_tx(&self, data: Vec<u8>) -> (SignedMantleTx, MsgId) {
|
||||
let verifying_key_bytes = self.bedrock_signing_key.public_key().to_bytes();
|
||||
let verifying_key =
|
||||
Ed25519PublicKey::from_bytes(&verifying_key_bytes).expect("valid ed25519 public key");
|
||||
|
||||
let inscribe_op = InscriptionOp {
|
||||
channel_id: self.bedrock_channel_id,
|
||||
inscription: data,
|
||||
parent: self.last_message_id,
|
||||
signer: verifying_key,
|
||||
};
|
||||
let inscribe_op_id = inscribe_op.id();
|
||||
|
||||
let ledger_tx = ledger::Tx::new(vec![], vec![]);
|
||||
|
||||
let inscribe_tx = MantleTx {
|
||||
ops: vec![Op::ChannelInscribe(inscribe_op)],
|
||||
ledger_tx,
|
||||
// Altruistic test config
|
||||
storage_gas_price: 0,
|
||||
execution_gas_price: 0,
|
||||
};
|
||||
|
||||
let tx_hash = inscribe_tx.hash();
|
||||
let signature_bytes = self
|
||||
.bedrock_signing_key
|
||||
.sign_payload(tx_hash.as_signing_bytes().as_ref())
|
||||
.to_bytes();
|
||||
let signature =
|
||||
logos_blockchain_key_management_system_service::keys::Ed25519Signature::from_bytes(
|
||||
&signature_bytes,
|
||||
);
|
||||
|
||||
let signed_mantle_tx = SignedMantleTx {
|
||||
ops_proofs: vec![OpProof::Ed25519Sig(signature)],
|
||||
ledger_tx_proof: empty_ledger_signature(&tx_hash),
|
||||
mantle_tx: inscribe_tx,
|
||||
};
|
||||
(signed_mantle_tx, inscribe_op_id)
|
||||
}
|
||||
|
||||
/// Post a transaction to the node and wait for inclusion
|
||||
pub async fn post_and_wait(&mut self, block_data: &HashableBlockData) -> Result<u64> {
|
||||
let inscription_data = borsh::to_vec(&block_data)?;
|
||||
let (tx, new_msg_id) = self.create_inscribe_tx(inscription_data);
|
||||
|
||||
// Post the transaction
|
||||
self.bedrock_client.post_transaction(tx).await?;
|
||||
|
||||
self.last_message_id = new_msg_id;
|
||||
|
||||
Ok(block_data.block_id)
|
||||
}
|
||||
}
|
||||
|
||||
/// Load signing key from file or generate a new one if it doesn't exist
|
||||
fn load_or_create_signing_key(path: &Path) -> Result<Ed25519Key> {
|
||||
if path.exists() {
|
||||
let key_bytes = fs::read(path)?;
|
||||
let key_array: [u8; ED25519_SECRET_KEY_SIZE] = key_bytes
|
||||
.try_into()
|
||||
.map_err(|_| anyhow!("Found key with incorrect length"))?;
|
||||
Ok(Ed25519Key::from_bytes(&key_array))
|
||||
} else {
|
||||
let mut key_bytes = [0u8; ED25519_SECRET_KEY_SIZE];
|
||||
rand::RngCore::fill_bytes(&mut rand::thread_rng(), &mut key_bytes);
|
||||
fs::write(path, key_bytes)?;
|
||||
Ok(Ed25519Key::from_bytes(&key_bytes))
|
||||
}
|
||||
}
|
||||
|
||||
fn empty_ledger_signature(
|
||||
tx_hash: &TxHash,
|
||||
) -> logos_blockchain_key_management_system_service::keys::ZkSignature {
|
||||
logos_blockchain_key_management_system_service::keys::ZkKey::multi_sign(&[], tx_hash.as_ref())
|
||||
.expect("multi-sign with empty key set works")
|
||||
}
|
||||
@ -46,7 +46,7 @@ impl SequencerBlockStore {
|
||||
}
|
||||
|
||||
pub fn get_block_at_id(&self, id: u64) -> Result<Block> {
|
||||
Ok(self.dbio.get_block(id)?.into_block(&self.signing_key))
|
||||
Ok(self.dbio.get_block(id)?)
|
||||
}
|
||||
|
||||
pub fn put_block_at_id(&mut self, block: Block) -> Result<()> {
|
||||
@ -113,7 +113,7 @@ mod tests {
|
||||
transactions: vec![],
|
||||
};
|
||||
|
||||
let genesis_block = genesis_block_hashable_data.into_block(&signing_key);
|
||||
let genesis_block = genesis_block_hashable_data.into_pending_block(&signing_key);
|
||||
// Start an empty node store
|
||||
let mut node_store =
|
||||
SequencerBlockStore::open_db_with_genesis(path, Some(genesis_block), signing_key)
|
||||
|
||||
@ -5,6 +5,7 @@ use std::{
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use reqwest::Url;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
@ -47,6 +48,16 @@ pub struct SequencerConfig {
|
||||
pub initial_commitments: Vec<CommitmentsInitialData>,
|
||||
/// Sequencer own signing key
|
||||
pub signing_key: [u8; 32],
|
||||
/// Bedrock configuration options
|
||||
pub bedrock_config: Option<BedrockConfig>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct BedrockConfig {
|
||||
/// Bedrock channel ID
|
||||
pub channel_id: [u8; 32],
|
||||
/// Bedrock Url
|
||||
pub node_url: Url,
|
||||
}
|
||||
|
||||
impl SequencerConfig {
|
||||
|
||||
@ -13,8 +13,9 @@ use log::warn;
|
||||
use mempool::{MemPool, MemPoolHandle};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::block_store::SequencerBlockStore;
|
||||
use crate::{block_settlement_client::BlockSettlementClient, block_store::SequencerBlockStore};
|
||||
|
||||
mod block_settlement_client;
|
||||
pub mod block_store;
|
||||
pub mod config;
|
||||
|
||||
@ -24,6 +25,7 @@ pub struct SequencerCore {
|
||||
mempool: MemPool<EncodedTransaction>,
|
||||
sequencer_config: SequencerConfig,
|
||||
chain_height: u64,
|
||||
block_settlement_client: Option<BlockSettlementClient>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
@ -51,7 +53,7 @@ impl SequencerCore {
|
||||
};
|
||||
|
||||
let signing_key = nssa::PrivateKey::try_new(config.signing_key).unwrap();
|
||||
let genesis_block = hashable_data.into_block(&signing_key);
|
||||
let genesis_block = hashable_data.into_pending_block(&signing_key);
|
||||
|
||||
// Sequencer should panic if unable to open db,
|
||||
// as fixing this issue may require actions non-native to program scope
|
||||
@ -87,12 +89,18 @@ impl SequencerCore {
|
||||
state.add_pinata_program(PINATA_BASE58.parse().unwrap());
|
||||
|
||||
let (mempool, mempool_handle) = MemPool::new(config.mempool_max_size);
|
||||
let block_settlement_client = config.bedrock_config.as_ref().map(|bedrock_config| {
|
||||
BlockSettlementClient::try_new(&config.home, bedrock_config)
|
||||
.expect("Block settlement client should be constructible")
|
||||
});
|
||||
|
||||
let mut this = Self {
|
||||
state,
|
||||
block_store,
|
||||
mempool,
|
||||
chain_height: config.genesis_id,
|
||||
sequencer_config: config,
|
||||
block_settlement_client,
|
||||
};
|
||||
|
||||
this.sync_state_with_stored_blocks();
|
||||
@ -137,9 +145,21 @@ impl SequencerCore {
|
||||
Ok(tx)
|
||||
}
|
||||
|
||||
pub async fn produce_new_block_and_post_to_settlement_layer(&mut self) -> Result<u64> {
|
||||
let block_data = self.produce_new_block_with_mempool_transactions()?;
|
||||
|
||||
if let Some(block_settlement) = self.block_settlement_client.as_mut() {
|
||||
block_settlement.post_and_wait(&block_data).await?;
|
||||
log::info!("Posted block data to Bedrock");
|
||||
}
|
||||
|
||||
Ok(self.chain_height)
|
||||
}
|
||||
|
||||
/// Produces new block from transactions in mempool
|
||||
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<u64> {
|
||||
pub fn produce_new_block_with_mempool_transactions(&mut self) -> Result<HashableBlockData> {
|
||||
let now = Instant::now();
|
||||
|
||||
let new_block_height = self.chain_height + 1;
|
||||
|
||||
let mut valid_transactions = vec![];
|
||||
@ -167,8 +187,6 @@ impl SequencerCore {
|
||||
|
||||
let curr_time = chrono::Utc::now().timestamp_millis() as u64;
|
||||
|
||||
let num_txs_in_block = valid_transactions.len();
|
||||
|
||||
let hashable_data = HashableBlockData {
|
||||
block_id: new_block_height,
|
||||
transactions: valid_transactions,
|
||||
@ -176,7 +194,9 @@ impl SequencerCore {
|
||||
timestamp: curr_time,
|
||||
};
|
||||
|
||||
let block = hashable_data.into_block(self.block_store.signing_key());
|
||||
let block = hashable_data
|
||||
.clone()
|
||||
.into_pending_block(self.block_store.signing_key());
|
||||
|
||||
self.block_store.put_block_at_id(block)?;
|
||||
|
||||
@ -194,11 +214,10 @@ impl SequencerCore {
|
||||
// ```
|
||||
log::info!(
|
||||
"Created block with {} transactions in {} seconds",
|
||||
num_txs_in_block,
|
||||
hashable_data.transactions.len(),
|
||||
now.elapsed().as_secs()
|
||||
);
|
||||
|
||||
Ok(self.chain_height)
|
||||
Ok(hashable_data)
|
||||
}
|
||||
|
||||
pub fn state(&self) -> &nssa::V02State {
|
||||
@ -277,6 +296,7 @@ mod tests {
|
||||
initial_accounts,
|
||||
initial_commitments: vec![],
|
||||
signing_key: *sequencer_sign_key_for_testing().value(),
|
||||
bedrock_config: None,
|
||||
}
|
||||
}
|
||||
|
||||
@ -619,9 +639,9 @@ mod tests {
|
||||
let tx = common::test_utils::produce_dummy_empty_transaction();
|
||||
mempool_handle.push(tx).await.unwrap();
|
||||
|
||||
let block_id = sequencer.produce_new_block_with_mempool_transactions();
|
||||
assert!(block_id.is_ok());
|
||||
assert_eq!(block_id.unwrap(), genesis_height + 1);
|
||||
let block = sequencer.produce_new_block_with_mempool_transactions();
|
||||
assert!(block.is_ok());
|
||||
assert_eq!(block.unwrap().block_id, genesis_height + 1);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@ -658,7 +678,8 @@ mod tests {
|
||||
// Create block
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.block_id;
|
||||
let block = sequencer
|
||||
.block_store
|
||||
.get_block_at_id(current_height)
|
||||
@ -697,7 +718,8 @@ mod tests {
|
||||
mempool_handle.push(tx.clone()).await.unwrap();
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.block_id;
|
||||
let block = sequencer
|
||||
.block_store
|
||||
.get_block_at_id(current_height)
|
||||
@ -708,7 +730,8 @@ mod tests {
|
||||
mempool_handle.push(tx.clone()).await.unwrap();
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.block_id;
|
||||
let block = sequencer
|
||||
.block_store
|
||||
.get_block_at_id(current_height)
|
||||
@ -743,7 +766,8 @@ mod tests {
|
||||
mempool_handle.push(tx.clone()).await.unwrap();
|
||||
let current_height = sequencer
|
||||
.produce_new_block_with_mempool_transactions()
|
||||
.unwrap();
|
||||
.unwrap()
|
||||
.block_id;
|
||||
let block = sequencer
|
||||
.block_store
|
||||
.get_block_at_id(current_height)
|
||||
|
||||
@ -388,6 +388,7 @@ mod tests {
|
||||
initial_accounts,
|
||||
initial_commitments: vec![],
|
||||
signing_key: *sequencer_sign_key_for_testing().value(),
|
||||
bedrock_config: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -154,5 +154,9 @@
|
||||
37,
|
||||
37,
|
||||
37
|
||||
]
|
||||
}
|
||||
],
|
||||
"bedrock_config": {
|
||||
"channel_id": [1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1],
|
||||
"node_url": "http://localhost:8080"
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,7 +50,9 @@ pub async fn startup_sequencer(
|
||||
let id = {
|
||||
let mut state = seq_core_wrapped.lock().await;
|
||||
|
||||
state.produce_new_block_with_mempool_transactions()?
|
||||
state
|
||||
.produce_new_block_and_post_to_settlement_layer()
|
||||
.await?
|
||||
};
|
||||
|
||||
info!("Block with id {id} created");
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use common::block::{Block, HashableBlockData};
|
||||
use common::block::Block;
|
||||
use error::DbError;
|
||||
use rocksdb::{
|
||||
BoundColumnFamily, ColumnFamilyDescriptor, DBWithThreadMode, MultiThreaded, Options,
|
||||
@ -26,6 +26,8 @@ pub const DB_META_FIRST_BLOCK_IN_DB_KEY: &str = "first_block_in_db";
|
||||
pub const DB_META_LAST_BLOCK_IN_DB_KEY: &str = "last_block_in_db";
|
||||
/// Key base for storing metainformation which describe if first block has been set
|
||||
pub const DB_META_FIRST_BLOCK_SET_KEY: &str = "first_block_set";
|
||||
/// Key base for storing metainformation about the last finalized block on Bedrock
|
||||
pub const DB_META_LAST_FINALIZED_BLOCK_ID: &str = "last_finalized_block_id";
|
||||
|
||||
/// Key base for storing snapshot which describe block id
|
||||
pub const DB_SNAPSHOT_BLOCK_ID_KEY: &str = "block_id";
|
||||
@ -75,6 +77,7 @@ impl RocksDBIO {
|
||||
dbio.put_meta_first_block_in_db(block)?;
|
||||
dbio.put_meta_is_first_block_set()?;
|
||||
dbio.put_meta_last_block_in_db(block_id)?;
|
||||
dbio.put_meta_last_finalized_block_id(None)?;
|
||||
|
||||
Ok(dbio)
|
||||
} else {
|
||||
@ -232,6 +235,28 @@ impl RocksDBIO {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn put_meta_last_finalized_block_id(&self, block_id: Option<u64>) -> DbResult<()> {
|
||||
let cf_meta = self.meta_column();
|
||||
self.db
|
||||
.put_cf(
|
||||
&cf_meta,
|
||||
borsh::to_vec(&DB_META_LAST_FINALIZED_BLOCK_ID).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize DB_META_LAST_FINALIZED_BLOCK_ID".to_string()),
|
||||
)
|
||||
})?,
|
||||
borsh::to_vec(&block_id).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize last block id".to_string()),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn put_meta_is_first_block_set(&self) -> DbResult<()> {
|
||||
let cf_meta = self.meta_column();
|
||||
self.db
|
||||
@ -269,7 +294,7 @@ impl RocksDBIO {
|
||||
Some("Failed to serialize block id".to_string()),
|
||||
)
|
||||
})?,
|
||||
borsh::to_vec(&HashableBlockData::from(block)).map_err(|err| {
|
||||
borsh::to_vec(&block).map_err(|err| {
|
||||
DbError::borsh_cast_message(
|
||||
err,
|
||||
Some("Failed to serialize block data".to_string()),
|
||||
@ -280,7 +305,7 @@ impl RocksDBIO {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_block(&self, block_id: u64) -> DbResult<HashableBlockData> {
|
||||
pub fn get_block(&self, block_id: u64) -> DbResult<Block> {
|
||||
let cf_block = self.block_column();
|
||||
let res = self
|
||||
.db
|
||||
@ -296,14 +321,12 @@ impl RocksDBIO {
|
||||
.map_err(|rerr| DbError::rocksdb_cast_message(rerr, None))?;
|
||||
|
||||
if let Some(data) = res {
|
||||
Ok(
|
||||
borsh::from_slice::<HashableBlockData>(&data).map_err(|serr| {
|
||||
DbError::borsh_cast_message(
|
||||
serr,
|
||||
Some("Failed to deserialize block data".to_string()),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
Ok(borsh::from_slice::<Block>(&data).map_err(|serr| {
|
||||
DbError::borsh_cast_message(
|
||||
serr,
|
||||
Some("Failed to deserialize block data".to_string()),
|
||||
)
|
||||
})?)
|
||||
} else {
|
||||
Err(DbError::db_interaction_error(
|
||||
"Block on this id not found".to_string(),
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user