2026-02-13 01:03:47 +04:00
|
|
|
import asyncio
|
2025-10-15 20:53:52 +02:00
|
|
|
import logging
|
2026-02-05 19:45:42 +04:00
|
|
|
from asyncio import create_task
|
2025-10-03 22:27:30 +02:00
|
|
|
from contextlib import asynccontextmanager
|
2025-10-30 11:48:34 +01:00
|
|
|
from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterator, List
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
from db.blocks import BlockRepository
|
|
|
|
|
from db.clients import SqliteClient
|
|
|
|
|
from db.transaction import TransactionRepository
|
2025-10-30 11:48:34 +01:00
|
|
|
from models.block import Block
|
2025-11-03 13:17:19 +01:00
|
|
|
from node.api.builder import build_node_api
|
|
|
|
|
from node.manager.builder import build_node_manager
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
from core.app import NBE
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
async def backfill_to_lib(app: "NBE") -> None:
|
|
|
|
|
"""
|
|
|
|
|
Fetch the LIB (Last Irreversible Block) from the node and backfill by walking the chain backwards.
|
|
|
|
|
This traverses parent links instead of querying by slot range, which handles pruned/missing blocks.
|
2026-02-13 01:03:47 +04:00
|
|
|
Retries indefinitely with exponential backoff on failure.
|
2026-02-05 19:45:42 +04:00
|
|
|
"""
|
2026-02-13 01:03:47 +04:00
|
|
|
delay = 1.0
|
|
|
|
|
max_delay = 60.0
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
try:
|
|
|
|
|
info = await app.state.node_api.get_info()
|
|
|
|
|
logger.info(f"Node info: LIB={info.lib}, tip={info.tip}, slot={info.slot}, height={info.height}")
|
|
|
|
|
|
|
|
|
|
await backfill_chain_from_hash(app, info.lib)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
except Exception as error:
|
|
|
|
|
logger.exception(f"Error during initial backfill to LIB: {error}")
|
|
|
|
|
logger.info(f"Retrying backfill in {delay:.0f}s...")
|
|
|
|
|
await asyncio.sleep(delay)
|
|
|
|
|
delay = min(delay * 2, max_delay)
|
2026-02-05 19:45:42 +04:00
|
|
|
|
|
|
|
|
|
|
|
|
|
async def backfill_chain_from_hash(app: "NBE", block_hash: str) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Walk the chain backwards from block_hash, fetching blocks until we hit
|
|
|
|
|
a block we already have or a genesis block (parent doesn't exist).
|
|
|
|
|
"""
|
|
|
|
|
blocks_to_insert: List[Block] = []
|
|
|
|
|
current_hash = block_hash
|
|
|
|
|
|
|
|
|
|
while True:
|
|
|
|
|
# Check if we already have this block
|
|
|
|
|
existing = await app.state.block_repository.get_by_hash(bytes.fromhex(current_hash))
|
|
|
|
|
if existing.is_some:
|
|
|
|
|
logger.debug(f"Block {current_hash[:16]}... already exists, stopping chain walk")
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
# Fetch the block from the node
|
|
|
|
|
block_serializer = await app.state.node_api.get_block_by_hash(current_hash)
|
|
|
|
|
if block_serializer is None:
|
|
|
|
|
logger.info(f"Block {current_hash[:16]}... not found on node (likely genesis parent), stopping chain walk")
|
|
|
|
|
break
|
|
|
|
|
|
|
|
|
|
block = block_serializer.into_block()
|
|
|
|
|
blocks_to_insert.append(block)
|
|
|
|
|
logger.debug(f"Queued block at slot {block.slot} (hash={current_hash[:16]}...) for insertion")
|
|
|
|
|
|
|
|
|
|
# Move to parent
|
|
|
|
|
current_hash = block.parent_block.hex()
|
|
|
|
|
|
|
|
|
|
if not blocks_to_insert:
|
|
|
|
|
logger.info("No new blocks to backfill")
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Reverse so we insert from oldest to newest (parent before child)
|
|
|
|
|
blocks_to_insert.reverse()
|
|
|
|
|
|
|
|
|
|
# Capture slot range before insert (blocks get detached from session after commit)
|
|
|
|
|
first_slot = blocks_to_insert[0].slot
|
|
|
|
|
last_slot = blocks_to_insert[-1].slot
|
|
|
|
|
block_count = len(blocks_to_insert)
|
|
|
|
|
|
|
|
|
|
logger.info(f"Backfilling {block_count} blocks from chain walk...")
|
|
|
|
|
|
|
|
|
|
# Insert all blocks, allowing the first one to be a chain root if its parent doesn't exist
|
|
|
|
|
await app.state.block_repository.create(*blocks_to_insert, allow_chain_root=True)
|
|
|
|
|
logger.info(f"Backfilled {block_count} blocks (slots {first_slot} to {last_slot})")
|
|
|
|
|
|
|
|
|
|
|
2025-10-03 22:27:30 +02:00
|
|
|
@asynccontextmanager
|
|
|
|
|
async def node_lifespan(app: "NBE") -> AsyncGenerator[None]:
|
2025-11-03 13:17:19 +01:00
|
|
|
app.state.node_manager = build_node_manager(app.settings)
|
|
|
|
|
app.state.node_api = build_node_api(app.settings)
|
2025-10-15 20:53:52 +02:00
|
|
|
|
2025-11-03 13:17:19 +01:00
|
|
|
db_client = SqliteClient()
|
2025-10-03 22:27:30 +02:00
|
|
|
app.state.db_client = db_client
|
|
|
|
|
app.state.block_repository = BlockRepository(db_client)
|
|
|
|
|
app.state.transaction_repository = TransactionRepository(db_client)
|
2025-11-03 13:17:19 +01:00
|
|
|
|
2025-10-03 22:27:30 +02:00
|
|
|
try:
|
2025-10-15 20:53:52 +02:00
|
|
|
logger.info("Starting node...")
|
2025-10-03 22:27:30 +02:00
|
|
|
await app.state.node_manager.start()
|
2025-10-15 20:53:52 +02:00
|
|
|
logger.info("Node started.")
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
# Backfill to LIB on startup
|
|
|
|
|
await backfill_to_lib(app)
|
|
|
|
|
|
|
|
|
|
app.state.subscription_to_updates_handle = create_task(subscribe_to_new_blocks(app))
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
yield
|
|
|
|
|
finally:
|
2025-10-15 20:53:52 +02:00
|
|
|
logger.info("Stopping node...")
|
2025-10-03 22:27:30 +02:00
|
|
|
await app.state.node_manager.stop()
|
2025-10-15 20:53:52 +02:00
|
|
|
logger.info("Node stopped.")
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
async def _gracefully_close_stream(stream: AsyncIterator) -> None:
|
|
|
|
|
aclose = getattr(stream, "aclose", None)
|
|
|
|
|
if aclose is not None:
|
2025-10-03 22:27:30 +02:00
|
|
|
try:
|
2025-10-15 20:53:52 +02:00
|
|
|
await aclose()
|
2025-10-03 22:27:30 +02:00
|
|
|
except Exception as e:
|
2025-10-15 20:53:52 +02:00
|
|
|
logger.error(f"Error while closing the new blocks stream: {e}")
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
async def subscribe_to_new_blocks(app: "NBE"):
|
2026-02-05 19:45:42 +04:00
|
|
|
logger.info("Subscription to new blocks started.")
|
|
|
|
|
blocks_stream = app.state.node_api.get_blocks_stream()
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
try:
|
|
|
|
|
while app.state.is_running:
|
|
|
|
|
try:
|
2026-02-05 19:45:42 +04:00
|
|
|
block_serializer = await anext(blocks_stream)
|
2025-10-30 11:48:34 +01:00
|
|
|
except TimeoutError:
|
|
|
|
|
continue
|
2025-10-15 20:53:52 +02:00
|
|
|
except StopAsyncIteration:
|
2026-02-05 19:45:42 +04:00
|
|
|
logger.error("Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
|
2025-10-15 20:53:52 +02:00
|
|
|
break
|
2025-11-03 13:17:19 +01:00
|
|
|
except Exception as error:
|
|
|
|
|
logger.exception(f"Error while fetching new blocks: {error}")
|
2025-10-15 20:53:52 +02:00
|
|
|
continue
|
|
|
|
|
|
2025-10-30 11:48:34 +01:00
|
|
|
try:
|
|
|
|
|
block = block_serializer.into_block()
|
2025-10-15 20:53:52 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
# Check if parent exists in DB
|
|
|
|
|
parent_exists = (await app.state.block_repository.get_by_hash(block.parent_block)).is_some
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
if not parent_exists:
|
|
|
|
|
# Need to backfill the chain from this block's parent
|
|
|
|
|
logger.info(f"Parent block not found for block at slot {block.slot}. Initiating chain backfill...")
|
|
|
|
|
await backfill_chain_from_hash(app, block.parent_block.hex())
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
# Re-check if parent now exists after backfill
|
|
|
|
|
parent_exists = (await app.state.block_repository.get_by_hash(block.parent_block)).is_some
|
|
|
|
|
if not parent_exists:
|
2026-03-09 11:28:46 +00:00
|
|
|
logger.warning(
|
|
|
|
|
f"Parent block still not found after backfill for block at slot {block.slot}. Skipping block."
|
|
|
|
|
)
|
2026-02-05 19:45:42 +04:00
|
|
|
continue
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
# Capture values before create() detaches the block from the session
|
|
|
|
|
block_slot = block.slot
|
2025-10-15 20:53:52 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
# Now we have the parent, store the block
|
|
|
|
|
await app.state.block_repository.create(block)
|
|
|
|
|
logger.debug(f"Stored block at slot {block_slot}")
|
2025-10-15 20:53:52 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
except Exception as error:
|
|
|
|
|
logger.exception(f"Error while storing new block: {error}")
|
|
|
|
|
finally:
|
|
|
|
|
await _gracefully_close_stream(blocks_stream)
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
logger.info("Subscription to new blocks finished.")
|