mirror of
https://github.com/logos-blockchain/logos-blockchain-block-explorer-template.git
synced 2026-02-17 03:23:13 +00:00
Merge pull request #4 from logos-blockchain/drusu/update-for-devnet
update types with latest fixes from blockchain and add a few features for devnet
This commit is contained in:
commit
781552e2af
@ -15,6 +15,23 @@ if TYPE_CHECKING:
|
||||
from core.app import NBE
|
||||
|
||||
|
||||
async def list_blocks(
|
||||
request: NBERequest,
|
||||
page: int = Query(0, ge=0),
|
||||
page_size: int = Query(10, ge=1, le=100, alias="page-size"),
|
||||
) -> Response:
|
||||
blocks, total_count = await request.app.state.block_repository.get_paginated(page, page_size)
|
||||
total_pages = (total_count + page_size - 1) // page_size # ceiling division
|
||||
|
||||
return JSONResponse({
|
||||
"blocks": [BlockRead.from_block(block).model_dump(mode="json") for block in blocks],
|
||||
"page": page,
|
||||
"page_size": page_size,
|
||||
"total_count": total_count,
|
||||
"total_pages": total_pages,
|
||||
})
|
||||
|
||||
|
||||
async def _get_blocks_stream_serialized(app: "NBE", block_from: Option[Block]) -> AsyncIterator[List[BlockRead]]:
|
||||
_stream = app.state.block_repository.updates_stream(block_from)
|
||||
async for blocks in _stream:
|
||||
|
||||
@ -18,6 +18,7 @@ def create_v1_router() -> APIRouter:
|
||||
router.add_api_route("/transactions/{transaction_hash:str}", transactions.get, methods=["GET"])
|
||||
|
||||
router.add_api_route("/blocks/stream", blocks.stream, methods=["GET"])
|
||||
router.add_api_route("/blocks/list", blocks.list_blocks, methods=["GET"])
|
||||
router.add_api_route("/blocks/{block_hash:str}", blocks.get, methods=["GET"])
|
||||
|
||||
return router
|
||||
|
||||
@ -12,6 +12,7 @@ class BlockRead(NbeSchema):
|
||||
hash: HexBytes
|
||||
parent_block_hash: HexBytes
|
||||
slot: int
|
||||
height: int
|
||||
block_root: HexBytes
|
||||
proof_of_leadership: ProofOfLeadership
|
||||
transactions: List[Transaction]
|
||||
@ -23,6 +24,7 @@ class BlockRead(NbeSchema):
|
||||
hash=block.hash,
|
||||
parent_block_hash=block.parent_block,
|
||||
slot=block.slot,
|
||||
height=block.height,
|
||||
block_root=block.block_root,
|
||||
proof_of_leadership=block.proof_of_leadership,
|
||||
transactions=block.transactions,
|
||||
|
||||
@ -51,7 +51,6 @@ class NBEState(State):
|
||||
block_repository: BlockRepository
|
||||
transaction_repository: TransactionRepository
|
||||
subscription_to_updates_handle: Task
|
||||
backfill_handle: Task
|
||||
|
||||
@property
|
||||
def is_running(self) -> bool:
|
||||
@ -64,7 +63,6 @@ class NBEState(State):
|
||||
async def _wait_tasks_finished(self):
|
||||
await gather(
|
||||
self.subscription_to_updates_handle,
|
||||
self.backfill_handle,
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
|
||||
@ -11,7 +11,14 @@ def dehexify(data: str) -> bytes:
|
||||
return bytes.fromhex(data)
|
||||
|
||||
|
||||
def validate_hex_bytes(data: str | bytes) -> bytes:
|
||||
if isinstance(data, bytes):
|
||||
return data
|
||||
return bytes.fromhex(data)
|
||||
|
||||
|
||||
HexBytes = Annotated[
|
||||
bytes,
|
||||
BeforeValidator(validate_hex_bytes),
|
||||
PlainSerializer(hexify, return_type=str, when_used="json"),
|
||||
]
|
||||
|
||||
163
src/db/blocks.py
163
src/db/blocks.py
@ -1,6 +1,6 @@
|
||||
import logging
|
||||
from asyncio import sleep
|
||||
from typing import AsyncIterator, List
|
||||
from typing import AsyncIterator, Dict, List
|
||||
|
||||
from rusty_results import Empty, Option, Some
|
||||
from sqlalchemy import Result, Select
|
||||
@ -11,30 +11,135 @@ from db.clients import DbClient
|
||||
from models.block import Block
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_latest_statement(limit: int, *, output_ascending: bool = True) -> Select:
|
||||
# Fetch the latest N blocks in descending slot order
|
||||
base = select(Block).order_by(Block.slot.desc(), Block.id.desc()).limit(limit)
|
||||
# Fetch the latest N blocks in descending height order
|
||||
base = select(Block).order_by(Block.height.desc()).limit(limit)
|
||||
if not output_ascending:
|
||||
return base
|
||||
|
||||
# Reorder for output
|
||||
inner = base.subquery()
|
||||
latest = aliased(Block, inner)
|
||||
return select(latest).options().order_by(latest.slot.asc(), latest.id.asc()) # type: ignore[arg-type]
|
||||
return select(latest).options().order_by(latest.height.asc()) # type: ignore[arg-type]
|
||||
|
||||
|
||||
class BlockRepository:
|
||||
"""
|
||||
FIXME: Assumes slots are sequential and one block per slot
|
||||
"""
|
||||
|
||||
def __init__(self, client: DbClient):
|
||||
self.client = client
|
||||
|
||||
async def create(self, *blocks: Block) -> None:
|
||||
async def create(self, *blocks: Block, allow_chain_root: bool = False) -> None:
|
||||
"""
|
||||
Insert blocks into the database with proper height calculation.
|
||||
|
||||
Args:
|
||||
blocks: Blocks to insert
|
||||
allow_chain_root: If True, allow the first block (by slot) to be a chain root
|
||||
even if its parent doesn't exist. Used during chain-walk backfills.
|
||||
"""
|
||||
if not blocks:
|
||||
return
|
||||
|
||||
with self.client.session() as session:
|
||||
session.add_all(list(blocks))
|
||||
session.commit()
|
||||
# Collect all unique parent hashes we need to look up
|
||||
parent_hashes = {block.parent_block for block in blocks}
|
||||
|
||||
# Fetch existing parent blocks to get their heights
|
||||
parent_heights: Dict[bytes, int] = {}
|
||||
if parent_hashes:
|
||||
statement = select(Block).where(Block.hash.in_(parent_hashes))
|
||||
existing_parents = session.exec(statement).all()
|
||||
for parent in existing_parents:
|
||||
parent_heights[parent.hash] = parent.height
|
||||
|
||||
# Also check if any of the blocks we're inserting are parents of others
|
||||
blocks_by_hash = {block.hash: block for block in blocks}
|
||||
|
||||
# Find the chain root candidate (lowest slot block whose parent isn't in the batch)
|
||||
chain_root_hash = None
|
||||
if allow_chain_root:
|
||||
sorted_blocks = sorted(blocks, key=lambda b: b.slot)
|
||||
for block in sorted_blocks:
|
||||
if block.parent_block not in blocks_by_hash and block.parent_block not in parent_heights:
|
||||
chain_root_hash = block.hash
|
||||
break
|
||||
|
||||
# Handle blocks in batch that depend on each other
|
||||
# Resolve dependencies iteratively, skipping orphans
|
||||
resolved = set()
|
||||
orphans = set()
|
||||
max_iterations = len(blocks) * 2 # Prevent infinite loops
|
||||
iterations = 0
|
||||
|
||||
while iterations < max_iterations:
|
||||
iterations += 1
|
||||
made_progress = False
|
||||
|
||||
for block in blocks:
|
||||
if block.hash in resolved or block.hash in orphans:
|
||||
continue
|
||||
|
||||
if block.parent_block in parent_heights:
|
||||
# Parent found in DB or already resolved
|
||||
block.height = parent_heights[block.parent_block] + 1
|
||||
parent_heights[block.hash] = block.height
|
||||
resolved.add(block.hash)
|
||||
made_progress = True
|
||||
elif block.parent_block in blocks_by_hash:
|
||||
parent = blocks_by_hash[block.parent_block]
|
||||
if parent.hash in resolved:
|
||||
# Parent in same batch and already resolved
|
||||
block.height = parent.height + 1
|
||||
parent_heights[block.hash] = block.height
|
||||
resolved.add(block.hash)
|
||||
made_progress = True
|
||||
elif parent.hash in orphans:
|
||||
# Parent is an orphan, so this block is also an orphan
|
||||
orphans.add(block.hash)
|
||||
made_progress = True
|
||||
# else: parent not yet resolved, try again next iteration
|
||||
else:
|
||||
# Parent not found anywhere
|
||||
if block.slot == 0 or block.hash == chain_root_hash:
|
||||
# Genesis block or chain root - no parent requirement
|
||||
block.height = 0
|
||||
parent_heights[block.hash] = block.height
|
||||
resolved.add(block.hash)
|
||||
made_progress = True
|
||||
if block.hash == chain_root_hash:
|
||||
logger.info(
|
||||
f"Chain root block: hash={block.hash.hex()[:16]}..., "
|
||||
f"slot={block.slot}, height=0"
|
||||
)
|
||||
else:
|
||||
# Orphan block - parent doesn't exist
|
||||
logger.warning(
|
||||
f"Dropping orphaned block: hash={block.hash.hex()}, "
|
||||
f"slot={block.slot}, parent={block.parent_block.hex()} (parent not found)"
|
||||
)
|
||||
orphans.add(block.hash)
|
||||
made_progress = True
|
||||
|
||||
# If no progress was made and we still have unresolved blocks, break
|
||||
if not made_progress:
|
||||
break
|
||||
|
||||
# Check for any blocks that couldn't be resolved (circular dependencies or other issues)
|
||||
unresolved = set(block.hash for block in blocks) - resolved - orphans
|
||||
for block in blocks:
|
||||
if block.hash in unresolved:
|
||||
logger.warning(
|
||||
f"Dropping unresolvable block: hash={block.hash.hex()}, "
|
||||
f"slot={block.slot}, parent={block.parent_block.hex()}"
|
||||
)
|
||||
|
||||
# Only add resolved blocks
|
||||
blocks_to_add = [block for block in blocks if block.hash in resolved]
|
||||
if blocks_to_add:
|
||||
session.add_all(blocks_to_add)
|
||||
session.commit()
|
||||
|
||||
async def get_by_id(self, block_id: int) -> Option[Block]:
|
||||
statement = select(Block).where(Block.id == block_id)
|
||||
@ -68,7 +173,7 @@ class BlockRepository:
|
||||
return b
|
||||
|
||||
async def get_earliest(self) -> Option[Block]:
|
||||
statement = select(Block).order_by(Block.slot.asc()).limit(1)
|
||||
statement = select(Block).order_by(Block.height.asc()).limit(1)
|
||||
|
||||
with self.client.session() as session:
|
||||
results: Result[Block] = session.exec(statement)
|
||||
@ -77,25 +182,47 @@ class BlockRepository:
|
||||
else:
|
||||
return Empty()
|
||||
|
||||
async def get_paginated(self, page: int, page_size: int) -> tuple[List[Block], int]:
|
||||
"""
|
||||
Get blocks with pagination, ordered by height descending (newest first).
|
||||
Returns a tuple of (blocks, total_count).
|
||||
"""
|
||||
offset = page * page_size
|
||||
|
||||
with self.client.session() as session:
|
||||
# Get total count
|
||||
from sqlalchemy import func
|
||||
count_statement = select(func.count()).select_from(Block)
|
||||
total_count = session.exec(count_statement).one()
|
||||
|
||||
# Get paginated blocks
|
||||
statement = (
|
||||
select(Block)
|
||||
.order_by(Block.height.desc())
|
||||
.offset(offset)
|
||||
.limit(page_size)
|
||||
)
|
||||
blocks = session.exec(statement).all()
|
||||
|
||||
return blocks, total_count
|
||||
|
||||
async def updates_stream(
|
||||
self, block_from: Option[Block], *, timeout_seconds: int = 1
|
||||
) -> AsyncIterator[List[Block]]:
|
||||
slot_cursor: int = block_from.map(lambda block: block.slot).unwrap_or(0)
|
||||
id_cursor: int = block_from.map(lambda block: block.id + 1).unwrap_or(0)
|
||||
height_cursor: int = block_from.map(lambda block: block.height + 1).unwrap_or(0)
|
||||
|
||||
while True:
|
||||
statement = (
|
||||
select(Block)
|
||||
.where(Block.slot >= slot_cursor, Block.id >= id_cursor)
|
||||
.order_by(Block.slot.asc(), Block.id.asc())
|
||||
.where(Block.height >= height_cursor)
|
||||
.order_by(Block.height.asc())
|
||||
)
|
||||
|
||||
with self.client.session() as session:
|
||||
blocks: List[Block] = session.exec(statement).all()
|
||||
|
||||
if len(blocks) > 0:
|
||||
slot_cursor = blocks[-1].slot
|
||||
id_cursor = blocks[-1].id + 1
|
||||
height_cursor = blocks[-1].height + 1
|
||||
yield blocks
|
||||
else:
|
||||
await sleep(timeout_seconds)
|
||||
|
||||
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from asyncio import sleep
|
||||
from typing import AsyncIterator, List
|
||||
|
||||
@ -13,11 +12,11 @@ from models.transactions.transaction import Transaction
|
||||
|
||||
|
||||
def get_latest_statement(limit: int, *, output_ascending: bool, preload_relationships: bool) -> Select:
|
||||
# Join with Block to order by Block's slot and fetch the latest N transactions in descending order
|
||||
# Join with Block to order by Block's height and fetch the latest N transactions in descending order
|
||||
base = (
|
||||
select(Transaction, Block.slot.label("block__slot"), Block.id.label("block__id"))
|
||||
select(Transaction, Block.height.label("block__height"))
|
||||
.join(Block, Transaction.block_id == Block.id)
|
||||
.order_by(Block.slot.desc(), Block.id.desc(), Transaction.id.desc())
|
||||
.order_by(Block.height.desc(), Transaction.id.desc())
|
||||
.limit(limit)
|
||||
)
|
||||
if not output_ascending:
|
||||
@ -26,7 +25,7 @@ def get_latest_statement(limit: int, *, output_ascending: bool, preload_relation
|
||||
# Reorder for output
|
||||
inner = base.subquery()
|
||||
latest = aliased(Transaction, inner)
|
||||
statement = select(latest).order_by(inner.c.block__slot.asc(), inner.c.block__id.asc(), latest.id.asc())
|
||||
statement = select(latest).order_by(inner.c.block__height.asc(), latest.id.asc())
|
||||
if preload_relationships:
|
||||
statement = statement.options(selectinload(latest.block))
|
||||
return statement
|
||||
@ -76,8 +75,7 @@ class TransactionRepository:
|
||||
async def updates_stream(
|
||||
self, transaction_from: Option[Transaction], *, timeout_seconds: int = 1
|
||||
) -> AsyncIterator[List[Transaction]]:
|
||||
slot_cursor = transaction_from.map(lambda transaction: transaction.block.slot).unwrap_or(0)
|
||||
block_id_cursor = transaction_from.map(lambda transaction: transaction.block.id).unwrap_or(0)
|
||||
height_cursor = transaction_from.map(lambda transaction: transaction.block.height).unwrap_or(0)
|
||||
transaction_id_cursor = transaction_from.map(lambda transaction: transaction.id + 1).unwrap_or(0)
|
||||
|
||||
while True:
|
||||
@ -86,19 +84,17 @@ class TransactionRepository:
|
||||
.options(selectinload(Transaction.block))
|
||||
.join(Block, Transaction.block_id == Block.id)
|
||||
.where(
|
||||
Block.slot >= slot_cursor,
|
||||
Block.id >= block_id_cursor,
|
||||
Block.height >= height_cursor,
|
||||
Transaction.id >= transaction_id_cursor,
|
||||
)
|
||||
.order_by(Block.slot.asc(), Block.id.asc(), Transaction.id.asc())
|
||||
.order_by(Block.height.asc(), Transaction.id.asc())
|
||||
)
|
||||
|
||||
with self.client.session() as session:
|
||||
transactions: List[Transaction] = session.exec(statement).all()
|
||||
|
||||
if len(transactions) > 0:
|
||||
slot_cursor = transactions[-1].block.slot
|
||||
block_id_cursor = transactions[-1].block.id
|
||||
height_cursor = transactions[-1].block.height
|
||||
transaction_id_cursor = transactions[-1].id + 1
|
||||
yield transactions
|
||||
else:
|
||||
|
||||
@ -24,6 +24,7 @@ class Block(TimestampedModel, table=True):
|
||||
hash: HexBytes = Field(nullable=False, unique=True)
|
||||
parent_block: HexBytes = Field(nullable=False)
|
||||
slot: int = Field(nullable=False)
|
||||
height: int = Field(nullable=False, default=0)
|
||||
block_root: HexBytes = Field(nullable=False)
|
||||
proof_of_leadership: ProofOfLeadership = Field(
|
||||
sa_column=Column(PydanticJsonColumn(ProofOfLeadership), nullable=False)
|
||||
|
||||
@ -1,2 +1 @@
|
||||
from .proof_of_leadership import ProofOfLeadership
|
||||
from .public import Public
|
||||
|
||||
@ -3,7 +3,6 @@ from typing import Optional, Union
|
||||
|
||||
from core.models import NbeSchema
|
||||
from core.types import HexBytes
|
||||
from models.header.public import Public
|
||||
|
||||
|
||||
class ProofOfLeadershipType(Enum):
|
||||
@ -19,7 +18,6 @@ class Groth16ProofOfLeadership(NbeProofOfLeadership):
|
||||
entropy_contribution: HexBytes
|
||||
leader_key: HexBytes
|
||||
proof: HexBytes
|
||||
public: Optional[Public]
|
||||
voucher_cm: HexBytes
|
||||
|
||||
|
||||
|
||||
@ -1,10 +0,0 @@
|
||||
from core.models import NbeSchema
|
||||
from core.types import HexBytes
|
||||
|
||||
|
||||
class Public(NbeSchema):
|
||||
aged_root: HexBytes
|
||||
epoch_nonce: HexBytes
|
||||
latest_root: HexBytes
|
||||
slot: int
|
||||
total_stake: int
|
||||
@ -1,8 +1,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import TYPE_CHECKING, AsyncIterator, List
|
||||
from typing import TYPE_CHECKING, AsyncIterator, Optional
|
||||
|
||||
from node.api.serializers.block import BlockSerializer
|
||||
from node.api.serializers.health import HealthSerializer
|
||||
from node.api.serializers.info import InfoSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.app import NBESettings
|
||||
@ -18,9 +19,13 @@ class NodeApi(ABC):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_blocks(self, **kwargs) -> List[BlockSerializer]:
|
||||
async def get_info(self) -> InfoSerializer:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_blocks_stream(self) -> AsyncIterator[List[BlockSerializer]]:
|
||||
async def get_block_by_hash(self, block_hash: str) -> Optional[BlockSerializer]:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
|
||||
pass
|
||||
|
||||
@ -1,23 +1,18 @@
|
||||
from asyncio import sleep
|
||||
from random import choices, random
|
||||
from typing import TYPE_CHECKING, AsyncIterator, List
|
||||
from random import random
|
||||
from typing import TYPE_CHECKING, AsyncIterator, Optional
|
||||
|
||||
from rusty_results import Some
|
||||
|
||||
from node.api.base import NodeApi
|
||||
from node.api.serializers.block import BlockSerializer
|
||||
from node.api.serializers.health import HealthSerializer
|
||||
from node.api.serializers.info import InfoSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.app import NBESettings
|
||||
|
||||
|
||||
def get_weighted_amount() -> int:
|
||||
items = [1, 2, 3]
|
||||
weights = [0.6, 0.3, 0.1]
|
||||
return choices(items, weights=weights, k=1)[0]
|
||||
|
||||
|
||||
class FakeNodeApi(NodeApi):
|
||||
def __init__(self, _settings: "NBESettings"):
|
||||
self.current_slot: int = 0
|
||||
@ -28,12 +23,18 @@ class FakeNodeApi(NodeApi):
|
||||
else:
|
||||
return HealthSerializer.from_healthy()
|
||||
|
||||
async def get_blocks(self, **kwargs) -> List[BlockSerializer]:
|
||||
n = get_weighted_amount()
|
||||
assert n >= 1
|
||||
blocks = [BlockSerializer.from_random() for _ in range(n)]
|
||||
self.current_slot = max(blocks, key=lambda block: block.slot).slot
|
||||
return blocks
|
||||
async def get_info(self) -> InfoSerializer:
|
||||
return InfoSerializer(
|
||||
lib="0" * 64,
|
||||
tip="0" * 64,
|
||||
slot=self.current_slot,
|
||||
height=0,
|
||||
mode="Fake",
|
||||
)
|
||||
|
||||
async def get_block_by_hash(self, block_hash: str) -> Optional[BlockSerializer]:
|
||||
# Fake API doesn't track blocks by hash
|
||||
return None
|
||||
|
||||
async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
|
||||
while True:
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, AsyncIterator, List, Optional
|
||||
from typing import TYPE_CHECKING, AsyncIterator, Optional
|
||||
from urllib.parse import urljoin, urlunparse
|
||||
|
||||
import httpx
|
||||
@ -12,6 +12,7 @@ from core.authentication import Authentication
|
||||
from node.api.base import NodeApi
|
||||
from node.api.serializers.block import BlockSerializer
|
||||
from node.api.serializers.health import HealthSerializer
|
||||
from node.api.serializers.info import InfoSerializer
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from core.app import NBESettings
|
||||
@ -23,9 +24,8 @@ logger = logging.getLogger(__name__)
|
||||
class HttpNodeApi(NodeApi):
|
||||
# Paths can't have a leading slash since they are relative to the base URL
|
||||
ENDPOINT_INFO = "cryptarchia/info"
|
||||
ENDPOINT_TRANSACTIONS = "cryptarchia/transactions"
|
||||
ENDPOINT_BLOCKS = "cryptarchia/blocks"
|
||||
ENDPOINT_BLOCKS_STREAM = "cryptarchia/events/blocks/stream"
|
||||
ENDPOINT_BLOCK_BY_HASH = "storage/block"
|
||||
|
||||
def __init__(self, settings: "NBESettings"):
|
||||
self.host: str = settings.node_api_host
|
||||
@ -70,19 +70,40 @@ class HttpNodeApi(NodeApi):
|
||||
else:
|
||||
return HealthSerializer.from_unhealthy()
|
||||
|
||||
async def get_blocks(self, slot_from: int, slot_to: int) -> List[BlockSerializer]:
|
||||
query_string = f"slot_from={slot_from}&slot_to={slot_to}"
|
||||
endpoint = urljoin(self.base_url, self.ENDPOINT_BLOCKS)
|
||||
url = f"{endpoint}?{query_string}"
|
||||
async def get_info(self) -> InfoSerializer:
|
||||
url = urljoin(self.base_url, self.ENDPOINT_INFO)
|
||||
response = requests.get(url, auth=self.authentication, timeout=60)
|
||||
python_json = response.json()
|
||||
blocks = [BlockSerializer.model_validate(item) for item in python_json]
|
||||
return blocks
|
||||
response.raise_for_status()
|
||||
return InfoSerializer.model_validate(response.json())
|
||||
|
||||
async def get_block_by_hash(self, block_hash: str) -> Optional[BlockSerializer]:
|
||||
url = urljoin(self.base_url, self.ENDPOINT_BLOCK_BY_HASH)
|
||||
response = requests.post(
|
||||
url,
|
||||
auth=self.authentication,
|
||||
timeout=60,
|
||||
json=block_hash,
|
||||
)
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
response.raise_for_status()
|
||||
json_data = response.json()
|
||||
if json_data is None:
|
||||
logger.warning(f"Block {block_hash} returned null from API")
|
||||
return None
|
||||
block = BlockSerializer.model_validate(json_data)
|
||||
# The storage endpoint doesn't include the block hash in the response,
|
||||
# so we set it from the request body
|
||||
if not block.header.hash:
|
||||
block.header.hash = bytes.fromhex(block_hash)
|
||||
return block
|
||||
|
||||
async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
|
||||
url = urljoin(self.base_url, self.ENDPOINT_BLOCKS_STREAM)
|
||||
auth = self.authentication.map(lambda _auth: _auth.for_httpx()).unwrap_or(None)
|
||||
async with httpx.AsyncClient(timeout=self.timeout, auth=auth) as client:
|
||||
# Use no read timeout for streaming - blocks may arrive infrequently
|
||||
stream_timeout = httpx.Timeout(connect=self.timeout, read=None, write=self.timeout, pool=self.timeout)
|
||||
async with httpx.AsyncClient(timeout=stream_timeout, auth=auth) as client:
|
||||
async with client.stream("GET", url) as response:
|
||||
response.raise_for_status() # TODO: Result
|
||||
|
||||
|
||||
@ -15,7 +15,7 @@ from utils.random import random_hash
|
||||
|
||||
|
||||
class HeaderSerializer(NbeSerializer, FromRandom):
|
||||
hash: BytesFromHex = Field(alias="id", description="Hash id in hex format.")
|
||||
hash: BytesFromHex = Field(default=b"", alias="id", description="Hash id in hex format.")
|
||||
parent_block: BytesFromHex = Field(description="Hash in hex format.")
|
||||
slot: int = Field(description="Integer in u64 format.")
|
||||
block_root: BytesFromHex = Field(description="Hash in hex format.")
|
||||
|
||||
@ -4,14 +4,14 @@ from typing import List, Self
|
||||
from pydantic import Field
|
||||
|
||||
from core.models import NbeSerializer
|
||||
from node.api.serializers.fields import BytesFromIntArray
|
||||
from node.api.serializers.fields import BytesFromHex
|
||||
from node.api.serializers.note import NoteSerializer
|
||||
from utils.protocols import FromRandom
|
||||
from utils.random import random_bytes
|
||||
|
||||
|
||||
class LedgerTransactionSerializer(NbeSerializer, FromRandom):
|
||||
inputs: List[BytesFromIntArray] = Field(description="Fr integer.")
|
||||
inputs: List[BytesFromHex] = Field(description="Fr integer.")
|
||||
outputs: List[NoteSerializer]
|
||||
|
||||
@classmethod
|
||||
@ -21,7 +21,7 @@ class LedgerTransactionSerializer(NbeSerializer, FromRandom):
|
||||
|
||||
return cls.model_validate(
|
||||
{
|
||||
"inputs": [list(random_bytes(2048)) for _ in range(n_inputs)],
|
||||
"inputs": [random_bytes(32).hex() for _ in range(n_inputs)],
|
||||
"outputs": [NoteSerializer.from_random() for _ in range(n_outputs)],
|
||||
}
|
||||
)
|
||||
|
||||
@ -10,7 +10,6 @@ from models.header.proof_of_leadership import (
|
||||
ProofOfLeadership,
|
||||
)
|
||||
from node.api.serializers.fields import BytesFromHex, BytesFromIntArray
|
||||
from node.api.serializers.public import PublicSerializer
|
||||
from utils.protocols import EnforceSubclassFromRandom
|
||||
from utils.random import random_bytes
|
||||
|
||||
@ -27,17 +26,14 @@ class Groth16LeaderProofSerializer(ProofOfLeadershipSerializer, NbeSerializer):
|
||||
proof: BytesFromIntArray = Field(
|
||||
description="Bytes in Integer Array format.",
|
||||
)
|
||||
public: Optional[PublicSerializer] = Field(description="Only received if Node is running in dev mode.")
|
||||
voucher_cm: BytesFromHex = Field(description="Hash.")
|
||||
|
||||
def into_proof_of_leadership(self) -> ProofOfLeadership:
|
||||
public = self.public.into_public() if self.public else None
|
||||
return Groth16ProofOfLeadership.model_validate(
|
||||
{
|
||||
"entropy_contribution": self.entropy_contribution,
|
||||
"leader_key": self.leader_key,
|
||||
"proof": self.proof,
|
||||
"public": public,
|
||||
"voucher_cm": self.voucher_cm,
|
||||
}
|
||||
)
|
||||
@ -49,7 +45,6 @@ class Groth16LeaderProofSerializer(ProofOfLeadershipSerializer, NbeSerializer):
|
||||
"entropy_contribution": random_bytes(32).hex(),
|
||||
"leader_key": random_bytes(32).hex(),
|
||||
"proof": list(random_bytes(128)),
|
||||
"public": PublicSerializer.from_random(slot),
|
||||
"voucher_cm": random_bytes(32).hex(),
|
||||
}
|
||||
)
|
||||
|
||||
@ -1,42 +0,0 @@
|
||||
from random import randint
|
||||
from typing import Self
|
||||
|
||||
from pydantic import Field
|
||||
from rusty_results import Option
|
||||
|
||||
from core.models import NbeSerializer
|
||||
from models.header.public import Public
|
||||
from node.api.serializers.fields import BytesFromHex
|
||||
from utils.protocols import FromRandom
|
||||
from utils.random import random_bytes
|
||||
|
||||
|
||||
class PublicSerializer(NbeSerializer, FromRandom):
|
||||
aged_root: BytesFromHex = Field(description="Fr integer in hex format.")
|
||||
epoch_nonce: BytesFromHex = Field(description="Fr integer in hex format.")
|
||||
latest_root: BytesFromHex = Field(description="Fr integer in hex format.")
|
||||
slot: int = Field(description="Integer in u64 format.")
|
||||
total_stake: int = Field(description="Integer in u64 format.")
|
||||
|
||||
def into_public(self) -> Public:
|
||||
return Public.model_validate(
|
||||
{
|
||||
"aged_root": self.aged_root,
|
||||
"epoch_nonce": self.epoch_nonce,
|
||||
"latest_root": self.latest_root,
|
||||
"slot": self.slot,
|
||||
"total_stake": self.total_stake,
|
||||
}
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_random(cls, slot: Option[int]) -> Self:
|
||||
cls.model_validate(
|
||||
{
|
||||
"aged_root": random_bytes(32).hex(),
|
||||
"epoch_nonce": random_bytes(32).hex(),
|
||||
"latest_root": random_bytes(32).hex(),
|
||||
"slot": slot.unwrap_or(randint(0, 10_000)),
|
||||
"total_stake": randint(0, 10_000),
|
||||
}
|
||||
)
|
||||
@ -1,11 +1,10 @@
|
||||
from typing import List, Self
|
||||
|
||||
from pydantic import Field
|
||||
from rusty_results import Option
|
||||
|
||||
from core.models import NbeSerializer
|
||||
from models.transactions.transaction import Transaction
|
||||
from node.api.serializers.fields import BytesFromHex
|
||||
from node.api.serializers.fields import BytesFromIntArray
|
||||
from node.api.serializers.proof import (
|
||||
OperationProofSerializer,
|
||||
OperationProofSerializerField,
|
||||
@ -15,14 +14,31 @@ from utils.protocols import FromRandom
|
||||
from utils.random import random_bytes
|
||||
|
||||
|
||||
class Groth16ProofSerializer(NbeSerializer, FromRandom):
|
||||
pi_a: BytesFromIntArray
|
||||
pi_b: BytesFromIntArray
|
||||
pi_c: BytesFromIntArray
|
||||
|
||||
def to_bytes(self) -> bytes:
|
||||
return self.pi_a + self.pi_b + self.pi_c
|
||||
|
||||
@classmethod
|
||||
def from_random(cls) -> Self:
|
||||
return cls.model_validate(
|
||||
{
|
||||
"pi_a": list(random_bytes(32)),
|
||||
"pi_b": list(random_bytes(64)),
|
||||
"pi_c": list(random_bytes(32)),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class SignedTransactionSerializer(NbeSerializer, FromRandom):
|
||||
transaction: TransactionSerializer = Field(alias="mantle_tx", description="Transaction.")
|
||||
operations_proofs: List[OperationProofSerializerField] = Field(
|
||||
alias="ops_proofs", description="List of OperationProof. Order should match `Self::transaction::operations`."
|
||||
)
|
||||
ledger_transaction_proof: BytesFromHex = Field(
|
||||
alias="ledger_tx_proof", description="Hash.", min_length=128, max_length=128
|
||||
)
|
||||
ledger_transaction_proof: Groth16ProofSerializer = Field(alias="ledger_tx_proof", description="Groth16 proof.")
|
||||
|
||||
def into_transaction(self) -> Transaction:
|
||||
operations_contents = self.transaction.operations_contents
|
||||
@ -48,7 +64,7 @@ class SignedTransactionSerializer(NbeSerializer, FromRandom):
|
||||
"operations": operations,
|
||||
"inputs": ledger_transaction.inputs,
|
||||
"outputs": outputs,
|
||||
"proof": self.ledger_transaction_proof,
|
||||
"proof": self.ledger_transaction_proof.to_bytes(),
|
||||
"execution_gas_price": self.transaction.execution_gas_price,
|
||||
"storage_gas_price": self.transaction.storage_gas_price,
|
||||
}
|
||||
@ -60,5 +76,5 @@ class SignedTransactionSerializer(NbeSerializer, FromRandom):
|
||||
n = len(transaction.operations_contents)
|
||||
operations_proofs = [OperationProofSerializer.from_random() for _ in range(n)]
|
||||
return cls.model_validate(
|
||||
{"mantle_tx": transaction, "ops_proofs": operations_proofs, "ledger_tx_proof": random_bytes(128).hex()}
|
||||
{"mantle_tx": transaction, "ops_proofs": operations_proofs, "ledger_tx_proof": Groth16ProofSerializer.from_random()}
|
||||
)
|
||||
|
||||
@ -1,16 +1,13 @@
|
||||
import logging
|
||||
from asyncio import TaskGroup, create_task, sleep
|
||||
from asyncio import create_task
|
||||
from contextlib import asynccontextmanager
|
||||
from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterator, List
|
||||
|
||||
from rusty_results import Option
|
||||
|
||||
from db.blocks import BlockRepository
|
||||
from db.clients import SqliteClient
|
||||
from db.transaction import TransactionRepository
|
||||
from models.block import Block
|
||||
from node.api.builder import build_node_api
|
||||
from node.api.serializers.block import BlockSerializer
|
||||
from node.manager.builder import build_node_manager
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@ -19,6 +16,69 @@ if TYPE_CHECKING:
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def backfill_to_lib(app: "NBE") -> None:
|
||||
"""
|
||||
Fetch the LIB (Last Irreversible Block) from the node and backfill by walking the chain backwards.
|
||||
This traverses parent links instead of querying by slot range, which handles pruned/missing blocks.
|
||||
"""
|
||||
try:
|
||||
info = await app.state.node_api.get_info()
|
||||
logger.info(f"Node info: LIB={info.lib}, tip={info.tip}, slot={info.slot}, height={info.height}")
|
||||
|
||||
await backfill_chain_from_hash(app, info.lib)
|
||||
|
||||
except Exception as error:
|
||||
logger.exception(f"Error during initial backfill to LIB: {error}")
|
||||
# Don't raise - we can still try to subscribe to new blocks
|
||||
|
||||
|
||||
async def backfill_chain_from_hash(app: "NBE", block_hash: str) -> None:
|
||||
"""
|
||||
Walk the chain backwards from block_hash, fetching blocks until we hit
|
||||
a block we already have or a genesis block (parent doesn't exist).
|
||||
"""
|
||||
blocks_to_insert: List[Block] = []
|
||||
current_hash = block_hash
|
||||
|
||||
while True:
|
||||
# Check if we already have this block
|
||||
existing = await app.state.block_repository.get_by_hash(bytes.fromhex(current_hash))
|
||||
if existing.is_some:
|
||||
logger.debug(f"Block {current_hash[:16]}... already exists, stopping chain walk")
|
||||
break
|
||||
|
||||
# Fetch the block from the node
|
||||
block_serializer = await app.state.node_api.get_block_by_hash(current_hash)
|
||||
if block_serializer is None:
|
||||
logger.info(f"Block {current_hash[:16]}... not found on node (likely genesis parent), stopping chain walk")
|
||||
break
|
||||
|
||||
block = block_serializer.into_block()
|
||||
blocks_to_insert.append(block)
|
||||
logger.debug(f"Queued block at slot {block.slot} (hash={current_hash[:16]}...) for insertion")
|
||||
|
||||
# Move to parent
|
||||
current_hash = block.parent_block.hex()
|
||||
|
||||
if not blocks_to_insert:
|
||||
logger.info("No new blocks to backfill")
|
||||
return
|
||||
|
||||
# Reverse so we insert from oldest to newest (parent before child)
|
||||
blocks_to_insert.reverse()
|
||||
|
||||
# Capture slot range before insert (blocks get detached from session after commit)
|
||||
first_slot = blocks_to_insert[0].slot
|
||||
last_slot = blocks_to_insert[-1].slot
|
||||
block_count = len(blocks_to_insert)
|
||||
|
||||
logger.info(f"Backfilling {block_count} blocks from chain walk...")
|
||||
|
||||
# Insert all blocks, allowing the first one to be a chain root if its parent doesn't exist
|
||||
await app.state.block_repository.create(*blocks_to_insert, allow_chain_root=True)
|
||||
logger.info(f"Backfilled {block_count} blocks (slots {first_slot} to {last_slot})")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def node_lifespan(app: "NBE") -> AsyncGenerator[None]:
|
||||
app.state.node_manager = build_node_manager(app.settings)
|
||||
@ -34,8 +94,10 @@ async def node_lifespan(app: "NBE") -> AsyncGenerator[None]:
|
||||
await app.state.node_manager.start()
|
||||
logger.info("Node started.")
|
||||
|
||||
app.state.subscription_to_updates_handle = create_task(subscribe_to_updates(app))
|
||||
app.state.backfill = create_task(backfill(app))
|
||||
# Backfill to LIB on startup
|
||||
await backfill_to_lib(app)
|
||||
|
||||
app.state.subscription_to_updates_handle = create_task(subscribe_to_new_blocks(app))
|
||||
|
||||
yield
|
||||
finally:
|
||||
@ -44,35 +106,6 @@ async def node_lifespan(app: "NBE") -> AsyncGenerator[None]:
|
||||
logger.info("Node stopped.")
|
||||
|
||||
|
||||
# ================
|
||||
# BACKFILLING
|
||||
# ================
|
||||
# Legend:
|
||||
# BT = Block and/or Transaction
|
||||
# Steps:
|
||||
# 1. Subscribe to new BT and store them in the database.
|
||||
# 2. Backfill gaps between the earliest received BT from subscription (step 1.) and the latest BT in the database.
|
||||
# 3. Backfill gaps between the earliest BT in the database and genesis BT (slot 0).
|
||||
# Assumptions:
|
||||
# - BT are always filled correctly.
|
||||
# - There's at most 1 gap in the BT sequence: From genesis to earliest received BT from subscription.
|
||||
# - Slots are populated fully or not at all (no partial slots).
|
||||
# Notes:
|
||||
# - Upsert always.
|
||||
|
||||
# ================
|
||||
# Fake
|
||||
_SUBSCRIPTION_START_SLOT = 5 # Simplification for now.
|
||||
# ================
|
||||
|
||||
|
||||
async def subscribe_to_updates(app: "NBE") -> None:
|
||||
logger.info("✅ Subscription to new blocks and transactions started.")
|
||||
async with TaskGroup() as tg:
|
||||
tg.create_task(subscribe_to_new_blocks(app))
|
||||
logger.info("Subscription to new blocks and transactions finished.")
|
||||
|
||||
|
||||
async def _gracefully_close_stream(stream: AsyncIterator) -> None:
|
||||
aclose = getattr(stream, "aclose", None)
|
||||
if aclose is not None:
|
||||
@ -83,15 +116,17 @@ async def _gracefully_close_stream(stream: AsyncIterator) -> None:
|
||||
|
||||
|
||||
async def subscribe_to_new_blocks(app: "NBE"):
|
||||
blocks_stream: AsyncGenerator[BlockSerializer] = app.state.node_api.get_blocks_stream() # type: ignore[call-arg]
|
||||
logger.info("Subscription to new blocks started.")
|
||||
blocks_stream = app.state.node_api.get_blocks_stream()
|
||||
|
||||
try:
|
||||
while app.state.is_running:
|
||||
try:
|
||||
block_serializer = await anext(blocks_stream) # TODO: Use anext's Sentinel?
|
||||
block_serializer = await anext(blocks_stream)
|
||||
except TimeoutError:
|
||||
continue
|
||||
except StopAsyncIteration:
|
||||
logger.error(f"Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
|
||||
logger.error("Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
|
||||
break
|
||||
except Exception as error:
|
||||
logger.exception(f"Error while fetching new blocks: {error}")
|
||||
@ -99,52 +134,31 @@ async def subscribe_to_new_blocks(app: "NBE"):
|
||||
|
||||
try:
|
||||
block = block_serializer.into_block()
|
||||
|
||||
# Check if parent exists in DB
|
||||
parent_exists = (await app.state.block_repository.get_by_hash(block.parent_block)).is_some
|
||||
|
||||
if not parent_exists:
|
||||
# Need to backfill the chain from this block's parent
|
||||
logger.info(f"Parent block not found for block at slot {block.slot}. Initiating chain backfill...")
|
||||
await backfill_chain_from_hash(app, block.parent_block.hex())
|
||||
|
||||
# Re-check if parent now exists after backfill
|
||||
parent_exists = (await app.state.block_repository.get_by_hash(block.parent_block)).is_some
|
||||
if not parent_exists:
|
||||
logger.warning(f"Parent block still not found after backfill for block at slot {block.slot}. Skipping block.")
|
||||
continue
|
||||
|
||||
# Capture values before create() detaches the block from the session
|
||||
block_slot = block.slot
|
||||
|
||||
# Now we have the parent, store the block
|
||||
await app.state.block_repository.create(block)
|
||||
logger.debug(f"Stored block at slot {block_slot}")
|
||||
|
||||
except Exception as error:
|
||||
logger.exception(f"Error while storing new block: {error}")
|
||||
finally:
|
||||
await _gracefully_close_stream(blocks_stream)
|
||||
|
||||
|
||||
async def backfill(app: "NBE") -> None:
|
||||
logger.info("Backfilling started.")
|
||||
async with TaskGroup() as tg:
|
||||
tg.create_task(backfill_blocks(app, db_hit_interval_seconds=3))
|
||||
logger.info("✅ Backfilling finished.")
|
||||
|
||||
|
||||
async def get_earliest_block_slot(app: "NBE") -> Option[int]:
|
||||
earliest_block: Option[Block] = await app.state.block_repository.get_earliest()
|
||||
return earliest_block.map(lambda block: block.slot)
|
||||
|
||||
|
||||
async def backfill_blocks(app: "NBE", *, db_hit_interval_seconds: int, batch_size: int = 50):
|
||||
"""
|
||||
FIXME: This is a very naive implementation:
|
||||
- One block per slot.
|
||||
- There's at most one gap to backfill (from genesis to earliest block).
|
||||
FIXME: First block received is slot=2
|
||||
"""
|
||||
logger.info("Checking for block gaps to backfill...")
|
||||
# Hit the database until we get a block
|
||||
while (earliest_block_slot_option := await get_earliest_block_slot(app)).is_empty:
|
||||
logger.debug("No blocks were found in the database yet. Waiting...")
|
||||
await sleep(db_hit_interval_seconds)
|
||||
earliest_block_slot: int = earliest_block_slot_option.unwrap()
|
||||
|
||||
if earliest_block_slot == 0:
|
||||
logger.info("No blocks to backfill.")
|
||||
return
|
||||
|
||||
slot_to = earliest_block_slot - 1
|
||||
logger.info(f"Backfilling blocks from slot {slot_to} down to 0...")
|
||||
while slot_to > 0:
|
||||
slot_from = max(0, slot_to - batch_size)
|
||||
blocks_serializers: List[BlockSerializer] = await app.state.node_api.get_blocks(
|
||||
slot_from=slot_from, slot_to=slot_to
|
||||
)
|
||||
blocks: List[Block] = [block_serializer.into_block() for block_serializer in blocks_serializers]
|
||||
logger.debug(f"Backfilling {len(blocks)} blocks from slot {slot_from} to {slot_to}...")
|
||||
await app.state.block_repository.create(*blocks)
|
||||
slot_to = slot_from - 1
|
||||
logger.info("Backfilling blocks completed.")
|
||||
logger.info("Subscription to new blocks finished.")
|
||||
|
||||
@ -17,7 +17,7 @@ function AppShell(props) {
|
||||
return h(
|
||||
Fragment,
|
||||
null,
|
||||
h('header', null, h('h1', null, 'Nomos Block Explorer'), h(HealthPill, null)),
|
||||
h('header', null, h('h1', null, 'λ Blockchain Block Explorer'), h(HealthPill, null)),
|
||||
props.children,
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,158 +1,243 @@
|
||||
// static/pages/BlocksTable.js
|
||||
// static/components/BlocksTable.js
|
||||
import { h } from 'preact';
|
||||
import { useEffect, useRef } from 'preact/hooks';
|
||||
import { useEffect, useState, useCallback, useRef } from 'preact/hooks';
|
||||
import { PAGE, API } from '../lib/api.js';
|
||||
import { TABLE_SIZE } from '../lib/constants.js';
|
||||
import { streamNdjson, ensureFixedRowCount, shortenHex } from '../lib/utils.js';
|
||||
import { shortenHex, streamNdjson } from '../lib/utils.js';
|
||||
|
||||
const normalize = (raw) => {
|
||||
const header = raw.header ?? null;
|
||||
const txLen = Array.isArray(raw.transactions)
|
||||
? raw.transactions.length
|
||||
: Array.isArray(raw.txs)
|
||||
? raw.txs.length
|
||||
: 0;
|
||||
|
||||
return {
|
||||
id: Number(raw.id ?? 0),
|
||||
height: Number(raw.height ?? 0),
|
||||
slot: Number(raw.slot ?? header?.slot ?? 0),
|
||||
hash: raw.hash ?? header?.hash ?? '',
|
||||
parent: raw.parent_block_hash ?? header?.parent_block ?? raw.parent_block ?? '',
|
||||
root: raw.block_root ?? header?.block_root ?? '',
|
||||
transactionCount: txLen,
|
||||
};
|
||||
};
|
||||
|
||||
export default function BlocksTable() {
|
||||
const bodyRef = useRef(null);
|
||||
const countRef = useRef(null);
|
||||
const [blocks, setBlocks] = useState([]);
|
||||
const [page, setPage] = useState(0);
|
||||
const [totalPages, setTotalPages] = useState(0);
|
||||
const [totalCount, setTotalCount] = useState(0);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState(null);
|
||||
const [live, setLive] = useState(true); // Start in live mode
|
||||
|
||||
const abortRef = useRef(null);
|
||||
const seenKeysRef = useRef(new Set());
|
||||
|
||||
useEffect(() => {
|
||||
const body = bodyRef.current;
|
||||
const counter = countRef.current;
|
||||
// Fetch paginated blocks
|
||||
const fetchBlocks = useCallback(async (pageNum) => {
|
||||
// Stop any live stream
|
||||
abortRef.current?.abort();
|
||||
seenKeysRef.current.clear();
|
||||
|
||||
// 5 columns: Hash | Slot | Parent | Block Root | Transactions
|
||||
ensureFixedRowCount(body, 5, TABLE_SIZE);
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
try {
|
||||
const res = await fetch(API.BLOCKS_LIST(pageNum, TABLE_SIZE));
|
||||
if (!res.ok) throw new Error(`HTTP ${res.status}`);
|
||||
const data = await res.json();
|
||||
setBlocks(data.blocks.map(normalize));
|
||||
setTotalPages(data.total_pages);
|
||||
setTotalCount(data.total_count);
|
||||
setPage(data.page);
|
||||
} catch (e) {
|
||||
setError(e.message);
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Start live streaming
|
||||
const startLiveStream = useCallback(() => {
|
||||
abortRef.current?.abort();
|
||||
abortRef.current = new AbortController();
|
||||
seenKeysRef.current.clear();
|
||||
setBlocks([]);
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const pruneAndPad = () => {
|
||||
// remove any placeholder rows that snuck in
|
||||
for (let i = body.rows.length - 1; i >= 0; i--) {
|
||||
if (body.rows[i].classList.contains('ph')) body.deleteRow(i);
|
||||
}
|
||||
// keep at most TABLE_SIZE non-placeholder rows
|
||||
while ([...body.rows].filter((r) => !r.classList.contains('ph')).length > TABLE_SIZE) {
|
||||
const last = body.rows[body.rows.length - 1];
|
||||
const key = last?.dataset?.key;
|
||||
if (key) seenKeysRef.current.delete(key);
|
||||
body.deleteRow(-1);
|
||||
}
|
||||
// pad with placeholders to TABLE_SIZE (5 cols)
|
||||
ensureFixedRowCount(body, 5, TABLE_SIZE);
|
||||
const real = [...body.rows].filter((r) => !r.classList.contains('ph')).length;
|
||||
counter.textContent = String(real);
|
||||
};
|
||||
|
||||
const navigateToBlockDetail = (blockHash) => {
|
||||
history.pushState({}, '', PAGE.BLOCK_DETAIL(blockHash));
|
||||
window.dispatchEvent(new PopStateEvent('popstate'));
|
||||
};
|
||||
|
||||
const appendRow = (b, key) => {
|
||||
const tr = document.createElement('tr');
|
||||
tr.dataset.key = key;
|
||||
|
||||
// Hash (clickable, replaces ID)
|
||||
const tdId = document.createElement('td');
|
||||
const linkId = document.createElement('a');
|
||||
linkId.className = 'linkish mono';
|
||||
linkId.href = PAGE.BLOCK_DETAIL(b.hash);
|
||||
linkId.textContent = shortenHex(b.hash);
|
||||
linkId.title = b.hash;
|
||||
linkId.addEventListener('click', (e) => {
|
||||
e.preventDefault();
|
||||
navigateToBlockDetail(b.hash);
|
||||
});
|
||||
tdId.appendChild(linkId);
|
||||
|
||||
// Slot
|
||||
const tdSlot = document.createElement('td');
|
||||
const spSlot = document.createElement('span');
|
||||
spSlot.className = 'mono';
|
||||
spSlot.textContent = String(b.slot);
|
||||
tdSlot.appendChild(spSlot);
|
||||
|
||||
// Parent (block.parent_block_hash)
|
||||
const tdParent = document.createElement('td');
|
||||
const linkParent = document.createElement('a');
|
||||
linkParent.className = 'linkish mono';
|
||||
linkParent.href = PAGE.BLOCK_DETAIL(b.parent);
|
||||
linkParent.textContent = shortenHex(b.parent);
|
||||
linkParent.title = b.parent;
|
||||
linkParent.addEventListener('click', (e) => {
|
||||
e.preventDefault();
|
||||
navigateToBlockDetail(b.parent, e);
|
||||
});
|
||||
tdParent.appendChild(linkParent);
|
||||
|
||||
// Block Root
|
||||
const tdRoot = document.createElement('td');
|
||||
const spRoot = document.createElement('span');
|
||||
spRoot.className = 'mono';
|
||||
spRoot.title = b.root;
|
||||
spRoot.textContent = shortenHex(b.root);
|
||||
tdRoot.appendChild(spRoot);
|
||||
|
||||
// Transactions (array length)
|
||||
const tdCount = document.createElement('td');
|
||||
const spCount = document.createElement('span');
|
||||
spCount.className = 'mono';
|
||||
spCount.textContent = String(b.transactionCount);
|
||||
tdCount.appendChild(spCount);
|
||||
|
||||
tr.append(tdId, tdSlot, tdParent, tdRoot, tdCount);
|
||||
body.insertBefore(tr, body.firstChild);
|
||||
pruneAndPad();
|
||||
};
|
||||
|
||||
const normalize = (raw) => {
|
||||
// New backend:
|
||||
// { id, hash, slot, block_root, parent_block_hash, transactions: [...] }
|
||||
// Back-compat (header.* / raw.parent_block) just in case.
|
||||
const header = raw.header ?? null;
|
||||
const txLen = Array.isArray(raw.transactions)
|
||||
? raw.transactions.length
|
||||
: Array.isArray(raw.txs)
|
||||
? raw.txs.length
|
||||
: 0;
|
||||
|
||||
return {
|
||||
id: Number(raw.id ?? 0),
|
||||
slot: Number(raw.slot ?? header?.slot ?? 0),
|
||||
hash: raw.hash ?? header?.hash ?? '',
|
||||
parent: raw.parent_block_hash ?? header?.parent_block ?? raw.parent_block ?? '',
|
||||
root: raw.block_root ?? header?.block_root ?? '',
|
||||
transactionCount: txLen,
|
||||
};
|
||||
};
|
||||
let liveBlocks = [];
|
||||
|
||||
streamNdjson(
|
||||
`${API.BLOCKS_STREAM}?prefetch-limit=${encodeURIComponent(TABLE_SIZE)}`,
|
||||
(raw) => {
|
||||
const b = normalize(raw);
|
||||
const key = `${b.id}:${b.slot}`;
|
||||
if (seenKeysRef.current.has(key)) {
|
||||
pruneAndPad();
|
||||
return;
|
||||
}
|
||||
if (seenKeysRef.current.has(key)) return;
|
||||
seenKeysRef.current.add(key);
|
||||
appendRow(b, key);
|
||||
|
||||
// Add to front, keep max TABLE_SIZE
|
||||
liveBlocks = [b, ...liveBlocks].slice(0, TABLE_SIZE);
|
||||
setBlocks([...liveBlocks]);
|
||||
setTotalCount(liveBlocks.length);
|
||||
setLoading(false);
|
||||
},
|
||||
{
|
||||
signal: abortRef.current.signal,
|
||||
onError: (e) => {
|
||||
console.error('Blocks stream error:', e);
|
||||
if (e?.name !== 'AbortError') {
|
||||
console.error('Blocks stream error:', e);
|
||||
setError(e?.message || 'Stream error');
|
||||
}
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
return () => abortRef.current?.abort();
|
||||
}, []);
|
||||
|
||||
// Handle live mode changes
|
||||
useEffect(() => {
|
||||
if (live) {
|
||||
startLiveStream();
|
||||
}
|
||||
return () => abortRef.current?.abort();
|
||||
}, [live, startLiveStream]);
|
||||
|
||||
// Go to a page (turns off live mode)
|
||||
const goToPage = (newPage) => {
|
||||
if (newPage >= 0) {
|
||||
setLive(false);
|
||||
fetchBlocks(newPage);
|
||||
}
|
||||
};
|
||||
|
||||
// Toggle live mode
|
||||
const toggleLive = () => {
|
||||
if (!live) {
|
||||
setLive(true);
|
||||
setPage(0);
|
||||
}
|
||||
};
|
||||
|
||||
const navigateToBlockDetail = (blockHash) => {
|
||||
history.pushState({}, '', PAGE.BLOCK_DETAIL(blockHash));
|
||||
window.dispatchEvent(new PopStateEvent('popstate'));
|
||||
};
|
||||
|
||||
const renderRow = (b, idx) => {
|
||||
return h(
|
||||
'tr',
|
||||
{ key: b.id || idx },
|
||||
// Hash
|
||||
h(
|
||||
'td',
|
||||
null,
|
||||
h(
|
||||
'a',
|
||||
{
|
||||
class: 'linkish mono',
|
||||
href: PAGE.BLOCK_DETAIL(b.hash),
|
||||
title: b.hash,
|
||||
onClick: (e) => {
|
||||
e.preventDefault();
|
||||
navigateToBlockDetail(b.hash);
|
||||
},
|
||||
},
|
||||
shortenHex(b.hash),
|
||||
),
|
||||
),
|
||||
// Height
|
||||
h('td', null, h('span', { class: 'mono' }, String(b.height))),
|
||||
// Slot
|
||||
h('td', null, h('span', { class: 'mono' }, String(b.slot))),
|
||||
// Parent
|
||||
h(
|
||||
'td',
|
||||
null,
|
||||
h(
|
||||
'a',
|
||||
{
|
||||
class: 'linkish mono',
|
||||
href: PAGE.BLOCK_DETAIL(b.parent),
|
||||
title: b.parent,
|
||||
onClick: (e) => {
|
||||
e.preventDefault();
|
||||
navigateToBlockDetail(b.parent);
|
||||
},
|
||||
},
|
||||
shortenHex(b.parent),
|
||||
),
|
||||
),
|
||||
// Block Root
|
||||
h('td', null, h('span', { class: 'mono', title: b.root }, shortenHex(b.root))),
|
||||
// Transactions
|
||||
h('td', null, h('span', { class: 'mono' }, String(b.transactionCount))),
|
||||
);
|
||||
};
|
||||
|
||||
const renderPlaceholderRow = (idx) => {
|
||||
return h(
|
||||
'tr',
|
||||
{ key: `ph-${idx}`, class: 'ph' },
|
||||
h('td', null, '\u00A0'),
|
||||
h('td', null, '\u00A0'),
|
||||
h('td', null, '\u00A0'),
|
||||
h('td', null, '\u00A0'),
|
||||
h('td', null, '\u00A0'),
|
||||
h('td', null, '\u00A0'),
|
||||
);
|
||||
};
|
||||
|
||||
const rows = [];
|
||||
for (let i = 0; i < TABLE_SIZE; i++) {
|
||||
if (i < blocks.length) {
|
||||
rows.push(renderRow(blocks[i], i));
|
||||
} else {
|
||||
rows.push(renderPlaceholderRow(i));
|
||||
}
|
||||
}
|
||||
|
||||
// Live button styles
|
||||
const liveButtonStyle = live
|
||||
? `
|
||||
cursor: pointer;
|
||||
background: #ff4444;
|
||||
color: white;
|
||||
border: none;
|
||||
animation: live-pulse 1.5s ease-in-out infinite;
|
||||
`
|
||||
: `
|
||||
cursor: pointer;
|
||||
background: var(--bg-secondary, #333);
|
||||
color: var(--muted, #888);
|
||||
border: 1px solid var(--border, #444);
|
||||
`;
|
||||
|
||||
return h(
|
||||
'div',
|
||||
{ class: 'card' },
|
||||
// Inject keyframes for the pulse animation
|
||||
h('style', null, `
|
||||
@keyframes live-pulse {
|
||||
0%, 100% { box-shadow: 0 0 4px #ff4444, 0 0 8px #ff4444; }
|
||||
50% { box-shadow: 0 0 8px #ff4444, 0 0 16px #ff4444, 0 0 24px #ff6666; }
|
||||
}
|
||||
`),
|
||||
h(
|
||||
'div',
|
||||
{ class: 'card-header' },
|
||||
h('div', null, h('strong', null, 'Blocks '), h('span', { class: 'pill', ref: countRef }, '0')),
|
||||
h('div', { style: 'color:var(--muted); fontSize:12px;' }),
|
||||
{ class: 'card-header', style: 'display:flex; justify-content:space-between; align-items:center;' },
|
||||
h('div', null, h('strong', null, 'Blocks '), h('span', { class: 'pill' }, String(totalCount))),
|
||||
h(
|
||||
'button',
|
||||
{
|
||||
class: 'pill',
|
||||
style: liveButtonStyle,
|
||||
onClick: toggleLive,
|
||||
title: live ? 'Live updates enabled' : 'Click to enable live updates',
|
||||
},
|
||||
live ? 'LIVE \u2022' : 'LIVE',
|
||||
),
|
||||
),
|
||||
h(
|
||||
'div',
|
||||
@ -163,11 +248,12 @@ export default function BlocksTable() {
|
||||
h(
|
||||
'colgroup',
|
||||
null,
|
||||
h('col', { style: 'width:240px' }), // Hash
|
||||
h('col', { style: 'width:90px' }), // Slot
|
||||
h('col', { style: 'width:240px' }), // Parent
|
||||
h('col', { style: 'width:240px' }), // Block Root
|
||||
h('col', { style: 'width:120px' }), // Transactions
|
||||
h('col', { style: 'width:200px' }), // Hash
|
||||
h('col', { style: 'width:70px' }), // Height
|
||||
h('col', { style: 'width:80px' }), // Slot
|
||||
h('col', { style: 'width:200px' }), // Parent
|
||||
h('col', { style: 'width:200px' }), // Block Root
|
||||
h('col', { style: 'width:100px' }), // Transactions
|
||||
),
|
||||
h(
|
||||
'thead',
|
||||
@ -176,14 +262,50 @@ export default function BlocksTable() {
|
||||
'tr',
|
||||
null,
|
||||
h('th', null, 'Hash'),
|
||||
h('th', null, 'Height'),
|
||||
h('th', null, 'Slot'),
|
||||
h('th', null, 'Parent'),
|
||||
h('th', null, 'Block Root'),
|
||||
h('th', null, 'Transactions'),
|
||||
),
|
||||
),
|
||||
h('tbody', { ref: bodyRef }),
|
||||
h('tbody', null, ...rows),
|
||||
),
|
||||
),
|
||||
// Pagination controls
|
||||
h(
|
||||
'div',
|
||||
{
|
||||
class: 'card-footer',
|
||||
style: 'display:flex; justify-content:space-between; align-items:center; padding:8px 14px; border-top:1px solid var(--border);',
|
||||
},
|
||||
h(
|
||||
'button',
|
||||
{
|
||||
class: 'pill',
|
||||
disabled: page === 0 || loading,
|
||||
onClick: () => goToPage(page - 1),
|
||||
style: 'cursor:pointer;',
|
||||
},
|
||||
'Previous',
|
||||
),
|
||||
h(
|
||||
'span',
|
||||
{ style: 'color:var(--muted); font-size:13px;' },
|
||||
live ? 'Streaming live blocks...' : totalPages > 0 ? `Page ${page + 1} of ${totalPages}` : 'No blocks',
|
||||
),
|
||||
h(
|
||||
'button',
|
||||
{
|
||||
class: 'pill',
|
||||
disabled: (!live && page >= totalPages - 1) || loading,
|
||||
onClick: () => live ? goToPage(0) : goToPage(page + 1),
|
||||
style: 'cursor:pointer;',
|
||||
},
|
||||
'Next',
|
||||
),
|
||||
),
|
||||
// Error display
|
||||
error && h('div', { style: 'padding:8px 14px; color:#ff8a8a;' }, `Error: ${error}`),
|
||||
);
|
||||
}
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
<!doctype html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>Nomos Block Explorer</title>
|
||||
<title>λ Blockchain Block Explorer</title>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
<meta name="description" content="Lightweight Nomos block Explorer UI" />
|
||||
<meta name="description" content="Lightweight λ Blockchain block Explorer UI" />
|
||||
|
||||
<!-- Styles -->
|
||||
<link rel="stylesheet" href="/static/styles.css" />
|
||||
|
||||
@ -10,6 +10,8 @@ const TRANSACTIONS_STREAM = joinUrl(API_PREFIX, 'transactions/stream');
|
||||
|
||||
const BLOCK_DETAIL_BY_HASH = (hash) => joinUrl(API_PREFIX, 'blocks', encodeHash(hash));
|
||||
const BLOCKS_STREAM = joinUrl(API_PREFIX, 'blocks/stream');
|
||||
const BLOCKS_LIST = (page, pageSize) =>
|
||||
`${joinUrl(API_PREFIX, 'blocks/list')}?page=${encodeURIComponent(page)}&page-size=${encodeURIComponent(pageSize)}`;
|
||||
|
||||
export const API = {
|
||||
HEALTH_ENDPOINT,
|
||||
@ -17,6 +19,7 @@ export const API = {
|
||||
TRANSACTIONS_STREAM,
|
||||
BLOCK_DETAIL_BY_HASH,
|
||||
BLOCKS_STREAM,
|
||||
BLOCKS_LIST,
|
||||
};
|
||||
|
||||
const BLOCK_DETAIL = (hash) => joinUrl('/blocks', encodeHash(hash));
|
||||
|
||||
@ -133,6 +133,7 @@ export default function BlockDetailPage({ parameters }) {
|
||||
const transactions = Array.isArray(block?.transactions) ? block.transactions : [];
|
||||
|
||||
// Prefer new top-level fields; fallback to legacy header.*
|
||||
const height = block?.height ?? null;
|
||||
const slot = block?.slot ?? header?.slot ?? null;
|
||||
const blockRoot = block?.block_root ?? header?.block_root ?? '';
|
||||
const currentBlockHash = block?.hash ?? header?.hash ?? '';
|
||||
@ -185,6 +186,7 @@ export default function BlockDetailPage({ parameters }) {
|
||||
h(
|
||||
'div',
|
||||
{ style: 'margin-left:auto; display:flex; gap:8px; flex-wrap:wrap;' },
|
||||
height != null && h('span', { class: 'pill', title: 'Height' }, `Height ${String(height)}`),
|
||||
slot != null && h('span', { class: 'pill', title: 'Slot' }, `Slot ${String(slot)}`),
|
||||
),
|
||||
),
|
||||
|
||||
@ -218,7 +218,7 @@ function InputsTable({ inputs }) {
|
||||
h('col', { style: 'width:80px' }), // #
|
||||
h('col', null), // Value
|
||||
),
|
||||
h('thead', null, h('tr', null, h('th', { style: 'text-align:center;' }, '#'), h('th', null, 'Value'))),
|
||||
h('thead', null, h('tr', null, h('th', { style: 'text-align:center;' }, '#'), h('th', null, 'Note ID'))),
|
||||
h(
|
||||
'tbody',
|
||||
null,
|
||||
@ -299,7 +299,6 @@ function OutputsTable({ outputs }) {
|
||||
function Ledger({ ledger }) {
|
||||
const inputs = Array.isArray(ledger?.inputs) ? ledger.inputs : [];
|
||||
const outputs = Array.isArray(ledger?.outputs) ? ledger.outputs : [];
|
||||
const totalInputValue = inputs.reduce((s, v) => s + toNumber(v), 0);
|
||||
const totalOutputValue = toNumber(ledger?.totalOutputValue);
|
||||
|
||||
return h(
|
||||
@ -318,11 +317,6 @@ function Ledger({ ledger }) {
|
||||
{ style: 'display:flex; alignItems:center; gap:8px;' },
|
||||
h('b', null, 'Inputs'),
|
||||
h('span', { class: 'pill' }, String(inputs.length)),
|
||||
h(
|
||||
'span',
|
||||
{ class: 'amount', style: 'margin-left:auto;' },
|
||||
`Total: ${toLocaleNum(totalInputValue)}`,
|
||||
),
|
||||
),
|
||||
h(InputsTable, { inputs }),
|
||||
),
|
||||
|
||||
7
third_party/requests.py
vendored
7
third_party/requests.py
vendored
@ -9,3 +9,10 @@ def get(url, params=None, auth: Option[Authentication] = None, **kwargs):
|
||||
if auth.is_some:
|
||||
headers["Authorization"] = auth.unwrap().for_requests()
|
||||
return requests.get(url, params, headers=headers, **kwargs)
|
||||
|
||||
|
||||
def post(url, data=None, json=None, auth: Option[Authentication] = None, **kwargs):
|
||||
headers = kwargs.get("headers", {})
|
||||
if auth.is_some:
|
||||
headers["Authorization"] = auth.unwrap().for_requests()
|
||||
return requests.post(url, data=data, json=json, headers=headers, **kwargs)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user