Decouple models from node api.

This commit is contained in:
Alejandro Cabeza Romero 2025-10-30 11:48:34 +01:00
parent 68c5e45804
commit 7f1a543681
No known key found for this signature in database
GPG Key ID: DA3D14AE478030FD
53 changed files with 1695 additions and 616 deletions

View File

@ -1 +1 @@
3.13 3.14

View File

@ -1,7 +1,7 @@
[project] [project]
name = "nomos-block-explorer" name = "nomos-block-explorer"
version = "0.1.0" version = "0.1.0"
requires-python = ">=3.13,<3.14" requires-python = ">=3.14,<3.15"
dependencies = [ dependencies = [
"fastapi~=0.118.0", "fastapi~=0.118.0",
"httpx>=0.28.1", "httpx>=0.28.1",

View File

@ -18,7 +18,7 @@ def _into_ndjson_data(data: Data) -> bytes:
return data.model_dump_ndjson() return data.model_dump_ndjson()
async def into_ndjson_stream(stream: Stream, bootstrap_data: Data = None) -> AsyncIterable[bytes]: async def into_ndjson_stream(stream: Stream, *, bootstrap_data: Data = None) -> AsyncIterable[bytes]:
if bootstrap_data is not None: if bootstrap_data is not None:
ndjson_data = _into_ndjson_data(bootstrap_data) ndjson_data = _into_ndjson_data(bootstrap_data)
if ndjson_data: if ndjson_data:

View File

@ -1,38 +1,32 @@
from http.client import NOT_FOUND from http.client import NOT_FOUND
from typing import TYPE_CHECKING, AsyncIterator, List, Optional from typing import TYPE_CHECKING, AsyncIterator, List
from fastapi import Path, Query from fastapi import Path, Query
from rusty_results import Empty, Option, Some
from starlette.responses import JSONResponse, Response from starlette.responses import JSONResponse, Response
from api.streams import into_ndjson_stream from api.streams import into_ndjson_stream
from api.v1.serializers.blocks import BlockRead from api.v1.serializers.blocks import BlockRead
from core.api import NBERequest, NDJsonStreamingResponse from core.api import NBERequest, NDJsonStreamingResponse
from node.models.blocks import Block from models.block import Block
if TYPE_CHECKING: if TYPE_CHECKING:
from core.app import NBE from core.app import NBE
async def _get_latest(request: NBERequest, limit: int) -> List[BlockRead]: async def _get_blocks_stream_serialized(app: "NBE", block_from: Option[Block]) -> AsyncIterator[List[BlockRead]]:
blocks = await request.app.state.block_repository.get_latest(limit=limit, ascending=True) _stream = app.state.block_repository.updates_stream(block_from)
return [BlockRead.from_block(block) for block in blocks]
async def _prefetch_blocks(request: NBERequest, prefetch_limit: int) -> List[BlockRead]:
return [] if prefetch_limit == 0 else await _get_latest(request, prefetch_limit)
async def _updates_stream(app: "NBE", latest_block: Optional[Block]) -> AsyncIterator[List[BlockRead]]:
_stream = app.state.block_repository.updates_stream(block_from=latest_block)
async for blocks in _stream: async for blocks in _stream:
yield [BlockRead.from_block(block) for block in blocks] yield [BlockRead.from_block(block) for block in blocks]
async def stream(request: NBERequest, prefetch_limit: int = Query(0, alias="prefetch-limit", ge=0)) -> Response: async def stream(request: NBERequest, prefetch_limit: int = Query(0, alias="prefetch-limit", ge=0)) -> Response:
bootstrap_blocks: List[BlockRead] = await _prefetch_blocks(request, prefetch_limit) latest_blocks = await request.app.state.block_repository.get_latest(prefetch_limit)
latest_block = bootstrap_blocks[-1] if bootstrap_blocks else None latest_block = Some(latest_blocks[-1]) if latest_blocks else Empty()
updates_stream: AsyncIterator[List[BlockRead]] = _updates_stream(request.app, latest_block) bootstrap_blocks: List[BlockRead] = [BlockRead.from_block(block) for block in latest_blocks]
ndjson_blocks_stream = into_ndjson_stream(stream=updates_stream, bootstrap_data=bootstrap_blocks)
blocks_stream: AsyncIterator[List[BlockRead]] = _get_blocks_stream_serialized(request.app, latest_block)
ndjson_blocks_stream = into_ndjson_stream(blocks_stream, bootstrap_data=bootstrap_blocks)
return NDJsonStreamingResponse(ndjson_blocks_stream) return NDJsonStreamingResponse(ndjson_blocks_stream)

View File

@ -5,22 +5,24 @@ from starlette.responses import JSONResponse, Response
from api.streams import into_ndjson_stream from api.streams import into_ndjson_stream
from core.api import NBERequest, NDJsonStreamingResponse from core.api import NBERequest, NDJsonStreamingResponse
from models.health import Health
from node.api.base import NodeApi from node.api.base import NodeApi
from node.models.health import Health from node.api.serializers.health import HealthSerializer
async def get(request: NBERequest) -> Response: async def get(request: NBERequest) -> Response:
response = await request.app.state.node_api.get_health_check() response = await request.app.state.node_api.get_health()
return JSONResponse(response) return JSONResponse(response)
async def _health_iterator(node_api: NodeApi) -> AsyncIterator[Health]: async def _create_health_stream(node_api: NodeApi, *, poll_interval_seconds: int = 10) -> AsyncIterator[Health]:
while True: while True:
yield await node_api.get_health_check() health_serializer: HealthSerializer = await node_api.get_health()
await sleep(10) yield health_serializer.into_health()
await sleep(poll_interval_seconds)
async def stream(request: NBERequest) -> Response: async def stream(request: NBERequest) -> Response:
_stream = _health_iterator(request.app.state.node_api) health_stream = _create_health_stream(request.app.state.node_api)
health_stream = into_ndjson_stream(stream=_stream) ndjson_health_stream = into_ndjson_stream(health_stream)
return NDJsonStreamingResponse(health_stream) return NDJsonStreamingResponse(ndjson_health_stream)

View File

@ -1,21 +1,29 @@
from typing import List, Self from typing import List, Self
from core.models import NbeSchema from core.models import NbeSchema
from node.models.blocks import Block, Header from core.types import HexBytes
from node.models.transactions import Transaction from models.block import Block
from models.header.proof_of_leadership import ProofOfLeadership
from models.transactions.transaction import Transaction
class BlockRead(NbeSchema): class BlockRead(NbeSchema):
id: int id: int
hash: HexBytes
parent_block_hash: HexBytes
slot: int slot: int
header: Header block_root: HexBytes
proof_of_leadership: ProofOfLeadership
transactions: List[Transaction] transactions: List[Transaction]
@classmethod @classmethod
def from_block(cls, block: Block) -> Self: def from_block(cls, block: Block) -> Self:
return cls( return cls(
id=block.id, id=block.id,
slot=block.header.slot, hash=block.hash,
header=block.header, parent_block_hash=block.parent_block,
slot=block.slot,
block_root=block.block_root,
proof_of_leadership=block.proof_of_leadership,
transactions=block.transactions, transactions=block.transactions,
) )

View File

@ -1,14 +1,21 @@
from typing import List, Self from typing import List, Self
from core.models import NbeSchema from core.models import NbeSchema
from node.models.transactions import Gas, LedgerTransaction, Transaction from core.types import HexBytes
from models.aliases import Gas
from models.transactions.notes import Note
from models.transactions.operations.operation import Operation
from models.transactions.transaction import Transaction
class TransactionRead(NbeSchema): class TransactionRead(NbeSchema):
id: int id: int
block_id: int block_id: int
operations: List[str] hash: HexBytes
ledger_transaction: LedgerTransaction operations: List[Operation]
inputs: List[HexBytes]
outputs: List[Note]
proof: HexBytes
execution_gas_price: Gas execution_gas_price: Gas
storage_gas_price: Gas storage_gas_price: Gas
@ -16,9 +23,12 @@ class TransactionRead(NbeSchema):
def from_transaction(cls, transaction: Transaction) -> Self: def from_transaction(cls, transaction: Transaction) -> Self:
return cls( return cls(
id=transaction.id, id=transaction.id,
block_id=transaction.block_id, block_id=transaction.block.id,
hash=transaction.hash,
operations=transaction.operations, operations=transaction.operations,
ledger_transaction=transaction.ledger_transaction, inputs=transaction.inputs,
outputs=transaction.outputs,
proof=transaction.proof,
execution_gas_price=transaction.execution_gas_price, execution_gas_price=transaction.execution_gas_price,
storage_gas_price=transaction.storage_gas_price, storage_gas_price=transaction.storage_gas_price,
) )

View File

@ -1,35 +1,38 @@
from http.client import NOT_FOUND from http.client import NOT_FOUND
from typing import TYPE_CHECKING, AsyncIterator, List, Optional from typing import TYPE_CHECKING, AsyncIterator, List
from fastapi import Path, Query from fastapi import Path, Query
from rusty_results import Empty, Option, Some
from starlette.responses import JSONResponse, Response from starlette.responses import JSONResponse, Response
from api.streams import into_ndjson_stream from api.streams import into_ndjson_stream
from api.v1.serializers.transactions import TransactionRead from api.v1.serializers.transactions import TransactionRead
from core.api import NBERequest, NDJsonStreamingResponse from core.api import NBERequest, NDJsonStreamingResponse
from node.models.transactions import Transaction from models.transactions.transaction import Transaction
if TYPE_CHECKING: if TYPE_CHECKING:
from core.app import NBE from core.app import NBE
async def _updates_stream( async def _get_transactions_stream_serialized(
app: "NBE", latest_transaction: Optional[Transaction] app: "NBE", transaction_from: Option[Transaction]
) -> AsyncIterator[List[TransactionRead]]: ) -> AsyncIterator[List[TransactionRead]]:
_stream = app.state.transaction_repository.updates_stream(transaction_from=latest_transaction) _stream = app.state.transaction_repository.updates_stream(transaction_from)
async for transactions in _stream: async for transactions in _stream:
yield [TransactionRead.from_transaction(transaction) for transaction in transactions] yield [TransactionRead.from_transaction(transaction) for transaction in transactions]
async def stream(request: NBERequest, prefetch_limit: int = Query(0, alias="prefetch-limit", ge=0)) -> Response: async def stream(request: NBERequest, prefetch_limit: int = Query(0, alias="prefetch-limit", ge=0)) -> Response:
latest_transactions: List[Transaction] = await request.app.state.transaction_repository.get_latest( latest_transactions: List[Transaction] = await request.app.state.transaction_repository.get_latest(
limit=prefetch_limit, ascending=True, preload_relationships=True prefetch_limit, ascending=True, preload_relationships=True
) )
latest_transaction = latest_transactions[-1] if latest_transactions else None latest_transaction = Some(latest_transactions[-1]) if latest_transactions else Empty()
latest_transaction_read = [TransactionRead.from_transaction(transaction) for transaction in latest_transactions] bootstrap_transactions = [TransactionRead.from_transaction(transaction) for transaction in latest_transactions]
updates_stream: AsyncIterator[List[TransactionRead]] = _updates_stream(request.app, latest_transaction) transactions_stream: AsyncIterator[List[TransactionRead]] = _get_transactions_stream_serialized(
ndjson_transactions_stream = into_ndjson_stream(stream=updates_stream, bootstrap_data=latest_transaction_read) request.app, latest_transaction
)
ndjson_transactions_stream = into_ndjson_stream(transactions_stream, bootstrap_data=bootstrap_transactions)
return NDJsonStreamingResponse(ndjson_transactions_stream) return NDJsonStreamingResponse(ndjson_transactions_stream)

View File

@ -28,6 +28,10 @@ class NbeSchema(NdjsonMixin, BaseModel):
return self.model_dump_json() return self.model_dump_json()
class NbeSerializer(NbeSchema):
pass
# --- SQLModel --- # --- SQLModel ---

View File

@ -54,6 +54,9 @@ class PydanticJsonColumn(TypeDecorator, Generic[T]):
cache_ok = True cache_ok = True
def __init__(self, model: type[T], *, many: bool = False) -> None: def __init__(self, model: type[T], *, many: bool = False) -> None:
"""
The passed model must be a non-list type. To specify a list of models, pass `many=True`.
"""
super().__init__() super().__init__()
self.many = many self.many = many
self._ta = _TypeAdapter(List[model] if many else model) self._ta = _TypeAdapter(List[model] if many else model)
@ -74,7 +77,8 @@ class PydanticJsonColumn(TypeDecorator, Generic[T]):
model_value = self._ta.validate_python(value) model_value = self._ta.validate_python(value)
# Dump to plain Python (dict/list) for the JSON column # Dump to plain Python (dict/list) for the JSON column
return self._ta.dump_python(model_value, mode="json") plain = self._ta.dump_python(model_value, mode="json")
return plain
# DB -> Python (on SELECT) # DB -> Python (on SELECT)
def process_result_value(self, value: Any, _dialect): def process_result_value(self, value: Any, _dialect):

13
src/core/types.py Normal file
View File

@ -0,0 +1,13 @@
from typing import Annotated
from pydantic import AfterValidator, BeforeValidator, PlainSerializer
def hexify(data: bytes) -> str:
return data.hex()
HexBytes = Annotated[
bytes,
PlainSerializer(hexify, return_type=str, when_used="json"),
]

View File

@ -1,27 +1,26 @@
import logging
from asyncio import sleep from asyncio import sleep
from typing import AsyncIterator, List, Optional from typing import AsyncIterator, List
from rusty_results import Empty, Option, Some from rusty_results import Empty, Option, Some
from sqlalchemy import Result, Select from sqlalchemy import Result, Select
from sqlalchemy.orm import aliased from sqlalchemy.orm import aliased
from sqlmodel import select from sqlmodel import select
from core.db import jget, order_by_json
from db.clients import DbClient from db.clients import DbClient
from node.models.blocks import Block from models.block import Block
def get_latest_statement(limit: int, latest_ascending: bool = True) -> Select: def get_latest_statement(limit: int, *, output_ascending: bool = True) -> Select:
# Fetch latest # Fetch the latest N blocks in descending slot order
descending = order_by_json(Block.header, "$.slot", into_type="int", descending=True) base = select(Block).order_by(Block.slot.desc(), Block.id.desc()).limit(limit)
inner = select(Block).order_by(descending, Block.id.desc()).limit(limit).subquery() if not output_ascending:
return base
# Reorder # Reorder for output
inner = base.subquery()
latest = aliased(Block, inner) latest = aliased(Block, inner)
latest_order = order_by_json(latest.header, "$.slot", into_type="int", descending=(not latest_ascending)) return select(latest).options().order_by(latest.slot.asc(), latest.id.asc()) # type: ignore[arg-type]
id_order = latest.id.asc() if latest_ascending else latest.id.desc()
statement = select(latest).order_by(latest_order, id_order) # type: ignore[arg-type]
return statement
class BlockRepository: class BlockRepository:
@ -37,51 +36,66 @@ class BlockRepository:
session.add_all(list(blocks)) session.add_all(list(blocks))
session.commit() session.commit()
async def get_latest(self, limit: int, *, ascending: bool = True) -> List[Block]:
statement = get_latest_statement(limit, ascending)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
return results.all()
async def get_by_id(self, block_id: int) -> Option[Block]: async def get_by_id(self, block_id: int) -> Option[Block]:
statement = select(Block).where(Block.id == block_id) statement = select(Block).where(Block.id == block_id)
with self.client.session() as session: with self.client.session() as session:
result: Result[Block] = session.exec(statement) result: Result[Block] = session.exec(statement)
if (block := result.first()) is not None: if (block := result.one_or_none()) is not None:
return Some(block)
else:
return Empty()
async def get_by_hash(self, block_hash: str) -> Option[Block]:
statement = select(Block).where(Block.hash == block_hash)
with self.client.session() as session:
result: Result[Block] = session.exec(statement)
if (block := result.one_or_none()) is not None:
return Some(block)
else:
return Empty()
async def get_latest(self, limit: int, *, ascending: bool = True) -> List[Block]:
if limit == 0:
return []
statement = get_latest_statement(limit, output_ascending=ascending)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
b = results.all()
return b
async def get_earliest(self) -> Option[Block]:
statement = select(Block).order_by(Block.slot.asc()).limit(1)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
if (block := results.one_or_none()) is not None:
return Some(block) return Some(block)
else: else:
return Empty() return Empty()
async def updates_stream( async def updates_stream(
self, block_from: Optional[Block], *, timeout_seconds: int = 1 self, block_from: Option[Block], *, timeout_seconds: int = 1
) -> AsyncIterator[List[Block]]: ) -> AsyncIterator[List[Block]]:
# FIXME slot_cursor: int = block_from.map(lambda block: block.slot).unwrap_or(0)
slot_cursor = block_from.slot + 1 if block_from is not None else 0 id_cursor: int = block_from.map(lambda block: block.id + 1).unwrap_or(0)
block_slot_expression = jget(Block.header, "$.slot", into_type="int")
order = order_by_json(Block.header, "$.slot", into_type="int", descending=False)
while True: while True:
where_clause = block_slot_expression >= slot_cursor statement = (
statement = select(Block).where(where_clause).order_by(order) select(Block)
.where(Block.slot >= slot_cursor, Block.id >= id_cursor)
.order_by(Block.slot.asc(), Block.id.asc())
)
with self.client.session() as session: with self.client.session() as session:
blocks: List[Block] = session.exec(statement).all() blocks: List[Block] = session.exec(statement).all()
if len(blocks) > 0: if len(blocks) > 0:
slot_cursor = blocks[-1].slot + 1 slot_cursor = blocks[-1].slot
id_cursor = blocks[-1].id + 1
yield blocks yield blocks
else: else:
await sleep(timeout_seconds) await sleep(timeout_seconds)
async def get_earliest(self) -> Option[Block]:
order = order_by_json(Block.header, "$.slot", into_type="int", descending=False)
statement = select(Block).order_by(order).limit(1)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
if (block := results.first()) is not None:
return Some(block)
else:
return Empty()

View File

@ -1,39 +1,32 @@
import logging
from asyncio import sleep from asyncio import sleep
from typing import AsyncIterator, Iterable, List, Optional from typing import AsyncIterator, List
from rusty_results import Empty, Option, Some from rusty_results import Empty, Option, Some
from sqlalchemy import Result, Select from sqlalchemy import Result, Select
from sqlalchemy.orm import aliased, selectinload from sqlalchemy.orm import aliased, selectinload
from sqlmodel import select from sqlmodel import select
from core.db import jget, order_by_json
from db.clients import DbClient from db.clients import DbClient
from node.models.transactions import Transaction from models.block import Block
from models.transactions.transaction import Transaction
def get_latest_statement( def get_latest_statement(limit: int, *, output_ascending: bool, preload_relationships: bool) -> Select:
limit: int, output_ascending: bool = True, preload_relationships: bool = False, **kwargs # Join with Block to order by Block's slot and fetch the latest N transactions in descending order
) -> Select: base = (
from node.models.blocks import Block select(Transaction, Block.slot.label("block__slot"), Block.id.label("block__id"))
.join(Block, Transaction.block_id == Block.id)
# Join with Block to order by Block's slot .order_by(Block.slot.desc(), Block.id.desc(), Transaction.id.desc())
slot_expr = jget(Block.header, "$.slot", into_type="int").label("slot")
slot_desc = order_by_json(Block.header, "$.slot", into_type="int", descending=True)
inner = (
select(Transaction, slot_expr)
.join(Block, Transaction.block_id == Block.id, isouter=False)
.order_by(slot_desc, Block.id.desc())
.limit(limit) .limit(limit)
.subquery()
) )
if not output_ascending:
return base
# Reorder # Reorder for output
inner = base.subquery()
latest = aliased(Transaction, inner) latest = aliased(Transaction, inner)
output_slot_order = inner.c.slot.asc() if output_ascending else inner.c.slot.desc() statement = select(latest).order_by(inner.c.block__slot.asc(), inner.c.block__id.asc(), latest.id.asc())
output_id_order = (
latest.id.asc() if output_ascending else latest.id.desc()
) # TODO: Double check it's Transaction.id
statement = select(latest).order_by(output_slot_order, output_id_order)
if preload_relationships: if preload_relationships:
statement = statement.options(selectinload(latest.block)) statement = statement.options(selectinload(latest.block))
return statement return statement
@ -43,54 +36,70 @@ class TransactionRepository:
def __init__(self, client: DbClient): def __init__(self, client: DbClient):
self.client = client self.client = client
async def create(self, transaction: Iterable[Transaction]) -> None: async def create(self, *transaction: Transaction) -> None:
with self.client.session() as session: with self.client.session() as session:
session.add_all(transaction) session.add_all(list(transaction))
session.commit() session.commit()
async def get_latest(self, limit: int, *, ascending: bool = True, **kwargs) -> List[Transaction]:
statement = get_latest_statement(limit, ascending, **kwargs)
with self.client.session() as session:
results: Result[Transaction] = session.exec(statement)
return results.all()
async def get_by_id(self, transaction_id: int) -> Option[Transaction]: async def get_by_id(self, transaction_id: int) -> Option[Transaction]:
statement = select(Transaction).where(Transaction.id == transaction_id) statement = select(Transaction).where(Transaction.id == transaction_id)
with self.client.session() as session: with self.client.session() as session:
result: Result[Transaction] = session.exec(statement) result: Result[Transaction] = session.exec(statement)
if (transaction := result.first()) is not None: if (transaction := result.one_or_none()) is not None:
return Some(transaction) return Some(transaction)
else: else:
return Empty() return Empty()
async def updates_stream( async def get_by_hash(self, transaction_hash: str) -> Option[Transaction]:
self, transaction_from: Optional[Transaction], *, timeout_seconds: int = 1 statement = select(Transaction).where(Transaction.hash == transaction_hash)
) -> AsyncIterator[List[Transaction]]:
from node.models.blocks import Block
slot_cursor: int = transaction_from.block.slot + 1 if transaction_from is not None else 0 with self.client.session() as session:
slot_expression = jget(Block.header, "$.slot", into_type="int") result: Result[Transaction] = session.exec(statement)
slot_order = order_by_json(Block.header, "$.slot", into_type="int", descending=False) if (transaction := result.one_or_none()) is not None:
return Some(transaction)
else:
return Empty()
async def get_latest(
self, limit: int, *, ascending: bool = False, preload_relationships: bool = False
) -> List[Transaction]:
if limit == 0:
return []
statement = get_latest_statement(limit, output_ascending=ascending, preload_relationships=preload_relationships)
with self.client.session() as session:
results: Result[Transaction] = session.exec(statement)
return results.all()
async def updates_stream(
self, transaction_from: Option[Transaction], *, timeout_seconds: int = 1
) -> AsyncIterator[List[Transaction]]:
slot_cursor = transaction_from.map(lambda transaction: transaction.block.slot).unwrap_or(0)
block_id_cursor = transaction_from.map(lambda transaction: transaction.block.id).unwrap_or(0)
transaction_id_cursor = transaction_from.map(lambda transaction: transaction.id + 1).unwrap_or(0)
while True: while True:
where_clause_slot = slot_expression >= slot_cursor
where_clause_id = Transaction.id > transaction_from.id if transaction_from is not None else True
statement = ( statement = (
select(Transaction) select(Transaction, Block.slot, Block.id)
.options(selectinload(Transaction.block)) .options(selectinload(Transaction.block))
.join(Block, Transaction.block_id == Block.id) .join(Block, Transaction.block_id == Block.id)
.where(where_clause_slot, where_clause_id) .where(
.order_by(slot_order, Block.id.asc(), Transaction.id.asc()) Block.slot >= slot_cursor,
Block.id >= block_id_cursor,
Transaction.id >= transaction_id_cursor,
)
.order_by(Block.slot.asc(), Block.id.asc(), Transaction.id.asc())
) )
with self.client.session() as session: with self.client.session() as session:
transactions: List[Transaction] = session.exec(statement).all() transactions: List[Transaction] = session.exec(statement).all()
if len(transactions) > 0: if len(transactions) > 0:
slot_cursor = transactions[-1].block.slot + 1 slot_cursor = transactions[-1].block.slot
block_id_cursor = transactions[-1].block.id
transaction_id_cursor = transactions[-1].id + 1
yield transactions yield transactions
else: else:
await sleep(timeout_seconds) await sleep(timeout_seconds)

4
src/models/__init__.py Normal file
View File

@ -0,0 +1,4 @@
from .block import Block
from .header import ProofOfLeadership
from .health import Health
from .transactions import Transaction

4
src/models/aliases.py Normal file
View File

@ -0,0 +1,4 @@
from core.types import HexBytes
Fr = HexBytes
Gas = int

78
src/models/block.py Normal file
View File

@ -0,0 +1,78 @@
import logging
import os
import random
from typing import TYPE_CHECKING, Any, List, Self
from pydantic.config import ExtraValues
from sqlalchemy import Column
from sqlmodel import Field, Relationship
from core.models import TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from core.types import HexBytes
from models.header.proof_of_leadership import ProofOfLeadership
if TYPE_CHECKING:
from models.transactions.transaction import Transaction
logger = logging.getLogger(__name__)
def _should_randomize_transactions():
is_debug = os.getenv("DEBUG", "False").lower() == "true"
is_debug__randomize_transactions = os.getenv("DEBUG__RANDOMIZE_TRANSACTIONS", "False").lower() == "true"
return is_debug and is_debug__randomize_transactions
class Block(TimestampedModel, table=True):
__tablename__ = "block"
# --- Columns --- #
hash: HexBytes = Field(nullable=False, unique=True)
parent_block: HexBytes = Field(nullable=False)
slot: int = Field(nullable=False)
block_root: HexBytes = Field(nullable=False)
proof_of_leadership: ProofOfLeadership = Field(
sa_column=Column(PydanticJsonColumn(ProofOfLeadership), nullable=False)
)
# --- Relationships --- #
transactions: List["Transaction"] = Relationship(
back_populates="block",
sa_relationship_kwargs={"lazy": "selectin"},
)
def __str__(self) -> str:
return f"Block(slot={self.slot})"
def __repr__(self) -> str:
return f"<Block(id={self.id}, created_at={self.created_at}, slot={self.slot}, parent={self.header["parent_block"]})>"
def with_transactions(self, transactions: List["Transaction"]) -> Self:
self.transactions = transactions
return self
@classmethod
def model_validate_json(
cls,
json_data: str | bytes | bytearray,
*,
strict: bool | None = None,
extra: ExtraValues | None = None,
context: Any | None = None,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> Self:
self = super().model_validate_json(
json_data, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
)
if _should_randomize_transactions():
from models.transactions.transaction import Transaction
logger.debug("DEBUG and DEBUG__RANDOMIZE_TRANSACTIONS are enabled, randomizing Block's transactions.")
n_transactions = 0 if random.randint(0, 1) <= 0.3 else random.randint(1, 5)
self.transactions = [Transaction.from_random() for _ in range(n_transactions)]
return self

View File

@ -0,0 +1,2 @@
from .proof_of_leadership import ProofOfLeadership
from .public import Public

View File

@ -0,0 +1,26 @@
from enum import Enum
from typing import Optional, Union
from core.models import NbeSchema
from core.types import HexBytes
from models.header.public import Public
class ProofOfLeadershipType(Enum):
GROTH16 = "GROTH16"
class NbeProofOfLeadership(NbeSchema):
type: ProofOfLeadershipType
class Groth16ProofOfLeadership(NbeProofOfLeadership):
type: ProofOfLeadershipType = ProofOfLeadershipType.GROTH16
entropy_contribution: HexBytes
leader_key: HexBytes
proof: HexBytes
public: Optional[Public]
voucher_cm: HexBytes
ProofOfLeadership = Union[Groth16ProofOfLeadership]

View File

@ -0,0 +1,10 @@
from core.models import NbeSchema
from core.types import HexBytes
class Public(NbeSchema):
aged_root: HexBytes
epoch_nonce: HexBytes
latest_root: HexBytes
slot: int
total_stake: int

11
src/models/health.py Normal file
View File

@ -0,0 +1,11 @@
from core.models import NbeSchema
class Health(NbeSchema):
healthy: bool
def __str__(self):
return "Healthy" if self.healthy else "Unhealthy"
def __repr__(self):
return f"<Health(healthy={self.healthy})>"

View File

@ -0,0 +1,3 @@
from .notes import Note
from .operations import Operation
from .transaction import Transaction

View File

@ -0,0 +1,7 @@
from core.models import NbeSchema
from core.types import HexBytes
class Note(NbeSchema):
value: int
public_key: HexBytes

View File

@ -0,0 +1,3 @@
from .contents import OperationContent
from .operation import Operation
from .proofs import OperationProof

View File

@ -0,0 +1,80 @@
from enum import Enum
from typing import List, Optional
from core.models import NbeSchema
from core.types import HexBytes
class ContentType(Enum):
CHANNEL_INSCRIBE = "ChannelInscribe"
CHANNEL_BLOB = "ChannelBlob"
CHANNEL_SET_KEYS = "ChannelSetKeys"
SDP_DECLARE = "SDPDeclare"
SDP_WITHDRAW = "SDPWithdraw"
SDP_ACTIVE = "SDPActive"
LEADER_CLAIM = "LeaderClaim"
class NbeContent(NbeSchema):
type: ContentType
class ChannelInscribe(NbeContent):
type: ContentType = ContentType.CHANNEL_INSCRIBE
channel_id: HexBytes
inscription: HexBytes
parent: HexBytes
signer: HexBytes
class ChannelBlob(NbeContent):
type: ContentType = ContentType.CHANNEL_BLOB
channel: HexBytes
blob: HexBytes
blob_size: int
da_storage_gas_price: int
parent: HexBytes
signer: HexBytes
class ChannelSetKeys(NbeContent):
type: ContentType = ContentType.CHANNEL_SET_KEYS
channel: HexBytes
keys: List[bytes]
class SDPDeclareServiceType(Enum):
BN = "BN"
DA = "DA"
class SDPDeclare(NbeContent):
type: ContentType = ContentType.SDP_DECLARE
service_type: SDPDeclareServiceType
locators: List[bytes]
provider_id: HexBytes
zk_id: HexBytes
locked_note_id: HexBytes
class SDPWithdraw(NbeContent):
type: ContentType = ContentType.SDP_WITHDRAW
declaration_id: HexBytes
nonce: HexBytes
class SDPActive(NbeContent):
type: ContentType = ContentType.SDP_ACTIVE
declaration_id: HexBytes
nonce: HexBytes
metadata: Optional[bytes]
class LeaderClaim(NbeContent):
type: ContentType = ContentType.LEADER_CLAIM
rewards_root: HexBytes
voucher_nullifier: HexBytes
mantle_tx_hash: HexBytes
OperationContent = ChannelInscribe | ChannelBlob | ChannelSetKeys | SDPDeclare | SDPWithdraw | SDPActive | LeaderClaim

View File

@ -0,0 +1,8 @@
from core.models import NbeSchema
from models.transactions.operations.contents import NbeContent
from models.transactions.operations.proofs import OperationProof
class Operation(NbeSchema):
content: NbeContent
proof: OperationProof

View File

@ -0,0 +1,33 @@
from enum import Enum
from core.models import NbeSchema
from core.types import HexBytes
class SignatureType(Enum):
ED25519 = "Ed25519"
ZK = "Zk"
ZK_AND_ED25519 = "ZkAndEd25519"
class NbeSignature(NbeSchema):
type: SignatureType
class Ed25519Signature(NbeSignature):
type: SignatureType = SignatureType.ED25519
signature: HexBytes
class ZkSignature(NbeSignature):
type: SignatureType = SignatureType.ZK
signature: HexBytes
class ZkAndEd25519Signature(NbeSignature):
type: SignatureType = SignatureType.ZK_AND_ED25519
zk_signature: HexBytes
ed25519_signature: HexBytes
OperationProof = Ed25519Signature | ZkSignature | ZkAndEd25519Signature

View File

@ -0,0 +1,47 @@
import logging
from typing import List, Optional
from sqlalchemy import JSON, Column
from sqlmodel import Field, Relationship
from core.models import TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from core.types import HexBytes
from models.aliases import Fr, Gas
from models.block import Block
from models.transactions.notes import Note
from models.transactions.operations.operation import Operation
logger = logging.getLogger(__name__)
class Transaction(TimestampedModel, table=True):
__tablename__ = "transaction"
# --- Columns --- #
block_id: Optional[int] = Field(default=None, foreign_key="block.id", nullable=False)
hash: HexBytes = Field(nullable=False, unique=True)
operations: List[Operation] = Field(
default_factory=list, sa_column=Column(PydanticJsonColumn(Operation, many=True), nullable=False)
)
inputs: List[Fr] = Field(default_factory=list, sa_column=Column(PydanticJsonColumn(Fr, many=True), nullable=False))
outputs: List[Note] = Field(
default_factory=list, sa_column=Column(PydanticJsonColumn(Note, many=True), nullable=False)
)
proof: HexBytes = Field(min_length=128, max_length=128, nullable=False)
execution_gas_price: Gas
storage_gas_price: Gas
# --- Relationships --- #
block: Optional[Block] = Relationship(
back_populates="transactions",
sa_relationship_kwargs={"lazy": "selectin"},
)
def __str__(self) -> str:
return f"Transaction({self.operations})"
def __repr__(self) -> str:
return f"<Transaction(id={self.id}, created_at={self.created_at}, operations={self.operations})>"

View File

@ -1,20 +1,19 @@
from abc import ABC, abstractmethod from abc import ABC, abstractmethod
from typing import AsyncIterator, List from typing import AsyncIterator, List
from node.models.blocks import Block from node.api.serializers.block import BlockSerializer
from node.models.health import Health from node.api.serializers.health import HealthSerializer
from node.models.transactions import Transaction
class NodeApi(ABC): class NodeApi(ABC):
@abstractmethod @abstractmethod
async def get_health_check(self) -> Health: async def get_health(self) -> HealthSerializer:
pass pass
@abstractmethod @abstractmethod
async def get_blocks(self, **kwargs) -> List[Block]: async def get_blocks(self, **kwargs) -> List[BlockSerializer]:
pass pass
@abstractmethod @abstractmethod
async def get_blocks_stream(self) -> AsyncIterator[List[Block]]: async def get_blocks_stream(self) -> AsyncIterator[List[BlockSerializer]]:
pass pass

View File

@ -1,10 +1,12 @@
from asyncio import sleep
from random import choices, random from random import choices, random
from typing import AsyncIterator, List from typing import AsyncIterator, List
from rusty_results import Some
from node.api.base import NodeApi from node.api.base import NodeApi
from node.models.blocks import Block from node.api.serializers.block import BlockSerializer
from node.models.health import Health from node.api.serializers.health import HealthSerializer
from node.models.transactions import Transaction
def get_weighted_amount() -> int: def get_weighted_amount() -> int:
@ -14,15 +16,24 @@ def get_weighted_amount() -> int:
class FakeNodeApi(NodeApi): class FakeNodeApi(NodeApi):
async def get_health_check(self) -> Health: def __init__(self):
self.current_slot: int = 0
async def get_health(self) -> HealthSerializer:
if random() < 0.1: if random() < 0.1:
return Health.from_unhealthy() return HealthSerializer.from_unhealthy()
else: else:
return Health.from_healthy() return HealthSerializer.from_healthy()
async def get_blocks(self) -> List[Block]: async def get_blocks(self, **kwargs) -> List[BlockSerializer]:
return [Block.from_random() for _ in range(1)] n = get_weighted_amount()
assert n >= 1
blocks = [BlockSerializer.from_random() for _ in range(n)]
self.current_slot = max(blocks, key=lambda block: block.slot).slot
return blocks
async def get_blocks_stream(self) -> AsyncIterator[Block]: async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
while True: while True:
yield Block.from_random() yield BlockSerializer.from_random(slot=Some(self.current_slot))
self.current_slot += 1
await sleep(3)

View File

@ -6,9 +6,8 @@ import httpx
import requests import requests
from node.api.base import NodeApi from node.api.base import NodeApi
from node.models.blocks import Block from node.api.serializers.block import BlockSerializer
from node.models.health import Health from node.api.serializers.health import HealthSerializer
from node.models.transactions import Transaction
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -29,24 +28,24 @@ class HttpNodeApi(NodeApi):
def base_url(self): def base_url(self):
return f"{self.protocol}://{self.host}:{self.port}" return f"{self.protocol}://{self.host}:{self.port}"
async def get_health_check(self) -> Health: async def get_health(self) -> HealthSerializer:
url = urljoin(self.base_url, self.ENDPOINT_INFO) url = urljoin(self.base_url, self.ENDPOINT_INFO)
response = requests.get(url, timeout=60) response = requests.get(url, timeout=60)
if response.status_code == 200: if response.status_code == 200:
return Health.from_healthy() return HealthSerializer.from_healthy()
else: else:
return Health.from_unhealthy() return HealthSerializer.from_unhealthy()
async def get_blocks(self, slot_from: int, slot_to: int) -> List[Block]: async def get_blocks(self, slot_from: int, slot_to: int) -> List[BlockSerializer]:
query_string = f"slot_from={slot_from}&slot_to={slot_to}" query_string = f"slot_from={slot_from}&slot_to={slot_to}"
endpoint = urljoin(self.base_url, self.ENDPOINT_BLOCKS) endpoint = urljoin(self.base_url, self.ENDPOINT_BLOCKS)
url = f"{endpoint}?{query_string}" url = f"{endpoint}?{query_string}"
response = requests.get(url, timeout=60) response = requests.get(url, timeout=60)
python_json = response.json() python_json = response.json()
blocks = [Block.model_validate(item) for item in python_json] blocks = [BlockSerializer.model_validate(item) for item in python_json]
return blocks return blocks
async def get_blocks_stream(self) -> AsyncIterator[Block]: async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
url = urljoin(self.base_url, self.ENDPOINT_BLOCKS_STREAM) url = urljoin(self.base_url, self.ENDPOINT_BLOCKS_STREAM)
async with httpx.AsyncClient(timeout=self.timeout) as client: async with httpx.AsyncClient(timeout=self.timeout) as client:
@ -56,6 +55,12 @@ class HttpNodeApi(NodeApi):
async for line in response.aiter_lines(): async for line in response.aiter_lines():
if not line: if not line:
continue continue
block = Block.model_validate_json(line) try:
block = BlockSerializer.model_validate_json(line)
except Exception as e:
import traceback
traceback.print_exc()
raise e
logger.debug(f"Received new block from Node: {block}") logger.debug(f"Received new block from Node: {block}")
yield block yield block

View File

@ -0,0 +1,34 @@
from random import randint
from typing import List, Self
from rusty_results import Empty, Option
from core.models import NbeSerializer
from models.block import Block
from node.api.serializers.header import HeaderSerializer
from node.api.serializers.signed_transaction import SignedTransactionSerializer
from utils.protocols import FromRandom
class BlockSerializer(NbeSerializer, FromRandom):
header: HeaderSerializer
transactions: List[SignedTransactionSerializer]
def into_block(self) -> Block:
transactions = [transaction.into_transaction() for transaction in self.transactions]
return Block.model_validate(
{
"hash": self.header.hash,
"parent_block": self.header.parent_block,
"slot": self.header.slot,
"block_root": self.header.block_root,
"proof_of_leadership": self.header.proof_of_leadership.into_proof_of_leadership(),
}
).with_transactions(transactions)
@classmethod
def from_random(cls, *, slot: Option[int] = None) -> Self:
slot = slot or Empty()
n = 1 if randint(0, 1) <= 0.5 else randint(2, 5)
transactions = [SignedTransactionSerializer.from_random() for _ in range(n)]
return cls.model_validate({"header": HeaderSerializer.from_random(slot=slot), "transactions": transactions})

View File

@ -0,0 +1,37 @@
from typing import Annotated
from pydantic import BeforeValidator, PlainSerializer, ValidationError
def bytes_from_intarray(data: list[int]) -> bytes:
if not isinstance(data, list):
raise ValueError(f"Unsupported data type for bytes deserialization. Expected list, got {type(data).__name__}.")
elif not all(isinstance(item, int) for item in data):
raise ValueError("List items must be integers.")
else:
return bytes(data)
def bytes_from_hex(data: str) -> bytes:
if not isinstance(data, str):
raise ValueError(
f"Unsupported data type for bytes deserialization. Expected string, got {type(data).__name__}."
)
return bytes.fromhex(data)
def bytes_from_int(data: int) -> bytes:
if not isinstance(data, int):
raise ValueError(
f"Unsupported data type for bytes deserialization. Expected integer, got {type(data).__name__}."
)
return data.to_bytes((data.bit_length() + 7) // 8) # TODO: Ensure endianness is correct.
def bytes_into_hex(data: bytes) -> str:
return data.hex()
BytesFromIntArray = Annotated[bytes, BeforeValidator(bytes_from_intarray), PlainSerializer(bytes_into_hex)]
BytesFromHex = Annotated[bytes, BeforeValidator(bytes_from_hex), PlainSerializer(bytes_into_hex)]
BytesFromInt = Annotated[bytes, BeforeValidator(bytes_from_int), PlainSerializer(bytes_into_hex)]

View File

@ -0,0 +1,34 @@
from random import randint
from typing import Self
from pydantic import Field
from rusty_results import Option, Some
from core.models import NbeSerializer
from node.api.serializers.fields import BytesFromHex
from node.api.serializers.proof_of_leadership import (
ProofOfLeadershipSerializer,
ProofOfLeadershipSerializerField,
)
from utils.protocols import FromRandom
from utils.random import random_hash
class HeaderSerializer(NbeSerializer, FromRandom):
hash: BytesFromHex = Field(alias="id", description="Hash id in hex format.")
parent_block: BytesFromHex = Field(description="Hash in hex format.")
slot: int = Field(description="Integer in u64 format.")
block_root: BytesFromHex = Field(description="Hash in hex format.")
proof_of_leadership: ProofOfLeadershipSerializerField
@classmethod
def from_random(cls, *, slot: Option[int]) -> Self:
return cls.model_validate(
{
"id": random_hash().hex(),
"parent_block": random_hash().hex(),
"slot": slot.unwrap_or_else(lambda: randint(0, 10_000)),
"block_root": random_hash().hex(),
"proof_of_leadership": ProofOfLeadershipSerializer.from_random(slot=slot),
}
)

View File

@ -0,0 +1,19 @@
from typing import Any, Self
from core.models import NbeSerializer
from models.health import Health
class HealthSerializer(NbeSerializer):
is_healthy: bool
def into_health(self) -> Health:
return Health.model_validate({"healthy": self.is_healthy})
@classmethod
def from_healthy(cls) -> Self:
return cls.model_validate({"is_healthy": True})
@classmethod
def from_unhealthy(cls) -> Self:
return cls.model_validate({"is_healthy": False})

View File

@ -0,0 +1,27 @@
from random import randint
from typing import List, Self
from pydantic import Field
from core.models import NbeSerializer
from node.api.serializers.fields import BytesFromIntArray
from node.api.serializers.note import NoteSerializer
from utils.protocols import FromRandom
from utils.random import random_bytes
class LedgerTransactionSerializer(NbeSerializer, FromRandom):
inputs: List[BytesFromIntArray] = Field(description="Fr integer.")
outputs: List[NoteSerializer]
@classmethod
def from_random(cls) -> Self:
n_inputs = 0 if randint(0, 1) <= 0.5 else randint(1, 5)
n_outputs = 0 if randint(0, 1) <= 0.5 else randint(1, 5)
return cls.model_validate(
{
"inputs": [list(random_bytes(2048)) for _ in range(n_inputs)],
"outputs": [NoteSerializer.from_random() for _ in range(n_outputs)],
}
)

View File

@ -0,0 +1,27 @@
from random import randint
from typing import Self
from pydantic import Field
from core.models import NbeSerializer
from models.transactions.notes import Note
from node.api.serializers.fields import BytesFromHex
from utils.protocols import FromRandom
from utils.random import random_bytes
class NoteSerializer(NbeSerializer, FromRandom):
value: int = Field(description="Integer in u64 format.")
public_key: BytesFromHex = Field(alias="pk", description="Fr integer.")
def into_note(self) -> Note:
return Note.model_validate(
{
"value": self.value,
"public_key": self.public_key,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate({"value": randint(1, 100), "pk": random_bytes(32).hex()})

View File

@ -0,0 +1,233 @@
from abc import ABC, abstractmethod
from enum import Enum
from random import choice, randint
from typing import Annotated, List, Optional, Self, Union
from pydantic import Field
from core.models import NbeSerializer
from models.transactions.operations.contents import (
ChannelBlob,
ChannelInscribe,
ChannelSetKeys,
LeaderClaim,
NbeContent,
SDPActive,
SDPDeclare,
SDPWithdraw,
)
from node.api.serializers.fields import BytesFromHex, BytesFromInt, BytesFromIntArray
from utils.protocols import EnforceSubclassFromRandom
from utils.random import random_bytes
class OperationContentSerializer(NbeSerializer, EnforceSubclassFromRandom, ABC):
@abstractmethod
def into_operation_content(self) -> NbeContent:
raise NotImplementedError
class ChannelInscribeSerializer(OperationContentSerializer):
channel_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
inscription: BytesFromIntArray = Field(description="Bytes as an integer array.")
parent: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
signer: BytesFromHex = Field(description="Public Key in hex format.")
def into_operation_content(self) -> ChannelInscribe:
return ChannelInscribe.model_validate(
{
"channel_id": self.channel_id,
"inscription": self.inscription,
"parent": self.parent,
"signer": self.signer,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"channel_id": list(random_bytes(32)),
"inscription": list(random_bytes(32)),
"parent": list(random_bytes(32)),
"signer": random_bytes(32).hex(),
}
)
class ChannelBlobSerializer(OperationContentSerializer):
channel: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
blob: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
blob_size: int
da_storage_gas_price: int
parent: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
signer: BytesFromHex = Field(description="Public Key in hex format.")
def into_operation_content(self) -> ChannelBlob:
return ChannelBlob.model_validate(
{
"channel": self.channel,
"blob": self.blob,
"blob_size": self.blob_size,
"da_storage_gas_price": self.da_storage_gas_price,
"parent": self.parent,
"signer": self.signer,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"channel": list(random_bytes(32)),
"blob": list(random_bytes(32)),
"blob_size": randint(1, 1_024),
"da_storage_gas_price": randint(1, 10_000),
"parent": list(random_bytes(32)),
"signer": random_bytes(32).hex(),
}
)
class ChannelSetKeysSerializer(OperationContentSerializer):
channel: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
keys: List[BytesFromHex] = Field(description="List of Public Keys in hex format.")
def into_operation_content(self) -> ChannelSetKeys:
return ChannelSetKeys.model_validate(
{
"channel": self.channel,
"keys": self.keys,
}
)
@classmethod
def from_random(cls) -> Self:
n = 1 if randint(0, 1) <= 0.5 else randint(1, 5)
return cls.model_validate(
{
"channel": list(random_bytes(32)),
"keys": [random_bytes(32).hex() for _ in range(n)],
}
)
class SDPDeclareServiceType(Enum):
BN = "BN"
DA = "DA"
class SDPDeclareSerializer(OperationContentSerializer):
service_type: SDPDeclareServiceType
locators: List[BytesFromHex]
provider_id: BytesFromIntArray = Field(description="Bytes as an integer array.")
zk_id: BytesFromHex = Field(description="Fr integer.")
locked_note_id: BytesFromHex = Field(description="Fr integer.")
def into_operation_content(self) -> SDPDeclare:
return SDPDeclare.model_validate(
{
"service_type": self.service_type.value,
"locators": self.locators,
"provider_id": self.provider_id,
"zk_id": self.zk_id,
"locked_note_id": self.locked_note_id,
}
)
@classmethod
def from_random(cls) -> Self:
n = 1 if randint(0, 1) <= 0.5 else randint(1, 5)
return cls.model_validate(
{
"service_type": choice(list(SDPDeclareServiceType)).value,
"locators": [random_bytes(32).hex() for _ in range(n)],
"provider_id": list(random_bytes(32)),
"zk_id": random_bytes(32).hex(),
"locked_note_id": random_bytes(32).hex(),
}
)
class SDPWithdrawSerializer(OperationContentSerializer):
declaration_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
nonce: BytesFromInt
def into_operation_content(self) -> SDPWithdraw:
return SDPWithdraw.model_validate(
{
"declaration_id": self.declaration_id,
"nonce": self.nonce,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"declaration_id": list(random_bytes(32)),
"nonce": int.from_bytes(random_bytes(8)),
}
)
class SDPActiveSerializer(OperationContentSerializer):
declaration_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
nonce: BytesFromInt
metadata: Optional[BytesFromIntArray] = Field(description="Bytes as an integer array.")
def into_operation_content(self) -> SDPActive:
return SDPActive.model_validate(
{
"declaration_id": self.declaration_id,
"nonce": self.nonce,
"metadata": self.metadata,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"declaration_id": list(random_bytes(32)),
"nonce": int.from_bytes(random_bytes(8)),
"metadata": None if randint(0, 1) <= 0.5 else list(random_bytes(32)),
}
)
class LeaderClaimSerializer(OperationContentSerializer):
rewards_root: BytesFromInt = Field(description="Fr integer.")
voucher_nullifier: BytesFromInt = Field(description="Fr integer.")
mantle_tx_hash: BytesFromInt = Field(description="Fr integer.")
def into_operation_content(self) -> LeaderClaim:
return LeaderClaim.model_validate(
{
"rewards_root": self.rewards_root,
"voucher_nullifier": self.voucher_nullifier,
"mantle_tx_hash": self.mantle_tx_hash,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"rewards_root": int.from_bytes(random_bytes(8)),
"voucher_nullifier": int.from_bytes(random_bytes(8)),
"mantle_tx_hash": int.from_bytes(random_bytes(8)),
}
)
type OperationContentSerializerVariants = Union[
ChannelInscribeSerializer,
ChannelBlobSerializer,
ChannelSetKeysSerializer,
SDPDeclareSerializer,
SDPWithdrawSerializer,
SDPActiveSerializer,
LeaderClaimSerializer,
]
OperationContentSerializerField = Annotated[OperationContentSerializerVariants, Field(union_mode="left_to_right")]

View File

@ -0,0 +1,82 @@
from abc import ABC, abstractmethod
from typing import Annotated, Self, Union
from pydantic import Field, RootModel
from core.models import NbeSerializer
from models.transactions.operations.proofs import (
Ed25519Signature,
NbeSignature,
ZkAndEd25519Signature,
ZkSignature,
)
from node.api.serializers.fields import BytesFromHex
from utils.protocols import EnforceSubclassFromRandom
from utils.random import random_bytes
class OperationProofSerializer(EnforceSubclassFromRandom, ABC):
@abstractmethod
def into_operation_proof(cls) -> NbeSignature:
raise NotImplementedError
# TODO: Differentiate between Ed25519SignatureSerializer and ZkSignatureSerializer
class Ed25519SignatureSerializer(OperationProofSerializer, RootModel[str]):
root: BytesFromHex
def into_operation_proof(self) -> NbeSignature:
return Ed25519Signature.model_validate(
{
"signature": self.root,
}
)
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
return cls.model_validate(random_bytes(64).hex())
class ZkSignatureSerializer(OperationProofSerializer, RootModel[str]):
root: BytesFromHex
def into_operation_proof(self) -> NbeSignature:
return ZkSignature.model_validate(
{
"signature": self.root,
}
)
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
return cls.model_validate(random_bytes(32).hex())
class ZkAndEd25519SignaturesSerializer(OperationProofSerializer, NbeSerializer):
zk_signature: BytesFromHex = Field(alias="zk_sig")
ed25519_signature: BytesFromHex = Field(alias="ed25519_sig")
def into_operation_proof(self) -> NbeSignature:
return ZkAndEd25519Signature.model_validate(
{
"zk_signature": self.zk_signature,
"ed25519_signature": self.ed25519_signature,
}
)
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
return ZkAndEd25519SignaturesSerializer.model_validate(
{
"zk_sig": random_bytes(32).hex(),
"ed25519_sig": random_bytes(32).hex(),
}
)
OperationProofSerializerVariants = Union[
Ed25519SignatureSerializer, ZkSignatureSerializer, ZkAndEd25519SignaturesSerializer
]
OperationProofSerializerField = Annotated[OperationProofSerializerVariants, Field(union_mode="left_to_right")]

View File

@ -0,0 +1,74 @@
from abc import ABC, abstractmethod
from typing import Annotated, Optional, Self, Union
from pydantic import Field
from rusty_results import Option
from core.models import NbeSerializer
from models.header.proof_of_leadership import (
Groth16ProofOfLeadership,
ProofOfLeadership,
)
from node.api.serializers.fields import BytesFromHex, BytesFromIntArray
from node.api.serializers.public import PublicSerializer
from utils.protocols import EnforceSubclassFromRandom
from utils.random import random_bytes
class ProofOfLeadershipSerializer(NbeSerializer, EnforceSubclassFromRandom, ABC):
@abstractmethod
def into_proof_of_leadership(self) -> ProofOfLeadership:
raise NotImplementedError
class Groth16LeaderProofSerializer(ProofOfLeadershipSerializer, NbeSerializer):
entropy_contribution: BytesFromHex = Field(description="Fr integer.")
leader_key: BytesFromIntArray = Field(description="Bytes in Integer Array format.")
proof: BytesFromIntArray = Field(
description="Bytes in Integer Array format.",
)
public: Optional[PublicSerializer] = Field(description="Only received if Node is running in dev mode.")
voucher_cm: BytesFromHex = Field(description="Hash.")
def into_proof_of_leadership(self) -> ProofOfLeadership:
public = self.public.into_public() if self.public else None
return Groth16ProofOfLeadership.model_validate(
{
"entropy_contribution": self.entropy_contribution,
"leader_key": self.leader_key,
"proof": self.proof,
"public": public,
"voucher_cm": self.voucher_cm,
}
)
@classmethod
def from_random(cls, *, slot: Option[int]) -> Self:
return cls.model_validate(
{
"entropy_contribution": random_bytes(32).hex(),
"leader_key": list(random_bytes(32)),
"proof": list(random_bytes(128)),
"public": PublicSerializer.from_random(slot),
"voucher_cm": random_bytes(32).hex(),
}
)
# Fake Variant that never resolves to allow union type checking to work
# TODO: Remove this when another Variant is added
from pydantic import BeforeValidator
def _always_fail(_):
raise ValueError("Never matches.")
_NeverType = Annotated[object, BeforeValidator(_always_fail)]
#
ProofOfLeadershipVariants = Union[
Groth16LeaderProofSerializer, _NeverType
] # TODO: Remove _NeverType when another Variant is added
ProofOfLeadershipSerializerField = Annotated[ProofOfLeadershipVariants, Field(union_mode="left_to_right")]

View File

@ -0,0 +1,42 @@
from random import randint
from typing import Self
from pydantic import Field
from rusty_results import Option
from core.models import NbeSerializer
from models.header.public import Public
from node.api.serializers.fields import BytesFromHex
from utils.protocols import FromRandom
from utils.random import random_bytes
class PublicSerializer(NbeSerializer, FromRandom):
aged_root: BytesFromHex = Field(description="Fr integer in hex format.")
epoch_nonce: BytesFromHex = Field(description="Fr integer in hex format.")
latest_root: BytesFromHex = Field(description="Fr integer in hex format.")
slot: int = Field(description="Integer in u64 format.")
total_stake: int = Field(description="Integer in u64 format.")
def into_public(self) -> Public:
return Public.model_validate(
{
"aged_root": self.aged_root,
"epoch_nonce": self.epoch_nonce,
"latest_root": self.latest_root,
"slot": self.slot,
"total_stake": self.total_stake,
}
)
@classmethod
def from_random(cls, slot: Option[int]) -> Self:
cls.model_validate(
{
"aged_root": random_bytes(32).hex(),
"epoch_nonce": random_bytes(32).hex(),
"latest_root": random_bytes(32).hex(),
"slot": slot.unwrap_or(randint(0, 10_000)),
"total_stake": randint(0, 10_000),
}
)

View File

@ -0,0 +1,64 @@
from typing import List, Self
from pydantic import Field
from rusty_results import Option
from core.models import NbeSerializer
from models.transactions.transaction import Transaction
from node.api.serializers.fields import BytesFromHex
from node.api.serializers.proof import (
OperationProofSerializer,
OperationProofSerializerField,
)
from node.api.serializers.transaction import TransactionSerializer
from utils.protocols import FromRandom
from utils.random import random_bytes
class SignedTransactionSerializer(NbeSerializer, FromRandom):
transaction: TransactionSerializer = Field(alias="mantle_tx", description="Transaction.")
operations_proofs: List[OperationProofSerializerField] = Field(
alias="ops_proofs", description="List of OperationProof. Order should match `Self::transaction::operations`."
)
ledger_transaction_proof: BytesFromHex = Field(
alias="ledger_tx_proof", description="Hash.", min_length=128, max_length=128
)
def into_transaction(self) -> Transaction:
operations_contents = self.transaction.operations_contents
if len(operations_contents) != len(self.operations_proofs):
raise ValueError(
f"Number of operations ({len(operations_contents)}) does not match number of operation proofs ({len(self.operations_proofs)})."
)
operations = [
{
"content": content.into_operation_content(),
"proof": proof.into_operation_proof(),
}
for content, proof in zip(operations_contents, self.operations_proofs)
]
ledger_transaction = self.transaction.ledger_transaction
outputs = [output.into_note() for output in ledger_transaction.outputs]
return Transaction.model_validate(
{
"hash": self.transaction.hash,
"operations": operations,
"inputs": ledger_transaction.inputs,
"outputs": outputs,
"proof": self.ledger_transaction_proof,
"execution_gas_price": self.transaction.execution_gas_price,
"storage_gas_price": self.transaction.storage_gas_price,
}
)
@classmethod
def from_random(cls) -> Self:
transaction = TransactionSerializer.from_random()
n = len(transaction.operations_contents)
operations_proofs = [OperationProofSerializer.from_random() for _ in range(n)]
return cls.model_validate(
{"mantle_tx": transaction, "ops_proofs": operations_proofs, "ledger_tx_proof": random_bytes(128).hex()}
)

View File

@ -0,0 +1,36 @@
from random import randint
from typing import List, Self
from pydantic import Field
from core.models import NbeSerializer
from node.api.serializers.fields import BytesFromHex
from node.api.serializers.ledger_transaction import LedgerTransactionSerializer
from node.api.serializers.operation import (
OperationContentSerializer,
OperationContentSerializerField,
)
from utils.protocols import FromRandom
from utils.random import random_bytes
class TransactionSerializer(NbeSerializer, FromRandom):
hash: BytesFromHex = Field(description="Hash id in hex format.")
operations_contents: List[OperationContentSerializerField] = Field(alias="ops")
ledger_transaction: LedgerTransactionSerializer = Field(alias="ledger_tx")
execution_gas_price: int = Field(description="Integer in u64 format.")
storage_gas_price: int = Field(description="Integer in u64 format.")
@classmethod
def from_random(cls) -> Self:
n = 0 if randint(0, 1) <= 0.5 else randint(1, 5)
operations_contents = [OperationContentSerializer.from_random() for _ in range(n)]
return cls.model_validate(
{
"hash": random_bytes(32).hex(),
"ops": operations_contents,
"ledger_tx": LedgerTransactionSerializer.from_random(),
"execution_gas_price": randint(1, 10_000),
"storage_gas_price": randint(1, 10_000),
}
)

View File

@ -1,19 +1,20 @@
import logging import logging
from asyncio import TaskGroup, create_task, sleep from asyncio import TaskGroup, create_task, sleep
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterator from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterator, List
from rusty_results import Option from rusty_results import Option
from db.blocks import BlockRepository from db.blocks import BlockRepository
from db.clients import SqliteClient from db.clients import SqliteClient
from db.transaction import TransactionRepository from db.transaction import TransactionRepository
from models.block import Block
from models.transactions.transaction import Transaction
from node.api.fake import FakeNodeApi from node.api.fake import FakeNodeApi
from node.api.http import HttpNodeApi from node.api.http import HttpNodeApi
from node.api.serializers.block import BlockSerializer
from node.manager.docker import DockerModeManager from node.manager.docker import DockerModeManager
from node.manager.fake import FakeNodeManager from node.manager.fake import FakeNodeManager
from node.models.blocks import Block
from node.models.transactions import Transaction
if TYPE_CHECKING: if TYPE_CHECKING:
from core.app import NBE from core.app import NBE
@ -27,8 +28,8 @@ async def node_lifespan(app: "NBE") -> AsyncGenerator[None]:
app.state.node_manager = FakeNodeManager() app.state.node_manager = FakeNodeManager()
# app.state.node_manager = DockerModeManager(app.settings.node_compose_filepath) # app.state.node_manager = DockerModeManager(app.settings.node_compose_filepath)
# app.state.node_api = FakeNodeApi() app.state.node_api = FakeNodeApi()
app.state.node_api = HttpNodeApi(host="127.0.0.1", port=18080) # app.state.node_api = HttpNodeApi(host="127.0.0.1", port=18080)
app.state.db_client = db_client app.state.db_client = db_client
app.state.block_repository = BlockRepository(db_client) app.state.block_repository = BlockRepository(db_client)
@ -88,21 +89,34 @@ async def _gracefully_close_stream(stream: AsyncIterator) -> None:
async def subscribe_to_new_blocks(app: "NBE"): async def subscribe_to_new_blocks(app: "NBE"):
blocks_stream: AsyncGenerator[Block] = app.state.node_api.get_blocks_stream() # type: ignore[call-arg] blocks_stream: AsyncGenerator[BlockSerializer] = app.state.node_api.get_blocks_stream() # type: ignore[call-arg]
try: try:
while app.state.is_running: while app.state.is_running:
try: try:
block = await anext(blocks_stream) # TODO: Use anext's Sentinel? block_serializer = await anext(blocks_stream) # TODO: Use anext's Sentinel?
except StopAsyncIteration:
logger.error("Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
break
except TimeoutError: except TimeoutError:
continue continue
except StopAsyncIteration:
import traceback
traceback.print_exc()
logger.error("Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
break
except Exception as e: except Exception as e:
import traceback
traceback.print_exc()
logger.error(f"Error while fetching new blocks: {e}") logger.error(f"Error while fetching new blocks: {e}")
continue continue
await app.state.block_repository.create(block) try:
block = block_serializer.into_block()
await app.state.block_repository.create(block)
except Exception as e:
import traceback
traceback.print_exc()
logger.error(f"Error while saving new block: {e}")
finally: finally:
await _gracefully_close_stream(blocks_stream) await _gracefully_close_stream(blocks_stream)
@ -146,7 +160,10 @@ async def backfill_blocks(app: "NBE", *, db_hit_interval_seconds: int, batch_siz
logger.info(f"Backfilling blocks from slot {slot_to} down to 0...") logger.info(f"Backfilling blocks from slot {slot_to} down to 0...")
while slot_to > 0: while slot_to > 0:
slot_from = max(0, slot_to - batch_size) slot_from = max(0, slot_to - batch_size)
blocks = await app.state.node_api.get_blocks(slot_from=slot_from, slot_to=slot_to) blocks_serializers: List[BlockSerializer] = await app.state.node_api.get_blocks(
slot_from=slot_from, slot_to=slot_to
)
blocks: List[Block] = [block_serializer.into_block() for block_serializer in blocks_serializers]
logger.debug(f"Backfilling {len(blocks)} blocks from slot {slot_from} to {slot_to}...") logger.debug(f"Backfilling {len(blocks)} blocks from slot {slot_from} to {slot_to}...")
await app.state.block_repository.create(*blocks) await app.state.block_repository.create(*blocks)
slot_to = slot_from - 1 slot_to = slot_from - 1

View File

@ -1,137 +0,0 @@
import logging
import os
import random
from typing import TYPE_CHECKING, Any, List, Self
from pydantic.config import ExtraValues
from pydantic_core.core_schema import computed_field
from sqlalchemy import Column
from sqlmodel import Field, Relationship
from core.models import NbeSchema, TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from utils.random import random_hash
if TYPE_CHECKING:
from node.models.transactions import Transaction
def _is_debug__randomize_transactions():
is_debug = os.getenv("DEBUG", "False").lower() == "true"
is_debug__randomize_transactions = os.getenv("DEBUG__RANDOMIZE_TRANSACTIONS", "False").lower() == "true"
return is_debug and is_debug__randomize_transactions
logger = logging.getLogger(__name__)
class Public(NbeSchema):
aged_root: str
epoch_nonce: str
latest_root: str
slot: int
total_stake: float
@classmethod
def from_random(cls, slot: int = None) -> "Public":
if slot is not None:
slot = random.randint(1, 100)
return Public(
aged_root=random_hash(),
epoch_nonce=random_hash(),
latest_root=random_hash(),
slot=slot,
total_stake=100.0,
)
class ProofOfLeadership(NbeSchema):
entropy_contribution: str
leader_key: List[int]
proof: List[int]
public: Public
voucher_cm: str
@classmethod
def from_random(cls, slot: int = None) -> "ProofOfLeadership":
random_hash_as_list = lambda: [random.randint(0, 255) for _ in range(64)]
return ProofOfLeadership(
entropy_contribution=random_hash(),
leader_key=random_hash_as_list(),
proof=random_hash_as_list(),
public=Public.from_random(slot),
voucher_cm=random_hash(),
)
class Header(NbeSchema):
block_root: str
parent_block: str
proof_of_leadership: ProofOfLeadership
slot: int
@classmethod
def from_random(cls, slot_from: int = 1, slot_to: int = 100) -> "Header":
slot = random.randint(slot_from, slot_to)
return Header(
block_root=random_hash(),
parent_block=random_hash(),
proof_of_leadership=ProofOfLeadership.from_random(slot),
slot=slot,
)
class Block(TimestampedModel, table=True):
__tablename__ = "block"
header: Header = Field(sa_column=Column(PydanticJsonColumn(Header), nullable=False))
transactions: List["Transaction"] = Relationship(
back_populates="block",
sa_relationship_kwargs={
"lazy": "selectin",
"cascade": "all, delete-orphan",
},
)
@property
def slot(self) -> int:
return self.header.slot
def __str__(self) -> str:
return f"Block(slot={self.slot})"
def __repr__(self) -> str:
return f"<Block(id={self.id}, created_at={self.created_at}, slot={self.slot}, parent={self.header["parent_block"]})>"
@classmethod
def model_validate_json(
cls,
json_data: str | bytes | bytearray,
*,
strict: bool | None = None,
extra: ExtraValues | None = None,
context: Any | None = None,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> Self:
self = super().model_validate_json(
json_data, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
)
if _is_debug__randomize_transactions():
from node.models.transactions import Transaction
logger.debug("DEBUG and DEBUG__RANDOMIZE_TRANSACTIONS is enabled, randomizing Block's transactions.")
n = 0 if random.randint(0, 1) <= 0.5 else random.randint(1, 10)
self.transactions = [Transaction.from_random() for _ in range(n)]
return self
@classmethod
def from_random(cls, slot_from: int = 1, slot_to: int = 100) -> "Block":
n = 0 if random.randint(0, 1) < 0.3 else random.randint(1, 5)
transactions = [Transaction.from_random() for _ in range(n)]
return Block(
header=Header.from_random(slot_from, slot_to),
transactions=transactions,
)

View File

@ -1,19 +0,0 @@
from core.models import IdNbeModel
class Health(IdNbeModel):
healthy: bool
@classmethod
def from_healthy(cls) -> "Health":
return cls(healthy=True)
@classmethod
def from_unhealthy(cls) -> "Health":
return cls(healthy=False)
def __str__(self):
return "Healthy" if self.healthy else "Unhealthy"
def __repr__(self):
return f"<Health(healthy={self.healthy})>"

View File

@ -1,92 +0,0 @@
import random
from enum import StrEnum
from typing import TYPE_CHECKING, List, Optional
from sqlalchemy import JSON, Column
from sqlmodel import Field, Relationship
from core.models import NbeSchema, TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from utils.random import random_address
if TYPE_CHECKING:
from node.models.blocks import Block
Value = int
Fr = int
Gas = float
PublicKey = bytes
class Operation(StrEnum):
CHANNEL_INSCRIBE = ("ChannelInscribe",) # (InscriptionOp)
CHANNEL_BLOB = ("ChannelBlob",) # (BlobOp)
CHANNEL_SET_KEYS = ("ChannelSetKeys",) # (SetKeysOp)
NATIVE = ("Native",) # (NativeOp)
SDP_DECLARE = ("SDPDeclare",) # (SDPDeclareOp)
SDP_WITHDRAW = ("SDPWithdraw",) # (SDPWithdrawOp)
SDP_ACTIVE = ("SDPActive",) # (SDPActiveOp)
LEADER_CLAIM = ("LeaderClaim",) # (LeaderClaimOp)
class Note(NbeSchema):
value: Value
public_key: PublicKey
@classmethod
def from_random(cls) -> "Note":
return Note(
value=random.randint(1, 100),
public_key=random_address().encode("utf-8"),
)
class LedgerTransaction(NbeSchema):
"""
Tx
"""
inputs: List[Fr] = Field(default_factory=list, sa_column=Column(JSON, nullable=False))
outputs: List[Note] = Field(default_factory=list, sa_column=Column(JSON, nullable=False))
@classmethod
def from_random(cls) -> "LedgerTransaction":
return LedgerTransaction(
inputs=[random.randint(1, 100) for _ in range(10)],
outputs=[Note.from_random() for _ in range(10)],
)
class Transaction(TimestampedModel, table=True):
"""
MantleTx
"""
__tablename__ = "transaction"
block_id: int = Field(foreign_key="block.id", nullable=False, index=True)
operations: List[str] = Field(alias="ops", default_factory=list, sa_column=Column(JSON, nullable=False))
ledger_transaction: LedgerTransaction = Field(
default_factory=dict, sa_column=Column(PydanticJsonColumn(LedgerTransaction), nullable=False)
)
execution_gas_price: Gas
storage_gas_price: Gas
block: Optional["Block"] = Relationship(back_populates="transactions")
def __str__(self) -> str:
return f"Transaction({self.operations})"
def __repr__(self) -> str:
return f"<Transaction(id={self.id}, created_at={self.created_at}, operations={self.operations})>"
@classmethod
def from_random(cls) -> "Transaction":
n = random.randint(1, 3)
operations = [random.choice(list(Operation)).value for _ in range(n)]
return Transaction(
operations=operations,
ledger_transaction=LedgerTransaction.from_random(),
execution_gas_price=random.random(),
storage_gas_price=random.random(),
)

35
src/utils/protocols.py Normal file
View File

@ -0,0 +1,35 @@
from abc import ABC, abstractmethod
from random import choice
from typing import Self
class FromRandom(ABC):
@classmethod
@abstractmethod
def from_random(cls, *args, **kwargs) -> Self:
raise NotImplementedError
# TODO: Unnecessarily complex.
class EnforceSubclassFromRandom(FromRandom, ABC):
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
subclasses = cls.__subclasses__()
if len(subclasses) == 0:
raise TypeError("No subclasses were found.")
return choice(subclasses).from_random(*args, **kwargs)
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
# Distance to the base in the MRO
try:
distance = cls.mro().index(EnforceSubclassFromRandom)
except ValueError:
return # Not a descendant (shouldn't happen here)
# Require override only for grandchildren (exactly two levels below)
if distance >= 2 and not hasattr(cls, "from_random"):
raise TypeError(
f"Class {cls.__name__} is a grandchild of EnforceSubclassFromRandom. Therefore, it must implement `from_random()`."
)

View File

@ -1,13 +1,18 @@
import random import random
from typing import List
def random_hex(length: int) -> str: def random_bytes(length: int) -> bytes:
return f"0x{random.getrandbits(length * 4):0{length}x}" return bytes((random.randint(0, 255) for _ in range(length)))
def random_hash() -> str: def random_address() -> bytes:
return random_hex(64) return random_bytes(40)
def random_address() -> str: def random_hash() -> bytes:
return random_hex(40) return random_bytes(64)
def as_list(data: bytes) -> List[int]:
return list(data)

View File

@ -14,24 +14,26 @@ export default function BlocksTable() {
const body = bodyRef.current; const body = bodyRef.current;
const counter = countRef.current; const counter = countRef.current;
// 5 columns now (ID, Slot, Root, Parent, Transactions) // 6 columns: ID | Slot | Hash | Parent | Block Root | Transactions
ensureFixedRowCount(body, 5, TABLE_SIZE); ensureFixedRowCount(body, 6, TABLE_SIZE);
abortRef.current?.abort(); abortRef.current?.abort();
abortRef.current = new AbortController(); abortRef.current = new AbortController();
const pruneAndPad = () => { const pruneAndPad = () => {
// remove any placeholder rows that snuck in
for (let i = body.rows.length - 1; i >= 0; i--) { for (let i = body.rows.length - 1; i >= 0; i--) {
if (body.rows[i].classList.contains('ph')) body.deleteRow(i); if (body.rows[i].classList.contains('ph')) body.deleteRow(i);
} }
// keep at most TABLE_SIZE non-placeholder rows
while ([...body.rows].filter((r) => !r.classList.contains('ph')).length > TABLE_SIZE) { while ([...body.rows].filter((r) => !r.classList.contains('ph')).length > TABLE_SIZE) {
const last = body.rows[body.rows.length - 1]; const last = body.rows[body.rows.length - 1];
const key = last?.dataset?.key; const key = last?.dataset?.key;
if (key) seenKeysRef.current.delete(key); if (key) seenKeysRef.current.delete(key);
body.deleteRow(-1); body.deleteRow(-1);
} }
// keep placeholders in sync with 5 columns // pad with placeholders to TABLE_SIZE (6 cols)
ensureFixedRowCount(body, 5, TABLE_SIZE); ensureFixedRowCount(body, 6, TABLE_SIZE);
const real = [...body.rows].filter((r) => !r.classList.contains('ph')).length; const real = [...body.rows].filter((r) => !r.classList.contains('ph')).length;
counter.textContent = String(real); counter.textContent = String(real);
}; };
@ -64,15 +66,15 @@ export default function BlocksTable() {
spSlot.textContent = String(b.slot); spSlot.textContent = String(b.slot);
tdSlot.appendChild(spSlot); tdSlot.appendChild(spSlot);
// Root // Hash
const tdRoot = document.createElement('td'); const tdHash = document.createElement('td');
const spRoot = document.createElement('span'); const spHash = document.createElement('span');
spRoot.className = 'mono'; spHash.className = 'mono';
spRoot.title = b.root; spHash.title = b.hash;
spRoot.textContent = shortenHex(b.root); spHash.textContent = shortenHex(b.hash);
tdRoot.appendChild(spRoot); tdHash.appendChild(spHash);
// Parent // Parent (block.parent_block_hash)
const tdParent = document.createElement('td'); const tdParent = document.createElement('td');
const spParent = document.createElement('span'); const spParent = document.createElement('span');
spParent.className = 'mono'; spParent.className = 'mono';
@ -80,6 +82,14 @@ export default function BlocksTable() {
spParent.textContent = shortenHex(b.parent); spParent.textContent = shortenHex(b.parent);
tdParent.appendChild(spParent); tdParent.appendChild(spParent);
// Block Root
const tdRoot = document.createElement('td');
const spRoot = document.createElement('span');
spRoot.className = 'mono';
spRoot.title = b.root;
spRoot.textContent = shortenHex(b.root);
tdRoot.appendChild(spRoot);
// Transactions (array length) // Transactions (array length)
const tdCount = document.createElement('td'); const tdCount = document.createElement('td');
const spCount = document.createElement('span'); const spCount = document.createElement('span');
@ -87,13 +97,16 @@ export default function BlocksTable() {
spCount.textContent = String(b.transactionCount); spCount.textContent = String(b.transactionCount);
tdCount.appendChild(spCount); tdCount.appendChild(spCount);
tr.append(tdId, tdSlot, tdRoot, tdParent, tdCount); tr.append(tdId, tdSlot, tdHash, tdParent, tdRoot, tdCount);
body.insertBefore(tr, body.firstChild); body.insertBefore(tr, body.firstChild);
pruneAndPad(); pruneAndPad();
}; };
const normalize = (raw) => { const normalize = (raw) => {
const header = raw.header ?? raw; // New backend:
// { id, hash, slot, block_root, parent_block_hash, transactions: [...] }
// Back-compat (header.* / raw.parent_block) just in case.
const header = raw.header ?? null;
const txLen = Array.isArray(raw.transactions) const txLen = Array.isArray(raw.transactions)
? raw.transactions.length ? raw.transactions.length
: Array.isArray(raw.txs) : Array.isArray(raw.txs)
@ -102,9 +115,10 @@ export default function BlocksTable() {
return { return {
id: Number(raw.id ?? 0), id: Number(raw.id ?? 0),
slot: Number(header?.slot ?? raw.slot ?? 0), slot: Number(raw.slot ?? header?.slot ?? 0),
root: header?.block_root ?? raw.block_root ?? '', hash: raw.hash ?? header?.hash ?? '',
parent: header?.parent_block ?? raw.parent_block ?? '', parent: raw.parent_block_hash ?? header?.parent_block ?? raw.parent_block ?? '',
root: raw.block_root ?? header?.block_root ?? '',
transactionCount: txLen, transactionCount: txLen,
}; };
}; };
@ -152,8 +166,9 @@ export default function BlocksTable() {
null, null,
h('col', { style: 'width:80px' }), // ID h('col', { style: 'width:80px' }), // ID
h('col', { style: 'width:90px' }), // Slot h('col', { style: 'width:90px' }), // Slot
h('col', { style: 'width:240px' }), // Root h('col', { style: 'width:240px' }), // Hash
h('col', { style: 'width:240px' }), // Parent h('col', { style: 'width:240px' }), // Parent
h('col', { style: 'width:240px' }), // Block Root
h('col', { style: 'width:120px' }), // Transactions h('col', { style: 'width:120px' }), // Transactions
), ),
h( h(
@ -164,8 +179,9 @@ export default function BlocksTable() {
null, null,
h('th', null, 'ID'), h('th', null, 'ID'),
h('th', null, 'Slot'), h('th', null, 'Slot'),
h('th', null, 'Block Root'), h('th', null, 'Hash'),
h('th', null, 'Parent'), h('th', null, 'Parent'),
h('th', null, 'Block Root'),
h('th', null, 'Transactions'), h('th', null, 'Transactions'),
), ),
), ),

View File

@ -1,139 +1,167 @@
// static/pages/TransactionsTable.js
import { h } from 'preact'; import { h } from 'preact';
import { useEffect, useRef } from 'preact/hooks'; import { useEffect, useRef } from 'preact/hooks';
import { API, TABLE_SIZE } from '../lib/api.js?dev=1'; import { API, TABLE_SIZE } from '../lib/api.js?dev=1';
import { import {
streamNdjson, streamNdjson,
ensureFixedRowCount, ensureFixedRowCount,
shortenHex, shortenHex, // (kept in case you want to use later)
formatTimestamp,
withBenignFilter, withBenignFilter,
} from '../lib/utils.js?dev=1'; } from '../lib/utils.js?dev=1';
const OPERATIONS_PREVIEW_LIMIT = 2; const OPERATIONS_PREVIEW_LIMIT = 2;
// ---------- small DOM helpers ----------
function createSpan(className, text, title) { function createSpan(className, text, title) {
const element = document.createElement('span'); const el = document.createElement('span');
if (className) element.className = className; if (className) el.className = className;
if (title) element.title = title; if (title) el.title = title;
element.textContent = text; el.textContent = text;
return element; return el;
} }
function createLink(href, text, title) { function createLink(href, text, title) {
const element = document.createElement('a'); const el = document.createElement('a');
element.className = 'linkish mono'; el.className = 'linkish mono';
element.href = href; el.href = href;
if (title) element.title = title; if (title) el.title = title;
element.textContent = text; el.textContent = text;
return element; return el;
} }
// ---------- coercion / formatting helpers ----------
const toNumber = (v) => {
if (v == null) return 0;
if (typeof v === 'number') return v;
if (typeof v === 'bigint') return Number(v);
if (typeof v === 'string') {
const s = v.trim();
if (/^0x[0-9a-f]+$/i.test(s)) return Number(BigInt(s));
const n = Number(s);
return Number.isFinite(n) ? n : 0;
}
if (typeof v === 'object' && v !== null && 'value' in v) return toNumber(v.value);
return 0;
};
const opLabel = (op) => {
if (op == null) return 'op';
if (typeof op === 'string' || typeof op === 'number') return String(op);
if (typeof op !== 'object') return String(op);
if (typeof op.type === 'string') return op.type;
if (typeof op.kind === 'string') return op.kind;
if (op.content) {
if (typeof op.content.type === 'string') return op.content.type;
if (typeof op.content.kind === 'string') return op.content.kind;
}
const keys = Object.keys(op);
return keys.length ? keys[0] : 'op';
};
function formatOperationsPreview(ops) {
if (!ops?.length) return '—';
const labels = ops.map(opLabel);
if (labels.length <= OPERATIONS_PREVIEW_LIMIT) return labels.join(', ');
const head = labels.slice(0, OPERATIONS_PREVIEW_LIMIT).join(', ');
const remainder = labels.length - OPERATIONS_PREVIEW_LIMIT;
return `${head} +${remainder}`;
}
// ---------- normalize API → view model ----------
function normalizeTransaction(raw) { function normalizeTransaction(raw) {
// Defensive parsing and intent-revealing structure // { id, block_id, hash, operations:[Operation], inputs:[HexBytes], outputs:[Note], proof, execution_gas_price, storage_gas_price, created_at? }
const operations = Array.isArray(raw?.ops) ? raw.ops : Array.isArray(raw?.operations) ? raw.operations : []; const ops = Array.isArray(raw?.operations) ? raw.operations : Array.isArray(raw?.ops) ? raw.ops : [];
const ledgerOutputs = Array.isArray(raw?.ledger_transaction?.outputs) ? raw.ledger_transaction.outputs : []; const outputs = Array.isArray(raw?.outputs) ? raw.outputs : [];
const totalOutputValue = outputs.reduce((sum, note) => sum + toNumber(note?.value), 0);
const totalOutputValue = ledgerOutputs.reduce((sum, note) => sum + Number(note?.value ?? 0), 0);
return { return {
id: raw?.id ?? '', id: raw?.id ?? '',
operations, operations: ops,
createdAt: raw?.created_at ?? raw?.timestamp ?? '', executionGasPrice: toNumber(raw?.execution_gas_price),
executionGasPrice: Number(raw?.execution_gas_price ?? 0), storageGasPrice: toNumber(raw?.storage_gas_price),
storageGasPrice: Number(raw?.storage_gas_price ?? 0), numberOfOutputs: outputs.length,
numberOfOutputs: ledgerOutputs.length,
totalOutputValue, totalOutputValue,
}; };
} }
function formatOperationsPreview(operations) { // ---------- row builder ----------
if (operations.length === 0) return '—'; function buildTransactionRow(tx) {
if (operations.length <= OPERATIONS_PREVIEW_LIMIT) return operations.join(', '); const tr = document.createElement('tr');
const head = operations.slice(0, OPERATIONS_PREVIEW_LIMIT).join(', ');
const remainder = operations.length - OPERATIONS_PREVIEW_LIMIT;
return `${head} +${remainder}`;
}
function buildTransactionRow(transactionData) {
const row = document.createElement('tr');
// ID // ID
const cellId = document.createElement('td'); const tdId = document.createElement('td');
cellId.className = 'mono'; tdId.className = 'mono';
cellId.appendChild( tdId.appendChild(createLink(`/transactions/${tx.id}`, String(tx.id), String(tx.id)));
createLink(`/transactions/${transactionData.id}`, String(transactionData.id), String(transactionData.id)),
// Operations (preview)
const tdOps = document.createElement('td');
const preview = formatOperationsPreview(tx.operations);
tdOps.appendChild(
createSpan('', preview, Array.isArray(tx.operations) ? tx.operations.map(opLabel).join(', ') : ''),
); );
// Operations // Outputs (count / total)
const cellOperations = document.createElement('td'); const tdOut = document.createElement('td');
const operationsPreview = formatOperationsPreview(transactionData.operations); tdOut.className = 'amount';
cellOperations.appendChild(createSpan('', operationsPreview, transactionData.operations.join(', '))); tdOut.textContent = `${tx.numberOfOutputs} / ${tx.totalOutputValue.toLocaleString(undefined, { maximumFractionDigits: 8 })}`;
// Outputs (count / total value)
const cellOutputs = document.createElement('td');
cellOutputs.className = 'amount';
cellOutputs.textContent = `${transactionData.numberOfOutputs} / ${transactionData.totalOutputValue.toLocaleString(undefined, { maximumFractionDigits: 8 })}`;
// Gas (execution / storage) // Gas (execution / storage)
const cellGas = document.createElement('td'); const tdGas = document.createElement('td');
cellGas.className = 'mono'; tdGas.className = 'mono';
cellGas.textContent = `${transactionData.executionGasPrice.toLocaleString()} / ${transactionData.storageGasPrice.toLocaleString()}`; tdGas.textContent = `${tx.executionGasPrice.toLocaleString()} / ${tx.storageGasPrice.toLocaleString()}`;
// Time tr.append(tdId, tdOps, tdOut, tdGas);
const cellTime = document.createElement('td'); return tr;
const timeSpan = createSpan('mono', formatTimestamp(transactionData.createdAt), String(transactionData.createdAt));
cellTime.appendChild(timeSpan);
row.append(cellId, cellOperations, cellOutputs, cellGas, cellTime);
return row;
} }
// ---------- component ----------
export default function TransactionsTable() { export default function TransactionsTable() {
const tableBodyRef = useRef(null); const bodyRef = useRef(null);
const counterRef = useRef(null); const countRef = useRef(null);
const abortControllerRef = useRef(null); const abortRef = useRef(null);
const totalCountRef = useRef(0); const totalCountRef = useRef(0);
useEffect(() => { useEffect(() => {
const tableBodyElement = tableBodyRef.current; const body = bodyRef.current;
const counterElement = counterRef.current; const counter = countRef.current;
ensureFixedRowCount(tableBodyElement, 4, TABLE_SIZE);
abortControllerRef.current?.abort(); // 4 columns: ID | Operations | Outputs | Gas
abortControllerRef.current = new AbortController(); ensureFixedRowCount(body, 4, TABLE_SIZE);
abortRef.current?.abort();
abortRef.current = new AbortController();
const url = `${API.TRANSACTIONS_STREAM}?prefetch-limit=${encodeURIComponent(TABLE_SIZE)}`; const url = `${API.TRANSACTIONS_STREAM}?prefetch-limit=${encodeURIComponent(TABLE_SIZE)}`;
streamNdjson( streamNdjson(
url, url,
(rawTransaction) => { (raw) => {
try { try {
const transactionData = normalizeTransaction(rawTransaction); const tx = normalizeTransaction(raw);
const row = buildTransactionRow(transactionData); const row = buildTransactionRow(tx);
body.insertBefore(row, body.firstChild);
tableBodyElement.insertBefore(row, tableBodyElement.firstChild); while (body.rows.length > TABLE_SIZE) body.deleteRow(-1);
while (tableBodyElement.rows.length > TABLE_SIZE) tableBodyElement.deleteRow(-1); counter.textContent = String(++totalCountRef.current);
counterElement.textContent = String(++totalCountRef.current); } catch (err) {
} catch (error) { console.error('Failed to render transaction row:', err, raw);
// Fail fast per row, but do not break the stream
console.error('Failed to render transaction row:', error);
} }
}, },
{ {
signal: abortControllerRef.current.signal, signal: abortRef.current.signal,
onError: withBenignFilter( onError: withBenignFilter(
(error) => console.error('Transaction stream error:', error), (err) => console.error('Transactions stream error:', err),
abortControllerRef.current.signal, abortRef.current.signal,
), ),
}, },
).catch((error) => { ).catch((err) => {
if (!abortControllerRef.current.signal.aborted) { if (!abortRef.current.signal.aborted) {
console.error('Transactions stream connection error:', error); console.error('Transactions stream connection error:', err);
} }
}); });
return () => abortControllerRef.current?.abort(); return () => abortRef.current?.abort();
}, []); }, []);
return h( return h(
@ -142,7 +170,7 @@ export default function TransactionsTable() {
h( h(
'div', 'div',
{ class: 'card-header' }, { class: 'card-header' },
h('div', null, h('strong', null, 'Transactions '), h('span', { class: 'pill', ref: counterRef }, '0')), h('div', null, h('strong', null, 'Transactions '), h('span', { class: 'pill', ref: countRef }, '0')),
h('div', { style: 'color:var(--muted); font-size:12px;' }), h('div', { style: 'color:var(--muted); font-size:12px;' }),
), ),
h( h(
@ -156,9 +184,8 @@ export default function TransactionsTable() {
null, null,
h('col', { style: 'width:120px' }), // ID h('col', { style: 'width:120px' }), // ID
h('col', null), // Operations h('col', null), // Operations
h('col', { style: 'width:180px' }), // Outputs (count / total) h('col', { style: 'width:200px' }), // Outputs (count / total)
h('col', { style: 'width:180px' }), // Gas (execution / storage) h('col', { style: 'width:200px' }), // Gas (execution / storage)
h('col', { style: 'width:180px' }), // Time
), ),
h( h(
'thead', 'thead',
@ -170,10 +197,9 @@ export default function TransactionsTable() {
h('th', null, 'Operations'), h('th', null, 'Operations'),
h('th', null, 'Outputs (count / total)'), h('th', null, 'Outputs (count / total)'),
h('th', null, 'Gas (execution / storage)'), h('th', null, 'Gas (execution / storage)'),
h('th', null, 'Time'),
), ),
), ),
h('tbody', { ref: tableBodyRef }), h('tbody', { ref: bodyRef }),
), ),
), ),
); );

View File

@ -5,24 +5,41 @@ import { API, PAGE } from '../lib/api.js?dev=1';
const OPERATIONS_PREVIEW_LIMIT = 2; const OPERATIONS_PREVIEW_LIMIT = 2;
// Helpers // ---- Helpers ----
const opLabel = (op) => {
if (op == null) return 'op';
if (typeof op === 'string' || typeof op === 'number') return String(op);
if (typeof op !== 'object') return String(op);
if (typeof op.type === 'string') return op.type;
if (typeof op.kind === 'string') return op.kind;
if (op.content) {
if (typeof op.content.type === 'string') return op.content.type;
if (typeof op.content.kind === 'string') return op.content.kind;
}
const keys = Object.keys(op);
return keys.length ? keys[0] : 'op';
};
function opsToPills(ops, limit = OPERATIONS_PREVIEW_LIMIT) { function opsToPills(ops, limit = OPERATIONS_PREVIEW_LIMIT) {
const arr = Array.isArray(ops) ? ops : []; const arr = Array.isArray(ops) ? ops : [];
if (!arr.length) return h('span', { style: 'color:var(--muted); white-space:nowrap;' }, '—'); if (!arr.length) return h('span', { style: 'color:var(--muted); white-space:nowrap;' }, '—');
const shown = arr.slice(0, limit);
const extra = arr.length - shown.length; const labels = arr.map(opLabel);
const shown = labels.slice(0, limit);
const extra = labels.length - shown.length;
return h( return h(
'div', 'div',
{ style: 'display:flex; gap:6px; flex-wrap:nowrap; align-items:center; white-space:nowrap;' }, { style: 'display:flex; gap:6px; flex-wrap:nowrap; align-items:center; white-space:nowrap;' },
...shown.map((op, i) => ...shown.map((label, i) =>
h('span', { key: `${op}-${i}`, class: 'pill', title: op, style: 'flex:0 0 auto;' }, op), h('span', { key: `${label}-${i}`, class: 'pill', title: label, style: 'flex:0 0 auto;' }, label),
), ),
extra > 0 && h('span', { class: 'pill', title: `${extra} more`, style: 'flex:0 0 auto;' }, `+${extra}`), extra > 0 && h('span', { class: 'pill', title: `${extra} more`, style: 'flex:0 0 auto;' }, `+${extra}`),
); );
} }
function computeOutputsSummary(ledgerTransaction) { function computeOutputsSummaryFromTx(tx) {
const outputs = Array.isArray(ledgerTransaction?.outputs) ? ledgerTransaction.outputs : []; const outputs = Array.isArray(tx?.outputs) ? tx.outputs : [];
const count = outputs.length; const count = outputs.length;
const total = outputs.reduce((sum, o) => sum + Number(o?.value ?? 0), 0); const total = outputs.reduce((sum, o) => sum + Number(o?.value ?? 0), 0);
return { count, total }; return { count, total };
@ -112,9 +129,15 @@ export default function BlockDetailPage({ parameters }) {
}; };
}, [blockId, isValidId]); }, [blockId, isValidId]);
const header = block?.header ?? {}; const header = block?.header ?? {}; // back-compat only
const transactions = Array.isArray(block?.transactions) ? block.transactions : []; const transactions = Array.isArray(block?.transactions) ? block.transactions : [];
const slot = block?.slot ?? header.slot;
// Prefer new top-level fields; fallback to legacy header.*
const slot = block?.slot ?? header?.slot ?? null;
const blockRoot = block?.block_root ?? header?.block_root ?? '';
const blockHash = block?.hash ?? header?.hash ?? '';
const parentId = block?.parent_id ?? null;
const parentHash = block?.parent_block_hash ?? header?.parent_block ?? '';
return h( return h(
'main', 'main',
@ -170,6 +193,23 @@ export default function BlockDetailPage({ parameters }) {
'div', 'div',
{ style: 'padding:12px 14px; display:grid; grid-template-columns: 120px 1fr; gap:8px 12px;' }, { style: 'padding:12px 14px; display:grid; grid-template-columns: 120px 1fr; gap:8px 12px;' },
// Hash (pill + copy)
h('div', null, h('b', null, 'Hash:')),
h(
'div',
{ style: 'display:flex; gap:8px; flex-wrap:wrap; align-items:flex-start;' },
h(
'span',
{
class: 'pill mono',
title: blockHash,
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
},
String(blockHash),
),
h(CopyPill, { text: blockHash }),
),
// Root (pill + copy) // Root (pill + copy)
h('div', null, h('b', null, 'Root:')), h('div', null, h('b', null, 'Root:')),
h( h(
@ -179,40 +219,40 @@ export default function BlockDetailPage({ parameters }) {
'span', 'span',
{ {
class: 'pill mono', class: 'pill mono',
title: header.block_root ?? '', title: blockRoot,
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;', style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
}, },
String(header.block_root ?? ''), String(blockRoot),
), ),
h(CopyPill, { text: header.block_root }), h(CopyPill, { text: blockRoot }),
), ),
// Parent (pill + copy) // Parent (id link OR parent hash) + copy
h('div', null, h('b', null, 'Parent:')), h('div', null, h('b', null, 'Parent:')),
h( h(
'div', 'div',
{ style: 'display:flex; gap:8px; flex-wrap:wrap; align-items:flex-start;' }, { style: 'display:flex; gap:8px; flex-wrap:wrap; align-items:flex-start;' },
block?.parent_id parentId != null
? h( ? h(
'a', 'a',
{ {
class: 'pill mono linkish', class: 'pill mono linkish',
href: PAGE.BLOCK_DETAIL(block.parent_id), href: PAGE.BLOCK_DETAIL(parentId),
title: String(block.parent_id), title: String(parentId),
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;', style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
}, },
String(block.parent_id), String(parentId),
) )
: h( : h(
'span', 'span',
{ {
class: 'pill mono', class: 'pill mono',
title: header.parent_block ?? '', title: parentHash,
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;', style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
}, },
String(header.parent_block ?? ''), String(parentHash || '—'),
), ),
h(CopyPill, { text: block?.parent_id ?? header.parent_block }), h(CopyPill, { text: parentId ?? parentHash }),
), ),
), ),
), ),
@ -234,7 +274,6 @@ export default function BlockDetailPage({ parameters }) {
'table', 'table',
{ {
class: 'table--transactions', class: 'table--transactions',
// Fill card by default; expand + scroll if content is wider
style: 'min-width:100%; width:max-content; table-layout:auto; border-collapse:collapse;', style: 'min-width:100%; width:max-content; table-layout:auto; border-collapse:collapse;',
}, },
h( h(
@ -265,10 +304,10 @@ export default function BlockDetailPage({ parameters }) {
'tbody', 'tbody',
null, null,
...transactions.map((t) => { ...transactions.map((t) => {
const operations = Array.isArray(t?.operations) ? t.operations : []; const { count, total } = computeOutputsSummaryFromTx(t);
const { count, total } = computeOutputsSummary(t?.ledger_transaction);
const executionGas = Number(t?.execution_gas_price ?? 0); const executionGas = Number(t?.execution_gas_price ?? 0);
const storageGas = Number(t?.storage_gas_price ?? 0); const storageGas = Number(t?.storage_gas_price ?? 0);
const ops = Array.isArray(t?.operations) ? t.operations : [];
return h( return h(
'tr', 'tr',
@ -309,7 +348,7 @@ export default function BlockDetailPage({ parameters }) {
h( h(
'td', 'td',
{ style: 'text-align:left; padding:8px 10px; white-space:nowrap;' }, { style: 'text-align:left; padding:8px 10px; white-space:nowrap;' },
opsToPills(operations), opsToPills(ops),
), ),
); );
}), }),

View File

@ -7,9 +7,10 @@ import { API } from '../lib/api.js?dev=1';
const isNumber = (v) => typeof v === 'number' && !Number.isNaN(v); const isNumber = (v) => typeof v === 'number' && !Number.isNaN(v);
const toLocaleNum = (n, opts = {}) => Number(n ?? 0).toLocaleString(undefined, { maximumFractionDigits: 8, ...opts }); const toLocaleNum = (n, opts = {}) => Number(n ?? 0).toLocaleString(undefined, { maximumFractionDigits: 8, ...opts });
// Try to render bytes in a readable way without guessing too hard // Best-effort pretty bytes/hex/string
function renderBytes(value) { function renderBytes(value) {
if (typeof value === 'string') return value; // hex/base64/etc. if (value == null) return '';
if (typeof value === 'string') return value; // hex/base64/plain
if (Array.isArray(value) && value.every((x) => Number.isInteger(x) && x >= 0 && x <= 255)) { if (Array.isArray(value) && value.every((x) => Number.isInteger(x) && x >= 0 && x <= 255)) {
return '0x' + value.map((b) => b.toString(16).padStart(2, '0')).join(''); return '0x' + value.map((b) => b.toString(16).padStart(2, '0')).join('');
} }
@ -20,24 +21,106 @@ function renderBytes(value) {
} }
} }
// ————— normalizer (robust to partial data) ————— const opLabel = (op) => {
function normalizeTransaction(raw) { if (op == null) return 'op';
const ops = Array.isArray(raw?.operations) ? raw.operations : []; if (typeof op === 'string' || typeof op === 'number') return String(op);
const lt = raw?.ledger_transaction ?? {}; if (typeof op !== 'object') return String(op);
const inputs = Array.isArray(lt?.inputs) ? lt.inputs : []; if (typeof op.type === 'string') return op.type;
const outputs = Array.isArray(lt?.outputs) ? lt.outputs : []; if (typeof op.kind === 'string') return op.kind;
if (op.content) {
if (typeof op.content.type === 'string') return op.content.type;
if (typeof op.content.kind === 'string') return op.content.kind;
}
const keys = Object.keys(op);
return keys.length ? keys[0] : 'op';
};
const totalOutputValue = outputs.reduce((sum, note) => sum + Number(note?.value ?? 0), 0); function opsToPills(ops, limit = 6) {
const arr = Array.isArray(ops) ? ops : [];
if (!arr.length) return h('span', { style: 'color:var(--muted); whiteSpace: "nowrap";' }, '—');
const labels = arr.map(opLabel);
const shown = labels.slice(0, limit);
const extra = labels.length - shown.length;
return h(
'div',
{ style: 'display:flex; gap:6px; flexWrap:"wrap"; alignItems:"center"' },
...shown.map((label, i) =>
h('span', { key: `${label}-${i}`, class: 'pill', title: label, style: 'flex:0 0 auto;' }, label),
),
extra > 0 && h('span', { class: 'pill', title: `${extra} more`, style: 'flex:0 0 auto;' }, `+${extra}`),
);
}
const toNumber = (v) => {
if (v == null) return 0;
if (typeof v === 'number') return v;
if (typeof v === 'bigint') return Number(v);
if (typeof v === 'string') {
const s = v.trim();
if (/^0x[0-9a-f]+$/i.test(s)) return Number(BigInt(s));
const n = Number(s);
return Number.isFinite(n) ? n : 0;
}
if (typeof v === 'object' && v !== null && 'value' in v) return toNumber(v.value);
return 0;
};
function CopyPill({ text, label = 'Copy' }) {
const onCopy = async (e) => {
e.preventDefault();
try {
await navigator.clipboard.writeText(String(text ?? ''));
} catch {}
};
return h(
'a',
{
class: 'pill linkish mono',
style: 'cursor:pointer; user-select:none;',
href: '#',
onClick: onCopy,
onKeyDown: (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
onCopy(e);
}
},
tabIndex: 0,
role: 'button',
},
label,
);
}
// ————— normalizer for new TransactionRead —————
// { id, block_id, hash, operations:[Operation], inputs:[HexBytes], outputs:[Note{public_key:HexBytes,value:int}],
// proof, execution_gas_price, storage_gas_price }
function normalizeTransaction(raw) {
const ops = Array.isArray(raw?.operations) ? raw.operations : Array.isArray(raw?.ops) ? raw.ops : [];
const inputs = Array.isArray(raw?.inputs) ? raw.inputs : [];
const outputs = Array.isArray(raw?.outputs) ? raw.outputs : [];
const totalOutputValue = outputs.reduce((sum, note) => sum + toNumber(note?.value), 0);
return { return {
id: raw?.id ?? '', id: raw?.id ?? '',
blockId: raw?.block_id ?? null, blockId: raw?.block_id ?? null,
operations: ops.map(String), hash: renderBytes(raw?.hash),
proof: renderBytes(raw?.proof),
operations: ops, // keep objects, well label in UI
executionGasPrice: isNumber(raw?.execution_gas_price) executionGasPrice: isNumber(raw?.execution_gas_price)
? raw.execution_gas_price ? raw.execution_gas_price
: Number(raw?.execution_gas_price ?? 0), : toNumber(raw?.execution_gas_price),
storageGasPrice: isNumber(raw?.storage_gas_price) ? raw.storage_gas_price : Number(raw?.storage_gas_price ?? 0), storageGasPrice: isNumber(raw?.storage_gas_price) ? raw.storage_gas_price : toNumber(raw?.storage_gas_price),
ledger: { inputs, outputs, totalOutputValue }, ledger: {
inputs: inputs.map((v) => renderBytes(v)),
outputs: outputs.map((n) => ({
public_key: renderBytes(n?.public_key),
value: toNumber(n?.value),
})),
totalOutputValue,
},
}; };
} }
@ -59,8 +142,7 @@ function Summary({ tx }) {
'div', 'div',
{ style: 'display:grid; gap:8px;' }, { style: 'display:grid; gap:8px;' },
// (ID removed) // Block link
tx.blockId != null && tx.blockId != null &&
h( h(
'div', 'div',
@ -73,6 +155,34 @@ function Summary({ tx }) {
), ),
), ),
// Hash + copy
h(
'div',
null,
h('b', null, 'Hash: '),
h(
'span',
{ class: 'pill mono', title: tx.hash, style: 'max-width:100%; overflow-wrap:anywhere;' },
String(tx.hash || ''),
),
h(CopyPill, { text: tx.hash }),
),
// Proof + copy (if present)
tx.proof &&
h(
'div',
null,
h('b', null, 'Proof: '),
h(
'span',
{ class: 'pill mono', title: tx.proof, style: 'max-width:100%; overflow-wrap:anywhere;' },
String(tx.proof),
),
h(CopyPill, { text: tx.proof }),
),
// Gas
h( h(
'div', 'div',
null, null,
@ -86,26 +196,14 @@ function Summary({ tx }) {
h('span', { class: 'mono' }, toLocaleNum(tx.storageGasPrice)), h('span', { class: 'mono' }, toLocaleNum(tx.storageGasPrice)),
), ),
h( // Operations (labels as pills)
'div', h('div', null, h('b', null, 'Operations: '), opsToPills(tx.operations)),
null,
h('b', null, 'Operations: '),
tx.operations?.length
? h(
'span',
{ style: 'display:inline-flex; gap:6px; flex-wrap:wrap; vertical-align:middle;' },
...tx.operations.map((op, i) => h('span', { key: i, class: 'pill', title: op }, op)),
)
: h('span', { style: 'color:var(--muted)' }, '—'),
),
), ),
); );
} }
function InputsTable({ inputs }) { function InputsTable({ inputs }) {
if (!inputs?.length) { if (!inputs?.length) return h('div', { style: 'color:var(--muted)' }, '—');
return h('div', { style: 'color:var(--muted)' }, '—');
}
return h( return h(
'div', 'div',
@ -117,7 +215,7 @@ function InputsTable({ inputs }) {
'colgroup', 'colgroup',
null, null,
h('col', { style: 'width:80px' }), // # h('col', { style: 'width:80px' }), // #
h('col', null), // Value (fills) h('col', null), // Value
), ),
h('thead', null, h('tr', null, h('th', { style: 'text-align:center;' }, '#'), h('th', null, 'Value'))), h('thead', null, h('tr', null, h('th', { style: 'text-align:center;' }, '#'), h('th', null, 'Value'))),
h( h(
@ -145,9 +243,7 @@ function InputsTable({ inputs }) {
} }
function OutputsTable({ outputs }) { function OutputsTable({ outputs }) {
if (!outputs?.length) { if (!outputs?.length) return h('div', { style: 'color:var(--muted)' }, '—');
return h('div', { style: 'color:var(--muted)' }, '—');
}
return h( return h(
'div', 'div',
@ -158,9 +254,9 @@ function OutputsTable({ outputs }) {
h( h(
'colgroup', 'colgroup',
null, null,
h('col', { style: 'width:80px' }), // # (compact, centered) h('col', { style: 'width:80px' }), // #
h('col', null), // Public Key (fills) h('col', null), // Public Key
h('col', { style: 'width:180px' }), // Value (compact, right) h('col', { style: 'width:180px' }), // Value
), ),
h( h(
'thead', 'thead',
@ -169,8 +265,8 @@ function OutputsTable({ outputs }) {
'tr', 'tr',
null, null,
h('th', { style: 'text-align:center;' }, '#'), h('th', { style: 'text-align:center;' }, '#'),
h('th', null, 'Public Key'), // ← back to Public Key second h('th', null, 'Public Key'),
h('th', { style: 'text-align:right;' }, 'Value'), // ← Value last h('th', { style: 'text-align:right;' }, 'Value'),
), ),
), ),
h( h(
@ -180,26 +276,18 @@ function OutputsTable({ outputs }) {
h( h(
'tr', 'tr',
{ key: idx }, { key: idx },
// # (index)
h('td', { style: 'text-align:center;' }, String(idx)), h('td', { style: 'text-align:center;' }, String(idx)),
// Public Key (fills, wraps)
h( h(
'td', 'td',
null, null,
h( h(
'span', 'span',
{ { class: 'mono', style: 'display:inline-block; overflow-wrap:anywhere;' },
class: 'mono', String(note.public_key ?? ''),
style: 'display:inline-block; overflow-wrap:anywhere; word-break:break-word;',
title: renderBytes(note?.public_key),
},
renderBytes(note?.public_key),
), ),
h('span', { class: 'sr-only' }, ' '),
), ),
h('td', { class: 'amount', style: 'text-align:right;' }, toLocaleNum(note.value)),
// Value (right-aligned)
h('td', { class: 'amount', style: 'text-align:right;' }, toLocaleNum(note?.value)),
), ),
), ),
), ),
@ -208,25 +296,25 @@ function OutputsTable({ outputs }) {
} }
function Ledger({ ledger }) { function Ledger({ ledger }) {
const { inputs, outputs, totalOutputValue } = ledger; const inputs = Array.isArray(ledger?.inputs) ? ledger.inputs : [];
const outputs = Array.isArray(ledger?.outputs) ? ledger.outputs : [];
// Sum inputs as integers (Fr is declared as int in your schema) const totalInputValue = inputs.reduce((s, v) => s + toNumber(v), 0);
const totalInputValue = inputs.reduce((sum, v) => sum + Number(v ?? 0), 0); const totalOutputValue = toNumber(ledger?.totalOutputValue);
return h( return h(
SectionCard, SectionCard,
{ title: 'Ledger Transaction' }, { title: 'Ledger' },
h( h(
'div', 'div',
{ style: 'display:grid; gap:16px;' }, { style: 'display:grid; gap:16px;' },
// Inputs (with Total on the right) // Inputs
h( h(
'div', 'div',
null, null,
h( h(
'div', 'div',
{ style: 'display:flex; align-items:center; gap:8px;' }, { style: 'display:flex; alignItems:center; gap:8px;' },
h('b', null, 'Inputs'), h('b', null, 'Inputs'),
h('span', { class: 'pill' }, String(inputs.length)), h('span', { class: 'pill' }, String(inputs.length)),
h( h(
@ -238,13 +326,13 @@ function Ledger({ ledger }) {
h(InputsTable, { inputs }), h(InputsTable, { inputs }),
), ),
// Outputs (unchanged header total) // Outputs
h( h(
'div', 'div',
null, null,
h( h(
'div', 'div',
{ style: 'display:flex; align-items:center; gap:8px;' }, { style: 'display:flex; alignItems:center; gap:8px;' },
h('b', null, 'Outputs'), h('b', null, 'Outputs'),
h('span', { class: 'pill' }, String(outputs.length)), h('span', { class: 'pill' }, String(outputs.length)),
h( h(
@ -317,7 +405,7 @@ export default function TransactionDetail({ parameters }) {
h( h(
'header', 'header',
{ style: 'display:flex; gap:12px; align-items:center; margin:12px 0;' }, { style: 'display:flex; gap:12px; alignItems:center; margin:12px 0;' },
h('a', { class: 'linkish', href: '/' }, '← Back'), h('a', { class: 'linkish', href: '/' }, '← Back'),
h('h1', { style: 'margin:0' }, pageTitle), h('h1', { style: 'margin:0' }, pageTitle),
), ),