Decouple models from node api.

This commit is contained in:
Alejandro Cabeza Romero 2025-10-30 11:48:34 +01:00
parent 68c5e45804
commit 7f1a543681
No known key found for this signature in database
GPG Key ID: DA3D14AE478030FD
53 changed files with 1695 additions and 616 deletions

View File

@ -1 +1 @@
3.13
3.14

View File

@ -1,7 +1,7 @@
[project]
name = "nomos-block-explorer"
version = "0.1.0"
requires-python = ">=3.13,<3.14"
requires-python = ">=3.14,<3.15"
dependencies = [
"fastapi~=0.118.0",
"httpx>=0.28.1",

View File

@ -18,7 +18,7 @@ def _into_ndjson_data(data: Data) -> bytes:
return data.model_dump_ndjson()
async def into_ndjson_stream(stream: Stream, bootstrap_data: Data = None) -> AsyncIterable[bytes]:
async def into_ndjson_stream(stream: Stream, *, bootstrap_data: Data = None) -> AsyncIterable[bytes]:
if bootstrap_data is not None:
ndjson_data = _into_ndjson_data(bootstrap_data)
if ndjson_data:

View File

@ -1,38 +1,32 @@
from http.client import NOT_FOUND
from typing import TYPE_CHECKING, AsyncIterator, List, Optional
from typing import TYPE_CHECKING, AsyncIterator, List
from fastapi import Path, Query
from rusty_results import Empty, Option, Some
from starlette.responses import JSONResponse, Response
from api.streams import into_ndjson_stream
from api.v1.serializers.blocks import BlockRead
from core.api import NBERequest, NDJsonStreamingResponse
from node.models.blocks import Block
from models.block import Block
if TYPE_CHECKING:
from core.app import NBE
async def _get_latest(request: NBERequest, limit: int) -> List[BlockRead]:
blocks = await request.app.state.block_repository.get_latest(limit=limit, ascending=True)
return [BlockRead.from_block(block) for block in blocks]
async def _prefetch_blocks(request: NBERequest, prefetch_limit: int) -> List[BlockRead]:
return [] if prefetch_limit == 0 else await _get_latest(request, prefetch_limit)
async def _updates_stream(app: "NBE", latest_block: Optional[Block]) -> AsyncIterator[List[BlockRead]]:
_stream = app.state.block_repository.updates_stream(block_from=latest_block)
async def _get_blocks_stream_serialized(app: "NBE", block_from: Option[Block]) -> AsyncIterator[List[BlockRead]]:
_stream = app.state.block_repository.updates_stream(block_from)
async for blocks in _stream:
yield [BlockRead.from_block(block) for block in blocks]
async def stream(request: NBERequest, prefetch_limit: int = Query(0, alias="prefetch-limit", ge=0)) -> Response:
bootstrap_blocks: List[BlockRead] = await _prefetch_blocks(request, prefetch_limit)
latest_block = bootstrap_blocks[-1] if bootstrap_blocks else None
updates_stream: AsyncIterator[List[BlockRead]] = _updates_stream(request.app, latest_block)
ndjson_blocks_stream = into_ndjson_stream(stream=updates_stream, bootstrap_data=bootstrap_blocks)
latest_blocks = await request.app.state.block_repository.get_latest(prefetch_limit)
latest_block = Some(latest_blocks[-1]) if latest_blocks else Empty()
bootstrap_blocks: List[BlockRead] = [BlockRead.from_block(block) for block in latest_blocks]
blocks_stream: AsyncIterator[List[BlockRead]] = _get_blocks_stream_serialized(request.app, latest_block)
ndjson_blocks_stream = into_ndjson_stream(blocks_stream, bootstrap_data=bootstrap_blocks)
return NDJsonStreamingResponse(ndjson_blocks_stream)

View File

@ -5,22 +5,24 @@ from starlette.responses import JSONResponse, Response
from api.streams import into_ndjson_stream
from core.api import NBERequest, NDJsonStreamingResponse
from models.health import Health
from node.api.base import NodeApi
from node.models.health import Health
from node.api.serializers.health import HealthSerializer
async def get(request: NBERequest) -> Response:
response = await request.app.state.node_api.get_health_check()
response = await request.app.state.node_api.get_health()
return JSONResponse(response)
async def _health_iterator(node_api: NodeApi) -> AsyncIterator[Health]:
async def _create_health_stream(node_api: NodeApi, *, poll_interval_seconds: int = 10) -> AsyncIterator[Health]:
while True:
yield await node_api.get_health_check()
await sleep(10)
health_serializer: HealthSerializer = await node_api.get_health()
yield health_serializer.into_health()
await sleep(poll_interval_seconds)
async def stream(request: NBERequest) -> Response:
_stream = _health_iterator(request.app.state.node_api)
health_stream = into_ndjson_stream(stream=_stream)
return NDJsonStreamingResponse(health_stream)
health_stream = _create_health_stream(request.app.state.node_api)
ndjson_health_stream = into_ndjson_stream(health_stream)
return NDJsonStreamingResponse(ndjson_health_stream)

View File

@ -1,21 +1,29 @@
from typing import List, Self
from core.models import NbeSchema
from node.models.blocks import Block, Header
from node.models.transactions import Transaction
from core.types import HexBytes
from models.block import Block
from models.header.proof_of_leadership import ProofOfLeadership
from models.transactions.transaction import Transaction
class BlockRead(NbeSchema):
id: int
hash: HexBytes
parent_block_hash: HexBytes
slot: int
header: Header
block_root: HexBytes
proof_of_leadership: ProofOfLeadership
transactions: List[Transaction]
@classmethod
def from_block(cls, block: Block) -> Self:
return cls(
id=block.id,
slot=block.header.slot,
header=block.header,
hash=block.hash,
parent_block_hash=block.parent_block,
slot=block.slot,
block_root=block.block_root,
proof_of_leadership=block.proof_of_leadership,
transactions=block.transactions,
)

View File

@ -1,14 +1,21 @@
from typing import List, Self
from core.models import NbeSchema
from node.models.transactions import Gas, LedgerTransaction, Transaction
from core.types import HexBytes
from models.aliases import Gas
from models.transactions.notes import Note
from models.transactions.operations.operation import Operation
from models.transactions.transaction import Transaction
class TransactionRead(NbeSchema):
id: int
block_id: int
operations: List[str]
ledger_transaction: LedgerTransaction
hash: HexBytes
operations: List[Operation]
inputs: List[HexBytes]
outputs: List[Note]
proof: HexBytes
execution_gas_price: Gas
storage_gas_price: Gas
@ -16,9 +23,12 @@ class TransactionRead(NbeSchema):
def from_transaction(cls, transaction: Transaction) -> Self:
return cls(
id=transaction.id,
block_id=transaction.block_id,
block_id=transaction.block.id,
hash=transaction.hash,
operations=transaction.operations,
ledger_transaction=transaction.ledger_transaction,
inputs=transaction.inputs,
outputs=transaction.outputs,
proof=transaction.proof,
execution_gas_price=transaction.execution_gas_price,
storage_gas_price=transaction.storage_gas_price,
)

View File

@ -1,35 +1,38 @@
from http.client import NOT_FOUND
from typing import TYPE_CHECKING, AsyncIterator, List, Optional
from typing import TYPE_CHECKING, AsyncIterator, List
from fastapi import Path, Query
from rusty_results import Empty, Option, Some
from starlette.responses import JSONResponse, Response
from api.streams import into_ndjson_stream
from api.v1.serializers.transactions import TransactionRead
from core.api import NBERequest, NDJsonStreamingResponse
from node.models.transactions import Transaction
from models.transactions.transaction import Transaction
if TYPE_CHECKING:
from core.app import NBE
async def _updates_stream(
app: "NBE", latest_transaction: Optional[Transaction]
async def _get_transactions_stream_serialized(
app: "NBE", transaction_from: Option[Transaction]
) -> AsyncIterator[List[TransactionRead]]:
_stream = app.state.transaction_repository.updates_stream(transaction_from=latest_transaction)
_stream = app.state.transaction_repository.updates_stream(transaction_from)
async for transactions in _stream:
yield [TransactionRead.from_transaction(transaction) for transaction in transactions]
async def stream(request: NBERequest, prefetch_limit: int = Query(0, alias="prefetch-limit", ge=0)) -> Response:
latest_transactions: List[Transaction] = await request.app.state.transaction_repository.get_latest(
limit=prefetch_limit, ascending=True, preload_relationships=True
prefetch_limit, ascending=True, preload_relationships=True
)
latest_transaction = latest_transactions[-1] if latest_transactions else None
latest_transaction_read = [TransactionRead.from_transaction(transaction) for transaction in latest_transactions]
latest_transaction = Some(latest_transactions[-1]) if latest_transactions else Empty()
bootstrap_transactions = [TransactionRead.from_transaction(transaction) for transaction in latest_transactions]
updates_stream: AsyncIterator[List[TransactionRead]] = _updates_stream(request.app, latest_transaction)
ndjson_transactions_stream = into_ndjson_stream(stream=updates_stream, bootstrap_data=latest_transaction_read)
transactions_stream: AsyncIterator[List[TransactionRead]] = _get_transactions_stream_serialized(
request.app, latest_transaction
)
ndjson_transactions_stream = into_ndjson_stream(transactions_stream, bootstrap_data=bootstrap_transactions)
return NDJsonStreamingResponse(ndjson_transactions_stream)

View File

@ -28,6 +28,10 @@ class NbeSchema(NdjsonMixin, BaseModel):
return self.model_dump_json()
class NbeSerializer(NbeSchema):
pass
# --- SQLModel ---

View File

@ -54,6 +54,9 @@ class PydanticJsonColumn(TypeDecorator, Generic[T]):
cache_ok = True
def __init__(self, model: type[T], *, many: bool = False) -> None:
"""
The passed model must be a non-list type. To specify a list of models, pass `many=True`.
"""
super().__init__()
self.many = many
self._ta = _TypeAdapter(List[model] if many else model)
@ -74,7 +77,8 @@ class PydanticJsonColumn(TypeDecorator, Generic[T]):
model_value = self._ta.validate_python(value)
# Dump to plain Python (dict/list) for the JSON column
return self._ta.dump_python(model_value, mode="json")
plain = self._ta.dump_python(model_value, mode="json")
return plain
# DB -> Python (on SELECT)
def process_result_value(self, value: Any, _dialect):

13
src/core/types.py Normal file
View File

@ -0,0 +1,13 @@
from typing import Annotated
from pydantic import AfterValidator, BeforeValidator, PlainSerializer
def hexify(data: bytes) -> str:
return data.hex()
HexBytes = Annotated[
bytes,
PlainSerializer(hexify, return_type=str, when_used="json"),
]

View File

@ -1,27 +1,26 @@
import logging
from asyncio import sleep
from typing import AsyncIterator, List, Optional
from typing import AsyncIterator, List
from rusty_results import Empty, Option, Some
from sqlalchemy import Result, Select
from sqlalchemy.orm import aliased
from sqlmodel import select
from core.db import jget, order_by_json
from db.clients import DbClient
from node.models.blocks import Block
from models.block import Block
def get_latest_statement(limit: int, latest_ascending: bool = True) -> Select:
# Fetch latest
descending = order_by_json(Block.header, "$.slot", into_type="int", descending=True)
inner = select(Block).order_by(descending, Block.id.desc()).limit(limit).subquery()
def get_latest_statement(limit: int, *, output_ascending: bool = True) -> Select:
# Fetch the latest N blocks in descending slot order
base = select(Block).order_by(Block.slot.desc(), Block.id.desc()).limit(limit)
if not output_ascending:
return base
# Reorder
# Reorder for output
inner = base.subquery()
latest = aliased(Block, inner)
latest_order = order_by_json(latest.header, "$.slot", into_type="int", descending=(not latest_ascending))
id_order = latest.id.asc() if latest_ascending else latest.id.desc()
statement = select(latest).order_by(latest_order, id_order) # type: ignore[arg-type]
return statement
return select(latest).options().order_by(latest.slot.asc(), latest.id.asc()) # type: ignore[arg-type]
class BlockRepository:
@ -37,51 +36,66 @@ class BlockRepository:
session.add_all(list(blocks))
session.commit()
async def get_latest(self, limit: int, *, ascending: bool = True) -> List[Block]:
statement = get_latest_statement(limit, ascending)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
return results.all()
async def get_by_id(self, block_id: int) -> Option[Block]:
statement = select(Block).where(Block.id == block_id)
with self.client.session() as session:
result: Result[Block] = session.exec(statement)
if (block := result.first()) is not None:
if (block := result.one_or_none()) is not None:
return Some(block)
else:
return Empty()
async def get_by_hash(self, block_hash: str) -> Option[Block]:
statement = select(Block).where(Block.hash == block_hash)
with self.client.session() as session:
result: Result[Block] = session.exec(statement)
if (block := result.one_or_none()) is not None:
return Some(block)
else:
return Empty()
async def get_latest(self, limit: int, *, ascending: bool = True) -> List[Block]:
if limit == 0:
return []
statement = get_latest_statement(limit, output_ascending=ascending)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
b = results.all()
return b
async def get_earliest(self) -> Option[Block]:
statement = select(Block).order_by(Block.slot.asc()).limit(1)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
if (block := results.one_or_none()) is not None:
return Some(block)
else:
return Empty()
async def updates_stream(
self, block_from: Optional[Block], *, timeout_seconds: int = 1
self, block_from: Option[Block], *, timeout_seconds: int = 1
) -> AsyncIterator[List[Block]]:
# FIXME
slot_cursor = block_from.slot + 1 if block_from is not None else 0
block_slot_expression = jget(Block.header, "$.slot", into_type="int")
order = order_by_json(Block.header, "$.slot", into_type="int", descending=False)
slot_cursor: int = block_from.map(lambda block: block.slot).unwrap_or(0)
id_cursor: int = block_from.map(lambda block: block.id + 1).unwrap_or(0)
while True:
where_clause = block_slot_expression >= slot_cursor
statement = select(Block).where(where_clause).order_by(order)
statement = (
select(Block)
.where(Block.slot >= slot_cursor, Block.id >= id_cursor)
.order_by(Block.slot.asc(), Block.id.asc())
)
with self.client.session() as session:
blocks: List[Block] = session.exec(statement).all()
if len(blocks) > 0:
slot_cursor = blocks[-1].slot + 1
slot_cursor = blocks[-1].slot
id_cursor = blocks[-1].id + 1
yield blocks
else:
await sleep(timeout_seconds)
async def get_earliest(self) -> Option[Block]:
order = order_by_json(Block.header, "$.slot", into_type="int", descending=False)
statement = select(Block).order_by(order).limit(1)
with self.client.session() as session:
results: Result[Block] = session.exec(statement)
if (block := results.first()) is not None:
return Some(block)
else:
return Empty()

View File

@ -1,39 +1,32 @@
import logging
from asyncio import sleep
from typing import AsyncIterator, Iterable, List, Optional
from typing import AsyncIterator, List
from rusty_results import Empty, Option, Some
from sqlalchemy import Result, Select
from sqlalchemy.orm import aliased, selectinload
from sqlmodel import select
from core.db import jget, order_by_json
from db.clients import DbClient
from node.models.transactions import Transaction
from models.block import Block
from models.transactions.transaction import Transaction
def get_latest_statement(
limit: int, output_ascending: bool = True, preload_relationships: bool = False, **kwargs
) -> Select:
from node.models.blocks import Block
# Join with Block to order by Block's slot
slot_expr = jget(Block.header, "$.slot", into_type="int").label("slot")
slot_desc = order_by_json(Block.header, "$.slot", into_type="int", descending=True)
inner = (
select(Transaction, slot_expr)
.join(Block, Transaction.block_id == Block.id, isouter=False)
.order_by(slot_desc, Block.id.desc())
def get_latest_statement(limit: int, *, output_ascending: bool, preload_relationships: bool) -> Select:
# Join with Block to order by Block's slot and fetch the latest N transactions in descending order
base = (
select(Transaction, Block.slot.label("block__slot"), Block.id.label("block__id"))
.join(Block, Transaction.block_id == Block.id)
.order_by(Block.slot.desc(), Block.id.desc(), Transaction.id.desc())
.limit(limit)
.subquery()
)
if not output_ascending:
return base
# Reorder
# Reorder for output
inner = base.subquery()
latest = aliased(Transaction, inner)
output_slot_order = inner.c.slot.asc() if output_ascending else inner.c.slot.desc()
output_id_order = (
latest.id.asc() if output_ascending else latest.id.desc()
) # TODO: Double check it's Transaction.id
statement = select(latest).order_by(output_slot_order, output_id_order)
statement = select(latest).order_by(inner.c.block__slot.asc(), inner.c.block__id.asc(), latest.id.asc())
if preload_relationships:
statement = statement.options(selectinload(latest.block))
return statement
@ -43,54 +36,70 @@ class TransactionRepository:
def __init__(self, client: DbClient):
self.client = client
async def create(self, transaction: Iterable[Transaction]) -> None:
async def create(self, *transaction: Transaction) -> None:
with self.client.session() as session:
session.add_all(transaction)
session.add_all(list(transaction))
session.commit()
async def get_latest(self, limit: int, *, ascending: bool = True, **kwargs) -> List[Transaction]:
statement = get_latest_statement(limit, ascending, **kwargs)
with self.client.session() as session:
results: Result[Transaction] = session.exec(statement)
return results.all()
async def get_by_id(self, transaction_id: int) -> Option[Transaction]:
statement = select(Transaction).where(Transaction.id == transaction_id)
with self.client.session() as session:
result: Result[Transaction] = session.exec(statement)
if (transaction := result.first()) is not None:
if (transaction := result.one_or_none()) is not None:
return Some(transaction)
else:
return Empty()
async def updates_stream(
self, transaction_from: Optional[Transaction], *, timeout_seconds: int = 1
) -> AsyncIterator[List[Transaction]]:
from node.models.blocks import Block
async def get_by_hash(self, transaction_hash: str) -> Option[Transaction]:
statement = select(Transaction).where(Transaction.hash == transaction_hash)
slot_cursor: int = transaction_from.block.slot + 1 if transaction_from is not None else 0
slot_expression = jget(Block.header, "$.slot", into_type="int")
slot_order = order_by_json(Block.header, "$.slot", into_type="int", descending=False)
with self.client.session() as session:
result: Result[Transaction] = session.exec(statement)
if (transaction := result.one_or_none()) is not None:
return Some(transaction)
else:
return Empty()
async def get_latest(
self, limit: int, *, ascending: bool = False, preload_relationships: bool = False
) -> List[Transaction]:
if limit == 0:
return []
statement = get_latest_statement(limit, output_ascending=ascending, preload_relationships=preload_relationships)
with self.client.session() as session:
results: Result[Transaction] = session.exec(statement)
return results.all()
async def updates_stream(
self, transaction_from: Option[Transaction], *, timeout_seconds: int = 1
) -> AsyncIterator[List[Transaction]]:
slot_cursor = transaction_from.map(lambda transaction: transaction.block.slot).unwrap_or(0)
block_id_cursor = transaction_from.map(lambda transaction: transaction.block.id).unwrap_or(0)
transaction_id_cursor = transaction_from.map(lambda transaction: transaction.id + 1).unwrap_or(0)
while True:
where_clause_slot = slot_expression >= slot_cursor
where_clause_id = Transaction.id > transaction_from.id if transaction_from is not None else True
statement = (
select(Transaction)
select(Transaction, Block.slot, Block.id)
.options(selectinload(Transaction.block))
.join(Block, Transaction.block_id == Block.id)
.where(where_clause_slot, where_clause_id)
.order_by(slot_order, Block.id.asc(), Transaction.id.asc())
.where(
Block.slot >= slot_cursor,
Block.id >= block_id_cursor,
Transaction.id >= transaction_id_cursor,
)
.order_by(Block.slot.asc(), Block.id.asc(), Transaction.id.asc())
)
with self.client.session() as session:
transactions: List[Transaction] = session.exec(statement).all()
if len(transactions) > 0:
slot_cursor = transactions[-1].block.slot + 1
slot_cursor = transactions[-1].block.slot
block_id_cursor = transactions[-1].block.id
transaction_id_cursor = transactions[-1].id + 1
yield transactions
else:
await sleep(timeout_seconds)

4
src/models/__init__.py Normal file
View File

@ -0,0 +1,4 @@
from .block import Block
from .header import ProofOfLeadership
from .health import Health
from .transactions import Transaction

4
src/models/aliases.py Normal file
View File

@ -0,0 +1,4 @@
from core.types import HexBytes
Fr = HexBytes
Gas = int

78
src/models/block.py Normal file
View File

@ -0,0 +1,78 @@
import logging
import os
import random
from typing import TYPE_CHECKING, Any, List, Self
from pydantic.config import ExtraValues
from sqlalchemy import Column
from sqlmodel import Field, Relationship
from core.models import TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from core.types import HexBytes
from models.header.proof_of_leadership import ProofOfLeadership
if TYPE_CHECKING:
from models.transactions.transaction import Transaction
logger = logging.getLogger(__name__)
def _should_randomize_transactions():
is_debug = os.getenv("DEBUG", "False").lower() == "true"
is_debug__randomize_transactions = os.getenv("DEBUG__RANDOMIZE_TRANSACTIONS", "False").lower() == "true"
return is_debug and is_debug__randomize_transactions
class Block(TimestampedModel, table=True):
__tablename__ = "block"
# --- Columns --- #
hash: HexBytes = Field(nullable=False, unique=True)
parent_block: HexBytes = Field(nullable=False)
slot: int = Field(nullable=False)
block_root: HexBytes = Field(nullable=False)
proof_of_leadership: ProofOfLeadership = Field(
sa_column=Column(PydanticJsonColumn(ProofOfLeadership), nullable=False)
)
# --- Relationships --- #
transactions: List["Transaction"] = Relationship(
back_populates="block",
sa_relationship_kwargs={"lazy": "selectin"},
)
def __str__(self) -> str:
return f"Block(slot={self.slot})"
def __repr__(self) -> str:
return f"<Block(id={self.id}, created_at={self.created_at}, slot={self.slot}, parent={self.header["parent_block"]})>"
def with_transactions(self, transactions: List["Transaction"]) -> Self:
self.transactions = transactions
return self
@classmethod
def model_validate_json(
cls,
json_data: str | bytes | bytearray,
*,
strict: bool | None = None,
extra: ExtraValues | None = None,
context: Any | None = None,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> Self:
self = super().model_validate_json(
json_data, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
)
if _should_randomize_transactions():
from models.transactions.transaction import Transaction
logger.debug("DEBUG and DEBUG__RANDOMIZE_TRANSACTIONS are enabled, randomizing Block's transactions.")
n_transactions = 0 if random.randint(0, 1) <= 0.3 else random.randint(1, 5)
self.transactions = [Transaction.from_random() for _ in range(n_transactions)]
return self

View File

@ -0,0 +1,2 @@
from .proof_of_leadership import ProofOfLeadership
from .public import Public

View File

@ -0,0 +1,26 @@
from enum import Enum
from typing import Optional, Union
from core.models import NbeSchema
from core.types import HexBytes
from models.header.public import Public
class ProofOfLeadershipType(Enum):
GROTH16 = "GROTH16"
class NbeProofOfLeadership(NbeSchema):
type: ProofOfLeadershipType
class Groth16ProofOfLeadership(NbeProofOfLeadership):
type: ProofOfLeadershipType = ProofOfLeadershipType.GROTH16
entropy_contribution: HexBytes
leader_key: HexBytes
proof: HexBytes
public: Optional[Public]
voucher_cm: HexBytes
ProofOfLeadership = Union[Groth16ProofOfLeadership]

View File

@ -0,0 +1,10 @@
from core.models import NbeSchema
from core.types import HexBytes
class Public(NbeSchema):
aged_root: HexBytes
epoch_nonce: HexBytes
latest_root: HexBytes
slot: int
total_stake: int

11
src/models/health.py Normal file
View File

@ -0,0 +1,11 @@
from core.models import NbeSchema
class Health(NbeSchema):
healthy: bool
def __str__(self):
return "Healthy" if self.healthy else "Unhealthy"
def __repr__(self):
return f"<Health(healthy={self.healthy})>"

View File

@ -0,0 +1,3 @@
from .notes import Note
from .operations import Operation
from .transaction import Transaction

View File

@ -0,0 +1,7 @@
from core.models import NbeSchema
from core.types import HexBytes
class Note(NbeSchema):
value: int
public_key: HexBytes

View File

@ -0,0 +1,3 @@
from .contents import OperationContent
from .operation import Operation
from .proofs import OperationProof

View File

@ -0,0 +1,80 @@
from enum import Enum
from typing import List, Optional
from core.models import NbeSchema
from core.types import HexBytes
class ContentType(Enum):
CHANNEL_INSCRIBE = "ChannelInscribe"
CHANNEL_BLOB = "ChannelBlob"
CHANNEL_SET_KEYS = "ChannelSetKeys"
SDP_DECLARE = "SDPDeclare"
SDP_WITHDRAW = "SDPWithdraw"
SDP_ACTIVE = "SDPActive"
LEADER_CLAIM = "LeaderClaim"
class NbeContent(NbeSchema):
type: ContentType
class ChannelInscribe(NbeContent):
type: ContentType = ContentType.CHANNEL_INSCRIBE
channel_id: HexBytes
inscription: HexBytes
parent: HexBytes
signer: HexBytes
class ChannelBlob(NbeContent):
type: ContentType = ContentType.CHANNEL_BLOB
channel: HexBytes
blob: HexBytes
blob_size: int
da_storage_gas_price: int
parent: HexBytes
signer: HexBytes
class ChannelSetKeys(NbeContent):
type: ContentType = ContentType.CHANNEL_SET_KEYS
channel: HexBytes
keys: List[bytes]
class SDPDeclareServiceType(Enum):
BN = "BN"
DA = "DA"
class SDPDeclare(NbeContent):
type: ContentType = ContentType.SDP_DECLARE
service_type: SDPDeclareServiceType
locators: List[bytes]
provider_id: HexBytes
zk_id: HexBytes
locked_note_id: HexBytes
class SDPWithdraw(NbeContent):
type: ContentType = ContentType.SDP_WITHDRAW
declaration_id: HexBytes
nonce: HexBytes
class SDPActive(NbeContent):
type: ContentType = ContentType.SDP_ACTIVE
declaration_id: HexBytes
nonce: HexBytes
metadata: Optional[bytes]
class LeaderClaim(NbeContent):
type: ContentType = ContentType.LEADER_CLAIM
rewards_root: HexBytes
voucher_nullifier: HexBytes
mantle_tx_hash: HexBytes
OperationContent = ChannelInscribe | ChannelBlob | ChannelSetKeys | SDPDeclare | SDPWithdraw | SDPActive | LeaderClaim

View File

@ -0,0 +1,8 @@
from core.models import NbeSchema
from models.transactions.operations.contents import NbeContent
from models.transactions.operations.proofs import OperationProof
class Operation(NbeSchema):
content: NbeContent
proof: OperationProof

View File

@ -0,0 +1,33 @@
from enum import Enum
from core.models import NbeSchema
from core.types import HexBytes
class SignatureType(Enum):
ED25519 = "Ed25519"
ZK = "Zk"
ZK_AND_ED25519 = "ZkAndEd25519"
class NbeSignature(NbeSchema):
type: SignatureType
class Ed25519Signature(NbeSignature):
type: SignatureType = SignatureType.ED25519
signature: HexBytes
class ZkSignature(NbeSignature):
type: SignatureType = SignatureType.ZK
signature: HexBytes
class ZkAndEd25519Signature(NbeSignature):
type: SignatureType = SignatureType.ZK_AND_ED25519
zk_signature: HexBytes
ed25519_signature: HexBytes
OperationProof = Ed25519Signature | ZkSignature | ZkAndEd25519Signature

View File

@ -0,0 +1,47 @@
import logging
from typing import List, Optional
from sqlalchemy import JSON, Column
from sqlmodel import Field, Relationship
from core.models import TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from core.types import HexBytes
from models.aliases import Fr, Gas
from models.block import Block
from models.transactions.notes import Note
from models.transactions.operations.operation import Operation
logger = logging.getLogger(__name__)
class Transaction(TimestampedModel, table=True):
__tablename__ = "transaction"
# --- Columns --- #
block_id: Optional[int] = Field(default=None, foreign_key="block.id", nullable=False)
hash: HexBytes = Field(nullable=False, unique=True)
operations: List[Operation] = Field(
default_factory=list, sa_column=Column(PydanticJsonColumn(Operation, many=True), nullable=False)
)
inputs: List[Fr] = Field(default_factory=list, sa_column=Column(PydanticJsonColumn(Fr, many=True), nullable=False))
outputs: List[Note] = Field(
default_factory=list, sa_column=Column(PydanticJsonColumn(Note, many=True), nullable=False)
)
proof: HexBytes = Field(min_length=128, max_length=128, nullable=False)
execution_gas_price: Gas
storage_gas_price: Gas
# --- Relationships --- #
block: Optional[Block] = Relationship(
back_populates="transactions",
sa_relationship_kwargs={"lazy": "selectin"},
)
def __str__(self) -> str:
return f"Transaction({self.operations})"
def __repr__(self) -> str:
return f"<Transaction(id={self.id}, created_at={self.created_at}, operations={self.operations})>"

View File

@ -1,20 +1,19 @@
from abc import ABC, abstractmethod
from typing import AsyncIterator, List
from node.models.blocks import Block
from node.models.health import Health
from node.models.transactions import Transaction
from node.api.serializers.block import BlockSerializer
from node.api.serializers.health import HealthSerializer
class NodeApi(ABC):
@abstractmethod
async def get_health_check(self) -> Health:
async def get_health(self) -> HealthSerializer:
pass
@abstractmethod
async def get_blocks(self, **kwargs) -> List[Block]:
async def get_blocks(self, **kwargs) -> List[BlockSerializer]:
pass
@abstractmethod
async def get_blocks_stream(self) -> AsyncIterator[List[Block]]:
async def get_blocks_stream(self) -> AsyncIterator[List[BlockSerializer]]:
pass

View File

@ -1,10 +1,12 @@
from asyncio import sleep
from random import choices, random
from typing import AsyncIterator, List
from rusty_results import Some
from node.api.base import NodeApi
from node.models.blocks import Block
from node.models.health import Health
from node.models.transactions import Transaction
from node.api.serializers.block import BlockSerializer
from node.api.serializers.health import HealthSerializer
def get_weighted_amount() -> int:
@ -14,15 +16,24 @@ def get_weighted_amount() -> int:
class FakeNodeApi(NodeApi):
async def get_health_check(self) -> Health:
def __init__(self):
self.current_slot: int = 0
async def get_health(self) -> HealthSerializer:
if random() < 0.1:
return Health.from_unhealthy()
return HealthSerializer.from_unhealthy()
else:
return Health.from_healthy()
return HealthSerializer.from_healthy()
async def get_blocks(self) -> List[Block]:
return [Block.from_random() for _ in range(1)]
async def get_blocks(self, **kwargs) -> List[BlockSerializer]:
n = get_weighted_amount()
assert n >= 1
blocks = [BlockSerializer.from_random() for _ in range(n)]
self.current_slot = max(blocks, key=lambda block: block.slot).slot
return blocks
async def get_blocks_stream(self) -> AsyncIterator[Block]:
async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
while True:
yield Block.from_random()
yield BlockSerializer.from_random(slot=Some(self.current_slot))
self.current_slot += 1
await sleep(3)

View File

@ -6,9 +6,8 @@ import httpx
import requests
from node.api.base import NodeApi
from node.models.blocks import Block
from node.models.health import Health
from node.models.transactions import Transaction
from node.api.serializers.block import BlockSerializer
from node.api.serializers.health import HealthSerializer
logger = logging.getLogger(__name__)
@ -29,24 +28,24 @@ class HttpNodeApi(NodeApi):
def base_url(self):
return f"{self.protocol}://{self.host}:{self.port}"
async def get_health_check(self) -> Health:
async def get_health(self) -> HealthSerializer:
url = urljoin(self.base_url, self.ENDPOINT_INFO)
response = requests.get(url, timeout=60)
if response.status_code == 200:
return Health.from_healthy()
return HealthSerializer.from_healthy()
else:
return Health.from_unhealthy()
return HealthSerializer.from_unhealthy()
async def get_blocks(self, slot_from: int, slot_to: int) -> List[Block]:
async def get_blocks(self, slot_from: int, slot_to: int) -> List[BlockSerializer]:
query_string = f"slot_from={slot_from}&slot_to={slot_to}"
endpoint = urljoin(self.base_url, self.ENDPOINT_BLOCKS)
url = f"{endpoint}?{query_string}"
response = requests.get(url, timeout=60)
python_json = response.json()
blocks = [Block.model_validate(item) for item in python_json]
blocks = [BlockSerializer.model_validate(item) for item in python_json]
return blocks
async def get_blocks_stream(self) -> AsyncIterator[Block]:
async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
url = urljoin(self.base_url, self.ENDPOINT_BLOCKS_STREAM)
async with httpx.AsyncClient(timeout=self.timeout) as client:
@ -56,6 +55,12 @@ class HttpNodeApi(NodeApi):
async for line in response.aiter_lines():
if not line:
continue
block = Block.model_validate_json(line)
try:
block = BlockSerializer.model_validate_json(line)
except Exception as e:
import traceback
traceback.print_exc()
raise e
logger.debug(f"Received new block from Node: {block}")
yield block

View File

@ -0,0 +1,34 @@
from random import randint
from typing import List, Self
from rusty_results import Empty, Option
from core.models import NbeSerializer
from models.block import Block
from node.api.serializers.header import HeaderSerializer
from node.api.serializers.signed_transaction import SignedTransactionSerializer
from utils.protocols import FromRandom
class BlockSerializer(NbeSerializer, FromRandom):
header: HeaderSerializer
transactions: List[SignedTransactionSerializer]
def into_block(self) -> Block:
transactions = [transaction.into_transaction() for transaction in self.transactions]
return Block.model_validate(
{
"hash": self.header.hash,
"parent_block": self.header.parent_block,
"slot": self.header.slot,
"block_root": self.header.block_root,
"proof_of_leadership": self.header.proof_of_leadership.into_proof_of_leadership(),
}
).with_transactions(transactions)
@classmethod
def from_random(cls, *, slot: Option[int] = None) -> Self:
slot = slot or Empty()
n = 1 if randint(0, 1) <= 0.5 else randint(2, 5)
transactions = [SignedTransactionSerializer.from_random() for _ in range(n)]
return cls.model_validate({"header": HeaderSerializer.from_random(slot=slot), "transactions": transactions})

View File

@ -0,0 +1,37 @@
from typing import Annotated
from pydantic import BeforeValidator, PlainSerializer, ValidationError
def bytes_from_intarray(data: list[int]) -> bytes:
if not isinstance(data, list):
raise ValueError(f"Unsupported data type for bytes deserialization. Expected list, got {type(data).__name__}.")
elif not all(isinstance(item, int) for item in data):
raise ValueError("List items must be integers.")
else:
return bytes(data)
def bytes_from_hex(data: str) -> bytes:
if not isinstance(data, str):
raise ValueError(
f"Unsupported data type for bytes deserialization. Expected string, got {type(data).__name__}."
)
return bytes.fromhex(data)
def bytes_from_int(data: int) -> bytes:
if not isinstance(data, int):
raise ValueError(
f"Unsupported data type for bytes deserialization. Expected integer, got {type(data).__name__}."
)
return data.to_bytes((data.bit_length() + 7) // 8) # TODO: Ensure endianness is correct.
def bytes_into_hex(data: bytes) -> str:
return data.hex()
BytesFromIntArray = Annotated[bytes, BeforeValidator(bytes_from_intarray), PlainSerializer(bytes_into_hex)]
BytesFromHex = Annotated[bytes, BeforeValidator(bytes_from_hex), PlainSerializer(bytes_into_hex)]
BytesFromInt = Annotated[bytes, BeforeValidator(bytes_from_int), PlainSerializer(bytes_into_hex)]

View File

@ -0,0 +1,34 @@
from random import randint
from typing import Self
from pydantic import Field
from rusty_results import Option, Some
from core.models import NbeSerializer
from node.api.serializers.fields import BytesFromHex
from node.api.serializers.proof_of_leadership import (
ProofOfLeadershipSerializer,
ProofOfLeadershipSerializerField,
)
from utils.protocols import FromRandom
from utils.random import random_hash
class HeaderSerializer(NbeSerializer, FromRandom):
hash: BytesFromHex = Field(alias="id", description="Hash id in hex format.")
parent_block: BytesFromHex = Field(description="Hash in hex format.")
slot: int = Field(description="Integer in u64 format.")
block_root: BytesFromHex = Field(description="Hash in hex format.")
proof_of_leadership: ProofOfLeadershipSerializerField
@classmethod
def from_random(cls, *, slot: Option[int]) -> Self:
return cls.model_validate(
{
"id": random_hash().hex(),
"parent_block": random_hash().hex(),
"slot": slot.unwrap_or_else(lambda: randint(0, 10_000)),
"block_root": random_hash().hex(),
"proof_of_leadership": ProofOfLeadershipSerializer.from_random(slot=slot),
}
)

View File

@ -0,0 +1,19 @@
from typing import Any, Self
from core.models import NbeSerializer
from models.health import Health
class HealthSerializer(NbeSerializer):
is_healthy: bool
def into_health(self) -> Health:
return Health.model_validate({"healthy": self.is_healthy})
@classmethod
def from_healthy(cls) -> Self:
return cls.model_validate({"is_healthy": True})
@classmethod
def from_unhealthy(cls) -> Self:
return cls.model_validate({"is_healthy": False})

View File

@ -0,0 +1,27 @@
from random import randint
from typing import List, Self
from pydantic import Field
from core.models import NbeSerializer
from node.api.serializers.fields import BytesFromIntArray
from node.api.serializers.note import NoteSerializer
from utils.protocols import FromRandom
from utils.random import random_bytes
class LedgerTransactionSerializer(NbeSerializer, FromRandom):
inputs: List[BytesFromIntArray] = Field(description="Fr integer.")
outputs: List[NoteSerializer]
@classmethod
def from_random(cls) -> Self:
n_inputs = 0 if randint(0, 1) <= 0.5 else randint(1, 5)
n_outputs = 0 if randint(0, 1) <= 0.5 else randint(1, 5)
return cls.model_validate(
{
"inputs": [list(random_bytes(2048)) for _ in range(n_inputs)],
"outputs": [NoteSerializer.from_random() for _ in range(n_outputs)],
}
)

View File

@ -0,0 +1,27 @@
from random import randint
from typing import Self
from pydantic import Field
from core.models import NbeSerializer
from models.transactions.notes import Note
from node.api.serializers.fields import BytesFromHex
from utils.protocols import FromRandom
from utils.random import random_bytes
class NoteSerializer(NbeSerializer, FromRandom):
value: int = Field(description="Integer in u64 format.")
public_key: BytesFromHex = Field(alias="pk", description="Fr integer.")
def into_note(self) -> Note:
return Note.model_validate(
{
"value": self.value,
"public_key": self.public_key,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate({"value": randint(1, 100), "pk": random_bytes(32).hex()})

View File

@ -0,0 +1,233 @@
from abc import ABC, abstractmethod
from enum import Enum
from random import choice, randint
from typing import Annotated, List, Optional, Self, Union
from pydantic import Field
from core.models import NbeSerializer
from models.transactions.operations.contents import (
ChannelBlob,
ChannelInscribe,
ChannelSetKeys,
LeaderClaim,
NbeContent,
SDPActive,
SDPDeclare,
SDPWithdraw,
)
from node.api.serializers.fields import BytesFromHex, BytesFromInt, BytesFromIntArray
from utils.protocols import EnforceSubclassFromRandom
from utils.random import random_bytes
class OperationContentSerializer(NbeSerializer, EnforceSubclassFromRandom, ABC):
@abstractmethod
def into_operation_content(self) -> NbeContent:
raise NotImplementedError
class ChannelInscribeSerializer(OperationContentSerializer):
channel_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
inscription: BytesFromIntArray = Field(description="Bytes as an integer array.")
parent: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
signer: BytesFromHex = Field(description="Public Key in hex format.")
def into_operation_content(self) -> ChannelInscribe:
return ChannelInscribe.model_validate(
{
"channel_id": self.channel_id,
"inscription": self.inscription,
"parent": self.parent,
"signer": self.signer,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"channel_id": list(random_bytes(32)),
"inscription": list(random_bytes(32)),
"parent": list(random_bytes(32)),
"signer": random_bytes(32).hex(),
}
)
class ChannelBlobSerializer(OperationContentSerializer):
channel: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
blob: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
blob_size: int
da_storage_gas_price: int
parent: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
signer: BytesFromHex = Field(description="Public Key in hex format.")
def into_operation_content(self) -> ChannelBlob:
return ChannelBlob.model_validate(
{
"channel": self.channel,
"blob": self.blob,
"blob_size": self.blob_size,
"da_storage_gas_price": self.da_storage_gas_price,
"parent": self.parent,
"signer": self.signer,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"channel": list(random_bytes(32)),
"blob": list(random_bytes(32)),
"blob_size": randint(1, 1_024),
"da_storage_gas_price": randint(1, 10_000),
"parent": list(random_bytes(32)),
"signer": random_bytes(32).hex(),
}
)
class ChannelSetKeysSerializer(OperationContentSerializer):
channel: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
keys: List[BytesFromHex] = Field(description="List of Public Keys in hex format.")
def into_operation_content(self) -> ChannelSetKeys:
return ChannelSetKeys.model_validate(
{
"channel": self.channel,
"keys": self.keys,
}
)
@classmethod
def from_random(cls) -> Self:
n = 1 if randint(0, 1) <= 0.5 else randint(1, 5)
return cls.model_validate(
{
"channel": list(random_bytes(32)),
"keys": [random_bytes(32).hex() for _ in range(n)],
}
)
class SDPDeclareServiceType(Enum):
BN = "BN"
DA = "DA"
class SDPDeclareSerializer(OperationContentSerializer):
service_type: SDPDeclareServiceType
locators: List[BytesFromHex]
provider_id: BytesFromIntArray = Field(description="Bytes as an integer array.")
zk_id: BytesFromHex = Field(description="Fr integer.")
locked_note_id: BytesFromHex = Field(description="Fr integer.")
def into_operation_content(self) -> SDPDeclare:
return SDPDeclare.model_validate(
{
"service_type": self.service_type.value,
"locators": self.locators,
"provider_id": self.provider_id,
"zk_id": self.zk_id,
"locked_note_id": self.locked_note_id,
}
)
@classmethod
def from_random(cls) -> Self:
n = 1 if randint(0, 1) <= 0.5 else randint(1, 5)
return cls.model_validate(
{
"service_type": choice(list(SDPDeclareServiceType)).value,
"locators": [random_bytes(32).hex() for _ in range(n)],
"provider_id": list(random_bytes(32)),
"zk_id": random_bytes(32).hex(),
"locked_note_id": random_bytes(32).hex(),
}
)
class SDPWithdrawSerializer(OperationContentSerializer):
declaration_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
nonce: BytesFromInt
def into_operation_content(self) -> SDPWithdraw:
return SDPWithdraw.model_validate(
{
"declaration_id": self.declaration_id,
"nonce": self.nonce,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"declaration_id": list(random_bytes(32)),
"nonce": int.from_bytes(random_bytes(8)),
}
)
class SDPActiveSerializer(OperationContentSerializer):
declaration_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
nonce: BytesFromInt
metadata: Optional[BytesFromIntArray] = Field(description="Bytes as an integer array.")
def into_operation_content(self) -> SDPActive:
return SDPActive.model_validate(
{
"declaration_id": self.declaration_id,
"nonce": self.nonce,
"metadata": self.metadata,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"declaration_id": list(random_bytes(32)),
"nonce": int.from_bytes(random_bytes(8)),
"metadata": None if randint(0, 1) <= 0.5 else list(random_bytes(32)),
}
)
class LeaderClaimSerializer(OperationContentSerializer):
rewards_root: BytesFromInt = Field(description="Fr integer.")
voucher_nullifier: BytesFromInt = Field(description="Fr integer.")
mantle_tx_hash: BytesFromInt = Field(description="Fr integer.")
def into_operation_content(self) -> LeaderClaim:
return LeaderClaim.model_validate(
{
"rewards_root": self.rewards_root,
"voucher_nullifier": self.voucher_nullifier,
"mantle_tx_hash": self.mantle_tx_hash,
}
)
@classmethod
def from_random(cls) -> Self:
return cls.model_validate(
{
"rewards_root": int.from_bytes(random_bytes(8)),
"voucher_nullifier": int.from_bytes(random_bytes(8)),
"mantle_tx_hash": int.from_bytes(random_bytes(8)),
}
)
type OperationContentSerializerVariants = Union[
ChannelInscribeSerializer,
ChannelBlobSerializer,
ChannelSetKeysSerializer,
SDPDeclareSerializer,
SDPWithdrawSerializer,
SDPActiveSerializer,
LeaderClaimSerializer,
]
OperationContentSerializerField = Annotated[OperationContentSerializerVariants, Field(union_mode="left_to_right")]

View File

@ -0,0 +1,82 @@
from abc import ABC, abstractmethod
from typing import Annotated, Self, Union
from pydantic import Field, RootModel
from core.models import NbeSerializer
from models.transactions.operations.proofs import (
Ed25519Signature,
NbeSignature,
ZkAndEd25519Signature,
ZkSignature,
)
from node.api.serializers.fields import BytesFromHex
from utils.protocols import EnforceSubclassFromRandom
from utils.random import random_bytes
class OperationProofSerializer(EnforceSubclassFromRandom, ABC):
@abstractmethod
def into_operation_proof(cls) -> NbeSignature:
raise NotImplementedError
# TODO: Differentiate between Ed25519SignatureSerializer and ZkSignatureSerializer
class Ed25519SignatureSerializer(OperationProofSerializer, RootModel[str]):
root: BytesFromHex
def into_operation_proof(self) -> NbeSignature:
return Ed25519Signature.model_validate(
{
"signature": self.root,
}
)
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
return cls.model_validate(random_bytes(64).hex())
class ZkSignatureSerializer(OperationProofSerializer, RootModel[str]):
root: BytesFromHex
def into_operation_proof(self) -> NbeSignature:
return ZkSignature.model_validate(
{
"signature": self.root,
}
)
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
return cls.model_validate(random_bytes(32).hex())
class ZkAndEd25519SignaturesSerializer(OperationProofSerializer, NbeSerializer):
zk_signature: BytesFromHex = Field(alias="zk_sig")
ed25519_signature: BytesFromHex = Field(alias="ed25519_sig")
def into_operation_proof(self) -> NbeSignature:
return ZkAndEd25519Signature.model_validate(
{
"zk_signature": self.zk_signature,
"ed25519_signature": self.ed25519_signature,
}
)
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
return ZkAndEd25519SignaturesSerializer.model_validate(
{
"zk_sig": random_bytes(32).hex(),
"ed25519_sig": random_bytes(32).hex(),
}
)
OperationProofSerializerVariants = Union[
Ed25519SignatureSerializer, ZkSignatureSerializer, ZkAndEd25519SignaturesSerializer
]
OperationProofSerializerField = Annotated[OperationProofSerializerVariants, Field(union_mode="left_to_right")]

View File

@ -0,0 +1,74 @@
from abc import ABC, abstractmethod
from typing import Annotated, Optional, Self, Union
from pydantic import Field
from rusty_results import Option
from core.models import NbeSerializer
from models.header.proof_of_leadership import (
Groth16ProofOfLeadership,
ProofOfLeadership,
)
from node.api.serializers.fields import BytesFromHex, BytesFromIntArray
from node.api.serializers.public import PublicSerializer
from utils.protocols import EnforceSubclassFromRandom
from utils.random import random_bytes
class ProofOfLeadershipSerializer(NbeSerializer, EnforceSubclassFromRandom, ABC):
@abstractmethod
def into_proof_of_leadership(self) -> ProofOfLeadership:
raise NotImplementedError
class Groth16LeaderProofSerializer(ProofOfLeadershipSerializer, NbeSerializer):
entropy_contribution: BytesFromHex = Field(description="Fr integer.")
leader_key: BytesFromIntArray = Field(description="Bytes in Integer Array format.")
proof: BytesFromIntArray = Field(
description="Bytes in Integer Array format.",
)
public: Optional[PublicSerializer] = Field(description="Only received if Node is running in dev mode.")
voucher_cm: BytesFromHex = Field(description="Hash.")
def into_proof_of_leadership(self) -> ProofOfLeadership:
public = self.public.into_public() if self.public else None
return Groth16ProofOfLeadership.model_validate(
{
"entropy_contribution": self.entropy_contribution,
"leader_key": self.leader_key,
"proof": self.proof,
"public": public,
"voucher_cm": self.voucher_cm,
}
)
@classmethod
def from_random(cls, *, slot: Option[int]) -> Self:
return cls.model_validate(
{
"entropy_contribution": random_bytes(32).hex(),
"leader_key": list(random_bytes(32)),
"proof": list(random_bytes(128)),
"public": PublicSerializer.from_random(slot),
"voucher_cm": random_bytes(32).hex(),
}
)
# Fake Variant that never resolves to allow union type checking to work
# TODO: Remove this when another Variant is added
from pydantic import BeforeValidator
def _always_fail(_):
raise ValueError("Never matches.")
_NeverType = Annotated[object, BeforeValidator(_always_fail)]
#
ProofOfLeadershipVariants = Union[
Groth16LeaderProofSerializer, _NeverType
] # TODO: Remove _NeverType when another Variant is added
ProofOfLeadershipSerializerField = Annotated[ProofOfLeadershipVariants, Field(union_mode="left_to_right")]

View File

@ -0,0 +1,42 @@
from random import randint
from typing import Self
from pydantic import Field
from rusty_results import Option
from core.models import NbeSerializer
from models.header.public import Public
from node.api.serializers.fields import BytesFromHex
from utils.protocols import FromRandom
from utils.random import random_bytes
class PublicSerializer(NbeSerializer, FromRandom):
aged_root: BytesFromHex = Field(description="Fr integer in hex format.")
epoch_nonce: BytesFromHex = Field(description="Fr integer in hex format.")
latest_root: BytesFromHex = Field(description="Fr integer in hex format.")
slot: int = Field(description="Integer in u64 format.")
total_stake: int = Field(description="Integer in u64 format.")
def into_public(self) -> Public:
return Public.model_validate(
{
"aged_root": self.aged_root,
"epoch_nonce": self.epoch_nonce,
"latest_root": self.latest_root,
"slot": self.slot,
"total_stake": self.total_stake,
}
)
@classmethod
def from_random(cls, slot: Option[int]) -> Self:
cls.model_validate(
{
"aged_root": random_bytes(32).hex(),
"epoch_nonce": random_bytes(32).hex(),
"latest_root": random_bytes(32).hex(),
"slot": slot.unwrap_or(randint(0, 10_000)),
"total_stake": randint(0, 10_000),
}
)

View File

@ -0,0 +1,64 @@
from typing import List, Self
from pydantic import Field
from rusty_results import Option
from core.models import NbeSerializer
from models.transactions.transaction import Transaction
from node.api.serializers.fields import BytesFromHex
from node.api.serializers.proof import (
OperationProofSerializer,
OperationProofSerializerField,
)
from node.api.serializers.transaction import TransactionSerializer
from utils.protocols import FromRandom
from utils.random import random_bytes
class SignedTransactionSerializer(NbeSerializer, FromRandom):
transaction: TransactionSerializer = Field(alias="mantle_tx", description="Transaction.")
operations_proofs: List[OperationProofSerializerField] = Field(
alias="ops_proofs", description="List of OperationProof. Order should match `Self::transaction::operations`."
)
ledger_transaction_proof: BytesFromHex = Field(
alias="ledger_tx_proof", description="Hash.", min_length=128, max_length=128
)
def into_transaction(self) -> Transaction:
operations_contents = self.transaction.operations_contents
if len(operations_contents) != len(self.operations_proofs):
raise ValueError(
f"Number of operations ({len(operations_contents)}) does not match number of operation proofs ({len(self.operations_proofs)})."
)
operations = [
{
"content": content.into_operation_content(),
"proof": proof.into_operation_proof(),
}
for content, proof in zip(operations_contents, self.operations_proofs)
]
ledger_transaction = self.transaction.ledger_transaction
outputs = [output.into_note() for output in ledger_transaction.outputs]
return Transaction.model_validate(
{
"hash": self.transaction.hash,
"operations": operations,
"inputs": ledger_transaction.inputs,
"outputs": outputs,
"proof": self.ledger_transaction_proof,
"execution_gas_price": self.transaction.execution_gas_price,
"storage_gas_price": self.transaction.storage_gas_price,
}
)
@classmethod
def from_random(cls) -> Self:
transaction = TransactionSerializer.from_random()
n = len(transaction.operations_contents)
operations_proofs = [OperationProofSerializer.from_random() for _ in range(n)]
return cls.model_validate(
{"mantle_tx": transaction, "ops_proofs": operations_proofs, "ledger_tx_proof": random_bytes(128).hex()}
)

View File

@ -0,0 +1,36 @@
from random import randint
from typing import List, Self
from pydantic import Field
from core.models import NbeSerializer
from node.api.serializers.fields import BytesFromHex
from node.api.serializers.ledger_transaction import LedgerTransactionSerializer
from node.api.serializers.operation import (
OperationContentSerializer,
OperationContentSerializerField,
)
from utils.protocols import FromRandom
from utils.random import random_bytes
class TransactionSerializer(NbeSerializer, FromRandom):
hash: BytesFromHex = Field(description="Hash id in hex format.")
operations_contents: List[OperationContentSerializerField] = Field(alias="ops")
ledger_transaction: LedgerTransactionSerializer = Field(alias="ledger_tx")
execution_gas_price: int = Field(description="Integer in u64 format.")
storage_gas_price: int = Field(description="Integer in u64 format.")
@classmethod
def from_random(cls) -> Self:
n = 0 if randint(0, 1) <= 0.5 else randint(1, 5)
operations_contents = [OperationContentSerializer.from_random() for _ in range(n)]
return cls.model_validate(
{
"hash": random_bytes(32).hex(),
"ops": operations_contents,
"ledger_tx": LedgerTransactionSerializer.from_random(),
"execution_gas_price": randint(1, 10_000),
"storage_gas_price": randint(1, 10_000),
}
)

View File

@ -1,19 +1,20 @@
import logging
from asyncio import TaskGroup, create_task, sleep
from contextlib import asynccontextmanager
from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterator
from typing import TYPE_CHECKING, AsyncGenerator, AsyncIterator, List
from rusty_results import Option
from db.blocks import BlockRepository
from db.clients import SqliteClient
from db.transaction import TransactionRepository
from models.block import Block
from models.transactions.transaction import Transaction
from node.api.fake import FakeNodeApi
from node.api.http import HttpNodeApi
from node.api.serializers.block import BlockSerializer
from node.manager.docker import DockerModeManager
from node.manager.fake import FakeNodeManager
from node.models.blocks import Block
from node.models.transactions import Transaction
if TYPE_CHECKING:
from core.app import NBE
@ -27,8 +28,8 @@ async def node_lifespan(app: "NBE") -> AsyncGenerator[None]:
app.state.node_manager = FakeNodeManager()
# app.state.node_manager = DockerModeManager(app.settings.node_compose_filepath)
# app.state.node_api = FakeNodeApi()
app.state.node_api = HttpNodeApi(host="127.0.0.1", port=18080)
app.state.node_api = FakeNodeApi()
# app.state.node_api = HttpNodeApi(host="127.0.0.1", port=18080)
app.state.db_client = db_client
app.state.block_repository = BlockRepository(db_client)
@ -88,21 +89,34 @@ async def _gracefully_close_stream(stream: AsyncIterator) -> None:
async def subscribe_to_new_blocks(app: "NBE"):
blocks_stream: AsyncGenerator[Block] = app.state.node_api.get_blocks_stream() # type: ignore[call-arg]
blocks_stream: AsyncGenerator[BlockSerializer] = app.state.node_api.get_blocks_stream() # type: ignore[call-arg]
try:
while app.state.is_running:
try:
block = await anext(blocks_stream) # TODO: Use anext's Sentinel?
except StopAsyncIteration:
logger.error("Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
break
block_serializer = await anext(blocks_stream) # TODO: Use anext's Sentinel?
except TimeoutError:
continue
except StopAsyncIteration:
import traceback
traceback.print_exc()
logger.error("Subscription to the new blocks stream ended unexpectedly. Please restart the node.")
break
except Exception as e:
import traceback
traceback.print_exc()
logger.error(f"Error while fetching new blocks: {e}")
continue
try:
block = block_serializer.into_block()
await app.state.block_repository.create(block)
except Exception as e:
import traceback
traceback.print_exc()
logger.error(f"Error while saving new block: {e}")
finally:
await _gracefully_close_stream(blocks_stream)
@ -146,7 +160,10 @@ async def backfill_blocks(app: "NBE", *, db_hit_interval_seconds: int, batch_siz
logger.info(f"Backfilling blocks from slot {slot_to} down to 0...")
while slot_to > 0:
slot_from = max(0, slot_to - batch_size)
blocks = await app.state.node_api.get_blocks(slot_from=slot_from, slot_to=slot_to)
blocks_serializers: List[BlockSerializer] = await app.state.node_api.get_blocks(
slot_from=slot_from, slot_to=slot_to
)
blocks: List[Block] = [block_serializer.into_block() for block_serializer in blocks_serializers]
logger.debug(f"Backfilling {len(blocks)} blocks from slot {slot_from} to {slot_to}...")
await app.state.block_repository.create(*blocks)
slot_to = slot_from - 1

View File

@ -1,137 +0,0 @@
import logging
import os
import random
from typing import TYPE_CHECKING, Any, List, Self
from pydantic.config import ExtraValues
from pydantic_core.core_schema import computed_field
from sqlalchemy import Column
from sqlmodel import Field, Relationship
from core.models import NbeSchema, TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from utils.random import random_hash
if TYPE_CHECKING:
from node.models.transactions import Transaction
def _is_debug__randomize_transactions():
is_debug = os.getenv("DEBUG", "False").lower() == "true"
is_debug__randomize_transactions = os.getenv("DEBUG__RANDOMIZE_TRANSACTIONS", "False").lower() == "true"
return is_debug and is_debug__randomize_transactions
logger = logging.getLogger(__name__)
class Public(NbeSchema):
aged_root: str
epoch_nonce: str
latest_root: str
slot: int
total_stake: float
@classmethod
def from_random(cls, slot: int = None) -> "Public":
if slot is not None:
slot = random.randint(1, 100)
return Public(
aged_root=random_hash(),
epoch_nonce=random_hash(),
latest_root=random_hash(),
slot=slot,
total_stake=100.0,
)
class ProofOfLeadership(NbeSchema):
entropy_contribution: str
leader_key: List[int]
proof: List[int]
public: Public
voucher_cm: str
@classmethod
def from_random(cls, slot: int = None) -> "ProofOfLeadership":
random_hash_as_list = lambda: [random.randint(0, 255) for _ in range(64)]
return ProofOfLeadership(
entropy_contribution=random_hash(),
leader_key=random_hash_as_list(),
proof=random_hash_as_list(),
public=Public.from_random(slot),
voucher_cm=random_hash(),
)
class Header(NbeSchema):
block_root: str
parent_block: str
proof_of_leadership: ProofOfLeadership
slot: int
@classmethod
def from_random(cls, slot_from: int = 1, slot_to: int = 100) -> "Header":
slot = random.randint(slot_from, slot_to)
return Header(
block_root=random_hash(),
parent_block=random_hash(),
proof_of_leadership=ProofOfLeadership.from_random(slot),
slot=slot,
)
class Block(TimestampedModel, table=True):
__tablename__ = "block"
header: Header = Field(sa_column=Column(PydanticJsonColumn(Header), nullable=False))
transactions: List["Transaction"] = Relationship(
back_populates="block",
sa_relationship_kwargs={
"lazy": "selectin",
"cascade": "all, delete-orphan",
},
)
@property
def slot(self) -> int:
return self.header.slot
def __str__(self) -> str:
return f"Block(slot={self.slot})"
def __repr__(self) -> str:
return f"<Block(id={self.id}, created_at={self.created_at}, slot={self.slot}, parent={self.header["parent_block"]})>"
@classmethod
def model_validate_json(
cls,
json_data: str | bytes | bytearray,
*,
strict: bool | None = None,
extra: ExtraValues | None = None,
context: Any | None = None,
by_alias: bool | None = None,
by_name: bool | None = None,
) -> Self:
self = super().model_validate_json(
json_data, strict=strict, extra=extra, context=context, by_alias=by_alias, by_name=by_name
)
if _is_debug__randomize_transactions():
from node.models.transactions import Transaction
logger.debug("DEBUG and DEBUG__RANDOMIZE_TRANSACTIONS is enabled, randomizing Block's transactions.")
n = 0 if random.randint(0, 1) <= 0.5 else random.randint(1, 10)
self.transactions = [Transaction.from_random() for _ in range(n)]
return self
@classmethod
def from_random(cls, slot_from: int = 1, slot_to: int = 100) -> "Block":
n = 0 if random.randint(0, 1) < 0.3 else random.randint(1, 5)
transactions = [Transaction.from_random() for _ in range(n)]
return Block(
header=Header.from_random(slot_from, slot_to),
transactions=transactions,
)

View File

@ -1,19 +0,0 @@
from core.models import IdNbeModel
class Health(IdNbeModel):
healthy: bool
@classmethod
def from_healthy(cls) -> "Health":
return cls(healthy=True)
@classmethod
def from_unhealthy(cls) -> "Health":
return cls(healthy=False)
def __str__(self):
return "Healthy" if self.healthy else "Unhealthy"
def __repr__(self):
return f"<Health(healthy={self.healthy})>"

View File

@ -1,92 +0,0 @@
import random
from enum import StrEnum
from typing import TYPE_CHECKING, List, Optional
from sqlalchemy import JSON, Column
from sqlmodel import Field, Relationship
from core.models import NbeSchema, TimestampedModel
from core.sqlmodel import PydanticJsonColumn
from utils.random import random_address
if TYPE_CHECKING:
from node.models.blocks import Block
Value = int
Fr = int
Gas = float
PublicKey = bytes
class Operation(StrEnum):
CHANNEL_INSCRIBE = ("ChannelInscribe",) # (InscriptionOp)
CHANNEL_BLOB = ("ChannelBlob",) # (BlobOp)
CHANNEL_SET_KEYS = ("ChannelSetKeys",) # (SetKeysOp)
NATIVE = ("Native",) # (NativeOp)
SDP_DECLARE = ("SDPDeclare",) # (SDPDeclareOp)
SDP_WITHDRAW = ("SDPWithdraw",) # (SDPWithdrawOp)
SDP_ACTIVE = ("SDPActive",) # (SDPActiveOp)
LEADER_CLAIM = ("LeaderClaim",) # (LeaderClaimOp)
class Note(NbeSchema):
value: Value
public_key: PublicKey
@classmethod
def from_random(cls) -> "Note":
return Note(
value=random.randint(1, 100),
public_key=random_address().encode("utf-8"),
)
class LedgerTransaction(NbeSchema):
"""
Tx
"""
inputs: List[Fr] = Field(default_factory=list, sa_column=Column(JSON, nullable=False))
outputs: List[Note] = Field(default_factory=list, sa_column=Column(JSON, nullable=False))
@classmethod
def from_random(cls) -> "LedgerTransaction":
return LedgerTransaction(
inputs=[random.randint(1, 100) for _ in range(10)],
outputs=[Note.from_random() for _ in range(10)],
)
class Transaction(TimestampedModel, table=True):
"""
MantleTx
"""
__tablename__ = "transaction"
block_id: int = Field(foreign_key="block.id", nullable=False, index=True)
operations: List[str] = Field(alias="ops", default_factory=list, sa_column=Column(JSON, nullable=False))
ledger_transaction: LedgerTransaction = Field(
default_factory=dict, sa_column=Column(PydanticJsonColumn(LedgerTransaction), nullable=False)
)
execution_gas_price: Gas
storage_gas_price: Gas
block: Optional["Block"] = Relationship(back_populates="transactions")
def __str__(self) -> str:
return f"Transaction({self.operations})"
def __repr__(self) -> str:
return f"<Transaction(id={self.id}, created_at={self.created_at}, operations={self.operations})>"
@classmethod
def from_random(cls) -> "Transaction":
n = random.randint(1, 3)
operations = [random.choice(list(Operation)).value for _ in range(n)]
return Transaction(
operations=operations,
ledger_transaction=LedgerTransaction.from_random(),
execution_gas_price=random.random(),
storage_gas_price=random.random(),
)

35
src/utils/protocols.py Normal file
View File

@ -0,0 +1,35 @@
from abc import ABC, abstractmethod
from random import choice
from typing import Self
class FromRandom(ABC):
@classmethod
@abstractmethod
def from_random(cls, *args, **kwargs) -> Self:
raise NotImplementedError
# TODO: Unnecessarily complex.
class EnforceSubclassFromRandom(FromRandom, ABC):
@classmethod
def from_random(cls, *args, **kwargs) -> Self:
subclasses = cls.__subclasses__()
if len(subclasses) == 0:
raise TypeError("No subclasses were found.")
return choice(subclasses).from_random(*args, **kwargs)
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
# Distance to the base in the MRO
try:
distance = cls.mro().index(EnforceSubclassFromRandom)
except ValueError:
return # Not a descendant (shouldn't happen here)
# Require override only for grandchildren (exactly two levels below)
if distance >= 2 and not hasattr(cls, "from_random"):
raise TypeError(
f"Class {cls.__name__} is a grandchild of EnforceSubclassFromRandom. Therefore, it must implement `from_random()`."
)

View File

@ -1,13 +1,18 @@
import random
from typing import List
def random_hex(length: int) -> str:
return f"0x{random.getrandbits(length * 4):0{length}x}"
def random_bytes(length: int) -> bytes:
return bytes((random.randint(0, 255) for _ in range(length)))
def random_hash() -> str:
return random_hex(64)
def random_address() -> bytes:
return random_bytes(40)
def random_address() -> str:
return random_hex(40)
def random_hash() -> bytes:
return random_bytes(64)
def as_list(data: bytes) -> List[int]:
return list(data)

View File

@ -14,24 +14,26 @@ export default function BlocksTable() {
const body = bodyRef.current;
const counter = countRef.current;
// 5 columns now (ID, Slot, Root, Parent, Transactions)
ensureFixedRowCount(body, 5, TABLE_SIZE);
// 6 columns: ID | Slot | Hash | Parent | Block Root | Transactions
ensureFixedRowCount(body, 6, TABLE_SIZE);
abortRef.current?.abort();
abortRef.current = new AbortController();
const pruneAndPad = () => {
// remove any placeholder rows that snuck in
for (let i = body.rows.length - 1; i >= 0; i--) {
if (body.rows[i].classList.contains('ph')) body.deleteRow(i);
}
// keep at most TABLE_SIZE non-placeholder rows
while ([...body.rows].filter((r) => !r.classList.contains('ph')).length > TABLE_SIZE) {
const last = body.rows[body.rows.length - 1];
const key = last?.dataset?.key;
if (key) seenKeysRef.current.delete(key);
body.deleteRow(-1);
}
// keep placeholders in sync with 5 columns
ensureFixedRowCount(body, 5, TABLE_SIZE);
// pad with placeholders to TABLE_SIZE (6 cols)
ensureFixedRowCount(body, 6, TABLE_SIZE);
const real = [...body.rows].filter((r) => !r.classList.contains('ph')).length;
counter.textContent = String(real);
};
@ -64,15 +66,15 @@ export default function BlocksTable() {
spSlot.textContent = String(b.slot);
tdSlot.appendChild(spSlot);
// Root
const tdRoot = document.createElement('td');
const spRoot = document.createElement('span');
spRoot.className = 'mono';
spRoot.title = b.root;
spRoot.textContent = shortenHex(b.root);
tdRoot.appendChild(spRoot);
// Hash
const tdHash = document.createElement('td');
const spHash = document.createElement('span');
spHash.className = 'mono';
spHash.title = b.hash;
spHash.textContent = shortenHex(b.hash);
tdHash.appendChild(spHash);
// Parent
// Parent (block.parent_block_hash)
const tdParent = document.createElement('td');
const spParent = document.createElement('span');
spParent.className = 'mono';
@ -80,6 +82,14 @@ export default function BlocksTable() {
spParent.textContent = shortenHex(b.parent);
tdParent.appendChild(spParent);
// Block Root
const tdRoot = document.createElement('td');
const spRoot = document.createElement('span');
spRoot.className = 'mono';
spRoot.title = b.root;
spRoot.textContent = shortenHex(b.root);
tdRoot.appendChild(spRoot);
// Transactions (array length)
const tdCount = document.createElement('td');
const spCount = document.createElement('span');
@ -87,13 +97,16 @@ export default function BlocksTable() {
spCount.textContent = String(b.transactionCount);
tdCount.appendChild(spCount);
tr.append(tdId, tdSlot, tdRoot, tdParent, tdCount);
tr.append(tdId, tdSlot, tdHash, tdParent, tdRoot, tdCount);
body.insertBefore(tr, body.firstChild);
pruneAndPad();
};
const normalize = (raw) => {
const header = raw.header ?? raw;
// New backend:
// { id, hash, slot, block_root, parent_block_hash, transactions: [...] }
// Back-compat (header.* / raw.parent_block) just in case.
const header = raw.header ?? null;
const txLen = Array.isArray(raw.transactions)
? raw.transactions.length
: Array.isArray(raw.txs)
@ -102,9 +115,10 @@ export default function BlocksTable() {
return {
id: Number(raw.id ?? 0),
slot: Number(header?.slot ?? raw.slot ?? 0),
root: header?.block_root ?? raw.block_root ?? '',
parent: header?.parent_block ?? raw.parent_block ?? '',
slot: Number(raw.slot ?? header?.slot ?? 0),
hash: raw.hash ?? header?.hash ?? '',
parent: raw.parent_block_hash ?? header?.parent_block ?? raw.parent_block ?? '',
root: raw.block_root ?? header?.block_root ?? '',
transactionCount: txLen,
};
};
@ -152,8 +166,9 @@ export default function BlocksTable() {
null,
h('col', { style: 'width:80px' }), // ID
h('col', { style: 'width:90px' }), // Slot
h('col', { style: 'width:240px' }), // Root
h('col', { style: 'width:240px' }), // Hash
h('col', { style: 'width:240px' }), // Parent
h('col', { style: 'width:240px' }), // Block Root
h('col', { style: 'width:120px' }), // Transactions
),
h(
@ -164,8 +179,9 @@ export default function BlocksTable() {
null,
h('th', null, 'ID'),
h('th', null, 'Slot'),
h('th', null, 'Block Root'),
h('th', null, 'Hash'),
h('th', null, 'Parent'),
h('th', null, 'Block Root'),
h('th', null, 'Transactions'),
),
),

View File

@ -1,139 +1,167 @@
// static/pages/TransactionsTable.js
import { h } from 'preact';
import { useEffect, useRef } from 'preact/hooks';
import { API, TABLE_SIZE } from '../lib/api.js?dev=1';
import {
streamNdjson,
ensureFixedRowCount,
shortenHex,
formatTimestamp,
shortenHex, // (kept in case you want to use later)
withBenignFilter,
} from '../lib/utils.js?dev=1';
const OPERATIONS_PREVIEW_LIMIT = 2;
// ---------- small DOM helpers ----------
function createSpan(className, text, title) {
const element = document.createElement('span');
if (className) element.className = className;
if (title) element.title = title;
element.textContent = text;
return element;
const el = document.createElement('span');
if (className) el.className = className;
if (title) el.title = title;
el.textContent = text;
return el;
}
function createLink(href, text, title) {
const element = document.createElement('a');
element.className = 'linkish mono';
element.href = href;
if (title) element.title = title;
element.textContent = text;
return element;
const el = document.createElement('a');
el.className = 'linkish mono';
el.href = href;
if (title) el.title = title;
el.textContent = text;
return el;
}
// ---------- coercion / formatting helpers ----------
const toNumber = (v) => {
if (v == null) return 0;
if (typeof v === 'number') return v;
if (typeof v === 'bigint') return Number(v);
if (typeof v === 'string') {
const s = v.trim();
if (/^0x[0-9a-f]+$/i.test(s)) return Number(BigInt(s));
const n = Number(s);
return Number.isFinite(n) ? n : 0;
}
if (typeof v === 'object' && v !== null && 'value' in v) return toNumber(v.value);
return 0;
};
const opLabel = (op) => {
if (op == null) return 'op';
if (typeof op === 'string' || typeof op === 'number') return String(op);
if (typeof op !== 'object') return String(op);
if (typeof op.type === 'string') return op.type;
if (typeof op.kind === 'string') return op.kind;
if (op.content) {
if (typeof op.content.type === 'string') return op.content.type;
if (typeof op.content.kind === 'string') return op.content.kind;
}
const keys = Object.keys(op);
return keys.length ? keys[0] : 'op';
};
function formatOperationsPreview(ops) {
if (!ops?.length) return '—';
const labels = ops.map(opLabel);
if (labels.length <= OPERATIONS_PREVIEW_LIMIT) return labels.join(', ');
const head = labels.slice(0, OPERATIONS_PREVIEW_LIMIT).join(', ');
const remainder = labels.length - OPERATIONS_PREVIEW_LIMIT;
return `${head} +${remainder}`;
}
// ---------- normalize API → view model ----------
function normalizeTransaction(raw) {
// Defensive parsing and intent-revealing structure
const operations = Array.isArray(raw?.ops) ? raw.ops : Array.isArray(raw?.operations) ? raw.operations : [];
// { id, block_id, hash, operations:[Operation], inputs:[HexBytes], outputs:[Note], proof, execution_gas_price, storage_gas_price, created_at? }
const ops = Array.isArray(raw?.operations) ? raw.operations : Array.isArray(raw?.ops) ? raw.ops : [];
const ledgerOutputs = Array.isArray(raw?.ledger_transaction?.outputs) ? raw.ledger_transaction.outputs : [];
const totalOutputValue = ledgerOutputs.reduce((sum, note) => sum + Number(note?.value ?? 0), 0);
const outputs = Array.isArray(raw?.outputs) ? raw.outputs : [];
const totalOutputValue = outputs.reduce((sum, note) => sum + toNumber(note?.value), 0);
return {
id: raw?.id ?? '',
operations,
createdAt: raw?.created_at ?? raw?.timestamp ?? '',
executionGasPrice: Number(raw?.execution_gas_price ?? 0),
storageGasPrice: Number(raw?.storage_gas_price ?? 0),
numberOfOutputs: ledgerOutputs.length,
operations: ops,
executionGasPrice: toNumber(raw?.execution_gas_price),
storageGasPrice: toNumber(raw?.storage_gas_price),
numberOfOutputs: outputs.length,
totalOutputValue,
};
}
function formatOperationsPreview(operations) {
if (operations.length === 0) return '—';
if (operations.length <= OPERATIONS_PREVIEW_LIMIT) return operations.join(', ');
const head = operations.slice(0, OPERATIONS_PREVIEW_LIMIT).join(', ');
const remainder = operations.length - OPERATIONS_PREVIEW_LIMIT;
return `${head} +${remainder}`;
}
function buildTransactionRow(transactionData) {
const row = document.createElement('tr');
// ---------- row builder ----------
function buildTransactionRow(tx) {
const tr = document.createElement('tr');
// ID
const cellId = document.createElement('td');
cellId.className = 'mono';
cellId.appendChild(
createLink(`/transactions/${transactionData.id}`, String(transactionData.id), String(transactionData.id)),
const tdId = document.createElement('td');
tdId.className = 'mono';
tdId.appendChild(createLink(`/transactions/${tx.id}`, String(tx.id), String(tx.id)));
// Operations (preview)
const tdOps = document.createElement('td');
const preview = formatOperationsPreview(tx.operations);
tdOps.appendChild(
createSpan('', preview, Array.isArray(tx.operations) ? tx.operations.map(opLabel).join(', ') : ''),
);
// Operations
const cellOperations = document.createElement('td');
const operationsPreview = formatOperationsPreview(transactionData.operations);
cellOperations.appendChild(createSpan('', operationsPreview, transactionData.operations.join(', ')));
// Outputs (count / total value)
const cellOutputs = document.createElement('td');
cellOutputs.className = 'amount';
cellOutputs.textContent = `${transactionData.numberOfOutputs} / ${transactionData.totalOutputValue.toLocaleString(undefined, { maximumFractionDigits: 8 })}`;
// Outputs (count / total)
const tdOut = document.createElement('td');
tdOut.className = 'amount';
tdOut.textContent = `${tx.numberOfOutputs} / ${tx.totalOutputValue.toLocaleString(undefined, { maximumFractionDigits: 8 })}`;
// Gas (execution / storage)
const cellGas = document.createElement('td');
cellGas.className = 'mono';
cellGas.textContent = `${transactionData.executionGasPrice.toLocaleString()} / ${transactionData.storageGasPrice.toLocaleString()}`;
const tdGas = document.createElement('td');
tdGas.className = 'mono';
tdGas.textContent = `${tx.executionGasPrice.toLocaleString()} / ${tx.storageGasPrice.toLocaleString()}`;
// Time
const cellTime = document.createElement('td');
const timeSpan = createSpan('mono', formatTimestamp(transactionData.createdAt), String(transactionData.createdAt));
cellTime.appendChild(timeSpan);
row.append(cellId, cellOperations, cellOutputs, cellGas, cellTime);
return row;
tr.append(tdId, tdOps, tdOut, tdGas);
return tr;
}
// ---------- component ----------
export default function TransactionsTable() {
const tableBodyRef = useRef(null);
const counterRef = useRef(null);
const abortControllerRef = useRef(null);
const bodyRef = useRef(null);
const countRef = useRef(null);
const abortRef = useRef(null);
const totalCountRef = useRef(0);
useEffect(() => {
const tableBodyElement = tableBodyRef.current;
const counterElement = counterRef.current;
ensureFixedRowCount(tableBodyElement, 4, TABLE_SIZE);
const body = bodyRef.current;
const counter = countRef.current;
abortControllerRef.current?.abort();
abortControllerRef.current = new AbortController();
// 4 columns: ID | Operations | Outputs | Gas
ensureFixedRowCount(body, 4, TABLE_SIZE);
abortRef.current?.abort();
abortRef.current = new AbortController();
const url = `${API.TRANSACTIONS_STREAM}?prefetch-limit=${encodeURIComponent(TABLE_SIZE)}`;
streamNdjson(
url,
(rawTransaction) => {
(raw) => {
try {
const transactionData = normalizeTransaction(rawTransaction);
const row = buildTransactionRow(transactionData);
const tx = normalizeTransaction(raw);
const row = buildTransactionRow(tx);
body.insertBefore(row, body.firstChild);
tableBodyElement.insertBefore(row, tableBodyElement.firstChild);
while (tableBodyElement.rows.length > TABLE_SIZE) tableBodyElement.deleteRow(-1);
counterElement.textContent = String(++totalCountRef.current);
} catch (error) {
// Fail fast per row, but do not break the stream
console.error('Failed to render transaction row:', error);
while (body.rows.length > TABLE_SIZE) body.deleteRow(-1);
counter.textContent = String(++totalCountRef.current);
} catch (err) {
console.error('Failed to render transaction row:', err, raw);
}
},
{
signal: abortControllerRef.current.signal,
signal: abortRef.current.signal,
onError: withBenignFilter(
(error) => console.error('Transaction stream error:', error),
abortControllerRef.current.signal,
(err) => console.error('Transactions stream error:', err),
abortRef.current.signal,
),
},
).catch((error) => {
if (!abortControllerRef.current.signal.aborted) {
console.error('Transactions stream connection error:', error);
).catch((err) => {
if (!abortRef.current.signal.aborted) {
console.error('Transactions stream connection error:', err);
}
});
return () => abortControllerRef.current?.abort();
return () => abortRef.current?.abort();
}, []);
return h(
@ -142,7 +170,7 @@ export default function TransactionsTable() {
h(
'div',
{ class: 'card-header' },
h('div', null, h('strong', null, 'Transactions '), h('span', { class: 'pill', ref: counterRef }, '0')),
h('div', null, h('strong', null, 'Transactions '), h('span', { class: 'pill', ref: countRef }, '0')),
h('div', { style: 'color:var(--muted); font-size:12px;' }),
),
h(
@ -156,9 +184,8 @@ export default function TransactionsTable() {
null,
h('col', { style: 'width:120px' }), // ID
h('col', null), // Operations
h('col', { style: 'width:180px' }), // Outputs (count / total)
h('col', { style: 'width:180px' }), // Gas (execution / storage)
h('col', { style: 'width:180px' }), // Time
h('col', { style: 'width:200px' }), // Outputs (count / total)
h('col', { style: 'width:200px' }), // Gas (execution / storage)
),
h(
'thead',
@ -170,10 +197,9 @@ export default function TransactionsTable() {
h('th', null, 'Operations'),
h('th', null, 'Outputs (count / total)'),
h('th', null, 'Gas (execution / storage)'),
h('th', null, 'Time'),
),
),
h('tbody', { ref: tableBodyRef }),
h('tbody', { ref: bodyRef }),
),
),
);

View File

@ -5,24 +5,41 @@ import { API, PAGE } from '../lib/api.js?dev=1';
const OPERATIONS_PREVIEW_LIMIT = 2;
// Helpers
// ---- Helpers ----
const opLabel = (op) => {
if (op == null) return 'op';
if (typeof op === 'string' || typeof op === 'number') return String(op);
if (typeof op !== 'object') return String(op);
if (typeof op.type === 'string') return op.type;
if (typeof op.kind === 'string') return op.kind;
if (op.content) {
if (typeof op.content.type === 'string') return op.content.type;
if (typeof op.content.kind === 'string') return op.content.kind;
}
const keys = Object.keys(op);
return keys.length ? keys[0] : 'op';
};
function opsToPills(ops, limit = OPERATIONS_PREVIEW_LIMIT) {
const arr = Array.isArray(ops) ? ops : [];
if (!arr.length) return h('span', { style: 'color:var(--muted); white-space:nowrap;' }, '—');
const shown = arr.slice(0, limit);
const extra = arr.length - shown.length;
const labels = arr.map(opLabel);
const shown = labels.slice(0, limit);
const extra = labels.length - shown.length;
return h(
'div',
{ style: 'display:flex; gap:6px; flex-wrap:nowrap; align-items:center; white-space:nowrap;' },
...shown.map((op, i) =>
h('span', { key: `${op}-${i}`, class: 'pill', title: op, style: 'flex:0 0 auto;' }, op),
...shown.map((label, i) =>
h('span', { key: `${label}-${i}`, class: 'pill', title: label, style: 'flex:0 0 auto;' }, label),
),
extra > 0 && h('span', { class: 'pill', title: `${extra} more`, style: 'flex:0 0 auto;' }, `+${extra}`),
);
}
function computeOutputsSummary(ledgerTransaction) {
const outputs = Array.isArray(ledgerTransaction?.outputs) ? ledgerTransaction.outputs : [];
function computeOutputsSummaryFromTx(tx) {
const outputs = Array.isArray(tx?.outputs) ? tx.outputs : [];
const count = outputs.length;
const total = outputs.reduce((sum, o) => sum + Number(o?.value ?? 0), 0);
return { count, total };
@ -112,9 +129,15 @@ export default function BlockDetailPage({ parameters }) {
};
}, [blockId, isValidId]);
const header = block?.header ?? {};
const header = block?.header ?? {}; // back-compat only
const transactions = Array.isArray(block?.transactions) ? block.transactions : [];
const slot = block?.slot ?? header.slot;
// Prefer new top-level fields; fallback to legacy header.*
const slot = block?.slot ?? header?.slot ?? null;
const blockRoot = block?.block_root ?? header?.block_root ?? '';
const blockHash = block?.hash ?? header?.hash ?? '';
const parentId = block?.parent_id ?? null;
const parentHash = block?.parent_block_hash ?? header?.parent_block ?? '';
return h(
'main',
@ -170,6 +193,23 @@ export default function BlockDetailPage({ parameters }) {
'div',
{ style: 'padding:12px 14px; display:grid; grid-template-columns: 120px 1fr; gap:8px 12px;' },
// Hash (pill + copy)
h('div', null, h('b', null, 'Hash:')),
h(
'div',
{ style: 'display:flex; gap:8px; flex-wrap:wrap; align-items:flex-start;' },
h(
'span',
{
class: 'pill mono',
title: blockHash,
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
},
String(blockHash),
),
h(CopyPill, { text: blockHash }),
),
// Root (pill + copy)
h('div', null, h('b', null, 'Root:')),
h(
@ -179,40 +219,40 @@ export default function BlockDetailPage({ parameters }) {
'span',
{
class: 'pill mono',
title: header.block_root ?? '',
title: blockRoot,
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
},
String(header.block_root ?? ''),
String(blockRoot),
),
h(CopyPill, { text: header.block_root }),
h(CopyPill, { text: blockRoot }),
),
// Parent (pill + copy)
// Parent (id link OR parent hash) + copy
h('div', null, h('b', null, 'Parent:')),
h(
'div',
{ style: 'display:flex; gap:8px; flex-wrap:wrap; align-items:flex-start;' },
block?.parent_id
parentId != null
? h(
'a',
{
class: 'pill mono linkish',
href: PAGE.BLOCK_DETAIL(block.parent_id),
title: String(block.parent_id),
href: PAGE.BLOCK_DETAIL(parentId),
title: String(parentId),
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
},
String(block.parent_id),
String(parentId),
)
: h(
'span',
{
class: 'pill mono',
title: header.parent_block ?? '',
title: parentHash,
style: 'max-width:100%; overflow-wrap:anywhere; word-break:break-word;',
},
String(header.parent_block ?? ''),
String(parentHash || '—'),
),
h(CopyPill, { text: block?.parent_id ?? header.parent_block }),
h(CopyPill, { text: parentId ?? parentHash }),
),
),
),
@ -234,7 +274,6 @@ export default function BlockDetailPage({ parameters }) {
'table',
{
class: 'table--transactions',
// Fill card by default; expand + scroll if content is wider
style: 'min-width:100%; width:max-content; table-layout:auto; border-collapse:collapse;',
},
h(
@ -265,10 +304,10 @@ export default function BlockDetailPage({ parameters }) {
'tbody',
null,
...transactions.map((t) => {
const operations = Array.isArray(t?.operations) ? t.operations : [];
const { count, total } = computeOutputsSummary(t?.ledger_transaction);
const { count, total } = computeOutputsSummaryFromTx(t);
const executionGas = Number(t?.execution_gas_price ?? 0);
const storageGas = Number(t?.storage_gas_price ?? 0);
const ops = Array.isArray(t?.operations) ? t.operations : [];
return h(
'tr',
@ -309,7 +348,7 @@ export default function BlockDetailPage({ parameters }) {
h(
'td',
{ style: 'text-align:left; padding:8px 10px; white-space:nowrap;' },
opsToPills(operations),
opsToPills(ops),
),
);
}),

View File

@ -7,9 +7,10 @@ import { API } from '../lib/api.js?dev=1';
const isNumber = (v) => typeof v === 'number' && !Number.isNaN(v);
const toLocaleNum = (n, opts = {}) => Number(n ?? 0).toLocaleString(undefined, { maximumFractionDigits: 8, ...opts });
// Try to render bytes in a readable way without guessing too hard
// Best-effort pretty bytes/hex/string
function renderBytes(value) {
if (typeof value === 'string') return value; // hex/base64/etc.
if (value == null) return '';
if (typeof value === 'string') return value; // hex/base64/plain
if (Array.isArray(value) && value.every((x) => Number.isInteger(x) && x >= 0 && x <= 255)) {
return '0x' + value.map((b) => b.toString(16).padStart(2, '0')).join('');
}
@ -20,24 +21,106 @@ function renderBytes(value) {
}
}
// ————— normalizer (robust to partial data) —————
function normalizeTransaction(raw) {
const ops = Array.isArray(raw?.operations) ? raw.operations : [];
const lt = raw?.ledger_transaction ?? {};
const inputs = Array.isArray(lt?.inputs) ? lt.inputs : [];
const outputs = Array.isArray(lt?.outputs) ? lt.outputs : [];
const opLabel = (op) => {
if (op == null) return 'op';
if (typeof op === 'string' || typeof op === 'number') return String(op);
if (typeof op !== 'object') return String(op);
if (typeof op.type === 'string') return op.type;
if (typeof op.kind === 'string') return op.kind;
if (op.content) {
if (typeof op.content.type === 'string') return op.content.type;
if (typeof op.content.kind === 'string') return op.content.kind;
}
const keys = Object.keys(op);
return keys.length ? keys[0] : 'op';
};
const totalOutputValue = outputs.reduce((sum, note) => sum + Number(note?.value ?? 0), 0);
function opsToPills(ops, limit = 6) {
const arr = Array.isArray(ops) ? ops : [];
if (!arr.length) return h('span', { style: 'color:var(--muted); whiteSpace: "nowrap";' }, '—');
const labels = arr.map(opLabel);
const shown = labels.slice(0, limit);
const extra = labels.length - shown.length;
return h(
'div',
{ style: 'display:flex; gap:6px; flexWrap:"wrap"; alignItems:"center"' },
...shown.map((label, i) =>
h('span', { key: `${label}-${i}`, class: 'pill', title: label, style: 'flex:0 0 auto;' }, label),
),
extra > 0 && h('span', { class: 'pill', title: `${extra} more`, style: 'flex:0 0 auto;' }, `+${extra}`),
);
}
const toNumber = (v) => {
if (v == null) return 0;
if (typeof v === 'number') return v;
if (typeof v === 'bigint') return Number(v);
if (typeof v === 'string') {
const s = v.trim();
if (/^0x[0-9a-f]+$/i.test(s)) return Number(BigInt(s));
const n = Number(s);
return Number.isFinite(n) ? n : 0;
}
if (typeof v === 'object' && v !== null && 'value' in v) return toNumber(v.value);
return 0;
};
function CopyPill({ text, label = 'Copy' }) {
const onCopy = async (e) => {
e.preventDefault();
try {
await navigator.clipboard.writeText(String(text ?? ''));
} catch {}
};
return h(
'a',
{
class: 'pill linkish mono',
style: 'cursor:pointer; user-select:none;',
href: '#',
onClick: onCopy,
onKeyDown: (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
onCopy(e);
}
},
tabIndex: 0,
role: 'button',
},
label,
);
}
// ————— normalizer for new TransactionRead —————
// { id, block_id, hash, operations:[Operation], inputs:[HexBytes], outputs:[Note{public_key:HexBytes,value:int}],
// proof, execution_gas_price, storage_gas_price }
function normalizeTransaction(raw) {
const ops = Array.isArray(raw?.operations) ? raw.operations : Array.isArray(raw?.ops) ? raw.ops : [];
const inputs = Array.isArray(raw?.inputs) ? raw.inputs : [];
const outputs = Array.isArray(raw?.outputs) ? raw.outputs : [];
const totalOutputValue = outputs.reduce((sum, note) => sum + toNumber(note?.value), 0);
return {
id: raw?.id ?? '',
blockId: raw?.block_id ?? null,
operations: ops.map(String),
hash: renderBytes(raw?.hash),
proof: renderBytes(raw?.proof),
operations: ops, // keep objects, well label in UI
executionGasPrice: isNumber(raw?.execution_gas_price)
? raw.execution_gas_price
: Number(raw?.execution_gas_price ?? 0),
storageGasPrice: isNumber(raw?.storage_gas_price) ? raw.storage_gas_price : Number(raw?.storage_gas_price ?? 0),
ledger: { inputs, outputs, totalOutputValue },
: toNumber(raw?.execution_gas_price),
storageGasPrice: isNumber(raw?.storage_gas_price) ? raw.storage_gas_price : toNumber(raw?.storage_gas_price),
ledger: {
inputs: inputs.map((v) => renderBytes(v)),
outputs: outputs.map((n) => ({
public_key: renderBytes(n?.public_key),
value: toNumber(n?.value),
})),
totalOutputValue,
},
};
}
@ -59,8 +142,7 @@ function Summary({ tx }) {
'div',
{ style: 'display:grid; gap:8px;' },
// (ID removed)
// Block link
tx.blockId != null &&
h(
'div',
@ -73,6 +155,34 @@ function Summary({ tx }) {
),
),
// Hash + copy
h(
'div',
null,
h('b', null, 'Hash: '),
h(
'span',
{ class: 'pill mono', title: tx.hash, style: 'max-width:100%; overflow-wrap:anywhere;' },
String(tx.hash || ''),
),
h(CopyPill, { text: tx.hash }),
),
// Proof + copy (if present)
tx.proof &&
h(
'div',
null,
h('b', null, 'Proof: '),
h(
'span',
{ class: 'pill mono', title: tx.proof, style: 'max-width:100%; overflow-wrap:anywhere;' },
String(tx.proof),
),
h(CopyPill, { text: tx.proof }),
),
// Gas
h(
'div',
null,
@ -86,26 +196,14 @@ function Summary({ tx }) {
h('span', { class: 'mono' }, toLocaleNum(tx.storageGasPrice)),
),
h(
'div',
null,
h('b', null, 'Operations: '),
tx.operations?.length
? h(
'span',
{ style: 'display:inline-flex; gap:6px; flex-wrap:wrap; vertical-align:middle;' },
...tx.operations.map((op, i) => h('span', { key: i, class: 'pill', title: op }, op)),
)
: h('span', { style: 'color:var(--muted)' }, '—'),
),
// Operations (labels as pills)
h('div', null, h('b', null, 'Operations: '), opsToPills(tx.operations)),
),
);
}
function InputsTable({ inputs }) {
if (!inputs?.length) {
return h('div', { style: 'color:var(--muted)' }, '—');
}
if (!inputs?.length) return h('div', { style: 'color:var(--muted)' }, '—');
return h(
'div',
@ -117,7 +215,7 @@ function InputsTable({ inputs }) {
'colgroup',
null,
h('col', { style: 'width:80px' }), // #
h('col', null), // Value (fills)
h('col', null), // Value
),
h('thead', null, h('tr', null, h('th', { style: 'text-align:center;' }, '#'), h('th', null, 'Value'))),
h(
@ -145,9 +243,7 @@ function InputsTable({ inputs }) {
}
function OutputsTable({ outputs }) {
if (!outputs?.length) {
return h('div', { style: 'color:var(--muted)' }, '—');
}
if (!outputs?.length) return h('div', { style: 'color:var(--muted)' }, '—');
return h(
'div',
@ -158,9 +254,9 @@ function OutputsTable({ outputs }) {
h(
'colgroup',
null,
h('col', { style: 'width:80px' }), // # (compact, centered)
h('col', null), // Public Key (fills)
h('col', { style: 'width:180px' }), // Value (compact, right)
h('col', { style: 'width:80px' }), // #
h('col', null), // Public Key
h('col', { style: 'width:180px' }), // Value
),
h(
'thead',
@ -169,8 +265,8 @@ function OutputsTable({ outputs }) {
'tr',
null,
h('th', { style: 'text-align:center;' }, '#'),
h('th', null, 'Public Key'), // ← back to Public Key second
h('th', { style: 'text-align:right;' }, 'Value'), // ← Value last
h('th', null, 'Public Key'),
h('th', { style: 'text-align:right;' }, 'Value'),
),
),
h(
@ -180,26 +276,18 @@ function OutputsTable({ outputs }) {
h(
'tr',
{ key: idx },
// # (index)
h('td', { style: 'text-align:center;' }, String(idx)),
// Public Key (fills, wraps)
h(
'td',
null,
h(
'span',
{
class: 'mono',
style: 'display:inline-block; overflow-wrap:anywhere; word-break:break-word;',
title: renderBytes(note?.public_key),
},
renderBytes(note?.public_key),
{ class: 'mono', style: 'display:inline-block; overflow-wrap:anywhere;' },
String(note.public_key ?? ''),
),
h('span', { class: 'sr-only' }, ' '),
),
// Value (right-aligned)
h('td', { class: 'amount', style: 'text-align:right;' }, toLocaleNum(note?.value)),
h('td', { class: 'amount', style: 'text-align:right;' }, toLocaleNum(note.value)),
),
),
),
@ -208,25 +296,25 @@ function OutputsTable({ outputs }) {
}
function Ledger({ ledger }) {
const { inputs, outputs, totalOutputValue } = ledger;
// Sum inputs as integers (Fr is declared as int in your schema)
const totalInputValue = inputs.reduce((sum, v) => sum + Number(v ?? 0), 0);
const inputs = Array.isArray(ledger?.inputs) ? ledger.inputs : [];
const outputs = Array.isArray(ledger?.outputs) ? ledger.outputs : [];
const totalInputValue = inputs.reduce((s, v) => s + toNumber(v), 0);
const totalOutputValue = toNumber(ledger?.totalOutputValue);
return h(
SectionCard,
{ title: 'Ledger Transaction' },
{ title: 'Ledger' },
h(
'div',
{ style: 'display:grid; gap:16px;' },
// Inputs (with Total on the right)
// Inputs
h(
'div',
null,
h(
'div',
{ style: 'display:flex; align-items:center; gap:8px;' },
{ style: 'display:flex; alignItems:center; gap:8px;' },
h('b', null, 'Inputs'),
h('span', { class: 'pill' }, String(inputs.length)),
h(
@ -238,13 +326,13 @@ function Ledger({ ledger }) {
h(InputsTable, { inputs }),
),
// Outputs (unchanged header total)
// Outputs
h(
'div',
null,
h(
'div',
{ style: 'display:flex; align-items:center; gap:8px;' },
{ style: 'display:flex; alignItems:center; gap:8px;' },
h('b', null, 'Outputs'),
h('span', { class: 'pill' }, String(outputs.length)),
h(
@ -317,7 +405,7 @@ export default function TransactionDetail({ parameters }) {
h(
'header',
{ style: 'display:flex; gap:12px; align-items:center; margin:12px 0;' },
{ style: 'display:flex; gap:12px; alignItems:center; margin:12px 0;' },
h('a', { class: 'linkish', href: '/' }, '← Back'),
h('h1', { style: 'margin:0' }, pageTitle),
),