mirror of
https://github.com/logos-blockchain/logos-blockchain-block-explorer-template.git
synced 2026-02-16 19:13:25 +00:00
display inscriptions in block explorer
This commit is contained in:
parent
dda9e4e714
commit
36aed1ddc3
@ -1,5 +1,5 @@
|
||||
{
|
||||
"dev": {
|
||||
"host": "http://localhost:18080"
|
||||
"host": "http://localhost:8080"
|
||||
}
|
||||
}
|
||||
|
||||
@ -26,7 +26,7 @@ class NBESettings(BaseSettings):
|
||||
node_manager: Literal["docker", "noop"] = Field(alias="NBE_NODE_MANAGER", default="noop")
|
||||
|
||||
node_api_host: str = Field(alias="NBE_NODE_API_HOST", default="127.0.0.1")
|
||||
node_api_port: int = Field(alias="NBE_NODE_API_PORT", default=18080)
|
||||
node_api_port: int = Field(alias="NBE_NODE_API_PORT", default=8080)
|
||||
node_api_timeout: int = Field(alias="NBE_NODE_API_TIMEOUT", default=60)
|
||||
node_api_protocol: str = Field(alias="NBE_NODE_API_PROTOCOL", default="http")
|
||||
node_api_auth: Optional[Authentication] = Field(alias="NBE_NODE_API_AUTH", default=None)
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
from typing import List, Literal, Optional
|
||||
|
||||
from core.models import NbeSchema
|
||||
from core.types import HexBytes
|
||||
@ -16,11 +16,11 @@ class ContentType(Enum):
|
||||
|
||||
|
||||
class NbeContent(NbeSchema):
|
||||
type: ContentType
|
||||
type: str
|
||||
|
||||
|
||||
class ChannelInscribe(NbeContent):
|
||||
type: ContentType = ContentType.CHANNEL_INSCRIBE
|
||||
type: Literal["ChannelInscribe"] = "ChannelInscribe"
|
||||
channel_id: HexBytes
|
||||
inscription: HexBytes
|
||||
parent: HexBytes
|
||||
@ -28,7 +28,7 @@ class ChannelInscribe(NbeContent):
|
||||
|
||||
|
||||
class ChannelBlob(NbeContent):
|
||||
type: ContentType = ContentType.CHANNEL_BLOB
|
||||
type: Literal["ChannelBlob"] = "ChannelBlob"
|
||||
channel: HexBytes
|
||||
blob: HexBytes
|
||||
blob_size: int
|
||||
@ -38,7 +38,7 @@ class ChannelBlob(NbeContent):
|
||||
|
||||
|
||||
class ChannelSetKeys(NbeContent):
|
||||
type: ContentType = ContentType.CHANNEL_SET_KEYS
|
||||
type: Literal["ChannelSetKeys"] = "ChannelSetKeys"
|
||||
channel: HexBytes
|
||||
keys: List[bytes]
|
||||
|
||||
@ -49,7 +49,7 @@ class SDPDeclareServiceType(Enum):
|
||||
|
||||
|
||||
class SDPDeclare(NbeContent):
|
||||
type: ContentType = ContentType.SDP_DECLARE
|
||||
type: Literal["SDPDeclare"] = "SDPDeclare"
|
||||
service_type: SDPDeclareServiceType
|
||||
locators: List[bytes]
|
||||
provider_id: HexBytes
|
||||
@ -58,20 +58,20 @@ class SDPDeclare(NbeContent):
|
||||
|
||||
|
||||
class SDPWithdraw(NbeContent):
|
||||
type: ContentType = ContentType.SDP_WITHDRAW
|
||||
type: Literal["SDPWithdraw"] = "SDPWithdraw"
|
||||
declaration_id: HexBytes
|
||||
nonce: HexBytes
|
||||
|
||||
|
||||
class SDPActive(NbeContent):
|
||||
type: ContentType = ContentType.SDP_ACTIVE
|
||||
type: Literal["SDPActive"] = "SDPActive"
|
||||
declaration_id: HexBytes
|
||||
nonce: HexBytes
|
||||
metadata: Optional[bytes]
|
||||
|
||||
|
||||
class LeaderClaim(NbeContent):
|
||||
type: ContentType = ContentType.LEADER_CLAIM
|
||||
type: Literal["LeaderClaim"] = "LeaderClaim"
|
||||
rewards_root: HexBytes
|
||||
voucher_nullifier: HexBytes
|
||||
mantle_tx_hash: HexBytes
|
||||
|
||||
@ -1,8 +1,12 @@
|
||||
from typing import Annotated
|
||||
|
||||
from pydantic import Field
|
||||
|
||||
from core.models import NbeSchema
|
||||
from models.transactions.operations.contents import NbeContent
|
||||
from models.transactions.operations.contents import OperationContent
|
||||
from models.transactions.operations.proofs import OperationProof
|
||||
|
||||
|
||||
class Operation(NbeSchema):
|
||||
content: NbeContent
|
||||
proof: OperationProof
|
||||
content: Annotated[OperationContent, Field(discriminator="type")]
|
||||
proof: Annotated[OperationProof, Field(discriminator="type")]
|
||||
|
||||
@ -1,4 +1,5 @@
|
||||
from enum import Enum
|
||||
from typing import Literal
|
||||
|
||||
from core.models import NbeSchema
|
||||
from core.types import HexBytes
|
||||
@ -11,21 +12,21 @@ class SignatureType(Enum):
|
||||
|
||||
|
||||
class NbeSignature(NbeSchema):
|
||||
type: SignatureType
|
||||
type: str
|
||||
|
||||
|
||||
class Ed25519Signature(NbeSignature):
|
||||
type: SignatureType = SignatureType.ED25519
|
||||
type: Literal["Ed25519"] = "Ed25519"
|
||||
signature: HexBytes
|
||||
|
||||
|
||||
class ZkSignature(NbeSignature):
|
||||
type: SignatureType = SignatureType.ZK
|
||||
type: Literal["Zk"] = "Zk"
|
||||
signature: HexBytes
|
||||
|
||||
|
||||
class ZkAndEd25519Signature(NbeSignature):
|
||||
type: SignatureType = SignatureType.ZK_AND_ED25519
|
||||
type: Literal["ZkAndEd25519"] = "ZkAndEd25519"
|
||||
zk_signature: HexBytes
|
||||
ed25519_signature: HexBytes
|
||||
|
||||
|
||||
@ -1,9 +1,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from enum import Enum
|
||||
from random import choice, randint
|
||||
from typing import Annotated, List, Optional, Self, Union
|
||||
from typing import Annotated, Any, List, Optional, Self, Union
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic import BeforeValidator, Field
|
||||
|
||||
from core.models import NbeSerializer
|
||||
from models.transactions.operations.contents import (
|
||||
@ -28,9 +28,9 @@ class OperationContentSerializer(NbeSerializer, EnforceSubclassFromRandom, ABC):
|
||||
|
||||
|
||||
class ChannelInscribeSerializer(OperationContentSerializer):
|
||||
channel_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
channel_id: BytesFromHex = Field(description="Channel ID in hex format.")
|
||||
inscription: BytesFromIntArray = Field(description="Bytes as an integer array.")
|
||||
parent: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
parent: BytesFromHex = Field(description="Parent hash in hex format.")
|
||||
signer: BytesFromHex = Field(description="Public Key in hex format.")
|
||||
|
||||
def into_operation_content(self) -> ChannelInscribe:
|
||||
@ -47,20 +47,20 @@ class ChannelInscribeSerializer(OperationContentSerializer):
|
||||
def from_random(cls) -> Self:
|
||||
return cls.model_validate(
|
||||
{
|
||||
"channel_id": list(random_bytes(32)),
|
||||
"channel_id": random_bytes(32).hex(),
|
||||
"inscription": list(random_bytes(32)),
|
||||
"parent": list(random_bytes(32)),
|
||||
"parent": random_bytes(32).hex(),
|
||||
"signer": random_bytes(32).hex(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ChannelBlobSerializer(OperationContentSerializer):
|
||||
channel: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
blob: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
channel: BytesFromHex = Field(description="Channel ID in hex format.")
|
||||
blob: BytesFromIntArray = Field(description="Bytes as an integer array.")
|
||||
blob_size: int
|
||||
da_storage_gas_price: int
|
||||
parent: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
parent: BytesFromHex = Field(description="Parent hash in hex format.")
|
||||
signer: BytesFromHex = Field(description="Public Key in hex format.")
|
||||
|
||||
def into_operation_content(self) -> ChannelBlob:
|
||||
@ -79,18 +79,18 @@ class ChannelBlobSerializer(OperationContentSerializer):
|
||||
def from_random(cls) -> Self:
|
||||
return cls.model_validate(
|
||||
{
|
||||
"channel": list(random_bytes(32)),
|
||||
"channel": random_bytes(32).hex(),
|
||||
"blob": list(random_bytes(32)),
|
||||
"blob_size": randint(1, 1_024),
|
||||
"da_storage_gas_price": randint(1, 10_000),
|
||||
"parent": list(random_bytes(32)),
|
||||
"parent": random_bytes(32).hex(),
|
||||
"signer": random_bytes(32).hex(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ChannelSetKeysSerializer(OperationContentSerializer):
|
||||
channel: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
channel: BytesFromHex = Field(description="Channel ID in hex format.")
|
||||
keys: List[BytesFromHex] = Field(description="List of Public Keys in hex format.")
|
||||
|
||||
def into_operation_content(self) -> ChannelSetKeys:
|
||||
@ -106,7 +106,7 @@ class ChannelSetKeysSerializer(OperationContentSerializer):
|
||||
n = 1 if randint(0, 1) <= 0.5 else randint(1, 5)
|
||||
return cls.model_validate(
|
||||
{
|
||||
"channel": list(random_bytes(32)),
|
||||
"channel": random_bytes(32).hex(),
|
||||
"keys": [random_bytes(32).hex() for _ in range(n)],
|
||||
}
|
||||
)
|
||||
@ -120,7 +120,7 @@ class SDPDeclareServiceType(Enum):
|
||||
class SDPDeclareSerializer(OperationContentSerializer):
|
||||
service_type: SDPDeclareServiceType
|
||||
locators: List[BytesFromHex]
|
||||
provider_id: BytesFromIntArray = Field(description="Bytes as an integer array.")
|
||||
provider_id: BytesFromHex = Field(description="Provider ID in hex format.")
|
||||
zk_id: BytesFromHex = Field(description="Fr integer.")
|
||||
locked_note_id: BytesFromHex = Field(description="Fr integer.")
|
||||
|
||||
@ -142,7 +142,7 @@ class SDPDeclareSerializer(OperationContentSerializer):
|
||||
{
|
||||
"service_type": choice(list(SDPDeclareServiceType)).value,
|
||||
"locators": [random_bytes(32).hex() for _ in range(n)],
|
||||
"provider_id": list(random_bytes(32)),
|
||||
"provider_id": random_bytes(32).hex(),
|
||||
"zk_id": random_bytes(32).hex(),
|
||||
"locked_note_id": random_bytes(32).hex(),
|
||||
}
|
||||
@ -150,7 +150,7 @@ class SDPDeclareSerializer(OperationContentSerializer):
|
||||
|
||||
|
||||
class SDPWithdrawSerializer(OperationContentSerializer):
|
||||
declaration_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
declaration_id: BytesFromHex = Field(description="Declaration ID in hex format.")
|
||||
nonce: BytesFromInt
|
||||
|
||||
def into_operation_content(self) -> SDPWithdraw:
|
||||
@ -165,14 +165,14 @@ class SDPWithdrawSerializer(OperationContentSerializer):
|
||||
def from_random(cls) -> Self:
|
||||
return cls.model_validate(
|
||||
{
|
||||
"declaration_id": list(random_bytes(32)),
|
||||
"declaration_id": random_bytes(32).hex(),
|
||||
"nonce": int.from_bytes(random_bytes(8)),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class SDPActiveSerializer(OperationContentSerializer):
|
||||
declaration_id: BytesFromIntArray = Field(description="Bytes as a 32-integer array.")
|
||||
declaration_id: BytesFromHex = Field(description="Declaration ID in hex format.")
|
||||
nonce: BytesFromInt
|
||||
metadata: Optional[BytesFromIntArray] = Field(description="Bytes as an integer array.")
|
||||
|
||||
@ -189,7 +189,7 @@ class SDPActiveSerializer(OperationContentSerializer):
|
||||
def from_random(cls) -> Self:
|
||||
return cls.model_validate(
|
||||
{
|
||||
"declaration_id": list(random_bytes(32)),
|
||||
"declaration_id": random_bytes(32).hex(),
|
||||
"nonce": int.from_bytes(random_bytes(8)),
|
||||
"metadata": None if randint(0, 1) <= 0.5 else list(random_bytes(32)),
|
||||
}
|
||||
@ -221,6 +221,30 @@ class LeaderClaimSerializer(OperationContentSerializer):
|
||||
)
|
||||
|
||||
|
||||
OPCODE_TO_SERIALIZER: dict[int, type[OperationContentSerializer]] = {
|
||||
0: ChannelInscribeSerializer,
|
||||
1: ChannelBlobSerializer,
|
||||
2: ChannelSetKeysSerializer,
|
||||
3: SDPDeclareSerializer,
|
||||
4: SDPWithdrawSerializer,
|
||||
5: SDPActiveSerializer,
|
||||
6: LeaderClaimSerializer,
|
||||
}
|
||||
|
||||
|
||||
def _parse_operation(data: Any) -> OperationContentSerializer:
|
||||
if isinstance(data, OperationContentSerializer):
|
||||
return data
|
||||
if isinstance(data, dict) and "opcode" in data:
|
||||
opcode = data["opcode"]
|
||||
payload = data["payload"]
|
||||
serializer_class = OPCODE_TO_SERIALIZER.get(opcode)
|
||||
if serializer_class is None:
|
||||
raise ValueError(f"Unknown operation opcode: {opcode}")
|
||||
return serializer_class.model_validate(payload)
|
||||
return data
|
||||
|
||||
|
||||
type OperationContentSerializerVariants = Union[
|
||||
ChannelInscribeSerializer,
|
||||
ChannelBlobSerializer,
|
||||
@ -230,4 +254,7 @@ type OperationContentSerializerVariants = Union[
|
||||
SDPActiveSerializer,
|
||||
LeaderClaimSerializer,
|
||||
]
|
||||
OperationContentSerializerField = Annotated[OperationContentSerializerVariants, Field(union_mode="left_to_right")]
|
||||
OperationContentSerializerField = Annotated[
|
||||
OperationContentSerializerVariants,
|
||||
BeforeValidator(_parse_operation),
|
||||
]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Annotated, Self, Union
|
||||
from typing import Annotated, Any, Self, Union
|
||||
|
||||
from pydantic import Field, RootModel
|
||||
from pydantic import BeforeValidator, Field, RootModel
|
||||
|
||||
from core.models import NbeSerializer
|
||||
from models.transactions.operations.proofs import (
|
||||
@ -10,7 +10,7 @@ from models.transactions.operations.proofs import (
|
||||
ZkAndEd25519Signature,
|
||||
ZkSignature,
|
||||
)
|
||||
from node.api.serializers.fields import BytesFromHex
|
||||
from node.api.serializers.fields import BytesFromIntArray
|
||||
from utils.protocols import EnforceSubclassFromRandom
|
||||
from utils.random import random_bytes
|
||||
|
||||
@ -21,11 +21,8 @@ class OperationProofSerializer(EnforceSubclassFromRandom, ABC):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
# TODO: Differentiate between Ed25519SignatureSerializer and ZkSignatureSerializer
|
||||
|
||||
|
||||
class Ed25519SignatureSerializer(OperationProofSerializer, RootModel[str]):
|
||||
root: BytesFromHex
|
||||
class Ed25519SignatureSerializer(OperationProofSerializer, RootModel[bytes]):
|
||||
root: BytesFromIntArray
|
||||
|
||||
def into_operation_proof(self) -> NbeSignature:
|
||||
return Ed25519Signature.model_validate(
|
||||
@ -36,11 +33,11 @@ class Ed25519SignatureSerializer(OperationProofSerializer, RootModel[str]):
|
||||
|
||||
@classmethod
|
||||
def from_random(cls, *args, **kwargs) -> Self:
|
||||
return cls.model_validate(random_bytes(64).hex())
|
||||
return cls.model_validate(list(random_bytes(64)))
|
||||
|
||||
|
||||
class ZkSignatureSerializer(OperationProofSerializer, RootModel[str]):
|
||||
root: BytesFromHex
|
||||
class ZkSignatureSerializer(OperationProofSerializer, RootModel[bytes]):
|
||||
root: BytesFromIntArray
|
||||
|
||||
def into_operation_proof(self) -> NbeSignature:
|
||||
return ZkSignature.model_validate(
|
||||
@ -51,12 +48,12 @@ class ZkSignatureSerializer(OperationProofSerializer, RootModel[str]):
|
||||
|
||||
@classmethod
|
||||
def from_random(cls, *args, **kwargs) -> Self:
|
||||
return cls.model_validate(random_bytes(32).hex())
|
||||
return cls.model_validate(list(random_bytes(32)))
|
||||
|
||||
|
||||
class ZkAndEd25519SignaturesSerializer(OperationProofSerializer, NbeSerializer):
|
||||
zk_signature: BytesFromHex = Field(alias="zk_sig")
|
||||
ed25519_signature: BytesFromHex = Field(alias="ed25519_sig")
|
||||
zk_signature: BytesFromIntArray = Field(alias="zk_sig")
|
||||
ed25519_signature: BytesFromIntArray = Field(alias="ed25519_sig")
|
||||
|
||||
def into_operation_proof(self) -> NbeSignature:
|
||||
return ZkAndEd25519Signature.model_validate(
|
||||
@ -70,13 +67,33 @@ class ZkAndEd25519SignaturesSerializer(OperationProofSerializer, NbeSerializer):
|
||||
def from_random(cls, *args, **kwargs) -> Self:
|
||||
return ZkAndEd25519SignaturesSerializer.model_validate(
|
||||
{
|
||||
"zk_sig": random_bytes(32).hex(),
|
||||
"ed25519_sig": random_bytes(32).hex(),
|
||||
"zk_sig": list(random_bytes(32)),
|
||||
"ed25519_sig": list(random_bytes(64)),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
PROOF_TAG_TO_SERIALIZER = {
|
||||
"Ed25519Sig": Ed25519SignatureSerializer,
|
||||
"ZkSig": ZkSignatureSerializer,
|
||||
"ZkAndEd25519Sigs": ZkAndEd25519SignaturesSerializer,
|
||||
}
|
||||
|
||||
|
||||
def _parse_proof(data: Any) -> OperationProofSerializer:
|
||||
if isinstance(data, OperationProofSerializer):
|
||||
return data
|
||||
if isinstance(data, dict):
|
||||
for tag, serializer_class in PROOF_TAG_TO_SERIALIZER.items():
|
||||
if tag in data:
|
||||
return serializer_class.model_validate(data[tag])
|
||||
return data
|
||||
|
||||
|
||||
OperationProofSerializerVariants = Union[
|
||||
Ed25519SignatureSerializer, ZkSignatureSerializer, ZkAndEd25519SignaturesSerializer
|
||||
]
|
||||
OperationProofSerializerField = Annotated[OperationProofSerializerVariants, Field(union_mode="left_to_right")]
|
||||
OperationProofSerializerField = Annotated[
|
||||
OperationProofSerializerVariants,
|
||||
BeforeValidator(_parse_proof),
|
||||
]
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
import hashlib
|
||||
import json
|
||||
from typing import List, Self
|
||||
|
||||
from pydantic import Field
|
||||
@ -40,6 +42,11 @@ class SignedTransactionSerializer(NbeSerializer, FromRandom):
|
||||
)
|
||||
ledger_transaction_proof: Groth16ProofSerializer = Field(alias="ledger_tx_proof", description="Groth16 proof.")
|
||||
|
||||
def _compute_hash(self) -> bytes:
|
||||
data = self.transaction.model_dump(mode="json")
|
||||
canonical = json.dumps(data, sort_keys=True, separators=(",", ":"))
|
||||
return hashlib.sha256(canonical.encode()).digest()
|
||||
|
||||
def into_transaction(self) -> Transaction:
|
||||
operations_contents = self.transaction.operations_contents
|
||||
if len(operations_contents) != len(self.operations_proofs):
|
||||
@ -60,7 +67,7 @@ class SignedTransactionSerializer(NbeSerializer, FromRandom):
|
||||
|
||||
return Transaction.model_validate(
|
||||
{
|
||||
"hash": self.transaction.hash,
|
||||
"hash": self._compute_hash(),
|
||||
"operations": operations,
|
||||
"inputs": ledger_transaction.inputs,
|
||||
"outputs": outputs,
|
||||
|
||||
@ -4,7 +4,6 @@ from typing import List, Self
|
||||
from pydantic import Field
|
||||
|
||||
from core.models import NbeSerializer
|
||||
from node.api.serializers.fields import BytesFromHex
|
||||
from node.api.serializers.ledger_transaction import LedgerTransactionSerializer
|
||||
from node.api.serializers.operation import (
|
||||
OperationContentSerializer,
|
||||
@ -15,7 +14,6 @@ from utils.random import random_bytes
|
||||
|
||||
|
||||
class TransactionSerializer(NbeSerializer, FromRandom):
|
||||
hash: BytesFromHex = Field(description="Hash id in hex format.")
|
||||
operations_contents: List[OperationContentSerializerField] = Field(alias="ops")
|
||||
ledger_transaction: LedgerTransactionSerializer = Field(alias="ledger_tx")
|
||||
execution_gas_price: int = Field(description="Integer in u64 format.")
|
||||
@ -27,7 +25,6 @@ class TransactionSerializer(NbeSerializer, FromRandom):
|
||||
operations_contents = [OperationContentSerializer.from_random() for _ in range(n)]
|
||||
return cls.model_validate(
|
||||
{
|
||||
"hash": random_bytes(32).hex(),
|
||||
"ops": operations_contents,
|
||||
"ledger_tx": LedgerTransactionSerializer.from_random(),
|
||||
"execution_gas_price": randint(1, 10_000),
|
||||
|
||||
@ -1,3 +1,4 @@
|
||||
import asyncio
|
||||
import logging
|
||||
from asyncio import create_task
|
||||
from contextlib import asynccontextmanager
|
||||
@ -20,16 +21,24 @@ async def backfill_to_lib(app: "NBE") -> None:
|
||||
"""
|
||||
Fetch the LIB (Last Irreversible Block) from the node and backfill by walking the chain backwards.
|
||||
This traverses parent links instead of querying by slot range, which handles pruned/missing blocks.
|
||||
Retries indefinitely with exponential backoff on failure.
|
||||
"""
|
||||
try:
|
||||
info = await app.state.node_api.get_info()
|
||||
logger.info(f"Node info: LIB={info.lib}, tip={info.tip}, slot={info.slot}, height={info.height}")
|
||||
delay = 1.0
|
||||
max_delay = 60.0
|
||||
|
||||
await backfill_chain_from_hash(app, info.lib)
|
||||
while True:
|
||||
try:
|
||||
info = await app.state.node_api.get_info()
|
||||
logger.info(f"Node info: LIB={info.lib}, tip={info.tip}, slot={info.slot}, height={info.height}")
|
||||
|
||||
except Exception as error:
|
||||
logger.exception(f"Error during initial backfill to LIB: {error}")
|
||||
# Don't raise - we can still try to subscribe to new blocks
|
||||
await backfill_chain_from_hash(app, info.lib)
|
||||
return
|
||||
|
||||
except Exception as error:
|
||||
logger.exception(f"Error during initial backfill to LIB: {error}")
|
||||
logger.info(f"Retrying backfill in {delay:.0f}s...")
|
||||
await asyncio.sleep(delay)
|
||||
delay = min(delay * 2, max_delay)
|
||||
|
||||
|
||||
async def backfill_chain_from_hash(app: "NBE", block_hash: str) -> None:
|
||||
|
||||
@ -45,26 +45,62 @@ const toNumber = (v) => {
|
||||
return 0;
|
||||
};
|
||||
|
||||
const opLabel = (op) => {
|
||||
if (op == null) return 'op';
|
||||
if (typeof op === 'string' || typeof op === 'number') return String(op);
|
||||
if (typeof op !== 'object') return String(op);
|
||||
if (typeof op.type === 'string') return op.type;
|
||||
if (typeof op.kind === 'string') return op.kind;
|
||||
if (op.content) {
|
||||
if (typeof op.content.type === 'string') return op.content.type;
|
||||
if (typeof op.content.kind === 'string') return op.content.kind;
|
||||
function tryDecodeUtf8Hex(hex) {
|
||||
if (typeof hex !== 'string' || hex.length === 0 || hex.length % 2 !== 0) return null;
|
||||
try {
|
||||
const bytes = new Uint8Array(hex.length / 2);
|
||||
for (let i = 0; i < hex.length; i += 2) {
|
||||
const b = parseInt(hex.substring(i, i + 2), 16);
|
||||
if (Number.isNaN(b)) return null;
|
||||
bytes[i / 2] = b;
|
||||
}
|
||||
const text = new TextDecoder('utf-8', { fatal: true }).decode(bytes);
|
||||
if (/[\x20-\x7e]/.test(text)) return text;
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
const keys = Object.keys(op);
|
||||
return keys.length ? keys[0] : 'op';
|
||||
};
|
||||
}
|
||||
|
||||
function opPreview(op) {
|
||||
const content = op?.content ?? op;
|
||||
const type = content?.type ?? (typeof op === 'string' ? op : 'op');
|
||||
|
||||
if (type === 'ChannelInscribe' && content) {
|
||||
const chanShort = typeof content.channel_id === 'string' ? content.channel_id.slice(0, 8) : '?';
|
||||
let inscPreview = '';
|
||||
if (typeof content.inscription === 'string') {
|
||||
const decoded = tryDecodeUtf8Hex(content.inscription);
|
||||
if (decoded != null) {
|
||||
inscPreview = decoded.length > 20 ? decoded.slice(0, 20) + '\u2026' : decoded;
|
||||
} else {
|
||||
inscPreview = content.inscription.slice(0, 12) + '\u2026';
|
||||
}
|
||||
}
|
||||
return `${type}(${chanShort}\u2026, ${inscPreview})`;
|
||||
}
|
||||
|
||||
if (type === 'ChannelBlob' && content) {
|
||||
const chanShort = typeof content.channel === 'string' ? content.channel.slice(0, 8) : '?';
|
||||
const size = content.blob_size != null ? `${content.blob_size}B` : '?';
|
||||
return `${type}(${chanShort}\u2026, ${size})`;
|
||||
}
|
||||
|
||||
if (type === 'ChannelSetKeys' && content) {
|
||||
const chanShort = typeof content.channel === 'string' ? content.channel.slice(0, 8) : '?';
|
||||
const nKeys = Array.isArray(content.keys) ? content.keys.length : '?';
|
||||
return `${type}(${chanShort}\u2026, ${nKeys} keys)`;
|
||||
}
|
||||
|
||||
return type;
|
||||
}
|
||||
|
||||
function formatOperationsPreview(ops) {
|
||||
if (!ops?.length) return '—';
|
||||
const labels = ops.map(opLabel);
|
||||
if (labels.length <= OPERATIONS_PREVIEW_LIMIT) return labels.join(', ');
|
||||
const head = labels.slice(0, OPERATIONS_PREVIEW_LIMIT).join(', ');
|
||||
const remainder = labels.length - OPERATIONS_PREVIEW_LIMIT;
|
||||
const previews = ops.map(opPreview);
|
||||
if (previews.length <= OPERATIONS_PREVIEW_LIMIT) return previews.join(', ');
|
||||
const head = previews.slice(0, OPERATIONS_PREVIEW_LIMIT).join(', ');
|
||||
const remainder = previews.length - OPERATIONS_PREVIEW_LIMIT;
|
||||
return `${head} +${remainder}`;
|
||||
}
|
||||
|
||||
@ -98,22 +134,18 @@ function buildTransactionRow(tx) {
|
||||
|
||||
// Operations (preview)
|
||||
const tdOps = document.createElement('td');
|
||||
tdOps.style.whiteSpace = 'normal';
|
||||
tdOps.style.lineHeight = '1.4';
|
||||
const preview = formatOperationsPreview(tx.operations);
|
||||
tdOps.appendChild(
|
||||
createSpan('', preview, Array.isArray(tx.operations) ? tx.operations.map(opLabel).join(', ') : ''),
|
||||
);
|
||||
const fullPreview = Array.isArray(tx.operations) ? tx.operations.map(opPreview).join(', ') : '';
|
||||
tdOps.appendChild(createSpan('', preview, fullPreview));
|
||||
|
||||
// Outputs (count / total)
|
||||
const tdOut = document.createElement('td');
|
||||
tdOut.className = 'amount';
|
||||
tdOut.textContent = `${tx.numberOfOutputs} / ${tx.totalOutputValue.toLocaleString(undefined, { maximumFractionDigits: 8 })}`;
|
||||
|
||||
// Gas (execution / storage)
|
||||
const tdGas = document.createElement('td');
|
||||
tdGas.className = 'mono';
|
||||
tdGas.textContent = `${tx.executionGasPrice.toLocaleString()} / ${tx.storageGasPrice.toLocaleString()}`;
|
||||
|
||||
tr.append(tdId, tdOps, tdOut, tdGas);
|
||||
tr.append(tdId, tdOps, tdOut);
|
||||
return tr;
|
||||
}
|
||||
|
||||
@ -128,8 +160,8 @@ export default function TransactionsTable() {
|
||||
const body = bodyRef.current;
|
||||
const counter = countRef.current;
|
||||
|
||||
// 4 columns: Hash | Operations | Outputs | Gas
|
||||
ensureFixedRowCount(body, 4, TABLE_SIZE);
|
||||
// 3 columns: Hash | Operations | Outputs
|
||||
ensureFixedRowCount(body, 3, TABLE_SIZE);
|
||||
|
||||
abortRef.current?.abort();
|
||||
abortRef.current = new AbortController();
|
||||
@ -187,7 +219,6 @@ export default function TransactionsTable() {
|
||||
h('col', { style: 'width:240px' }), // Hash
|
||||
h('col', null), // Operations
|
||||
h('col', { style: 'width:200px' }), // Outputs (count / total)
|
||||
h('col', { style: 'width:200px' }), // Gas (execution / storage)
|
||||
),
|
||||
h(
|
||||
'thead',
|
||||
@ -198,7 +229,6 @@ export default function TransactionsTable() {
|
||||
h('th', null, 'Hash'),
|
||||
h('th', null, 'Operations'),
|
||||
h('th', null, 'Outputs (count / total)'),
|
||||
h('th', null, 'Gas (execution / storage)'),
|
||||
),
|
||||
),
|
||||
h('tbody', { ref: bodyRef }),
|
||||
|
||||
@ -296,6 +296,142 @@ function OutputsTable({ outputs }) {
|
||||
);
|
||||
}
|
||||
|
||||
// ————— operations detail —————
|
||||
|
||||
/** Try to decode a hex string as UTF-8. Returns the decoded string or null on failure. */
|
||||
function tryDecodeUtf8Hex(hex) {
|
||||
if (typeof hex !== 'string' || hex.length === 0 || hex.length % 2 !== 0) return null;
|
||||
try {
|
||||
const bytes = new Uint8Array(hex.length / 2);
|
||||
for (let i = 0; i < hex.length; i += 2) {
|
||||
const b = parseInt(hex.substring(i, i + 2), 16);
|
||||
if (Number.isNaN(b)) return null;
|
||||
bytes[i / 2] = b;
|
||||
}
|
||||
const text = new TextDecoder('utf-8', { fatal: true }).decode(bytes);
|
||||
// Only accept if it contains at least one printable non-control character
|
||||
if (/[\x20-\x7e]/.test(text)) return text;
|
||||
return null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Human-friendly label for a content field key */
|
||||
const fieldLabel = (key) =>
|
||||
key
|
||||
.replace(/_/g, ' ')
|
||||
.replace(/\b\w/g, (c) => c.toUpperCase());
|
||||
|
||||
/** Render the value of a single content field */
|
||||
function FieldValue({ value }) {
|
||||
if (value == null) return h('span', { class: 'mono', style: 'color:var(--muted)' }, 'null');
|
||||
if (typeof value === 'number') return h('span', { class: 'mono' }, toLocaleNum(value));
|
||||
if (typeof value === 'string') {
|
||||
// hex strings
|
||||
if (value.length > 24) {
|
||||
return h(
|
||||
'span',
|
||||
{ style: 'display:flex; align-items:center; gap:6px;' },
|
||||
h('span', { class: 'mono', style: 'overflow-wrap:anywhere; word-break:break-all;' }, value),
|
||||
h(CopyPill, { text: value }),
|
||||
);
|
||||
}
|
||||
return h('span', { class: 'mono' }, value);
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return h(
|
||||
'div',
|
||||
{ style: 'display:flex; flex-direction:column; gap:4px;' },
|
||||
...value.map((item, i) => h('div', { key: i }, h(FieldValue, { value: renderBytes(item) }))),
|
||||
);
|
||||
}
|
||||
return h('span', { class: 'mono' }, renderBytes(value));
|
||||
}
|
||||
|
||||
function InscriptionValue({ value }) {
|
||||
const decoded = tryDecodeUtf8Hex(value);
|
||||
if (decoded != null) {
|
||||
return h(
|
||||
'span',
|
||||
{ style: 'display:flex; align-items:center; gap:6px;' },
|
||||
h('span', { style: 'overflow-wrap:anywhere; word-break:break-word;' }, decoded),
|
||||
h(CopyPill, { text: decoded }),
|
||||
);
|
||||
}
|
||||
return h(FieldValue, { value });
|
||||
}
|
||||
|
||||
function OperationContent({ content }) {
|
||||
// Get all fields except "type"
|
||||
const entries = Object.entries(content).filter(([k]) => k !== 'type');
|
||||
if (!entries.length) return h('div', { style: 'color:var(--muted)' }, 'No fields');
|
||||
|
||||
return h(
|
||||
'div',
|
||||
{ style: 'display:grid; grid-template-columns:auto 1fr; gap:6px 12px; align-items:baseline;' },
|
||||
...entries.flatMap(([key, value]) => [
|
||||
h('span', { style: 'color:var(--muted); font-size:13px; white-space:nowrap;' }, fieldLabel(key)),
|
||||
key === 'inscription' ? h(InscriptionValue, { value }) : h(FieldValue, { value }),
|
||||
]),
|
||||
);
|
||||
}
|
||||
|
||||
function OperationProof({ proof }) {
|
||||
if (!proof) return null;
|
||||
const proofType = proof.type ?? 'Unknown';
|
||||
const entries = Object.entries(proof).filter(([k]) => k !== 'type');
|
||||
|
||||
return h(
|
||||
'div',
|
||||
{ style: 'margin-top:8px; padding-top:8px; border-top:1px solid #1f2435;' },
|
||||
h('span', { style: 'color:var(--muted); font-size:12px;' }, `Proof: ${proofType}`),
|
||||
entries.length > 0 &&
|
||||
h(
|
||||
'div',
|
||||
{ style: 'margin-top:4px; display:grid; grid-template-columns:auto 1fr; gap:4px 12px; align-items:baseline;' },
|
||||
...entries.flatMap(([key, value]) => [
|
||||
h('span', { style: 'color:var(--muted); font-size:12px; white-space:nowrap;' }, fieldLabel(key)),
|
||||
h('span', { class: 'mono', style: 'font-size:12px; overflow-wrap:anywhere; word-break:break-all;' }, renderBytes(value)),
|
||||
]),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function OperationCard({ op, index }) {
|
||||
const content = op?.content ?? op;
|
||||
const proof = op?.proof ?? null;
|
||||
const type = content?.type ?? opLabel(op);
|
||||
|
||||
return h(
|
||||
'div',
|
||||
{ style: 'background:#0e1320; border:1px solid #1f2435; border-radius:8px; padding:12px 14px;' },
|
||||
h(
|
||||
'div',
|
||||
{ style: 'display:flex; align-items:center; gap:8px; margin-bottom:10px;' },
|
||||
h('span', { class: 'pill', style: 'font-size:11px;' }, `#${index}`),
|
||||
h('span', { class: 'pill', style: 'background:rgba(63,185,80,0.12); color:var(--accent);' }, type),
|
||||
),
|
||||
h(OperationContent, { content }),
|
||||
h(OperationProof, { proof }),
|
||||
);
|
||||
}
|
||||
|
||||
function Operations({ operations }) {
|
||||
const ops = Array.isArray(operations) ? operations : [];
|
||||
if (!ops.length) return null;
|
||||
|
||||
return h(
|
||||
SectionCard,
|
||||
{ title: `Operations (${ops.length})` },
|
||||
h(
|
||||
'div',
|
||||
{ style: 'display:flex; flex-direction:column; gap:12px;' },
|
||||
...ops.map((op, i) => h(OperationCard, { key: i, op, index: i })),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
function Ledger({ ledger }) {
|
||||
const inputs = Array.isArray(ledger?.inputs) ? ledger.inputs : [];
|
||||
const outputs = Array.isArray(ledger?.outputs) ? ledger.outputs : [];
|
||||
@ -418,6 +554,6 @@ export default function TransactionDetail({ parameters }) {
|
||||
!tx && !err && h('p', null, 'Loading…'),
|
||||
|
||||
// Success
|
||||
tx && h(Fragment, null, h(Summary, { tx }), h(Ledger, { ledger: tx.ledger })),
|
||||
tx && h(Fragment, null, h(Summary, { tx }), h(Operations, { operations: tx.operations }), h(Ledger, { ledger: tx.ledger })),
|
||||
);
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user