2026-01-29 20:48:49 +01:00
|
|
|
import json
|
2025-10-15 20:53:52 +02:00
|
|
|
import logging
|
2026-02-05 19:45:42 +04:00
|
|
|
from typing import TYPE_CHECKING, AsyncIterator, Optional
|
2025-12-19 12:54:26 +01:00
|
|
|
from urllib.parse import urljoin, urlunparse
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
import httpx
|
2025-11-03 13:17:19 +01:00
|
|
|
from pydantic import ValidationError
|
2025-12-19 12:26:12 +01:00
|
|
|
from rusty_results import Empty, Option, Some
|
|
|
|
|
from third_party import requests
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2025-12-19 12:26:12 +01:00
|
|
|
from core.authentication import Authentication
|
2025-10-03 22:27:30 +02:00
|
|
|
from node.api.base import NodeApi
|
2025-10-30 11:48:34 +01:00
|
|
|
from node.api.serializers.block import BlockSerializer
|
|
|
|
|
from node.api.serializers.health import HealthSerializer
|
2026-02-05 19:45:42 +04:00
|
|
|
from node.api.serializers.info import InfoSerializer
|
2025-10-15 20:53:52 +02:00
|
|
|
|
2025-11-03 13:17:19 +01:00
|
|
|
if TYPE_CHECKING:
|
|
|
|
|
from core.app import NBESettings
|
|
|
|
|
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
logger = logging.getLogger(__name__)
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class HttpNodeApi(NodeApi):
|
2025-12-19 13:04:33 +01:00
|
|
|
# Paths can't have a leading slash since they are relative to the base URL
|
|
|
|
|
ENDPOINT_INFO = "cryptarchia/info"
|
2026-01-29 19:19:00 +01:00
|
|
|
ENDPOINT_BLOCKS_STREAM = "cryptarchia/events/blocks/stream"
|
2026-02-05 19:45:42 +04:00
|
|
|
ENDPOINT_BLOCK_BY_HASH = "storage/block"
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2025-11-03 13:17:19 +01:00
|
|
|
def __init__(self, settings: "NBESettings"):
|
|
|
|
|
self.host: str = settings.node_api_host
|
|
|
|
|
self.port: int = settings.node_api_port
|
|
|
|
|
self.protocol: str = settings.node_api_protocol or "http"
|
|
|
|
|
self.timeout: int = settings.node_api_timeout or 60
|
2025-12-19 12:26:12 +01:00
|
|
|
self.authentication: Option[Authentication] = (
|
|
|
|
|
Some(settings.node_api_auth) if settings.node_api_auth else Empty()
|
|
|
|
|
)
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
@property
|
2025-12-19 12:54:26 +01:00
|
|
|
def base_url(self) -> str:
|
|
|
|
|
if "/" in self.host:
|
|
|
|
|
host, path = self.host.split("/", 1)
|
2025-12-19 13:04:33 +01:00
|
|
|
path = f"/{path}"
|
|
|
|
|
if not path.endswith("/"):
|
|
|
|
|
path += "/"
|
2025-12-19 12:54:26 +01:00
|
|
|
else:
|
|
|
|
|
host = self.host
|
|
|
|
|
path = ""
|
|
|
|
|
|
|
|
|
|
network_location = f"{host}:{self.port}" if self.port else host
|
|
|
|
|
|
2025-12-19 13:04:33 +01:00
|
|
|
url = urlunparse(
|
2025-12-19 12:54:26 +01:00
|
|
|
(
|
|
|
|
|
self.protocol,
|
|
|
|
|
network_location,
|
|
|
|
|
path,
|
|
|
|
|
# The following are unused but required
|
|
|
|
|
"", # Params
|
|
|
|
|
"", # Query
|
|
|
|
|
"", # Fragment
|
|
|
|
|
)
|
|
|
|
|
)
|
2025-12-19 13:04:33 +01:00
|
|
|
return url
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2025-10-30 11:48:34 +01:00
|
|
|
async def get_health(self) -> HealthSerializer:
|
2025-10-15 20:53:52 +02:00
|
|
|
url = urljoin(self.base_url, self.ENDPOINT_INFO)
|
2025-12-19 12:26:12 +01:00
|
|
|
response = requests.get(url, auth=self.authentication, timeout=60)
|
2025-10-15 20:53:52 +02:00
|
|
|
if response.status_code == 200:
|
2025-10-30 11:48:34 +01:00
|
|
|
return HealthSerializer.from_healthy()
|
2025-10-15 20:53:52 +02:00
|
|
|
else:
|
2025-10-30 11:48:34 +01:00
|
|
|
return HealthSerializer.from_unhealthy()
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2026-02-05 19:45:42 +04:00
|
|
|
async def get_info(self) -> InfoSerializer:
|
|
|
|
|
url = urljoin(self.base_url, self.ENDPOINT_INFO)
|
|
|
|
|
response = requests.get(url, auth=self.authentication, timeout=60)
|
|
|
|
|
response.raise_for_status()
|
|
|
|
|
return InfoSerializer.model_validate(response.json())
|
|
|
|
|
|
|
|
|
|
async def get_block_by_hash(self, block_hash: str) -> Optional[BlockSerializer]:
|
|
|
|
|
url = urljoin(self.base_url, f"{self.ENDPOINT_BLOCK_BY_HASH}/{block_hash}")
|
2025-12-19 12:26:12 +01:00
|
|
|
response = requests.get(url, auth=self.authentication, timeout=60)
|
2026-02-05 19:45:42 +04:00
|
|
|
if response.status_code == 404:
|
|
|
|
|
return None
|
|
|
|
|
response.raise_for_status()
|
|
|
|
|
json_data = response.json()
|
|
|
|
|
if json_data is None:
|
|
|
|
|
logger.warning(f"Block {block_hash} returned null from API")
|
|
|
|
|
return None
|
|
|
|
|
block = BlockSerializer.model_validate(json_data)
|
|
|
|
|
# The storage endpoint doesn't include the block hash in the response,
|
|
|
|
|
# so we set it from the URL parameter
|
|
|
|
|
if not block.header.hash:
|
|
|
|
|
block.header.hash = bytes.fromhex(block_hash)
|
|
|
|
|
return block
|
2025-10-15 20:53:52 +02:00
|
|
|
|
2025-10-30 11:48:34 +01:00
|
|
|
async def get_blocks_stream(self) -> AsyncIterator[BlockSerializer]:
|
2025-10-15 20:53:52 +02:00
|
|
|
url = urljoin(self.base_url, self.ENDPOINT_BLOCKS_STREAM)
|
2025-12-19 12:26:12 +01:00
|
|
|
auth = self.authentication.map(lambda _auth: _auth.for_httpx()).unwrap_or(None)
|
2026-02-05 19:45:42 +04:00
|
|
|
# Use no read timeout for streaming - blocks may arrive infrequently
|
|
|
|
|
stream_timeout = httpx.Timeout(connect=self.timeout, read=None, write=self.timeout, pool=self.timeout)
|
|
|
|
|
async with httpx.AsyncClient(timeout=stream_timeout, auth=auth) as client:
|
2025-10-15 20:53:52 +02:00
|
|
|
async with client.stream("GET", url) as response:
|
|
|
|
|
response.raise_for_status() # TODO: Result
|
|
|
|
|
|
|
|
|
|
async for line in response.aiter_lines():
|
|
|
|
|
if not line:
|
|
|
|
|
continue
|
2025-10-30 11:48:34 +01:00
|
|
|
try:
|
2026-01-29 19:19:00 +01:00
|
|
|
event = json.loads(line)
|
|
|
|
|
block = BlockSerializer.model_validate(event["block"])
|
|
|
|
|
except (ValidationError, KeyError, json.JSONDecodeError) as error:
|
2025-11-03 13:17:19 +01:00
|
|
|
logger.exception(error)
|
|
|
|
|
continue
|
2025-10-30 11:48:34 +01:00
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
logger.debug(f"Received new block from Node: {block}")
|
|
|
|
|
yield block
|