2025-10-15 20:53:52 +02:00
|
|
|
import logging
|
|
|
|
|
from typing import AsyncIterator, List
|
2025-10-03 22:27:30 +02:00
|
|
|
from urllib.parse import urljoin
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
import httpx
|
2025-10-03 22:27:30 +02:00
|
|
|
import requests
|
|
|
|
|
|
|
|
|
|
from node.api.base import NodeApi
|
2025-10-15 20:53:52 +02:00
|
|
|
from node.models.blocks import Block
|
|
|
|
|
from node.models.health import Health
|
|
|
|
|
from node.models.transactions import Transaction
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
class HttpNodeApi(NodeApi):
|
2025-10-15 20:53:52 +02:00
|
|
|
ENDPOINT_INFO = "/cryptarchia/info"
|
|
|
|
|
ENDPOINT_TRANSACTIONS = "/cryptarchia/transactions"
|
|
|
|
|
ENDPOINT_BLOCKS = "/cryptarchia/blocks"
|
|
|
|
|
ENDPOINT_BLOCKS_STREAM = "/cryptarchia/blocks/stream"
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
def __init__(self, host: str, port: int, protocol: str = "http", timeout: int = 60):
|
2025-10-03 22:27:30 +02:00
|
|
|
self.host: str = host
|
|
|
|
|
self.port: int = port
|
2025-10-15 20:53:52 +02:00
|
|
|
self.protocol: str = protocol
|
|
|
|
|
self.timeout: int = timeout
|
2025-10-03 22:27:30 +02:00
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def base_url(self):
|
|
|
|
|
return f"{self.protocol}://{self.host}:{self.port}"
|
|
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
async def get_health_check(self) -> Health:
|
|
|
|
|
url = urljoin(self.base_url, self.ENDPOINT_INFO)
|
2025-10-03 22:27:30 +02:00
|
|
|
response = requests.get(url, timeout=60)
|
2025-10-15 20:53:52 +02:00
|
|
|
if response.status_code == 200:
|
|
|
|
|
return Health.from_healthy()
|
|
|
|
|
else:
|
|
|
|
|
return Health.from_unhealthy()
|
2025-10-03 22:27:30 +02:00
|
|
|
|
2025-10-15 20:53:52 +02:00
|
|
|
async def get_blocks(self, slot_from: int, slot_to: int) -> List[Block]:
|
|
|
|
|
query_string = f"slot_from={slot_from}&slot_to={slot_to}"
|
|
|
|
|
endpoint = urljoin(self.base_url, self.ENDPOINT_BLOCKS)
|
|
|
|
|
url = f"{endpoint}?{query_string}"
|
2025-10-03 22:27:30 +02:00
|
|
|
response = requests.get(url, timeout=60)
|
2025-10-15 20:53:52 +02:00
|
|
|
python_json = response.json()
|
|
|
|
|
blocks = [Block.model_validate(item) for item in python_json]
|
|
|
|
|
return blocks
|
|
|
|
|
|
|
|
|
|
async def get_blocks_stream(self) -> AsyncIterator[Block]:
|
|
|
|
|
url = urljoin(self.base_url, self.ENDPOINT_BLOCKS_STREAM)
|
|
|
|
|
|
|
|
|
|
async with httpx.AsyncClient(timeout=self.timeout) as client:
|
|
|
|
|
async with client.stream("GET", url) as response:
|
|
|
|
|
response.raise_for_status() # TODO: Result
|
|
|
|
|
|
|
|
|
|
async for line in response.aiter_lines():
|
|
|
|
|
if not line:
|
|
|
|
|
continue
|
|
|
|
|
block = Block.model_validate_json(line)
|
|
|
|
|
logger.debug(f"Received new block from Node: {block}")
|
|
|
|
|
yield block
|