Merge pull request #15 from logos-co/test-interaction-data-flow

Test/interaction data flow
This commit is contained in:
Roman Zajic 2025-04-25 14:52:56 +08:00 committed by GitHub
commit ad295831d3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
11 changed files with 135 additions and 10 deletions

View File

@ -70,6 +70,14 @@ class REST(BaseClient):
response = self.rest_call("get", "da/blacklisted-peers")
return response.json()
def da_balancer_stats(self):
response = self.rest_call("get", "da/balancer-stats")
return response.json()
def da_monitor_stats(self):
response = self.rest_call("get", "da/monitor-stats")
return response.json()
def network_info(self):
response = self.rest_call("get", "network/info")
return response.json()

View File

@ -41,3 +41,4 @@ GATEWAY = get_env_var("GATEWAY", "172.19.0.1")
RUNNING_IN_CI = get_env_var("CI")
API_REQUEST_TIMEOUT = get_env_var("API_REQUEST_TIMEOUT", 20)
CHECK_LOG_ERRORS = get_env_var("CHECK_LOG_ERRORS", False)
CONSENSUS_SLOT_TIME = get_env_var("CONSENSUS_SLOT_TIME", 5)

View File

@ -166,3 +166,9 @@ class NomosNode:
def send_get_cryptarchia_headers_request(self, data):
return self._api.cryptarchia_headers(data)
def send_add_share_request(self, data):
return self._api.da_add_share(data)
def send_add_blob_info_request(self, data):
return self._api.mempool_add_blobinfo(data)

View File

@ -4,7 +4,7 @@ import os
import pytest
from src.client.proxy_client import ProxyClient
from src.env_vars import CFGSYNC, NOMOS, NOMOS_EXECUTOR
from src.env_vars import CFGSYNC, NOMOS, NOMOS_EXECUTOR, CONSENSUS_SLOT_TIME
from src.libs.common import delay
from src.libs.custom_logger import get_custom_logger
from src.node.nomos_node import NomosNode
@ -72,7 +72,7 @@ class StepsCommon:
logger.error(f"REST service did not become ready in time: {ex}")
raise
delay(5)
delay(CONSENSUS_SLOT_TIME)
@pytest.fixture(scope="function")
def setup_4_node_cluster(self, request):
@ -97,7 +97,7 @@ class StepsCommon:
logger.error(f"REST service did not become ready in time: {ex}")
raise
delay(5)
delay(CONSENSUS_SLOT_TIME)
@pytest.fixture(scope="function")
def setup_proxy_clients(self, request):

View File

@ -126,3 +126,21 @@ class StepsDataAvailability(StepsCommon):
return response
return get_commitments()
@allure.step
def add_publish_share(self, node, da_share, **kwargs):
timeout_duration = kwargs.get("timeout_duration", 65)
interval = kwargs.get("interval", 0.1)
@retry(stop=stop_after_delay(timeout_duration), wait=wait_fixed(interval), reraise=True)
def add_share():
try:
response = node.send_add_share_request(da_share)
except Exception as ex:
logger.error(f"Exception while adding share: {ex}")
raise
return response
return add_share()

34
src/steps/mempool.py Normal file
View File

@ -0,0 +1,34 @@
import allure
from tenacity import retry, stop_after_delay, wait_fixed
from src.libs.custom_logger import get_custom_logger
from src.steps.common import StepsCommon
logger = get_custom_logger(__name__)
def prepare_add_blob_info_request(blob_id, app_id, index):
blob_info = {"id": blob_id, "metadata": {"app_id": app_id, "index": index}}
return blob_info
class StepsMempool(StepsCommon):
@allure.step
def add_dispersed_blob_info(self, node, blob_id, app_id, index, **kwargs):
timeout_duration = kwargs.get("timeout_duration", 65)
interval = kwargs.get("interval", 0.1)
data = prepare_add_blob_info_request(blob_id, app_id, index)
@retry(stop=stop_after_delay(timeout_duration), wait=wait_fixed(interval), reraise=True)
def add_blob_info():
try:
response = node.send_add_blob_info_request(data)
except Exception as ex:
logger.error(f"Exception while adding blob info to mempool: {ex}")
raise
return response
return add_blob_info()

View File

@ -2,6 +2,7 @@ import json
import pytest
from src.client.nomos_cli import NomosCli
from src.env_vars import CONSENSUS_SLOT_TIME
from src.libs.common import delay, to_app_id, to_index
from src.libs.custom_logger import get_custom_logger
from src.steps.da import StepsDataAvailability
@ -16,7 +17,7 @@ class TestDataIntegrity(StepsDataAvailability):
@pytest.mark.usefixtures("setup_4_node_cluster")
def test_da_identify_retrieve_missing_columns(self):
self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0))
delay(5)
delay(CONSENSUS_SLOT_TIME)
test_results = []
# Iterate through standard nodes 1-3 to get blob data for 1/2 columns
for node in self.main_nodes[1:4]:
@ -39,7 +40,7 @@ class TestDataIntegrity(StepsDataAvailability):
self.main_nodes[1].stop()
self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0))
delay(5)
delay(CONSENSUS_SLOT_TIME)
test_results = []
# Iterate through standard nodes 2-3 to get blob data for 1/2 columns
for node in self.main_nodes[2:4]:
@ -58,7 +59,7 @@ class TestDataIntegrity(StepsDataAvailability):
@pytest.mark.usefixtures("setup_2_node_cluster")
def test_da_sampling_determines_data_presence(self):
self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0))
delay(5)
delay(CONSENSUS_SLOT_TIME)
rcv_data = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5))
rcv_data_json = json.dumps(rcv_data)

View File

@ -3,6 +3,7 @@ import time
import pytest
from src.env_vars import CONSENSUS_SLOT_TIME
from src.libs.common import to_app_id, to_index, delay
from src.steps.da import StepsDataAvailability, logger
from src.test_data import DATA_TO_DISPERSE
@ -44,7 +45,7 @@ class TestHighLoadDos(StepsDataAvailability):
response = self.disperse_data(DATA_TO_DISPERSE[7], to_app_id(1), to_index(0))
assert response.status_code == 200, "Initial dispersal was not successful"
delay(5)
delay(CONSENSUS_SLOT_TIME)
start_time = time.time()
while time.time() - start_time < timeout:

View File

@ -1,5 +1,6 @@
import pytest
from src.env_vars import CONSENSUS_SLOT_TIME
from src.libs.common import delay, generate_text_data, to_app_id, to_index
from src.libs.custom_logger import get_custom_logger
from src.steps.da import StepsDataAvailability
@ -29,7 +30,7 @@ class TestLargeVolume(StepsDataAvailability):
assert response.status_code == 200
delay(5)
delay(CONSENSUS_SLOT_TIME)
self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5), timeout_duration=20, interval=1)
@pytest.mark.usefixtures("setup_2_node_cluster")

View File

@ -1,5 +1,6 @@
import pytest
from src.env_vars import CONSENSUS_SLOT_TIME
from src.libs.common import to_app_id, to_index, delay
from src.libs.custom_logger import get_custom_logger
from src.steps.consensus import StepsConsensus
@ -47,7 +48,7 @@ class TestApiCompatibility(StepsDataAvailability, StepsConsensus, StepsStorage):
@pytest.mark.usefixtures("setup_2_node_cluster")
def test_da_consensus_compatibility(self):
self.disperse_data(DATA_TO_DISPERSE[2], to_app_id(1), to_index(0))
delay(5)
delay(CONSENSUS_SLOT_TIME)
index_shares = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5))
column_commitments, rows_commitments = extract_commitments(index_shares)
@ -75,7 +76,7 @@ class TestApiCompatibility(StepsDataAvailability, StepsConsensus, StepsStorage):
@pytest.mark.usefixtures("setup_4_node_cluster")
def test_da_cross_nodes_consensus_compatibility(self):
self.disperse_data(DATA_TO_DISPERSE[2], to_app_id(1), to_index(0))
delay(5)
delay(CONSENSUS_SLOT_TIME)
index_shares = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5))
column_commitments, rows_commitments = extract_commitments(index_shares)

View File

@ -0,0 +1,54 @@
import pytest
from src.env_vars import CONSENSUS_SLOT_TIME
from src.libs.common import to_app_id, to_index, delay, to_blob_id
from src.libs.custom_logger import get_custom_logger
from src.steps.consensus import StepsConsensus
from src.steps.da import StepsDataAvailability
from src.steps.mempool import StepsMempool
from src.steps.storage import StepsStorage
from src.test_data import DATA_TO_DISPERSE
logger = get_custom_logger(__name__)
def extract_da_shares(index_shares):
return [share for _, shares in index_shares for share in shares if shares]
class TestInteractionDataFlow(StepsDataAvailability, StepsMempool):
main_nodes = []
@pytest.mark.usefixtures("setup_2_node_cluster")
def test_da_dispersal_integration(self):
self.disperse_data(DATA_TO_DISPERSE[3], to_app_id(1), to_index(0))
delay(CONSENSUS_SLOT_TIME)
index_shares = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5))
da_shares = extract_da_shares(index_shares)
assert len(da_shares) == 2, "Two da_shares are expected"
modified_da_share = da_shares[0]
modified_da_share["share_idx"] = 7
self.add_publish_share(self.node2, modified_da_share)
delay(CONSENSUS_SLOT_TIME)
index_shares = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(8))
da_shares = extract_da_shares(index_shares)
assert len(da_shares) < 3, "Modified da_share should not get published"
@pytest.mark.usefixtures("setup_2_node_cluster")
def test_da_mempool_interaction(self):
self.disperse_data(DATA_TO_DISPERSE[4], to_app_id(1), to_index(0))
self.add_dispersed_blob_info(self.node2, to_blob_id(10), to_app_id(1), to_index(0))
delay(CONSENSUS_SLOT_TIME)
index_shares = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5))
da_shares = extract_da_shares(index_shares)
assert len(da_shares) == 2, "Dispersal should not be affected by additional blob info added to mempool"