diff --git a/cluster_config/cfgsync-template.yaml b/cluster_config/cfgsync-template.yaml index 069440f..f95023a 100644 --- a/cluster_config/cfgsync-template.yaml +++ b/cluster_config/cfgsync-template.yaml @@ -28,6 +28,10 @@ mempool_publish_strategy: !SampleSubnetworks secs: 0 nanos: 100000000 +replication_settings: + seen_message_cache_size: 204800 + seen_message_ttl_secs: 900 + # Tracing tracing_settings: logger: Stdout diff --git a/src/api_clients/base_client.py b/src/api_clients/base_client.py index c0381e6..ddb68ee 100644 --- a/src/api_clients/base_client.py +++ b/src/api_clients/base_client.py @@ -40,4 +40,4 @@ class BaseClient: def print_request_size(self, data): body_size = len(data) if data else 0 body_kb = body_size / 1024 - logger.debug(f"Body size: {body_kb:.2f}kB") + logger.debug(f"Request body size: {body_kb:.2f}kB") diff --git a/src/env_vars.py b/src/env_vars.py index 863c0a1..8f1b14d 100644 --- a/src/env_vars.py +++ b/src/env_vars.py @@ -15,7 +15,7 @@ def get_env_var(var_name, default=None): # Configuration constants. Need to be upercase to appear in reports -DEFAULT_NOMOS_IMAGE = "ghcr.io/logos-co/nomos-node:testnet" +DEFAULT_NOMOS_IMAGE = "ghcr.io/logos-co/nomos:testnet" NOMOS_IMAGE = get_env_var("NOMOS_IMAGE", DEFAULT_NOMOS_IMAGE) DEFAULT_PROXY_IMAGE = "bitnami/configurable-http-proxy:latest" diff --git a/tests/data_integrity/test_data_integrity.py b/tests/data_integrity/test_data_integrity.py index bce91f7..0644660 100644 --- a/tests/data_integrity/test_data_integrity.py +++ b/tests/data_integrity/test_data_integrity.py @@ -1,6 +1,4 @@ import json -import random - import pytest from src.client.nomos_cli import NomosCli @@ -20,7 +18,7 @@ class TestDataIntegrity(StepsDataAvailability): self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0)) delay(5) test_results = [] - # Iterate through standard nodes to get blob data for 1/2 columns + # Iterate through standard nodes 1-3 to get blob data for 1/2 columns for node in self.main_nodes[1:4]: rcv_data = self.get_data_range(node, to_app_id(1), to_index(0), to_index(5)) rcv_data_json = json.dumps(rcv_data) @@ -34,6 +32,29 @@ class TestDataIntegrity(StepsDataAvailability): logger.info(f"Dispersed data received by : {test_results}") + @pytest.mark.usefixtures("setup_4_node_cluster") + def test_da_disperse_retrieve_one_node_stopped(self): + + # Stop nomos node 1 - dispersal and data retrieval should be still possible + self.main_nodes[1].stop() + + self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0)) + delay(5) + test_results = [] + # Iterate through standard nodes 2-3 to get blob data for 1/2 columns + for node in self.main_nodes[2:4]: + rcv_data = self.get_data_range(node, to_app_id(1), to_index(0), to_index(5)) + rcv_data_json = json.dumps(rcv_data) + + reconstructed_data = NomosCli(command="reconstruct").run(input_values=[rcv_data_json]) + + if DATA_TO_DISPERSE[1] == reconstructed_data: + test_results.append(node.name()) + + assert len(test_results) > 0, "Dispersed data were not received by any node" + + logger.info(f"Dispersed data received by : {test_results}") + @pytest.mark.usefixtures("setup_2_node_cluster") def test_da_sampling_determines_data_presence(self): self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0)) @@ -44,3 +65,10 @@ class TestDataIntegrity(StepsDataAvailability): decoded_data = NomosCli(command="reconstruct").run(input_values=[rcv_data_json], decode_only=True) assert DATA_TO_DISPERSE[1] == decoded_data, "Retrieved data are not same with original data" + + @pytest.mark.usefixtures("setup_2_node_cluster") + def test_da_disperse_empty_data(self): + empty_data = b"" + response = self.disperse_data(empty_data, to_app_id(1), to_index(0), utf8=False, padding=False, timeout_duration=0) + + assert response.status_code == 400, "Dispersal of empty data should be rejected as bad request" diff --git a/tests/networking_privacy/test_networking_privacy.py b/tests/networking_privacy/test_networking_privacy.py index 2886553..194d327 100644 --- a/tests/networking_privacy/test_networking_privacy.py +++ b/tests/networking_privacy/test_networking_privacy.py @@ -1,7 +1,7 @@ import pytest import psutil -from src.libs.common import delay, to_app_id, to_index, generate_random_bytes +from src.libs.common import delay, to_app_id, to_index, generate_random_bytes, generate_text_data from src.libs.custom_logger import get_custom_logger from src.steps.da import StepsDataAvailability from src.test_data import DATA_TO_DISPERSE @@ -74,3 +74,36 @@ class TestNetworkingPrivacy(StepsDataAvailability): overhead = (consumed - data_sent) / data_sent assert overhead < 400, "Dispersal overhead is too high" + + @pytest.mark.usefixtures("setup_4_node_cluster") + def test_consumed_bandwidth_large_data_dispersal(self): + net_io = psutil.net_io_counters() + prev_total = net_io.bytes_sent + net_io.bytes_recv + + data_to_disperse = generate_text_data(2048) # ~10kB + + successful_dispersals = 0 + for i in range(20): + try: + self.disperse_data(data_to_disperse, to_app_id(10), to_index(0)) + successful_dispersals += 1 + except Exception as ex: + logger.warning(f"Dispersal #{i} was not successful with error {ex}") + + if successful_dispersals == 10: + break + + delay(0.1) + + net_io = psutil.net_io_counters() + curr_total = net_io.bytes_sent + net_io.bytes_recv + + consumed = curr_total - prev_total + + assert successful_dispersals == 10, "Unable to finish 10 successful dispersals" + + data_sent = 2 * successful_dispersals * len(data_to_disperse) + overhead = (consumed - data_sent) / data_sent + + # Large data should have less transfer overhead + assert overhead < 300, "Dispersal overhead is too high"