From 098036ce675e8c652b5bf2d75070ce8331be9a6a Mon Sep 17 00:00:00 2001 From: Radoslaw Kaminski Date: Tue, 11 Mar 2025 15:28:06 +0000 Subject: [PATCH] test large volume --- .gitignore | 4 ++ src/api_clients/base_client.py | 6 +++ src/steps/common.py | 8 ++- tests/dos_robustness/test_large_volume.py | 59 +++++++++++++++++++++++ 4 files changed, 76 insertions(+), 1 deletion(-) create mode 100644 tests/dos_robustness/test_large_volume.py diff --git a/.gitignore b/.gitignore index a8cfc79..ff4e95e 100644 --- a/.gitignore +++ b/.gitignore @@ -104,3 +104,7 @@ dmypy.json # Pyre type checker .pyre/ + +log/ +kzgrs/ +cluster_config/cfgsync.yaml \ No newline at end of file diff --git a/src/api_clients/base_client.py b/src/api_clients/base_client.py index b731e0f..c0381e6 100644 --- a/src/api_clients/base_client.py +++ b/src/api_clients/base_client.py @@ -9,6 +9,7 @@ logger = get_custom_logger(__name__) class BaseClient: def make_request(self, method, url, headers=None, data=None): self.log_request_as_curl(method, url, headers, data) + self.print_request_size(data) response = requests.request(method.upper(), url, headers=headers, data=data, timeout=API_REQUEST_TIMEOUT) try: response.raise_for_status() @@ -35,3 +36,8 @@ class BaseClient: headers_str_for_log = " ".join([f'-H "{key}: {value}"' for key, value in headers.items()]) if headers else "" curl_cmd = f"curl -v -X {method.upper()} \"{url}\" {headers_str_for_log} -d '{data}'" logger.info(curl_cmd) + + def print_request_size(self, data): + body_size = len(data) if data else 0 + body_kb = body_size / 1024 + logger.debug(f"Body size: {body_kb:.2f}kB") diff --git a/src/steps/common.py b/src/steps/common.py index b08e045..2b03786 100644 --- a/src/steps/common.py +++ b/src/steps/common.py @@ -72,7 +72,13 @@ class StepsCommon: @pytest.fixture(scope="function") def setup_4_node_cluster(self, request): logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}") - prepare_cluster_config(4) + + if hasattr(request, "param"): + subnet_size = request.param + else: + subnet_size = 2 + + prepare_cluster_config(4, subnet_size) self.node1 = NomosNode(CFGSYNC, "cfgsync") self.node2 = NomosNode(NOMOS, "nomos_node_0") self.node3 = NomosNode(NOMOS, "nomos_node_1") diff --git a/tests/dos_robustness/test_large_volume.py b/tests/dos_robustness/test_large_volume.py new file mode 100644 index 0000000..671ac86 --- /dev/null +++ b/tests/dos_robustness/test_large_volume.py @@ -0,0 +1,59 @@ +import random +import pytest + +from src.libs.common import to_app_id, to_index +from src.libs.custom_logger import get_custom_logger +from src.steps.da import StepsDataAvailability + +logger = get_custom_logger(__name__) + + +def generate_large_text_data(size): + """Generate large text data with random words""" + words = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit", "sed", "do", "eiusmod", "tempor"] + + result = [] + target_size = size + current_size = 0 + + while current_size <= target_size: + word = random.choice(words) + result.append(word) + current_size = len(" ".join(result).encode("utf-8")) + + data = " ".join(result) + + while len(data.encode("utf-8")) > target_size: + data = data[:-1] + + logger.debug(f"Raw data size: {len(data.encode("utf-8"))}\n\t{data}") + + return data + + +class TestLargeVolume(StepsDataAvailability): + + @pytest.mark.usefixtures("setup_4_node_cluster") + @pytest.mark.parametrize("setup_4_node_cluster", [2048], indirect=True) + @pytest.mark.parametrize( + "raw_data_size", + [ + 50, + # 70, + # 256, + # 10 * 1024, + # 100 * 1024, + # 256 * 1024, + ], + ) + def test_large_volume_dispersal(self, raw_data_size): + data = generate_large_text_data(raw_data_size) + + try: + response = self.disperse_data(data, to_app_id(1), to_index(0), timeout_duration=0) + if response.status_code != 200: + print(response) + except Exception as ex: + raise Exception(f"Dispersal was not successful with error {ex}") + + assert response.status_code == 200