From 5f0db1c518e3b47b39ed922cd831385894ed0ae5 Mon Sep 17 00:00:00 2001 From: Radoslaw Kaminski Date: Wed, 12 Mar 2025 11:52:01 +0000 Subject: [PATCH] refactor test case --- src/libs/common.py | 19 ++++++++++ tests/dos_robustness/test_large_volume.py | 42 ++++------------------- 2 files changed, 25 insertions(+), 36 deletions(-) diff --git a/src/libs/common.py b/src/libs/common.py index af03e25..25ab29f 100644 --- a/src/libs/common.py +++ b/src/libs/common.py @@ -54,6 +54,25 @@ def generate_random_bytes(n=31): return os.urandom(n) +def generate_text_data(target_size): + words = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit", "sed", "do", "eiusmod", "tempor"] + result = [] + + current_size = 0 + while current_size <= target_size: + word = random.choice(words) + result.append(word) + current_size = len(" ".join(result).encode("utf-8")) + + text_data = " ".join(result) + while len(text_data.encode("utf-8")) > target_size: + text_data = text_data[:-1] + + logger.debug(f"Raw data size: {len(text_data.encode("utf-8"))}\n\t{text_data}") + + return text_data + + def add_padding(orig_bytes): """ Pads a list of bytes (integers in [0..255]) using a PKCS#7-like scheme: diff --git a/tests/dos_robustness/test_large_volume.py b/tests/dos_robustness/test_large_volume.py index 671ac86..37bfae2 100644 --- a/tests/dos_robustness/test_large_volume.py +++ b/tests/dos_robustness/test_large_volume.py @@ -1,58 +1,28 @@ -import random import pytest -from src.libs.common import to_app_id, to_index +from src.libs.common import generate_text_data, to_app_id, to_index from src.libs.custom_logger import get_custom_logger from src.steps.da import StepsDataAvailability logger = get_custom_logger(__name__) -def generate_large_text_data(size): - """Generate large text data with random words""" - words = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit", "sed", "do", "eiusmod", "tempor"] - - result = [] - target_size = size - current_size = 0 - - while current_size <= target_size: - word = random.choice(words) - result.append(word) - current_size = len(" ".join(result).encode("utf-8")) - - data = " ".join(result) - - while len(data.encode("utf-8")) > target_size: - data = data[:-1] - - logger.debug(f"Raw data size: {len(data.encode("utf-8"))}\n\t{data}") - - return data - - class TestLargeVolume(StepsDataAvailability): @pytest.mark.usefixtures("setup_4_node_cluster") - @pytest.mark.parametrize("setup_4_node_cluster", [2048], indirect=True) @pytest.mark.parametrize( - "raw_data_size", + "setup_4_node_cluster,raw_data_size", [ - 50, - # 70, - # 256, - # 10 * 1024, - # 100 * 1024, - # 256 * 1024, + ({"subnet_size": 32, "dispersal_factor": 8}, 70), # => ~~0.58kB + ({"subnet_size": 2048, "dispersal_factor": 512}, 51 * 1024), # => ~~244kB, spec limit: 248kB ], + indirect=["setup_4_node_cluster"], ) def test_large_volume_dispersal(self, raw_data_size): - data = generate_large_text_data(raw_data_size) + data = generate_text_data(raw_data_size) try: response = self.disperse_data(data, to_app_id(1), to_index(0), timeout_duration=0) - if response.status_code != 200: - print(response) except Exception as ex: raise Exception(f"Dispersal was not successful with error {ex}")