test large volume

This commit is contained in:
Radoslaw Kaminski 2025-03-11 15:28:06 +00:00
parent 90a30b635f
commit 098036ce67
No known key found for this signature in database
4 changed files with 76 additions and 1 deletions

4
.gitignore vendored
View File

@ -104,3 +104,7 @@ dmypy.json
# Pyre type checker
.pyre/
log/
kzgrs/
cluster_config/cfgsync.yaml

View File

@ -9,6 +9,7 @@ logger = get_custom_logger(__name__)
class BaseClient:
def make_request(self, method, url, headers=None, data=None):
self.log_request_as_curl(method, url, headers, data)
self.print_request_size(data)
response = requests.request(method.upper(), url, headers=headers, data=data, timeout=API_REQUEST_TIMEOUT)
try:
response.raise_for_status()
@ -35,3 +36,8 @@ class BaseClient:
headers_str_for_log = " ".join([f'-H "{key}: {value}"' for key, value in headers.items()]) if headers else ""
curl_cmd = f"curl -v -X {method.upper()} \"{url}\" {headers_str_for_log} -d '{data}'"
logger.info(curl_cmd)
def print_request_size(self, data):
body_size = len(data) if data else 0
body_kb = body_size / 1024
logger.debug(f"Body size: {body_kb:.2f}kB")

View File

@ -72,7 +72,13 @@ class StepsCommon:
@pytest.fixture(scope="function")
def setup_4_node_cluster(self, request):
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
prepare_cluster_config(4)
if hasattr(request, "param"):
subnet_size = request.param
else:
subnet_size = 2
prepare_cluster_config(4, subnet_size)
self.node1 = NomosNode(CFGSYNC, "cfgsync")
self.node2 = NomosNode(NOMOS, "nomos_node_0")
self.node3 = NomosNode(NOMOS, "nomos_node_1")

View File

@ -0,0 +1,59 @@
import random
import pytest
from src.libs.common import to_app_id, to_index
from src.libs.custom_logger import get_custom_logger
from src.steps.da import StepsDataAvailability
logger = get_custom_logger(__name__)
def generate_large_text_data(size):
"""Generate large text data with random words"""
words = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit", "sed", "do", "eiusmod", "tempor"]
result = []
target_size = size
current_size = 0
while current_size <= target_size:
word = random.choice(words)
result.append(word)
current_size = len(" ".join(result).encode("utf-8"))
data = " ".join(result)
while len(data.encode("utf-8")) > target_size:
data = data[:-1]
logger.debug(f"Raw data size: {len(data.encode("utf-8"))}\n\t{data}")
return data
class TestLargeVolume(StepsDataAvailability):
@pytest.mark.usefixtures("setup_4_node_cluster")
@pytest.mark.parametrize("setup_4_node_cluster", [2048], indirect=True)
@pytest.mark.parametrize(
"raw_data_size",
[
50,
# 70,
# 256,
# 10 * 1024,
# 100 * 1024,
# 256 * 1024,
],
)
def test_large_volume_dispersal(self, raw_data_size):
data = generate_large_text_data(raw_data_size)
try:
response = self.disperse_data(data, to_app_id(1), to_index(0), timeout_duration=0)
if response.status_code != 200:
print(response)
except Exception as ex:
raise Exception(f"Dispersal was not successful with error {ex}")
assert response.status_code == 200