refactor test case

This commit is contained in:
Radoslaw Kaminski 2025-03-12 11:52:01 +00:00
parent 77fe30a9d3
commit 5f0db1c518
No known key found for this signature in database
2 changed files with 25 additions and 36 deletions

View File

@ -54,6 +54,25 @@ def generate_random_bytes(n=31):
return os.urandom(n)
def generate_text_data(target_size):
words = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit", "sed", "do", "eiusmod", "tempor"]
result = []
current_size = 0
while current_size <= target_size:
word = random.choice(words)
result.append(word)
current_size = len(" ".join(result).encode("utf-8"))
text_data = " ".join(result)
while len(text_data.encode("utf-8")) > target_size:
text_data = text_data[:-1]
logger.debug(f"Raw data size: {len(text_data.encode("utf-8"))}\n\t{text_data}")
return text_data
def add_padding(orig_bytes):
"""
Pads a list of bytes (integers in [0..255]) using a PKCS#7-like scheme:

View File

@ -1,58 +1,28 @@
import random
import pytest
from src.libs.common import to_app_id, to_index
from src.libs.common import generate_text_data, to_app_id, to_index
from src.libs.custom_logger import get_custom_logger
from src.steps.da import StepsDataAvailability
logger = get_custom_logger(__name__)
def generate_large_text_data(size):
"""Generate large text data with random words"""
words = ["lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing", "elit", "sed", "do", "eiusmod", "tempor"]
result = []
target_size = size
current_size = 0
while current_size <= target_size:
word = random.choice(words)
result.append(word)
current_size = len(" ".join(result).encode("utf-8"))
data = " ".join(result)
while len(data.encode("utf-8")) > target_size:
data = data[:-1]
logger.debug(f"Raw data size: {len(data.encode("utf-8"))}\n\t{data}")
return data
class TestLargeVolume(StepsDataAvailability):
@pytest.mark.usefixtures("setup_4_node_cluster")
@pytest.mark.parametrize("setup_4_node_cluster", [2048], indirect=True)
@pytest.mark.parametrize(
"raw_data_size",
"setup_4_node_cluster,raw_data_size",
[
50,
# 70,
# 256,
# 10 * 1024,
# 100 * 1024,
# 256 * 1024,
({"subnet_size": 32, "dispersal_factor": 8}, 70), # => ~~0.58kB
({"subnet_size": 2048, "dispersal_factor": 512}, 51 * 1024), # => ~~244kB, spec limit: 248kB
],
indirect=["setup_4_node_cluster"],
)
def test_large_volume_dispersal(self, raw_data_size):
data = generate_large_text_data(raw_data_size)
data = generate_text_data(raw_data_size)
try:
response = self.disperse_data(data, to_app_id(1), to_index(0), timeout_duration=0)
if response.status_code != 200:
print(response)
except Exception as ex:
raise Exception(f"Dispersal was not successful with error {ex}")