test: client nodes initialization

This commit is contained in:
Roman 2025-03-05 03:37:11 +00:00
parent 917e1cfd9b
commit 7e11b4939c
No known key found for this signature in database
GPG Key ID: BB3828275C58EFF1
9 changed files with 129 additions and 56 deletions

View File

@ -9,13 +9,17 @@ class REST(BaseClient):
def __init__(self, rest_port):
self._rest_port = rest_port
def rest_call(self, method, endpoint, payload=None):
url = f"http://127.0.0.1:{self._rest_port}/{endpoint}"
def rest_call(self, method, endpoint, payload=None, host="127.0.0.1", port=None):
if port is None:
port = self._rest_port
url = f"http://{host}:{port}/{endpoint}"
headers = {"Content-Type": "application/json", "Connection": "close"}
return self.make_request(method, url, headers=headers, data=payload)
def rest_call_text(self, method, endpoint, payload=None):
url = f"http://127.0.0.1:{self._rest_port}/{endpoint}"
def rest_call_text(self, method, endpoint, payload=None, host="127.0.0.1", port=None):
if port is None:
port = self._rest_port
url = f"http://{host}:{port}/{endpoint}"
headers = {"accept": "text/plain", "Connection": "close"}
return self.make_request(method, url, headers=headers, data=payload)
@ -23,9 +27,9 @@ class REST(BaseClient):
status_response = self.rest_call("get", "cryptarchia/info")
return status_response.json()
def send_dispersal_request(self, data):
def send_dispersal_request(self, data, host=None, port=None):
return self.rest_call("post", "disperse-data", json.dumps(data))
def send_get_range(self, query):
def send_get_range(self, query, host=None, port=None):
response = self.rest_call("post", "da/get-range", json.dumps(query))
return response.json()

View File

@ -8,4 +8,11 @@ nomos_cli = {
"ports": [],
"entrypoint": "",
},
"client_node": {
"image": NOMOS_IMAGE,
"flags": [],
"volumes": [],
"ports": [],
"entrypoint": "",
},
}

View File

@ -3,14 +3,13 @@ import os
import re
from src.data_storage import DS
from src.libs.common import generate_log_prefix
from src.libs.common import generate_log_prefix, delay, remove_padding
from src.libs.custom_logger import get_custom_logger
from tenacity import retry, stop_after_delay, wait_fixed
from src.cli.cli_vars import nomos_cli
from src.docker_manager import DockerManager, stop, kill
from src.env_vars import DOCKER_LOG_DIR, NOMOS_CLI
from src.steps.da import remove_padding
logger = get_custom_logger(__name__)
@ -66,16 +65,19 @@ class NomosCli:
command=cmd,
)
DS.nomos_nodes.append(self)
DS.client_nodes.append(self)
match self._command:
case "reconstruct":
decode_only = kwargs.get("decode_only", False)
return self.reconstruct(input_values=input_values, decode_only=decode_only)
return self.reconstruct(decode_only=decode_only)
case "client_node":
delay(3600)
return None
case _:
return
return None
def reconstruct(self, input_values=None, decode_only=False):
def reconstruct(self, decode_only=False):
keywords = ["Reconstructed data"]
log_stream = self._container.logs(stream=True)
@ -98,7 +100,7 @@ class NomosCli:
result_bytes = remove_padding(result_bytes)
result = bytes(result_bytes).decode("utf-8")
DS.nomos_nodes.remove(self)
DS.client_nodes.remove(self)
return result

View File

@ -1,3 +1,4 @@
# We use this class for global variables
class DS:
nomos_nodes = []
client_nodes = []

View File

@ -20,13 +20,12 @@ NOMOS_EXECUTOR = "nomos_executor"
CFGSYNC = "cfgsync"
DEFAULT_IMAGE = "ghcr.io/logos-co/nomos-node:testnet"
NOMOS_IMAGE = get_env_var("NOMOS_IMAGE", DEFAULT_IMAGE)
NODE_1 = get_env_var("NODE_1", NOMOS)
NODE_2 = get_env_var("NODE_2", NOMOS_EXECUTOR)
NODE_3 = get_env_var("NODE_3", CFGSYNC)
NOMOS_IMAGE = get_env_var("NOMOS_IMAGE", DEFAULT_IMAGE)
NOMOS_CLI = "/usr/bin/nomos-cli"
ADDITIONAL_NODES = get_env_var("ADDITIONAL_NODES", f"{NOMOS},{NOMOS}")

View File

@ -52,3 +52,44 @@ def generate_random_bytes(n=31):
if n < 0:
raise ValueError("Input must be an unsigned integer (non-negative)")
return os.urandom(n)
def add_padding(orig_bytes):
"""
Pads a list of bytes (integers in [0..255]) using a PKCS#7-like scheme:
- The value of each padded byte is the number of bytes padded.
- If the original data is already a multiple of the block size,
an additional full block of bytes (each the block size) is added.
"""
block_size = 31
original_len = len(orig_bytes)
padding_needed = block_size - (original_len % block_size)
# If the data is already a multiple of block_size, add a full block of padding
if padding_needed == 0:
padding_needed = block_size
# Each padded byte will be equal to padding_needed
padded_bytes = orig_bytes + [padding_needed] * padding_needed
return padded_bytes
def remove_padding(padded_bytes):
"""
Removes PKCS#7-like padding from a list of bytes.
Raises:
ValueError: If the padding is incorrect.
Returns:
The original list of bytes without padding.
"""
if not padded_bytes:
raise ValueError("The input is empty, cannot remove padding.")
padding_len = padded_bytes[-1]
if padding_len < 1 or padding_len > 31:
raise ValueError("Invalid padding length.")
if padded_bytes[-padding_len:] != [padding_len] * padding_len:
raise ValueError("Invalid padding bytes.")
return padded_bytes[:-padding_len]

View File

@ -4,6 +4,7 @@ import shutil
import pytest
from src.cli.nomos_cli import NomosCli
from src.env_vars import CFGSYNC, NOMOS, NOMOS_EXECUTOR
from src.libs.common import delay
from src.libs.custom_logger import get_custom_logger
@ -43,6 +44,7 @@ class StepsCommon:
def cluster_setup(self):
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
self.main_nodes = []
self.cli_nodes = []
@pytest.fixture(scope="function")
def setup_2_node_cluster(self, request):
@ -87,3 +89,18 @@ class StepsCommon:
raise
delay(5)
@pytest.fixture(scope="function")
def init_client_nodes(self, request):
logger.debug(f"Running fixture init: {inspect.currentframe().f_code.co_name}")
if hasattr(request, "param"):
num_clients = request.param
else:
num_clients = 5
for i in range(num_clients):
cli_node = NomosCli(command="client_node")
self.cli_nodes.append(cli_node)
delay(1)

View File

@ -2,53 +2,13 @@ import allure
from tenacity import retry, stop_after_delay, wait_fixed
from src.env_vars import NOMOS_EXECUTOR
from src.libs.common import add_padding
from src.libs.custom_logger import get_custom_logger
from src.steps.common import StepsCommon
logger = get_custom_logger(__name__)
def add_padding(orig_bytes):
"""
Pads a list of bytes (integers in [0..255]) using a PKCS#7-like scheme:
- The value of each padded byte is the number of bytes padded.
- If the original data is already a multiple of the block size,
an additional full block of bytes (each the block size) is added.
"""
block_size = 31
original_len = len(orig_bytes)
padding_needed = block_size - (original_len % block_size)
# If the data is already a multiple of block_size, add a full block of padding
if padding_needed == 0:
padding_needed = block_size
# Each padded byte will be equal to padding_needed
padded_bytes = orig_bytes + [padding_needed] * padding_needed
return padded_bytes
def remove_padding(padded_bytes):
"""
Removes PKCS#7-like padding from a list of bytes.
Raises:
ValueError: If the padding is incorrect.
Returns:
The original list of bytes without padding.
"""
if not padded_bytes:
raise ValueError("The input is empty, cannot remove padding.")
padding_len = padded_bytes[-1]
if padding_len < 1 or padding_len > 31:
raise ValueError("Invalid padding length.")
if padded_bytes[-padding_len:] != [padding_len] * padding_len:
raise ValueError("Invalid padding bytes.")
return padded_bytes[:-padding_len]
def prepare_dispersal_request(data, app_id, index, utf8=True, padding=True):
if utf8:
data_bytes = data.encode("utf-8")

View File

@ -9,6 +9,7 @@ from src.test_data import DATA_TO_DISPERSE
class TestHighLoadDos(StepsDataAvailability):
main_nodes = []
client_nodes = []
@pytest.mark.usefixtures("setup_2_node_cluster")
def test_sustained_high_rate_upload(self):
@ -110,3 +111,44 @@ class TestHighLoadDos(StepsDataAvailability):
assert failure_ratio_w < 0.20, f"Dispersal failure ratio {failure_ratio_w} too high"
assert failure_ratio_r < 0.20, f"Data download failure ratio {failure_ratio_r} too high"
@pytest.mark.usefixtures("setup_2_node_cluster", "init_client_nodes")
def test_sustained_high_rate_multiple_clients(self):
timeout = 60
start_time = time.time()
successful_dispersals = 0
unsuccessful_dispersals = 0
successful_downloads = 0
unsuccessful_downloads = 0
while True:
if time.time() - start_time > timeout:
break
delay(0.01)
try:
response = self.disperse_data(DATA_TO_DISPERSE[6], to_app_id(1), to_index(0), timeout_duration=0)
if response.status_code == 200:
successful_dispersals += 1
else:
unsuccessful_dispersals += 1
except Exception:
unsuccessful_dispersals += 1
try:
self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5), timeout_duration=0)
successful_downloads += 1
except Exception:
unsuccessful_downloads += 1
assert successful_dispersals > 0, "No successful dispersals"
assert successful_downloads > 0, "No successful downloads"
failure_ratio_w = unsuccessful_dispersals / successful_dispersals
failure_ratio_r = unsuccessful_downloads / successful_downloads
logger.info(f"Unsuccessful dispersals ratio: {failure_ratio_w}")
logger.info(f"Unsuccessful download ratio: {failure_ratio_r}")
assert failure_ratio_w < 0.20, f"Dispersal failure ratio {failure_ratio_w} too high"
assert failure_ratio_r < 0.20, f"Data download failure ratio {failure_ratio_r} too high"