mirror of
https://github.com/logos-blockchain/logos-blockchain-e2e-tests.git
synced 2026-01-02 13:13:08 +00:00
test: spam protection with valid uploads
- move delay(5) to fixtures
This commit is contained in:
parent
9e45d0d890
commit
768bc30201
@ -19,7 +19,7 @@ NOMOS = "nomos"
|
||||
NOMOS_EXECUTOR = "nomos_executor"
|
||||
CFGSYNC = "cfgsync"
|
||||
|
||||
DEFAULT_IMAGE = "ghcr.io/logos-co/nomos-node:latest"
|
||||
DEFAULT_IMAGE = "ghcr.io/logos-co/nomos-node:testnet"
|
||||
|
||||
NODE_1 = get_env_var("NODE_1", NOMOS)
|
||||
NODE_2 = get_env_var("NODE_2", NOMOS_EXECUTOR)
|
||||
|
||||
@ -5,6 +5,7 @@ import shutil
|
||||
import pytest
|
||||
|
||||
from src.env_vars import CFGSYNC, NOMOS, NOMOS_EXECUTOR
|
||||
from src.libs.common import delay
|
||||
from src.libs.custom_logger import get_custom_logger
|
||||
from src.node.nomos_node import NomosNode
|
||||
|
||||
@ -65,6 +66,8 @@ class StepsCommon:
|
||||
logger.error(f"REST service did not become ready in time: {ex}")
|
||||
raise
|
||||
|
||||
delay(5)
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
def setup_4_node_cluster(self, request):
|
||||
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
|
||||
@ -82,3 +85,5 @@ class StepsCommon:
|
||||
except Exception as ex:
|
||||
logger.error(f"REST service did not become ready in time: {ex}")
|
||||
raise
|
||||
|
||||
delay(5)
|
||||
|
||||
@ -17,7 +17,6 @@ class TestDataIntegrity(StepsDataAvailability):
|
||||
|
||||
@pytest.mark.usefixtures("setup_4_node_cluster")
|
||||
def test_da_identify_retrieve_missing_columns(self):
|
||||
delay(5)
|
||||
self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0))
|
||||
delay(5)
|
||||
# Select one target node at random to get blob data for 1/2 columns
|
||||
@ -31,7 +30,6 @@ class TestDataIntegrity(StepsDataAvailability):
|
||||
|
||||
@pytest.mark.usefixtures("setup_2_node_cluster")
|
||||
def test_da_sampling_determines_data_presence(self):
|
||||
delay(5)
|
||||
self.disperse_data(DATA_TO_DISPERSE[1], to_app_id(1), to_index(0))
|
||||
delay(5)
|
||||
rcv_data = self.get_data_range(self.node2, to_app_id(1), to_index(0), to_index(5))
|
||||
|
||||
@ -11,16 +11,31 @@ logger = get_custom_logger(__name__)
|
||||
class TestDosRobustness(StepsDataAvailability):
|
||||
main_nodes = []
|
||||
|
||||
@pytest.mark.parametrize("setup_2_node_cluster", [2], indirect=True)
|
||||
def test_spam_protection_data_uploads(self, setup_2_node_cluster):
|
||||
delay(5)
|
||||
@pytest.mark.usefixtures("setup_2_node_cluster")
|
||||
def test_spam_protection_valid_uploads(self, setup_2_node_cluster):
|
||||
num_samples = len(DATA_TO_DISPERSE)
|
||||
missing_dispersals = num_samples
|
||||
for i in range(num_samples):
|
||||
try:
|
||||
self.disperse_data(DATA_TO_DISPERSE[i], to_app_id(1), to_index(0), timeout_duration=0)
|
||||
missing_dispersals -= 1
|
||||
except Exception as ex:
|
||||
logger.error(f"Dispersal #{i+1} was not successful with error {ex}")
|
||||
break
|
||||
|
||||
delay(0.1)
|
||||
|
||||
assert missing_dispersals == 0, f"{missing_dispersals} dispersals were not successful"
|
||||
|
||||
@pytest.mark.usefixtures("setup_2_node_cluster")
|
||||
def test_spam_protection_single_burst(self, setup_2_node_cluster):
|
||||
successful_dispersals = 0
|
||||
for i in range(1000):
|
||||
try:
|
||||
self.disperse_data(DATA_TO_DISPERSE[0], to_app_id(1), to_index(0), timeout_duration=0)
|
||||
successful_dispersals = i
|
||||
except Exception as ex:
|
||||
logger.debug(f"Dispersal #{i} was not successful with error {ex}")
|
||||
logger.debug(f"Dispersal #{i+1} was not successful with error {ex}")
|
||||
break
|
||||
|
||||
assert successful_dispersals < 1000, "More than 1000 consecutive dispersals were successful without any constraint"
|
||||
assert successful_dispersals < 1000, "All 1000 consecutive dispersals were successful without any constraint"
|
||||
|
||||
@ -14,7 +14,6 @@ class TestNetworkingPrivacy(StepsDataAvailability):
|
||||
|
||||
@pytest.mark.parametrize("setup_2_node_cluster", [2], indirect=True)
|
||||
def test_consumed_bandwidth_dispersal(self, setup_2_node_cluster):
|
||||
delay(5)
|
||||
net_io = psutil.net_io_counters()
|
||||
prev_total = net_io.bytes_sent + net_io.bytes_recv
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user