test_: Code Migration from status-cli-tests with review comments
This commit is contained in:
parent
5de833a617
commit
5b374662d6
|
@ -1,7 +1,3 @@
|
||||||
Here’s the updated README with the additional prerequisites and instructions:
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
|
|
||||||
Functional tests for `status-go`
|
Functional tests for `status-go`
|
||||||
|
@ -22,15 +18,11 @@ Functional tests for `status-go`
|
||||||
3. **Set up a virtual environment (recommended):**
|
3. **Set up a virtual environment (recommended):**
|
||||||
- In `./tests-functional`, run:
|
- In `./tests-functional`, run:
|
||||||
```bash
|
```bash
|
||||||
python -m venv .venv
|
python3 -m venv .venv
|
||||||
source .venv/bin/activate
|
source .venv/bin/activate
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
```
|
```
|
||||||
- **Optional (for test development)**: Use Python virtual environment for better dependency management. You can follow the guide [here](https://akrabat.com/creating-virtual-environments-with-pyenv/)
|
- **Optional (for test development)**: Use Python virtual environment for better dependency management. You can follow the guide [here](https://akrabat.com/creating-virtual-environments-with-pyenv/)
|
||||||
4. Install pre-commit hooks (optional):
|
|
||||||
```bash
|
|
||||||
pre-commit install
|
|
||||||
```
|
|
||||||
|
|
||||||
## How to Run
|
## How to Run
|
||||||
|
|
||||||
|
@ -86,7 +78,7 @@ To run the tests:
|
||||||
|
|
||||||
## Build Status Backend
|
## Build Status Backend
|
||||||
|
|
||||||
You can build the binary with the following command in the `status-go` root directory:
|
You can manually build the binary with the following command in the `status-go` root directory:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
make status-backend
|
make status-backend
|
||||||
|
@ -98,6 +90,4 @@ For further details on building and setting up `status-go` and `status-backend`,
|
||||||
|
|
||||||
Location of the binary: `cmd/status-backend/status-backend`
|
Location of the binary: `cmd/status-backend/status-backend`
|
||||||
|
|
||||||
---
|
In test build is automatically being build and placed in right path. If build already exists then new build is not generated.
|
||||||
|
|
||||||
This README should cover your additional setup, installation, and testing instructions with clear steps for users. Let me know if there are any further modifications needed!
|
|
|
@ -1,9 +1,11 @@
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
|
from src.libs.common import write_signal_to_file
|
||||||
|
|
||||||
import websocket
|
import websocket
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class SignalClient:
|
class SignalClient:
|
||||||
|
|
||||||
|
@ -13,16 +15,13 @@ class SignalClient:
|
||||||
self.received_signals = {signal: [] for signal in self.await_signals}
|
self.received_signals = {signal: [] for signal in self.await_signals}
|
||||||
|
|
||||||
def on_message(self, ws, signal):
|
def on_message(self, ws, signal):
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
signal_data = json.loads(signal)
|
signal_data = json.loads(signal)
|
||||||
signal_type = signal_data.get("type")
|
signal_type = signal_data.get("type")
|
||||||
|
|
||||||
logger.info(f"Received signal: {signal_data}")
|
write_signal_to_file(signal_data)
|
||||||
|
|
||||||
if signal_type in self.await_signals:
|
if signal_type in self.await_signals:
|
||||||
self.received_signals[signal_type].append(signal_data)
|
self.received_signals[signal_type].append(signal_data)
|
||||||
# logger.debug(f"Signal {signal_type} stored: {signal_data}")
|
|
||||||
|
|
||||||
def wait_for_signal(self, signal_type, expected_event=None, timeout=20):
|
def wait_for_signal(self, signal_type, expected_event=None, timeout=20):
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
|
@ -29,6 +29,10 @@ PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
|
||||||
STATUS_BACKEND_URL = os.getenv("STATUS_BACKEND_URL", "http://127.0.0.1")
|
STATUS_BACKEND_URL = os.getenv("STATUS_BACKEND_URL", "http://127.0.0.1")
|
||||||
API_REQUEST_TIMEOUT = int(os.getenv("API_REQUEST_TIMEOUT", "15"))
|
API_REQUEST_TIMEOUT = int(os.getenv("API_REQUEST_TIMEOUT", "15"))
|
||||||
|
|
||||||
|
SOURCE_DIR = os.path.join(PROJECT_ROOT, "cmd/status-backend")
|
||||||
|
DEST_DIR = os.path.join(PROJECT_ROOT, "tests-functional")
|
||||||
|
BINARY_PATH = os.path.join(SOURCE_DIR, "status-backend")
|
||||||
|
|
||||||
# Paths relative to project root
|
# Paths relative to project root
|
||||||
DATA_DIR = os.path.join(PROJECT_ROOT, "tests-functional/local")
|
DATA_DIR = os.path.join(PROJECT_ROOT, "tests-functional/local")
|
||||||
LOCAL_DATA_DIR1 = create_unique_data_dir(DATA_DIR, random.randint(1, 100))
|
LOCAL_DATA_DIR1 = create_unique_data_dir(DATA_DIR, random.randint(1, 100))
|
||||||
|
@ -47,4 +51,4 @@ LATENCY_CMD = "sudo tc qdisc add dev eth0 root netem delay 1s 100ms distribution
|
||||||
PACKET_LOSS_CMD = "sudo tc qdisc add dev eth0 root netem loss 50%"
|
PACKET_LOSS_CMD = "sudo tc qdisc add dev eth0 root netem loss 50%"
|
||||||
LOW_BANDWIDTH_CMD = "sudo tc qdisc add dev eth0 root tbf rate 1kbit burst 1kbit"
|
LOW_BANDWIDTH_CMD = "sudo tc qdisc add dev eth0 root tbf rate 1kbit burst 1kbit"
|
||||||
REMOVE_TC_CMD = "sudo tc qdisc del dev eth0 root"
|
REMOVE_TC_CMD = "sudo tc qdisc del dev eth0 root"
|
||||||
NUM_CONTACT_REQUESTS = 5
|
NUM_CONTACT_REQUESTS = int(os.getenv("NUM_CONTACT_REQUESTS", "5"))
|
|
@ -1,6 +1,56 @@
|
||||||
|
anyio==4.6.0
|
||||||
|
attrs==23.1.0
|
||||||
|
black==24.3.0
|
||||||
|
certifi==2023.11.17
|
||||||
|
cfgv==3.4.0
|
||||||
|
charset-normalizer==3.3.2
|
||||||
|
click==8.1.7
|
||||||
|
clients==1.5
|
||||||
deepdiff==5.5.0
|
deepdiff==5.5.0
|
||||||
jsonschema~=3.2.0
|
distlib==0.3.8
|
||||||
pytest==6.2.4
|
exceptiongroup==1.2.2
|
||||||
|
execnet==2.0.2
|
||||||
|
filelock==3.13.1
|
||||||
|
genson==1.2.2
|
||||||
|
h11==0.14.0
|
||||||
|
httpcore==1.0.6
|
||||||
|
httpx==0.27.2
|
||||||
|
identify==2.5.33
|
||||||
|
idna==3.7
|
||||||
|
importlib_metadata==8.5.0
|
||||||
|
iniconfig==2.0.0
|
||||||
|
jsonschema==3.2.0
|
||||||
|
libs==0.0.10
|
||||||
|
mypy-extensions==1.0.0
|
||||||
|
nodeenv==1.8.0
|
||||||
|
ordered-set==4.0.2
|
||||||
|
packaging==23.2
|
||||||
|
pathspec==0.12.1
|
||||||
|
platformdirs==4.1.0
|
||||||
|
pluggy==1.5.0
|
||||||
|
pre-commit==3.6.2
|
||||||
|
py==1.11.0
|
||||||
|
pyright==1.1.352
|
||||||
|
pyrsistent==0.20.0
|
||||||
|
pytest==8.3.3
|
||||||
|
pytest-dependency==0.6.0
|
||||||
|
pytest-instafail==0.5.0
|
||||||
|
pytest-rerunfailures==13.0
|
||||||
|
pytest-timeout==2.2.0
|
||||||
|
pytest-xdist==3.5.0
|
||||||
|
python-dotenv==1.0.1
|
||||||
|
PyYAML==6.0.1
|
||||||
requests==2.31.0
|
requests==2.31.0
|
||||||
genson~=1.2.2
|
signals==0.0.2
|
||||||
websocket-client~=1.4.2
|
six==1.16.0
|
||||||
|
sniffio==1.3.1
|
||||||
|
tenacity==8.2.3
|
||||||
|
toml==0.10.2
|
||||||
|
tomli==2.0.2
|
||||||
|
typeguard==4.1.5
|
||||||
|
typing-inspect==0.9.0
|
||||||
|
typing_extensions==4.9.0
|
||||||
|
urllib3==2.1.0
|
||||||
|
virtualenv==20.25.0
|
||||||
|
websocket-client==1.4.2
|
||||||
|
zipp==3.20.2
|
||||||
|
|
|
@ -5,6 +5,7 @@ from src.libs.custom_logger import get_custom_logger
|
||||||
|
|
||||||
logger = get_custom_logger(__name__)
|
logger = get_custom_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BaseAPIClient:
|
class BaseAPIClient:
|
||||||
def __init__(self, base_url):
|
def __init__(self, base_url):
|
||||||
self.base_url = base_url
|
self.base_url = base_url
|
||||||
|
|
|
@ -1,28 +1,65 @@
|
||||||
|
import json
|
||||||
from time import sleep
|
from time import sleep
|
||||||
from src.libs.custom_logger import get_custom_logger
|
from src.libs.custom_logger import get_custom_logger
|
||||||
|
import subprocess
|
||||||
|
import shutil
|
||||||
import os
|
import os
|
||||||
import allure
|
|
||||||
import uuid
|
import uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
logger = get_custom_logger(__name__)
|
logger = get_custom_logger(__name__)
|
||||||
|
GO_PROJECT_ROOT = Path(__file__).resolve().parents[3]
|
||||||
|
SOURCE_DIR = GO_PROJECT_ROOT / "cmd/status-backend"
|
||||||
def attach_allure_file(file):
|
DEST_DIR = GO_PROJECT_ROOT / "tests-functional"
|
||||||
logger.debug(f"Attaching file {file}")
|
BINARY_PATH = SOURCE_DIR / "status-backend"
|
||||||
allure.attach.file(file, name=os.path.basename(file), attachment_type=allure.attachment_type.TEXT)
|
REPORTS_DIR = DEST_DIR / "reports"
|
||||||
|
REPORTS_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
LOG_FILE_PATH = REPORTS_DIR / f"signals_log_{timestamp}.json"
|
||||||
|
|
||||||
|
|
||||||
def delay(num_seconds):
|
def delay(num_seconds):
|
||||||
logger.debug(f"Sleeping for {num_seconds} seconds")
|
logger.debug(f"Sleeping for {num_seconds} seconds")
|
||||||
sleep(num_seconds)
|
sleep(num_seconds)
|
||||||
|
|
||||||
|
|
||||||
def create_unique_data_dir(base_dir: str, index: int) -> str:
|
def create_unique_data_dir(base_dir: str, index: int) -> str:
|
||||||
"""Generate a unique data directory for each node instance."""
|
|
||||||
unique_id = str(uuid.uuid4())[:8]
|
unique_id = str(uuid.uuid4())[:8]
|
||||||
unique_dir = os.path.join(base_dir, f"data_{index}_{unique_id}")
|
unique_dir = os.path.join(base_dir, f"data_{index}_{unique_id}")
|
||||||
os.makedirs(unique_dir, exist_ok=True)
|
os.makedirs(unique_dir, exist_ok=True)
|
||||||
return unique_dir
|
return unique_dir
|
||||||
|
|
||||||
|
|
||||||
def get_project_root() -> str:
|
def get_project_root() -> str:
|
||||||
"""Returns the root directory of the project."""
|
|
||||||
return os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
|
return os.path.abspath(os.path.join(os.path.dirname(__file__), "../../.."))
|
||||||
|
|
||||||
|
|
||||||
|
def write_signal_to_file(signal_data):
|
||||||
|
with open(LOG_FILE_PATH, "a") as file:
|
||||||
|
json.dump(signal_data, file)
|
||||||
|
file.write("\n")
|
||||||
|
|
||||||
|
|
||||||
|
def build_and_copy_binary():
|
||||||
|
logger.info(f"Building status-backend binary in {GO_PROJECT_ROOT}")
|
||||||
|
result = subprocess.run(["make", "status-backend"], cwd=GO_PROJECT_ROOT, capture_output=True, text=True)
|
||||||
|
|
||||||
|
if result.returncode != 0:
|
||||||
|
logger.info("Build failed with the following output:")
|
||||||
|
logger.info(result.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not os.path.exists(BINARY_PATH):
|
||||||
|
logger.info("Binary build failed or not found! Exiting.")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.info(f"Copying binary to {DEST_DIR}")
|
||||||
|
shutil.copy(BINARY_PATH, DEST_DIR)
|
||||||
|
|
||||||
|
if os.path.exists(os.path.join(DEST_DIR, "status-backend")):
|
||||||
|
logger.info("Binary successfully copied to tests-functional directory.")
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logger.info("Failed to copy binary to the tests-functional directory.")
|
||||||
|
return False
|
||||||
|
|
|
@ -8,7 +8,8 @@ def log_length_filter(max_length):
|
||||||
def filter(self, record):
|
def filter(self, record):
|
||||||
if len(record.getMessage()) > max_length:
|
if len(record.getMessage()) > max_length:
|
||||||
logging.getLogger(record.name).log(
|
logging.getLogger(record.name).log(
|
||||||
record.levelno, f"Log line was discarded because it's longer than max_log_line_length={max_log_line_length}"
|
record.levelno,
|
||||||
|
f"Log line was discarded because it's longer than max_log_line_length={max_log_line_length}"
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
|
@ -17,7 +17,6 @@ class StatusNodeRPC(BaseAPIClient):
|
||||||
reraise=True
|
reraise=True
|
||||||
)
|
)
|
||||||
def send_rpc_request(self, method, params=None, timeout=API_REQUEST_TIMEOUT):
|
def send_rpc_request(self, method, params=None, timeout=API_REQUEST_TIMEOUT):
|
||||||
"""Send JSON-RPC requests, used for standard JSON-RPC API calls."""
|
|
||||||
payload = {"jsonrpc": "2.0", "method": method, "params": params or [], "id": 1}
|
payload = {"jsonrpc": "2.0", "method": method, "params": params or [], "id": 1}
|
||||||
logger.info(f"Sending JSON-RPC request to {self.base_url} with payload: {payload}")
|
logger.info(f"Sending JSON-RPC request to {self.base_url} with payload: {payload}")
|
||||||
|
|
||||||
|
@ -35,7 +34,6 @@ class StatusNodeRPC(BaseAPIClient):
|
||||||
reraise=True
|
reraise=True
|
||||||
)
|
)
|
||||||
def initialize_application(self, data_dir, timeout=API_REQUEST_TIMEOUT):
|
def initialize_application(self, data_dir, timeout=API_REQUEST_TIMEOUT):
|
||||||
"""Send a direct POST request to the InitializeApplication endpoint."""
|
|
||||||
payload = {"dataDir": data_dir}
|
payload = {"dataDir": data_dir}
|
||||||
logger.info(f"Sending direct POST request to InitializeApplication with payload: {payload}")
|
logger.info(f"Sending direct POST request to InitializeApplication with payload: {payload}")
|
||||||
|
|
||||||
|
@ -53,7 +51,6 @@ class StatusNodeRPC(BaseAPIClient):
|
||||||
reraise=True
|
reraise=True
|
||||||
)
|
)
|
||||||
def create_account_and_login(self, account_data, timeout=API_REQUEST_TIMEOUT):
|
def create_account_and_login(self, account_data, timeout=API_REQUEST_TIMEOUT):
|
||||||
"""Send a direct POST request to CreateAccountAndLogin endpoint."""
|
|
||||||
payload = {
|
payload = {
|
||||||
"rootDataDir": account_data.get("rootDataDir"),
|
"rootDataDir": account_data.get("rootDataDir"),
|
||||||
"displayName": account_data.get("displayName", "test1"),
|
"displayName": account_data.get("displayName", "test1"),
|
||||||
|
@ -76,7 +73,6 @@ class StatusNodeRPC(BaseAPIClient):
|
||||||
reraise=True
|
reraise=True
|
||||||
)
|
)
|
||||||
def start_messenger(self, timeout=API_REQUEST_TIMEOUT):
|
def start_messenger(self, timeout=API_REQUEST_TIMEOUT):
|
||||||
"""Send JSON-RPC request to start Waku messenger."""
|
|
||||||
payload = {
|
payload = {
|
||||||
"jsonrpc": "2.0",
|
"jsonrpc": "2.0",
|
||||||
"method": "wakuext_startMessenger",
|
"method": "wakuext_startMessenger",
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import os
|
import os
|
||||||
import asyncio
|
|
||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import signal
|
import signal
|
||||||
|
@ -13,11 +12,17 @@ from conftest import option
|
||||||
from src.libs.custom_logger import get_custom_logger
|
from src.libs.custom_logger import get_custom_logger
|
||||||
from src.node.rpc_client import StatusNodeRPC
|
from src.node.rpc_client import StatusNodeRPC
|
||||||
from clients.signals import SignalClient
|
from clients.signals import SignalClient
|
||||||
|
from src.libs.common import build_and_copy_binary
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
logger = get_custom_logger(__name__)
|
logger = get_custom_logger(__name__)
|
||||||
|
|
||||||
|
PROJECT_ROOT = Path(__file__).resolve().parents[2]
|
||||||
|
|
||||||
|
|
||||||
class StatusNode:
|
class StatusNode:
|
||||||
|
binary_built = False
|
||||||
|
|
||||||
def __init__(self, name=None, port=None, pubkey=None):
|
def __init__(self, name=None, port=None, pubkey=None):
|
||||||
self.data_dir = None
|
self.data_dir = None
|
||||||
try:
|
try:
|
||||||
|
@ -37,9 +42,7 @@ class StatusNode:
|
||||||
self.api = StatusNodeRPC(self.port, self.name)
|
self.api = StatusNodeRPC(self.port, self.name)
|
||||||
|
|
||||||
def setup_method(self):
|
def setup_method(self):
|
||||||
# Set up RPC client
|
|
||||||
self.rpc_client = RpcClient(option.rpc_url_statusd)
|
self.rpc_client = RpcClient(option.rpc_url_statusd)
|
||||||
# Set up WebSocket signal client
|
|
||||||
await_signals = ["history.request.started", "history.request.completed"]
|
await_signals = ["history.request.started", "history.request.completed"]
|
||||||
self.signal_client = SignalClient(option.ws_url_statusd, await_signals)
|
self.signal_client = SignalClient(option.ws_url_statusd, await_signals)
|
||||||
|
|
||||||
|
@ -49,7 +52,6 @@ class StatusNode:
|
||||||
websocket_thread.start()
|
websocket_thread.start()
|
||||||
|
|
||||||
def initialize_node(self, name, port, data_dir, account_data):
|
def initialize_node(self, name, port, data_dir, account_data):
|
||||||
"""Centralized method to initialize a node."""
|
|
||||||
self.name = name
|
self.name = name
|
||||||
self.port = port
|
self.port = port
|
||||||
self.start(data_dir)
|
self.start(data_dir)
|
||||||
|
@ -59,14 +61,19 @@ class StatusNode:
|
||||||
self.pubkey = self.get_pubkey(account_data["displayName"])
|
self.pubkey = self.get_pubkey(account_data["displayName"])
|
||||||
|
|
||||||
def start_node(self, command):
|
def start_node(self, command):
|
||||||
"""Start the node using a subprocess command."""
|
|
||||||
logger.info(f"Starting node with command: {command}")
|
logger.info(f"Starting node with command: {command}")
|
||||||
self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
|
self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
|
||||||
self.pid = self.process.pid
|
self.pid = self.process.pid
|
||||||
self.log_thread = self.capture_process_logs(self.process, self.logs)
|
self.log_thread = self.capture_process_logs(self.process, self.logs)
|
||||||
|
|
||||||
def start(self, data_dir, capture_logs=True):
|
def start(self, data_dir, capture_logs=True):
|
||||||
"""Start the status-backend node and initialize it before subscribing to signals."""
|
dest_binary_path = Path(PROJECT_ROOT) / "status-backend"
|
||||||
|
|
||||||
|
if not StatusNode.binary_built and not dest_binary_path.exists():
|
||||||
|
if not build_and_copy_binary():
|
||||||
|
raise RuntimeError("Failed to build or copy the status-backend binary.")
|
||||||
|
StatusNode.binary_built = True
|
||||||
|
|
||||||
self.capture_logs = capture_logs
|
self.capture_logs = capture_logs
|
||||||
self.data_dir = data_dir
|
self.data_dir = data_dir
|
||||||
command = ["./status-backend", f"--address=localhost:{self.port}"]
|
command = ["./status-backend", f"--address=localhost:{self.port}"]
|
||||||
|
@ -77,17 +84,14 @@ class StatusNode:
|
||||||
self.start_signal_client()
|
self.start_signal_client()
|
||||||
|
|
||||||
def create_account_and_login(self, account_data):
|
def create_account_and_login(self, account_data):
|
||||||
"""Create an account and log in using the status-backend."""
|
|
||||||
logger.info(f"Creating account and logging in for node {self.name}")
|
logger.info(f"Creating account and logging in for node {self.name}")
|
||||||
self.api.create_account_and_login(account_data)
|
self.api.create_account_and_login(account_data)
|
||||||
|
|
||||||
def start_messenger(self):
|
def start_messenger(self):
|
||||||
"""Start the Waku messenger."""
|
|
||||||
logger.info(f"Starting Waku messenger for node {self.name}")
|
logger.info(f"Starting Waku messenger for node {self.name}")
|
||||||
self.api.start_messenger()
|
self.api.start_messenger()
|
||||||
|
|
||||||
def start_signal_client(self):
|
def start_signal_client(self):
|
||||||
"""Start a SignalClient for the given node to listen for WebSocket signals."""
|
|
||||||
ws_url = f"ws://localhost:{self.port}"
|
ws_url = f"ws://localhost:{self.port}"
|
||||||
await_signals = ["history.request.started", "history.request.completed"]
|
await_signals = ["history.request.started", "history.request.completed"]
|
||||||
self.signal_client = SignalClient(ws_url, await_signals)
|
self.signal_client = SignalClient(ws_url, await_signals)
|
||||||
|
@ -98,10 +102,9 @@ class StatusNode:
|
||||||
logger.info("WebSocket client started and subscribed to signals.")
|
logger.info("WebSocket client started and subscribed to signals.")
|
||||||
|
|
||||||
def wait_fully_started(self):
|
def wait_fully_started(self):
|
||||||
"""Wait until the node logs indicate that the server has started."""
|
|
||||||
logger.info(f"Waiting for {self.name} to fully start...")
|
logger.info(f"Waiting for {self.name} to fully start...")
|
||||||
start_time = time.time()
|
start_time = time.time()
|
||||||
while time.time() - start_time < 20:
|
while time.time() - start_time < 30:
|
||||||
if any("server started" in log for log in self.logs):
|
if any("server started" in log for log in self.logs):
|
||||||
logger.info(f"Node {self.name} has fully started.")
|
logger.info(f"Node {self.name} has fully started.")
|
||||||
return
|
return
|
||||||
|
@ -109,8 +112,6 @@ class StatusNode:
|
||||||
raise TimeoutError(f"Node {self.name} did not fully start in time.")
|
raise TimeoutError(f"Node {self.name} did not fully start in time.")
|
||||||
|
|
||||||
def capture_process_logs(self, process, logs):
|
def capture_process_logs(self, process, logs):
|
||||||
"""Capture logs from a subprocess."""
|
|
||||||
|
|
||||||
def read_output():
|
def read_output():
|
||||||
while True:
|
while True:
|
||||||
line = process.stdout.readline()
|
line = process.stdout.readline()
|
||||||
|
@ -125,14 +126,11 @@ class StatusNode:
|
||||||
return log_thread
|
return log_thread
|
||||||
|
|
||||||
def random_node_name(self, length=10):
|
def random_node_name(self, length=10):
|
||||||
"""Generate a random node name."""
|
|
||||||
allowed_chars = string.ascii_lowercase + string.digits + "_-"
|
allowed_chars = string.ascii_lowercase + string.digits + "_-"
|
||||||
return ''.join(random.choice(allowed_chars) for _ in range(length))
|
return ''.join(random.choice(allowed_chars) for _ in range(length))
|
||||||
|
|
||||||
def get_pubkey(self, display_name):
|
def get_pubkey(self, display_name):
|
||||||
"""Retrieve public-key based on display name from accounts_getAccounts response."""
|
|
||||||
response = self.api.send_rpc_request("accounts_getAccounts")
|
response = self.api.send_rpc_request("accounts_getAccounts")
|
||||||
|
|
||||||
accounts = response.get("result", [])
|
accounts = response.get("result", [])
|
||||||
for account in accounts:
|
for account in accounts:
|
||||||
if account.get("name") == display_name:
|
if account.get("name") == display_name:
|
||||||
|
@ -140,11 +138,9 @@ class StatusNode:
|
||||||
raise ValueError(f"Public key not found for display name: {display_name}")
|
raise ValueError(f"Public key not found for display name: {display_name}")
|
||||||
|
|
||||||
def wait_for_signal(self, signal_type, expected_event=None, timeout=20):
|
def wait_for_signal(self, signal_type, expected_event=None, timeout=20):
|
||||||
"""Wait for a signal using the signal client and validate against expected event details."""
|
|
||||||
return self.signal_client.wait_for_signal(signal_type, expected_event, timeout)
|
return self.signal_client.wait_for_signal(signal_type, expected_event, timeout)
|
||||||
|
|
||||||
def stop(self, remove_local_data=True):
|
def stop(self, remove_local_data=True):
|
||||||
"""Stop the status-backend process."""
|
|
||||||
if self.process:
|
if self.process:
|
||||||
logger.info(f"Stopping node with name: {self.name}")
|
logger.info(f"Stopping node with name: {self.name}")
|
||||||
self.process.kill()
|
self.process.kill()
|
||||||
|
|
|
@ -12,19 +12,6 @@ logger = get_custom_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class StepsCommon:
|
class StepsCommon:
|
||||||
@pytest.fixture(scope="function", autouse=False)
|
|
||||||
def start_1_node(self):
|
|
||||||
account_data = {
|
|
||||||
**ACCOUNT_PAYLOAD_DEFAULTS,
|
|
||||||
"rootDataDir": LOCAL_DATA_DIR1,
|
|
||||||
"displayName": "first_node_user"
|
|
||||||
}
|
|
||||||
random_port = str(random.randint(1024, 65535))
|
|
||||||
|
|
||||||
self.first_node = StatusNode()
|
|
||||||
self.first_node.initialize_node("first_node", random_port, LOCAL_DATA_DIR1, account_data)
|
|
||||||
self.first_node_pubkey = self.first_node.get_pubkey()
|
|
||||||
|
|
||||||
@pytest.fixture(scope="function", autouse=False)
|
@pytest.fixture(scope="function", autouse=False)
|
||||||
def start_2_nodes(self):
|
def start_2_nodes(self):
|
||||||
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
|
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
|
||||||
|
@ -64,7 +51,6 @@ class StepsCommon:
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def add_latency(self):
|
def add_latency(self):
|
||||||
"""Add network latency"""
|
|
||||||
logger.debug("Adding network latency")
|
logger.debug("Adding network latency")
|
||||||
subprocess.Popen(LATENCY_CMD, shell=True)
|
subprocess.Popen(LATENCY_CMD, shell=True)
|
||||||
try:
|
try:
|
||||||
|
@ -75,7 +61,6 @@ class StepsCommon:
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def add_packet_loss(self):
|
def add_packet_loss(self):
|
||||||
"""Add packet loss"""
|
|
||||||
logger.debug("Adding packet loss")
|
logger.debug("Adding packet loss")
|
||||||
subprocess.Popen(PACKET_LOSS_CMD, shell=True)
|
subprocess.Popen(PACKET_LOSS_CMD, shell=True)
|
||||||
try:
|
try:
|
||||||
|
@ -86,7 +71,6 @@ class StepsCommon:
|
||||||
|
|
||||||
@contextmanager
|
@contextmanager
|
||||||
def add_low_bandwidth(self):
|
def add_low_bandwidth(self):
|
||||||
"""Add low bandwidth"""
|
|
||||||
logger.debug("Adding low bandwidth")
|
logger.debug("Adding low bandwidth")
|
||||||
subprocess.Popen(LOW_BANDWIDTH_CMD, shell=True)
|
subprocess.Popen(LOW_BANDWIDTH_CMD, shell=True)
|
||||||
try:
|
try:
|
||||||
|
@ -108,10 +92,12 @@ class StepsCommon:
|
||||||
def send_with_timestamp(self, send_method, id, message):
|
def send_with_timestamp(self, send_method, id, message):
|
||||||
timestamp = datetime.now().strftime("%H:%M:%S")
|
timestamp = datetime.now().strftime("%H:%M:%S")
|
||||||
response = send_method(id, message)
|
response = send_method(id, message)
|
||||||
response_messages = response["result"]["messages"]
|
response_messages = response.get("result", {}).get("messages", [])
|
||||||
message_id = None
|
message_id = None
|
||||||
|
|
||||||
for m in response_messages:
|
for m in response_messages:
|
||||||
if m["text"] == message:
|
if m["text"] == message:
|
||||||
message_id = m["id"]
|
message_id = m["id"]
|
||||||
break
|
break
|
||||||
return timestamp, message_id
|
|
||||||
|
return timestamp, message_id, response
|
||||||
|
|
|
@ -53,7 +53,6 @@ class TestContactRequest(StepsCommon):
|
||||||
|
|
||||||
nodes.append((first_node, second_node, account_data_first["displayName"], index))
|
nodes.append((first_node, second_node, account_data_first["displayName"], index))
|
||||||
|
|
||||||
# Validate contact requests
|
|
||||||
missing_contact_requests = []
|
missing_contact_requests = []
|
||||||
for first_node, second_node, display_name, index in nodes:
|
for first_node, second_node, display_name, index in nodes:
|
||||||
result = self.send_and_wait_for_message((first_node, second_node), display_name, index, timeout_secs)
|
result = self.send_and_wait_for_message((first_node, second_node), display_name, index, timeout_secs)
|
||||||
|
@ -83,18 +82,12 @@ class TestContactRequest(StepsCommon):
|
||||||
first_node_pubkey = first_node.get_pubkey(display_name)
|
first_node_pubkey = first_node.get_pubkey(display_name)
|
||||||
contact_request_message = f"contact_request_{index}"
|
contact_request_message = f"contact_request_{index}"
|
||||||
|
|
||||||
timestamp, message_id = self.send_with_timestamp(
|
timestamp, message_id, response = self.send_with_timestamp(
|
||||||
second_node.send_contact_request, first_node_pubkey, contact_request_message
|
second_node.send_contact_request, first_node_pubkey, contact_request_message
|
||||||
)
|
)
|
||||||
|
|
||||||
response = second_node.send_contact_request(first_node_pubkey, contact_request_message)
|
|
||||||
|
|
||||||
expected_event_started = {"requestId": "", "peerId": "", "batchIndex": 0, "numBatches": 1}
|
|
||||||
expected_event_completed = {"requestId": "", "peerId": "", "batchIndex": 0}
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
first_node.wait_for_signal("history.request.started", expected_event_started, timeout)
|
first_node.wait_for_signal("history.request.started", None, timeout)
|
||||||
first_node.wait_for_signal("history.request.completed", expected_event_completed, timeout)
|
first_node.wait_for_signal("history.request.completed", None, timeout)
|
||||||
except TimeoutError as e:
|
except TimeoutError as e:
|
||||||
logging.error(f"Signal validation failed: {str(e)}")
|
logging.error(f"Signal validation failed: {str(e)}")
|
||||||
return timestamp, message_id, contact_request_message, None
|
return timestamp, message_id, contact_request_message, None
|
||||||
|
|
Loading…
Reference in New Issue