test_: status cli to status backend

This commit is contained in:
Florin Barbu 2024-11-11 16:40:05 +02:00
parent 7ee45bab1c
commit 22b47bf4ea
No known key found for this signature in database
GPG Key ID: 593D6DBC6D9E5095
35 changed files with 1183 additions and 398 deletions

11
pyrightconfig.json Normal file
View File

@ -0,0 +1,11 @@
{
"include": ["tests-functional"],
"reportMissingImports": true,
"reportOptionalMemberAccess": false,
"reportGeneralTypeIssues": false,
"reportInvalidStringEscapeSequence": false,
"reportWildcardImportFromLibrary": false,
"venvPath": ".",
"venv": ".venv",
"typeCheckingMode": "off"
}

View File

@ -0,0 +1,11 @@
repos:
- repo: https://github.com/psf/black
rev: 23.7.0
hooks:
- id: black
args: [--line-length=150]
- repo: https://github.com/RobertCraigie/pyright-python
rev: v1.1.326
hooks:
- id: pyright

View File

@ -7,15 +7,22 @@ Functional tests for status-go
- [Overview](#overview) - [Overview](#overview)
- [How to Install](#how-to-install) - [How to Install](#how-to-install)
- [How to Run](#how-to-run) - [How to Run](#how-to-run)
- [Running Tests](#running-tests) - [Running Tests](#running-tests)
- [Implementation details](#implementation-details) - [Implementation details](#implementation-details)
## How to Install ## How to Install
* Install [Docker](https://docs.docker.com/engine/install/) and [Docker Compose](https://docs.docker.com/compose/install/) 1. Install [Docker](https://docs.docker.com/engine/install/) and [Docker Compose](https://docs.docker.com/compose/install/)
* Install [Python 3.10.14](https://www.python.org/downloads/) 2. Install [Python 3.10.14](https://www.python.org/downloads/)
* In `./tests-functional`, run `pip install -r requirements.txt` 3. **Set up a virtual environment (recommended):**
* **Optional (for test development)**: Use Python virtual environment for better dependency management. You can follow the guide [here](https://akrabat.com/creating-virtual-environments-with-pyenv/): - In `./tests-functional`, run:
```bash
python3 -m venv .venv
source .venv/bin/activate
pip install -r requirements.txt
```
- **Optional (for test development)**: Use Python virtual environment for better dependency management. You can follow the guide [here](https://akrabat.com/creating-virtual-environments-with-pyenv/)
## How to Run ## How to Run

View File

@ -1,48 +0,0 @@
import json
import logging
import time
import websocket
class SignalClient:
def __init__(self, ws_url, await_signals):
self.url = f"{ws_url}/signals"
self.await_signals = await_signals
self.received_signals = {
signal: [] for signal in self.await_signals
}
def on_message(self, ws, signal):
signal = json.loads(signal)
if signal.get("type") in self.await_signals:
self.received_signals[signal["type"]].append(signal)
def wait_for_signal(self, signal_type, timeout=20):
start_time = time.time()
while not self.received_signals.get(signal_type):
if time.time() - start_time >= timeout:
raise TimeoutError(
f"Signal {signal_type} is not received in {timeout} seconds")
time.sleep(0.2)
logging.debug(f"Signal {signal_type} is received in {round(time.time() - start_time)} seconds")
return self.received_signals[signal_type][0]
def _on_error(self, ws, error):
logging.error(f"Error: {error}")
def _on_close(self, ws, close_status_code, close_msg):
logging.info(f"Connection closed: {close_status_code}, {close_msg}")
def _on_open(self, ws):
logging.info("Connection opened")
def _connect(self):
ws = websocket.WebSocketApp(self.url,
on_message=self.on_message,
on_error=self._on_error,
on_close=self._on_close)
ws.on_open = self._on_open
ws.run_forever()

View File

@ -9,24 +9,20 @@
"HTTPPort": 3333, "HTTPPort": 3333,
"HTTPVirtualHosts": ["*", "status-go"], "HTTPVirtualHosts": ["*", "status-go"],
"APIModules": "eth,admin,wallet,accounts,waku,wakuext,ethclient", "APIModules": "eth,admin,wallet,accounts,waku,wakuext,ethclient",
"WalletConfig": { "WalletConfig": {"Enabled": true},
"Enabled": true "WakuConfig": {"Enabled": false},
},
"WakuConfig": {
"Enabled": false
},
"Networks": [ "Networks": [
{ {
"ChainID": 31337, "ChainID": 31337,
"ChainName": "Anvil", "ChainName": "Anvil",
"RPCURL": "http://anvil:8545", "RPCURL": "http://anvil:8545",
"ShortName": "eth", "ShortName": "eth",
"NativeCurrencyName": "Ether", "NativeCurrencyName": "Ether",
"NativeCurrencySymbol": "ETH", "NativeCurrencySymbol": "ETH",
"NativeCurrencyDecimals": 18, "NativeCurrencyDecimals": 18,
"IsTest": false, "IsTest": false,
"Layer": 1, "Layer": 1,
"Enabled": true "Enabled": true,
} }
] ],
} }

View File

@ -1,8 +1,11 @@
import inspect
import os import os
import threading import threading
from dataclasses import dataclass from dataclasses import dataclass
import pytest as pytest import pytest as pytest
from src.libs.custom_logger import get_custom_logger
logger = get_custom_logger(__name__)
def pytest_addoption(parser): def pytest_addoption(parser):
@ -58,30 +61,27 @@ def pytest_configure(config):
option.base_dir = os.path.dirname(os.path.abspath(__file__)) option.base_dir = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture(scope="session", autouse=True) @pytest.fixture(scope="session", autouse=False)
def init_status_backend(): def init_status_backend():
await_signals = [ logger.info(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
await_signals = [
"mediaserver.started", "mediaserver.started",
"node.started", "node.started",
"node.ready", "node.ready",
"node.login", "node.login",
"wallet", # TODO: a test per event of a different type "wallet", # TODO: a test per event of a different type
] ]
from clients.status_backend import StatusBackend from src.node.clients.status_backend import StatusBackend
backend_client = StatusBackend(
await_signals=await_signals
)
websocket_thread = threading.Thread( backend_client = StatusBackend(await_signals=await_signals)
target=backend_client._connect
) websocket_thread = threading.Thread(target=backend_client._connect)
websocket_thread.daemon = True websocket_thread.daemon = True
websocket_thread.start() websocket_thread.start()
backend_client.init_status_backend() backend_client.init_status_backend(data_dir="/")
backend_client.restore_account_and_wait_for_rpc_client_to_start() backend_client.restore_account_and_wait_for_rpc_client_to_start()
yield backend_client yield backend_client

View File

@ -1,23 +0,0 @@
from dataclasses import dataclass
@dataclass
class Account:
address: str
private_key: str
password: str
passphrase: str
user_1 = Account(
address="0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
private_key="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80",
password="Strong12345",
passphrase="test test test test test test test test test test test junk"
)
user_2 = Account(
address="0x70997970c51812dc3a010c7d01b50e0d17dc79c8",
private_key="0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d",
password="Strong12345",
passphrase="test test test test test test test test test test nest junk"
)

View File

@ -1,9 +1,11 @@
[pytest] [pytest]
addopts = -s -v --tb=short addopts = -s -v --instafail --tb=short --color=auto
log_cli=true log_cli=true
log_level=INFO log_level=INFO
log_file = log/test.log
log_cli_format = %(asctime)s %(name)s %(levelname)s %(message)s
log_file_format = %(asctime)s %(name)s %(levelname)s %(message)s
timeout = 300
markers = markers =
rpc rpc
wallet wallet
@ -14,3 +16,5 @@ markers =
init init
transaction transaction
create_account create_account
filterwarnings =
ignore::DeprecationWarning

View File

@ -4,3 +4,9 @@ pytest==6.2.4
requests==2.31.0 requests==2.31.0
genson~=1.2.2 genson~=1.2.2
websocket-client~=1.4.2 websocket-client~=1.4.2
tenacity~=9.0.0
black~=24.10.0
pyright~=1.1.388
pytest-instafail==0.5.0
pre-commit~=4.0.1
pytest-timeout~=2.2.0

View File

@ -0,0 +1,57 @@
import os
import random
from dataclasses import dataclass
import uuid
def create_unique_data_dir(base_dir: str, index: int) -> str:
unique_id = str(uuid.uuid4())[:8]
unique_dir = os.path.join(base_dir, f"data_{index}_{unique_id}")
os.makedirs(unique_dir, exist_ok=True)
return unique_dir
@dataclass
class Account:
address: str
private_key: str
password: str
passphrase: str
user_1 = Account(
address="0xf39fd6e51aad88f6f4ce6ab8827279cfffb92266",
private_key="0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80",
password="Strong12345",
passphrase="test test test test test test test test test test test junk",
)
user_2 = Account(
address="0x70997970c51812dc3a010c7d01b50e0d17dc79c8",
private_key="0x59c6995e998f97a5a0044966f0945389dc9e86dae88c7a8412f4603b6b78690d",
password="Strong12345",
passphrase="test test test test test test test test test test nest junk",
)
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), "../.."))
STATUS_BACKEND_URL = os.getenv("STATUS_BACKEND_URL", "http://127.0.0.1")
API_REQUEST_TIMEOUT = int(os.getenv("API_REQUEST_TIMEOUT", "15"))
SOURCE_DIR = os.path.join(PROJECT_ROOT, "build/bin")
DEST_DIR = os.path.join(PROJECT_ROOT, "tests-functional")
BINARY_PATH = os.path.join(SOURCE_DIR, "status-backend")
DATA_DIR = os.path.join(PROJECT_ROOT, "tests-functional/local")
SIGNALS_DIR = os.path.join(DEST_DIR, "signals")
LOCAL_DATA_DIR1 = create_unique_data_dir(DATA_DIR, random.randint(1, 100))
LOCAL_DATA_DIR2 = create_unique_data_dir(DATA_DIR, random.randint(1, 100))
RESOURCES_FOLDER = os.path.join(PROJECT_ROOT, "resources")
ACCOUNT_PAYLOAD_DEFAULTS = {
"displayName": "user",
"password": "test_password",
"customizationColor": "primary",
}
NUM_CONTACT_REQUESTS = int(os.getenv("NUM_CONTACT_REQUESTS", "5"))
NUM_MESSAGES = int(os.getenv("NUM_MESSAGES", "20"))
DELAY_BETWEEN_MESSAGES = int(os.getenv("NUM_MESSAGES", "1"))
EVENT_SIGNAL_TIMEOUT_SEC = int(os.getenv("EVENT_SIGNAL_TIMEOUT_SEC", "5"))

View File

View File

@ -0,0 +1,29 @@
import requests
import json
from tenacity import retry, stop_after_delay, wait_fixed
from src.libs.custom_logger import get_custom_logger
logger = get_custom_logger(__name__)
class BaseAPIClient:
def __init__(self, base_url):
self.base_url = base_url
@retry(stop=stop_after_delay(10), wait=wait_fixed(0.5), reraise=True)
def send_post_request(self, endpoint, payload=None, headers=None, timeout=10):
if headers is None:
headers = {"Content-Type": "application/json"}
if payload is None:
payload = {}
url = f"{self.base_url}/{endpoint}"
logger.info(f"Sending POST request to {url} with payload: {json.dumps(payload)}")
try:
response = requests.post(url, headers=headers, data=json.dumps(payload), timeout=timeout)
response.raise_for_status()
logger.info(f"Response received: {response.status_code} - {response.text}")
return response.json()
except requests.exceptions.RequestException as e:
logger.error(f"Request to {url} failed: {str(e)}")
raise

View File

@ -0,0 +1,50 @@
import json
from time import sleep
from src.libs.custom_logger import get_custom_logger
import subprocess
import shutil
import os
from datetime import datetime
from src.constants import PROJECT_ROOT, BINARY_PATH, DEST_DIR, SIGNALS_DIR
from pathlib import Path
logger = get_custom_logger(__name__)
Path(SIGNALS_DIR).mkdir(parents=True, exist_ok=True)
def delay(num_seconds):
logger.debug(f"Sleeping for {num_seconds} seconds")
sleep(num_seconds)
def write_signal_to_file(signal_data):
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
signal_file_path = os.path.join(SIGNALS_DIR, f"signals_log_{timestamp}.json")
with open(signal_file_path, "a+") as file:
json.dump(signal_data, file)
file.write("\n")
def build_and_copy_binary():
logger.info(f"Building status-backend binary in {PROJECT_ROOT}")
result = subprocess.run(["make", "status-backend"], cwd=PROJECT_ROOT, capture_output=True, text=True)
if result.returncode != 0:
logger.info("Build failed with the following output:")
logger.info(result.stderr)
return False
if not os.path.exists(BINARY_PATH):
logger.info("Binary build failed or not found! Exiting.")
return False
logger.info(f"Copying binary to {DEST_DIR}")
shutil.copy(BINARY_PATH, DEST_DIR)
if os.path.exists(os.path.join(DEST_DIR, "status-backend")):
logger.info("Binary successfully copied to tests-functional directory.")
return True
else:
logger.info("Failed to copy binary to the tests-functional directory.")
return False

View File

@ -0,0 +1,25 @@
import logging
max_log_line_length = 10000
def log_length_filter(max_length):
class logLengthFilter(logging.Filter):
def filter(self, record):
if len(record.getMessage()) > max_length:
logging.getLogger(record.name).log(
record.levelno,
f"Log line not shown in stdout because it's longer than max_log_line_length={max_log_line_length}. Please check the log/test.log for the full log",
)
return False
return True
return logLengthFilter()
def get_custom_logger(name):
logging.getLogger("urllib3").setLevel(logging.WARNING)
logging.getLogger("docker").setLevel(logging.WARNING)
logger = logging.getLogger(name)
logger.addFilter(log_length_filter(max_log_line_length))
return logger

View File

View File

@ -0,0 +1,68 @@
import json
import time
from src.libs.common import write_signal_to_file
import websocket
from src.libs.custom_logger import get_custom_logger
logger = get_custom_logger(__name__)
class SignalClient:
def __init__(self, ws_url, await_signals):
self.url = f"{ws_url}/signals"
self.await_signals = await_signals
self.received_signals = {signal: [] for signal in self.await_signals}
def on_message(self, ws, signal):
signal_data = json.loads(signal)
signal_type = signal_data.get("type")
write_signal_to_file(signal_data)
if signal_type in self.await_signals:
self.received_signals[signal_type].append(signal_data)
def wait_for_signal(self, signal_type, timeout=20):
start_time = time.time()
while not self.received_signals.get(signal_type):
if time.time() - start_time >= timeout:
raise TimeoutError(f"Signal {signal_type} not received in {timeout} seconds")
time.sleep(0.2)
logger.info(f"Signal {signal_type} received in {round(time.time() - start_time)} seconds")
return self.received_signals[signal_type][0]
def wait_for_complete_signal(self, signal_type, timeout=5):
start_time = time.time()
events = []
while time.time() - start_time < timeout:
if self.received_signals.get(signal_type):
events.extend(self.received_signals[signal_type])
self.received_signals[signal_type] = []
time.sleep(0.2)
if events:
logger.info(f"Collected {len(events)} events of type {signal_type} within {timeout} seconds")
return events
raise TimeoutError(f"No signals of type {signal_type} received in {timeout} seconds")
def _on_error(self, ws, error):
logger.error(f"WebSocket error: {error}")
def _on_close(self, ws, close_status_code, close_msg):
logger.info(f"WebSocket connection closed: {close_status_code}, {close_msg}")
def _on_open(self, ws):
logger.info("WebSocket connection opened")
def _connect(self):
ws = websocket.WebSocketApp(
self.url,
on_message=self.on_message,
on_error=self._on_error,
on_close=self._on_close,
)
ws.on_open = self._on_open
ws.run_forever()

View File

@ -1,19 +1,20 @@
import json import json
import logging
import time import time
from datetime import datetime from datetime import datetime
from json import JSONDecodeError from json import JSONDecodeError
import jsonschema import jsonschema
import requests import requests
from tenacity import retry, stop_after_attempt, stop_after_delay, wait_fixed
from clients.signals import SignalClient from src.node.clients.signals import SignalClient
from conftest import option from conftest import option
from constants import user_1 from src.constants import API_REQUEST_TIMEOUT, user_1
from src.libs.custom_logger import get_custom_logger
logger = get_custom_logger(__name__)
class RpcClient: class RpcClient:
def __init__(self, rpc_url, client=requests.Session()): def __init__(self, rpc_url, client=requests.Session()):
self.client = client self.client = client
self.rpc_url = rpc_url self.rpc_url = rpc_url
@ -22,11 +23,9 @@ class RpcClient:
try: try:
return response.json()[key] return response.json()[key]
except json.JSONDecodeError: except json.JSONDecodeError:
raise AssertionError( raise AssertionError(f"Invalid JSON in response: {response.content}")
f"Invalid JSON in response: {response.content}")
except KeyError: except KeyError:
raise AssertionError( raise AssertionError(f"Key '{key}' not found in the JSON response: {response.content}")
f"Key '{key}' not found in the JSON response: {response.content}")
def verify_is_valid_json_rpc_response(self, response, _id=None): def verify_is_valid_json_rpc_response(self, response, _id=None):
assert response.status_code == 200, f"Got response {response.content}, status code {response.status_code}" assert response.status_code == 200, f"Got response {response.content}, status code {response.status_code}"
@ -36,9 +35,7 @@ class RpcClient:
if _id: if _id:
try: try:
if _id != response.json()["id"]: if _id != response.json()["id"]:
raise AssertionError( raise AssertionError(f"got id: {response.json()['id']} instead of expected id: {_id}")
f"got id: {response.json()['id']} instead of expected id: {_id}"
)
except KeyError: except KeyError:
raise AssertionError(f"no id in response {response.json()}") raise AssertionError(f"no id in response {response.json()}")
return response return response
@ -48,17 +45,18 @@ class RpcClient:
assert response.content assert response.content
self._check_decode_and_key_errors_in_response(response, "error") self._check_decode_and_key_errors_in_response(response, "error")
def rpc_request(self, method, params=[], request_id=13, url=None): @retry(stop=stop_after_delay(10), wait=wait_fixed(0.5), reraise=True)
def rpc_request(self, method, params=[], request_id=13, url=None, timeout=API_REQUEST_TIMEOUT):
url = url if url else self.rpc_url url = url if url else self.rpc_url
data = {"jsonrpc": "2.0", "method": method, "id": request_id} data = {"jsonrpc": "2.0", "method": method, "id": request_id}
if params: if params:
data["params"] = params data["params"] = params
logging.info(f"Sending POST request to url {url} with data: {json.dumps(data, sort_keys=True, indent=4)}") logger.info(f"Sending POST request to url {url} with data: {json.dumps(data, sort_keys=True, indent=4)}")
response = self.client.post(url, json=data) response = self.client.post(url, json=data, timeout=timeout)
try: try:
logging.info(f"Got response: {json.dumps(response.json(), sort_keys=True, indent=4)}") logger.info(f"Got response: {json.dumps(response.json(), sort_keys=True, indent=4)}")
except JSONDecodeError: except JSONDecodeError:
logging.info(f"Got response: {response.content}") logger.info(f"Got response: {response.content}")
return response return response
def rpc_valid_request(self, method, params=[], _id=None, url=None): def rpc_valid_request(self, method, params=[], _id=None, url=None):
@ -68,64 +66,73 @@ class RpcClient:
def verify_json_schema(self, response, method): def verify_json_schema(self, response, method):
with open(f"{option.base_dir}/schemas/{method}", "r") as schema: with open(f"{option.base_dir}/schemas/{method}", "r") as schema:
jsonschema.validate(instance=response, jsonschema.validate(instance=response, schema=json.load(schema))
schema=json.load(schema))
class StatusBackend(RpcClient, SignalClient): class StatusBackend(RpcClient, SignalClient):
def __init__(self, api_url=None, ws_url=None, await_signals=list()):
def __init__(self, await_signals=list()): self.api_url = f"{api_url if api_url else option.rpc_url_status_backend}/statusgo"
self.ws_url = f"{ws_url if ws_url else option.ws_url_status_backend}"
self.api_url = f"{option.rpc_url_status_backend}/statusgo" self.rpc_url = f"{api_url if api_url else option.rpc_url_status_backend}/statusgo/CallRPC"
self.ws_url = f"{option.ws_url_status_backend}"
self.rpc_url = f"{option.rpc_url_status_backend}/statusgo/CallRPC"
RpcClient.__init__(self, self.rpc_url) RpcClient.__init__(self, self.rpc_url)
SignalClient.__init__(self, self.ws_url, await_signals) SignalClient.__init__(self, self.ws_url, await_signals)
def api_request(self, method, data, url=None): def api_request(self, method, data, timeout=API_REQUEST_TIMEOUT, url=None):
url = url if url else self.api_url url = url if url else self.api_url
url = f"{url}/{method}" url = f"{url}/{method}"
logging.info(f"Sending POST request to url {url} with data: {json.dumps(data, sort_keys=True, indent=4)}")
response = requests.post(url, json=data) logger.info(f"Sending POST request to url {url} with data: {json.dumps(data, sort_keys=True, indent=4)}")
logging.info(f"Got response: {response.content}") response = requests.post(url, json=data, timeout=timeout)
logger.info(f"Got response: {response.content}")
return response return response
def verify_is_valid_api_response(self, response): def verify_is_valid_api_response(self, response):
assert response.status_code == 200, f"Got response {response.content}, status code {response.status_code}" assert response.status_code == 200, f"Got response {response.content}, status code {response.status_code}"
assert response.content assert response.content
logging.info(f"Got response: {response.content}") logger.info(f"Got response: {response.content}")
try: try:
assert not response.json()["error"] assert not response.json()["error"]
except json.JSONDecodeError: except json.JSONDecodeError:
raise AssertionError( raise AssertionError(f"Invalid JSON in response: {response.content}")
f"Invalid JSON in response: {response.content}")
except KeyError: except KeyError:
pass pass
def api_valid_request(self, method, data): def api_valid_request(self, method, data, timeout=API_REQUEST_TIMEOUT):
response = self.api_request(method, data) response = self.api_request(method, data, timeout)
self.verify_is_valid_api_response(response) self.verify_is_valid_api_response(response)
return response.json()
@retry(stop=stop_after_attempt(3), wait=wait_fixed(1), reraise=True)
def init_status_backend(self, data_dir, timeout=API_REQUEST_TIMEOUT):
payload = {"dataDir": data_dir}
logger.info(f"Sending direct POST request to InitializeApplication with payload: {payload}")
response = self.api_valid_request("InitializeApplication", payload, timeout=timeout)
if response.get("error"):
logger.error(f"InitializeApplication request failed with error: {response['error']}")
raise RuntimeError(f"Failed to initialize application: {response['error']}")
return response return response
def init_status_backend(self, data_dir="/"): @retry(stop=stop_after_attempt(3), wait=wait_fixed(1), reraise=True)
method = "InitializeApplication" def create_account_and_login(self, account_data, timeout=API_REQUEST_TIMEOUT):
data = { payload = {
"dataDir": data_dir "rootDataDir": account_data.get("rootDataDir"),
"displayName": account_data.get("displayName", "test1"),
"password": account_data.get("password", "test1"),
"customizationColor": account_data.get("customizationColor", "primary"),
} }
return self.api_valid_request(method, data) logger.info(f"Sending direct POST request to CreateAccountAndLogin with payload: {payload}")
def create_account_and_login(self, display_name="Mr_Meeseeks", password=user_1.password): response = self.api_valid_request("CreateAccountAndLogin", payload, timeout=timeout)
data_dir = f"dataDir_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
method = "CreateAccountAndLogin" if response.get("error"):
data = { logger.error(f"CreateAccountAndLogin request failed with error: {response['error']}")
"rootDataDir": data_dir, raise RuntimeError(f"Failed to create account and login: {response['error']}")
"kdfIterations": 256000,
"displayName": display_name, return response
"password": password,
"customizationColor": "primary"
}
return self.api_valid_request(method, data)
def restore_account_and_login(self, display_name="Mr_Meeseeks", user=user_1): def restore_account_and_login(self, display_name="Mr_Meeseeks", user=user_1):
method = "RestoreAccountAndLogin" method = "RestoreAccountAndLogin"
@ -149,9 +156,9 @@ class StatusBackend(RpcClient, SignalClient):
"NativeCurrencyDecimals": 18, "NativeCurrencyDecimals": 18,
"IsTest": False, "IsTest": False,
"Layer": 1, "Layer": 1,
"Enabled": True "Enabled": True,
} }
] ],
} }
return self.api_valid_request(method, data) return self.api_valid_request(method, data)
@ -161,7 +168,7 @@ class StatusBackend(RpcClient, SignalClient):
# ToDo: change this part for waiting for `node.login` signal when websockets are migrated to StatusBackend # ToDo: change this part for waiting for `node.login` signal when websockets are migrated to StatusBackend
while time.time() - start_time <= timeout: while time.time() - start_time <= timeout:
try: try:
self.rpc_valid_request(method='accounts_getKeypairs') self.rpc_valid_request(method="accounts_getKeypairs")
return return
except AssertionError: except AssertionError:
time.sleep(3) time.sleep(3)
@ -172,13 +179,15 @@ class StatusBackend(RpcClient, SignalClient):
response = self.rpc_request(method, params) response = self.rpc_request(method, params)
json_response = response.json() json_response = response.json()
if 'error' in json_response: if "error" in json_response:
assert json_response['error']['code'] == -32000 assert json_response["error"]["code"] == -32000
assert json_response['error']['message'] == "messenger already started" assert json_response["error"]["message"] == "messenger already started"
return return
self.verify_is_valid_json_rpc_response(response) self.verify_is_valid_json_rpc_response(response)
return response
def start_wallet(self, params=[]): def start_wallet(self, params=[]):
method = "wallet_startWallet" method = "wallet_startWallet"
response = self.rpc_request(method, params) response = self.rpc_request(method, params)

View File

@ -0,0 +1,158 @@
import os
import random
import shutil
import signal
import string
import subprocess
import threading
import time
from src.libs.custom_logger import get_custom_logger
from src.node.clients.status_backend import StatusBackend
from src.libs.common import build_and_copy_binary
from pathlib import Path
logger = get_custom_logger(__name__)
PROJECT_ROOT = Path(__file__).resolve().parents[2]
class StatusNode:
binary_built = False
def __init__(self, name=None, port=None):
self.name = self.random_node_name() if not name else name.lower()
self.port = str(random.randint(1024, 65535)) if not port else port
self.pubkey = None
self.process = None
self.log_thread = None
self.capture_logs = True
self.logs = []
self.pid = None
await_signals = [
"history.request.started",
"messages.new",
"message.delivered",
"history.request.completed",
]
self.status_api = StatusBackend(api_url=f"http://127.0.0.1:{self.port}", ws_url=f"ws://localhost:{self.port}", await_signals=await_signals)
def initialize_node(self, name, port, data_dir, account_data):
self.name = name
self.port = port
self.start(data_dir)
self.wait_fully_started()
self.create_account_and_login(account_data)
self.start_messenger()
self.pubkey = self.get_pubkey(account_data["displayName"])
def start_node(self, command):
logger.info(f"Starting node with command: {command}")
self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
self.pid = self.process.pid
self.log_thread = self.capture_process_logs(self.process, self.logs)
def start(self, data_dir, capture_logs=True):
dest_binary_path = Path(PROJECT_ROOT) / "status-backend"
if not StatusNode.binary_built and not dest_binary_path.exists():
if not build_and_copy_binary():
raise RuntimeError("Failed to build or copy the status-backend binary.")
StatusNode.binary_built = True
self.capture_logs = capture_logs
command = ["./status-backend", f"--address=localhost:{self.port}"]
self.start_node(command)
self.wait_fully_started()
self.status_api.init_status_backend(data_dir)
self.start_signal_client()
def create_account_and_login(self, account_data):
logger.info(f"Creating account and logging in for node {self.name}")
self.status_api.create_account_and_login(account_data)
def start_messenger(self):
logger.info(f"Starting Waku messenger for node {self.name}")
self.status_api.start_messenger()
def start_signal_client(self):
websocket_thread = threading.Thread(target=self.status_api._connect)
websocket_thread.daemon = True
websocket_thread.start()
logger.info("WebSocket client started and subscribed to signals.")
def wait_fully_started(self):
logger.info(f"Waiting for {self.name} to fully start...")
start_time = time.time()
while time.time() - start_time < 30:
if any("status-backend started" in log for log in self.logs):
logger.info(f"Node {self.name} has fully started.")
return
time.sleep(0.5)
raise TimeoutError(f"Node {self.name} did not fully start in time.")
def capture_process_logs(self, process, logs):
def read_output():
while True:
line = process.stdout.readline()
if not line:
break
logs.append(line.strip())
logger.debug(f"{self.name.upper()} - {line.strip()}")
log_thread = threading.Thread(target=read_output)
log_thread.daemon = True
log_thread.start()
return log_thread
def random_node_name(self, length=10):
allowed_chars = string.ascii_lowercase + string.digits + "_-"
return "".join(random.choice(allowed_chars) for _ in range(length))
def get_pubkey(self, display_name):
response = self.status_api.rpc_request("accounts_getAccounts")
accounts = response.json().get("result", [])
for account in accounts:
if account.get("name") == display_name:
return account.get("public-key")
raise ValueError(f"Public key not found for display name: {display_name}")
def wait_for_signal(self, signal_type, timeout=20):
return self.status_api.wait_for_signal(signal_type, timeout)
def wait_for_complete_signal(self, signal_type, timeout=20):
return self.status_api.wait_for_complete_signal(signal_type, timeout)
def stop(self, remove_local_data=True):
if self.process:
logger.info(f"Stopping node with name: {self.name}")
self.process.kill()
if self.capture_logs:
self.log_thread.join()
if remove_local_data:
node_dir = f"test-{self.name}"
if os.path.exists(node_dir):
try:
shutil.rmtree(node_dir)
except Exception as ex:
logger.warning(f"Couldn't delete node dir {node_dir} because of {str(ex)}")
self.process = None
def send_contact_request(self, pubkey, message):
params = [{"id": pubkey, "message": message}]
return self.status_api.rpc_request("wakuext_sendContactRequest", params)
def send_message(self, pubkey, message):
params = [{"id": pubkey, "message": message}]
return self.status_api.rpc_request("wakuext_sendOneToOneMessage", params)
def pause_process(self):
if self.pid:
logger.info(f"Pausing node with pid: {self.pid}")
os.kill(self.pid, signal.SIGTSTP)
def resume_process(self):
if self.pid:
logger.info(f"Resuming node with pid: {self.pid}")
os.kill(self.pid, signal.SIGCONT)

View File

@ -7,7 +7,6 @@ from conftest import option
class CustomSchemaBuilder(SchemaBuilder): class CustomSchemaBuilder(SchemaBuilder):
def __init__(self, schema_name): def __init__(self, schema_name):
super().__init__() super().__init__()
self.path = f"{option.base_dir}/schemas/{schema_name}" self.path = f"{option.base_dir}/schemas/{schema_name}"

View File

View File

@ -0,0 +1,244 @@
from contextlib import contextmanager
import inspect
import subprocess
import pytest
import json
import threading
import time
from collections import namedtuple
from src.libs.common import delay
from src.libs.custom_logger import get_custom_logger
from src.node.status_node import StatusNode
from datetime import datetime
from src.constants import *
from src.node.clients.signals import SignalClient
from src.node.clients.status_backend import RpcClient, StatusBackend
from conftest import option
logger = get_custom_logger(__name__)
class StatusDTestCase:
network_id = 31337
def setup_method(self):
self.rpc_client = RpcClient(option.rpc_url_statusd)
class StatusBackendTestCase:
def setup_class(self):
self.rpc_client = StatusBackend()
self.network_id = 31337
class WalletTestCase(StatusBackendTestCase):
def wallet_create_multi_transaction(self, **kwargs):
method = "wallet_createMultiTransaction"
transfer_tx_data = {
"data": "",
"from": user_1.address,
"gas": "0x5BBF",
"input": "",
"maxFeePerGas": "0xbcc0f04fd",
"maxPriorityFeePerGas": "0xbcc0f04fd",
"to": user_2.address,
"type": "0x02",
"value": "0x5af3107a4000",
}
for key, new_value in kwargs.items():
if key in transfer_tx_data:
transfer_tx_data[key] = new_value
else:
logger.info(f"Warning: The key '{key}' does not exist in the transferTx parameters and will be ignored.")
params = [
{
"fromAddress": user_1.address,
"fromAmount": "0x5af3107a4000",
"fromAsset": "ETH",
"type": 0, # MultiTransactionSend
"toAddress": user_2.address,
"toAsset": "ETH",
},
[
{
"bridgeName": "Transfer",
"chainID": 31337,
"transferTx": transfer_tx_data,
}
],
f"{option.password}",
]
return self.rpc_client.rpc_request(method, params)
def send_valid_multi_transaction(self, **kwargs):
response = self.wallet_create_multi_transaction(**kwargs)
tx_hash = None
self.rpc_client.verify_is_valid_json_rpc_response(response)
try:
tx_hash = response.json()["result"]["hashes"][str(self.network_id)][0]
except (KeyError, json.JSONDecodeError):
raise Exception(response.content)
return tx_hash
class TransactionTestCase(WalletTestCase):
def setup_method(self):
self.tx_hash = self.send_valid_multi_transaction()
class EthRpcTestCase(WalletTestCase):
@pytest.fixture(autouse=True, scope="class")
def tx_data(self):
tx_hash = self.send_valid_multi_transaction()
self.wait_until_tx_not_pending(tx_hash)
receipt = self.get_transaction_receipt(tx_hash)
try:
block_number = receipt.json()["result"]["blockNumber"]
block_hash = receipt.json()["result"]["blockHash"]
except (KeyError, json.JSONDecodeError):
raise Exception(receipt.content)
tx_data = namedtuple("TxData", ["tx_hash", "block_number", "block_hash"])
return tx_data(tx_hash, block_number, block_hash)
def get_block_header(self, block_number):
method = "ethclient_headerByNumber"
params = [self.network_id, block_number]
return self.rpc_client.rpc_valid_request(method, params)
def get_transaction_receipt(self, tx_hash):
method = "ethclient_transactionReceipt"
params = [self.network_id, tx_hash]
return self.rpc_client.rpc_valid_request(method, params)
def wait_until_tx_not_pending(self, tx_hash, timeout=10):
method = "ethclient_transactionByHash"
params = [self.network_id, tx_hash]
response = self.rpc_client.rpc_valid_request(method, params)
start_time = time.time()
while response.json()["result"]["isPending"] == True:
time_passed = time.time() - start_time
if time_passed >= timeout:
raise TimeoutError(f"Tx {tx_hash} is still pending after {timeout} seconds")
time.sleep(0.5)
response = self.rpc_client.rpc_valid_request(method, params)
return response.json()["result"]["tx"]
class SignalTestCase(StatusDTestCase):
await_signals = []
def setup_method(self):
super().setup_method()
self.signal_client = SignalClient(option.ws_url_statusd, self.await_signals)
websocket_thread = threading.Thread(target=self.signal_client._connect)
websocket_thread.daemon = True
websocket_thread.start()
class StepsCommon:
@pytest.fixture(scope="function", autouse=False)
def start_2_nodes(self):
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
self.first_node_display_name = "first_node_user"
self.second_node_display_name = "second_node_user"
account_data_first = {
**ACCOUNT_PAYLOAD_DEFAULTS,
"rootDataDir": LOCAL_DATA_DIR1,
"displayName": self.first_node_display_name,
}
account_data_second = {
**ACCOUNT_PAYLOAD_DEFAULTS,
"rootDataDir": LOCAL_DATA_DIR2,
"displayName": self.second_node_display_name,
}
self.first_node = StatusNode(name="first_node")
self.first_node.start(data_dir=LOCAL_DATA_DIR1)
self.first_node.wait_fully_started()
self.second_node = StatusNode(name="second_node")
self.second_node.start(data_dir=LOCAL_DATA_DIR2)
self.second_node.wait_fully_started()
self.first_node.create_account_and_login(account_data_first)
self.second_node.create_account_and_login(account_data_second)
delay(4)
self.first_node.start_messenger()
delay(1)
self.second_node.start_messenger()
self.first_node_pubkey = self.first_node.get_pubkey(self.first_node_display_name)
self.second_node_pubkey = self.second_node.get_pubkey(self.second_node_display_name)
logger.debug(f"First Node Public Key: {self.first_node_pubkey}")
logger.debug(f"Second Node Public Key: {self.second_node_pubkey}")
@contextmanager
def add_latency(self):
logger.debug("Entering context manager: add_latency")
subprocess.Popen(
"sudo tc qdisc add dev eth0 root netem delay 1s 100ms distribution normal",
shell=True,
)
try:
yield
finally:
logger.debug(f"Exiting context manager: add_latency")
subprocess.Popen("sudo tc qdisc del dev eth0 root", shell=True)
@contextmanager
def add_packet_loss(self):
logger.debug("Entering context manager: add_packet_loss")
subprocess.Popen("sudo tc qdisc add dev eth0 root netem loss 50%", shell=True)
try:
yield
finally:
logger.debug(f"Exiting context manager: add_packet_loss")
subprocess.Popen("sudo tc qdisc del dev eth0 root netem", shell=True)
@contextmanager
def add_low_bandwith(self):
logger.debug("Entering context manager: add_low_bandwith")
subprocess.Popen("sudo tc qdisc add dev eth0 root tbf rate 1kbit burst 1kbit", shell=True)
try:
yield
finally:
logger.debug(f"Exiting context manager: add_low_bandwith")
subprocess.Popen("sudo tc qdisc del dev eth0 root", shell=True)
@contextmanager
def node_pause(self, node):
logger.debug("Entering context manager: node_pause")
node.pause_process()
try:
yield
finally:
logger.debug(f"Exiting context manager: node_pause")
node.resume_process()
def send_with_timestamp(self, send_method, id, message):
timestamp = datetime.now().strftime("%H:%M:%S")
response = send_method(id, message)
response_messages = response.json().get("result", {}).get("messages", [])
message_id = None
for m in response_messages:
if m["text"] == message:
message_id = m["id"]
break
return timestamp, message_id, response
def accept_contact_request(self, sending_node=None, receiving_node_pk=None):
if not sending_node:
sending_node = self.second_node
if not receiving_node_pk:
receiving_node_pk = self.first_node_pubkey
sending_node.send_contact_request(receiving_node_pk, "hi")

View File

@ -0,0 +1,53 @@
from src.libs.custom_logger import get_custom_logger
logger = get_custom_logger(__name__)
class MessageValidator:
def __init__(self, response, contact_request=False):
self.response = response.json()
self.contact_request = contact_request
def validate_response_structure(self):
assert self.response.get("jsonrpc") == "2.0", "Invalid JSON-RPC version"
assert "result" in self.response, "Missing 'result' in response"
def validate_chat_data(self, expected_chat_id, expected_display_name, expected_text):
chats = self.response["result"].get("chats", [])
assert len(chats) > 0, "No chats found in the response"
chat = chats[0]
actual_chat_id = chat.get("id")
assert actual_chat_id == expected_chat_id, f"Chat ID mismatch: Expected '{expected_chat_id}', found '{actual_chat_id}'"
actual_chat_name = chat.get("name")
assert actual_chat_name.startswith("0x"), f"Invalid chat name format: Expected name to start with '0x', found '{actual_chat_name}'"
last_message = chat.get("lastMessage", {})
actual_text = last_message.get("text")
display_name = last_message.get("displayName")
assert actual_text == expected_text, f"Message text mismatch: Expected '{expected_text}', found '{actual_text}'"
assert display_name == expected_display_name.strip(), f"DisplayName mismatch: Expected '{display_name}', found '{expected_display_name}'"
if self.contact_request:
actual_contact_request_state = last_message.get("contactRequestState")
assert actual_contact_request_state == 1, f"Unexpected contact request state: Expected '1', found '{actual_contact_request_state}'"
assert "compressedKey" in last_message, "Missing 'compressedKey' in last message"
def validate_event_against_response(self, event, fields_to_validate):
chats_in_event = event.get("event", {}).get("chats", [])
assert len(chats_in_event) > 0, "No chats found in the event"
response_chat = self.response["result"]["chats"][0]
event_chat = chats_in_event[0]
for response_field, event_field in fields_to_validate.items():
response_value = response_chat.get("lastMessage", {}).get(response_field)
event_value = event_chat.get("lastMessage", {}).get(event_field)
assert response_value == event_value, f"Mismatch for '{response_field}': Expected '{response_value}', found '{event_value}'"
def run_all_validations(self, expected_chat_id, expected_display_name, expected_text):
self.validate_response_structure()
self.validate_chat_data(expected_chat_id, expected_display_name, expected_text)
logger.info("All validations passed for the response.")

View File

View File

@ -1,21 +1,18 @@
import random import random
import pytest import pytest
from src.steps.common import StatusBackendTestCase
from test_cases import StatusBackendTestCase
@pytest.mark.usefixtures("init_status_backend")
@pytest.mark.accounts @pytest.mark.accounts
@pytest.mark.rpc @pytest.mark.rpc
class TestAccounts(StatusBackendTestCase): class TestAccounts(StatusBackendTestCase):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"method, params", "method, params",
[ [
("accounts_getKeypairs", []), ("accounts_getKeypairs", []),
("accounts_hasPairedDevices", []), ("accounts_hasPairedDevices", []),
("accounts_remainingAccountCapacity", []) ("accounts_remainingAccountCapacity", []),
], ],
) )
def test_(self, method, params): def test_(self, method, params):

View File

@ -1,142 +0,0 @@
import json
import logging
import threading
import time
from collections import namedtuple
import pytest
from clients.signals import SignalClient
from clients.status_backend import RpcClient, StatusBackend
from conftest import option
from constants import user_1, user_2
class StatusDTestCase:
network_id = 31337
def setup_method(self):
self.rpc_client = RpcClient(
option.rpc_url_statusd
)
class StatusBackendTestCase:
def setup_class(self):
self.rpc_client = StatusBackend()
self.network_id = 31337
class WalletTestCase(StatusBackendTestCase):
def wallet_create_multi_transaction(self, **kwargs):
method = "wallet_createMultiTransaction"
transfer_tx_data = {
"data": "",
"from": user_1.address,
"gas": "0x5BBF",
"input": "",
"maxFeePerGas": "0xbcc0f04fd",
"maxPriorityFeePerGas": "0xbcc0f04fd",
"to": user_2.address,
"type": "0x02",
"value": "0x5af3107a4000",
}
for key, new_value in kwargs.items():
if key in transfer_tx_data:
transfer_tx_data[key] = new_value
else:
logging.info(
f"Warning: The key '{key}' does not exist in the transferTx parameters and will be ignored.")
params = [
{
"fromAddress": user_1.address,
"fromAmount": "0x5af3107a4000",
"fromAsset": "ETH",
"type": 0, # MultiTransactionSend
"toAddress": user_2.address,
"toAsset": "ETH",
},
[
{
"bridgeName": "Transfer",
"chainID": 31337,
"transferTx": transfer_tx_data
}
],
f"{option.password}",
]
return self.rpc_client.rpc_request(method, params)
def send_valid_multi_transaction(self, **kwargs):
response = self.wallet_create_multi_transaction(**kwargs)
tx_hash = None
self.rpc_client.verify_is_valid_json_rpc_response(response)
try:
tx_hash = response.json(
)["result"]["hashes"][str(self.network_id)][0]
except (KeyError, json.JSONDecodeError):
raise Exception(response.content)
return tx_hash
class TransactionTestCase(WalletTestCase):
def setup_method(self):
self.tx_hash = self.send_valid_multi_transaction()
class EthRpcTestCase(WalletTestCase):
@pytest.fixture(autouse=True, scope='class')
def tx_data(self):
tx_hash = self.send_valid_multi_transaction()
self.wait_until_tx_not_pending(tx_hash)
receipt = self.get_transaction_receipt(tx_hash)
try:
block_number = receipt.json()["result"]["blockNumber"]
block_hash = receipt.json()["result"]["blockHash"]
except (KeyError, json.JSONDecodeError):
raise Exception(receipt.content)
tx_data = namedtuple("TxData", ["tx_hash", "block_number", "block_hash"])
return tx_data(tx_hash, block_number, block_hash)
def get_block_header(self, block_number):
method = "ethclient_headerByNumber"
params = [self.network_id, block_number]
return self.rpc_client.rpc_valid_request(method, params)
def get_transaction_receipt(self, tx_hash):
method = "ethclient_transactionReceipt"
params = [self.network_id, tx_hash]
return self.rpc_client.rpc_valid_request(method, params)
def wait_until_tx_not_pending(self, tx_hash, timeout=10):
method = "ethclient_transactionByHash"
params = [self.network_id, tx_hash]
response = self.rpc_client.rpc_valid_request(method, params)
start_time = time.time()
while response.json()["result"]["isPending"] == True:
time_passed = time.time() - start_time
if time_passed >= timeout:
raise TimeoutError(
f"Tx {tx_hash} is still pending after {timeout} seconds")
time.sleep(0.5)
response = self.rpc_client.rpc_valid_request(method, params)
return response.json()["result"]["tx"]
class SignalTestCase(StatusDTestCase):
await_signals = []
def setup_method(self):
super().setup_method()
self.signal_client = SignalClient(option.ws_url_statusd, self.await_signals)
websocket_thread = threading.Thread(target=self.signal_client._connect)
websocket_thread.daemon = True
websocket_thread.start()

View File

@ -0,0 +1,133 @@
from uuid import uuid4
from src.constants import *
from src.libs.common import delay
from src.libs.custom_logger import get_custom_logger
from src.node.status_node import StatusNode
from src.steps.common import StepsCommon
from src.validators.message_validator import MessageValidator
logger = get_custom_logger(__name__)
class TestContactRequest(StepsCommon):
def test_contact_request_baseline(self):
timeout_secs = EVENT_SIGNAL_TIMEOUT_SEC
num_contact_requests = 1
nodes = []
for index in range(num_contact_requests):
first_node = StatusNode(name=f"first_node_{index}")
second_node = StatusNode(name=f"second_node_{index}")
data_dir_first = create_unique_data_dir(os.path.join(PROJECT_ROOT, DATA_DIR), index)
data_dir_second = create_unique_data_dir(os.path.join(PROJECT_ROOT, DATA_DIR), index)
delay(2)
first_node.start(data_dir=data_dir_first)
second_node.start(data_dir=data_dir_second)
account_data_first = {
"rootDataDir": data_dir_first,
"displayName": f"test_user_first_{index}",
"password": f"test_password_first_{index}",
"customizationColor": "primary",
}
account_data_second = {
"rootDataDir": data_dir_second,
"displayName": f"test_user_second_{index}",
"password": f"test_password_second_{index}",
"customizationColor": "primary",
}
first_node.create_account_and_login(account_data_first)
second_node.create_account_and_login(account_data_second)
delay(5)
first_node.start_messenger()
second_node.start_messenger()
first_node.pubkey = first_node.get_pubkey(account_data_first["displayName"])
second_node.pubkey = second_node.get_pubkey(account_data_second["displayName"])
first_node.wait_fully_started()
second_node.wait_fully_started()
nodes.append((first_node, second_node, account_data_first["displayName"], account_data_second["displayName"], index))
missing_contact_requests = []
for first_node, second_node, first_node_display_name, second_node_display_name, index in nodes:
result = self.send_and_wait_for_message((first_node, second_node), first_node_display_name, second_node_display_name, index, timeout_secs)
timestamp, message_id, contact_request_message, response = result
if not response:
missing_contact_requests.append((timestamp, contact_request_message, message_id))
if missing_contact_requests:
formatted_missing_requests = [f"Timestamp: {ts}, Message: {msg}, ID: {mid}" for ts, msg, mid in missing_contact_requests]
raise AssertionError(
f"{len(missing_contact_requests)} contact requests out of {num_contact_requests} didn't reach the peer node: "
+ "\n".join(formatted_missing_requests)
)
def send_and_wait_for_message(self, nodes, first_node_display_name, second_node_display_name, index, timeout=10):
first_node, second_node = nodes
first_node_pubkey = first_node.get_pubkey(first_node_display_name)
contact_request_message = f"contact_request_{index}"
timestamp, message_id, response = self.send_with_timestamp(second_node.send_contact_request, first_node_pubkey, contact_request_message)
validator = MessageValidator(response)
validator.run_all_validations(first_node_pubkey, second_node_display_name, contact_request_message)
try:
messages_new_events = first_node.wait_for_complete_signal("messages.new", timeout)
messages_new_event = None
for event in messages_new_events:
if "chats" in event.get("event", {}):
messages_new_event = event
try:
validator.validate_event_against_response(
messages_new_event,
fields_to_validate={
"text": "text",
"displayName": "displayName",
"id": "id",
},
)
break
except AssertionError as validation_error:
logger.error(f"Validation failed for event: {messages_new_event}, Error: {validation_error}")
continue
if messages_new_event is None:
raise ValueError("No 'messages.new' event with 'chats' data found within the timeout period.")
except (TimeoutError, ValueError) as e:
logger.error(f"Signal validation failed: {str(e)}")
return timestamp, message_id, contact_request_message, None
first_node.stop()
second_node.stop()
return timestamp, message_id, contact_request_message, response
def test_contact_request_with_latency(self):
with self.add_latency():
self.test_contact_request_baseline()
def test_contact_request_with_packet_loss(self):
with self.add_packet_loss():
self.test_contact_request_baseline()
def test_contact_request_with_low_bandwidth(self):
with self.add_low_bandwidth():
self.test_contact_request_baseline()
def test_contact_request_with_node_pause(self, start_2_nodes):
with self.node_pause(self.second_node):
message = str(uuid4())
self.first_node.send_contact_request(self.second_node_pubkey, message)
delay(30)
assert self.second_node.wait_for_signal("messages.new")
assert self.first_node.wait_for_signal("message.delivered")

View File

@ -1,6 +1,6 @@
import pytest import pytest
from test_cases import EthRpcTestCase from src.steps.common import EthRpcTestCase
def validate_header(header, block_number, block_hash): def validate_header(header, block_number, block_hash):
@ -24,10 +24,10 @@ def validate_receipt(receipt, tx_hash, block_number, block_hash):
assert receipt["blockHash"] == block_hash assert receipt["blockHash"] == block_hash
@pytest.mark.usefixtures("init_status_backend")
@pytest.mark.rpc @pytest.mark.rpc
@pytest.mark.ethclient @pytest.mark.ethclient
class TestEth(EthRpcTestCase): class TestEth(EthRpcTestCase):
def test_block_number(self): def test_block_number(self):
self.rpc_client.rpc_valid_request("ethclient_blockNumber", [self.network_id]) self.rpc_client.rpc_valid_request("ethclient_blockNumber", [self.network_id])
@ -35,13 +35,17 @@ class TestEth(EthRpcTestCase):
self.rpc_client.rpc_valid_request("ethclient_suggestGasPrice", [self.network_id]) self.rpc_client.rpc_valid_request("ethclient_suggestGasPrice", [self.network_id])
def test_header_by_number(self, tx_data): def test_header_by_number(self, tx_data):
response = self.rpc_client.rpc_valid_request("ethclient_headerByNumber", response = self.rpc_client.rpc_valid_request("ethclient_headerByNumber", [self.network_id, tx_data.block_number])
[self.network_id, tx_data.block_number])
validate_header(response.json()["result"], tx_data.block_number, tx_data.block_hash) validate_header(response.json()["result"], tx_data.block_number, tx_data.block_hash)
def test_block_by_number(self, tx_data): def test_block_by_number(self, tx_data):
response = self.rpc_client.rpc_valid_request("ethclient_blockByNumber", [self.network_id, tx_data.block_number]) response = self.rpc_client.rpc_valid_request("ethclient_blockByNumber", [self.network_id, tx_data.block_number])
validate_block(response.json()["result"], tx_data.block_number, tx_data.block_hash, tx_data.tx_hash) validate_block(
response.json()["result"],
tx_data.block_number,
tx_data.block_hash,
tx_data.tx_hash,
)
def test_header_by_hash(self, tx_data): def test_header_by_hash(self, tx_data):
response = self.rpc_client.rpc_valid_request("ethclient_headerByHash", [self.network_id, tx_data.block_hash]) response = self.rpc_client.rpc_valid_request("ethclient_headerByHash", [self.network_id, tx_data.block_hash])
@ -49,7 +53,12 @@ class TestEth(EthRpcTestCase):
def test_block_by_hash(self, tx_data): def test_block_by_hash(self, tx_data):
response = self.rpc_client.rpc_valid_request("ethclient_blockByHash", [self.network_id, tx_data.block_hash]) response = self.rpc_client.rpc_valid_request("ethclient_blockByHash", [self.network_id, tx_data.block_hash])
validate_block(response.json()["result"], tx_data.block_number, tx_data.block_hash, tx_data.tx_hash) validate_block(
response.json()["result"],
tx_data.block_number,
tx_data.block_hash,
tx_data.tx_hash,
)
def test_transaction_by_hash(self, tx_data): def test_transaction_by_hash(self, tx_data):
response = self.rpc_client.rpc_valid_request("ethclient_transactionByHash", [self.network_id, tx_data.tx_hash]) response = self.rpc_client.rpc_valid_request("ethclient_transactionByHash", [self.network_id, tx_data.tx_hash])
@ -57,4 +66,9 @@ class TestEth(EthRpcTestCase):
def test_transaction_receipt(self, tx_data): def test_transaction_receipt(self, tx_data):
response = self.rpc_client.rpc_valid_request("ethclient_transactionReceipt", [self.network_id, tx_data.tx_hash]) response = self.rpc_client.rpc_valid_request("ethclient_transactionReceipt", [self.network_id, tx_data.tx_hash])
validate_receipt(response.json()["result"], tx_data.tx_hash, tx_data.block_number, tx_data.block_hash) validate_receipt(
response.json()["result"],
tx_data.tx_hash,
tx_data.block_number,
tx_data.block_hash,
)

View File

@ -1,10 +1,10 @@
import pytest import pytest
@pytest.mark.usefixtures("init_status_backend")
@pytest.mark.create_account @pytest.mark.create_account
@pytest.mark.rpc @pytest.mark.rpc
class TestInitialiseApp: class TestInitialiseApp:
@pytest.mark.init @pytest.mark.init
def test_init_app(self, init_status_backend): def test_init_app(self, init_status_backend):
# this test is going to fail on every call except first since status-backend will be already initialized # this test is going to fail on every call except first since status-backend will be already initialized
@ -13,10 +13,9 @@ class TestInitialiseApp:
assert backend_client is not None assert backend_client is not None
backend_client.verify_json_schema( backend_client.verify_json_schema(
backend_client.wait_for_signal("mediaserver.started"), "signal_mediaserver_started") backend_client.wait_for_signal("mediaserver.started"),
backend_client.verify_json_schema( "signal_mediaserver_started",
backend_client.wait_for_signal("node.started"), "signal_node_started") )
backend_client.verify_json_schema( backend_client.verify_json_schema(backend_client.wait_for_signal("node.started"), "signal_node_started")
backend_client.wait_for_signal("node.ready"), "signal_node_ready") backend_client.verify_json_schema(backend_client.wait_for_signal("node.ready"), "signal_node_ready")
backend_client.verify_json_schema( backend_client.verify_json_schema(backend_client.wait_for_signal("node.login"), "signal_node_login")
backend_client.wait_for_signal("node.login"), "signal_node_login")

View File

@ -0,0 +1,138 @@
from uuid import uuid4
import pytest
from src.constants import *
from src.libs.common import delay
from src.libs.custom_logger import get_custom_logger
from src.steps.common import StepsCommon
from src.validators.message_validator import MessageValidator
logger = get_custom_logger(__name__)
@pytest.mark.usefixtures("start_2_nodes")
class TestOneToOneMessages(StepsCommon):
def test_one_to_one_message_baseline(self):
timeout_secs = EVENT_SIGNAL_TIMEOUT_SEC
num_messages = NUM_MESSAGES
node_test_data = [
(
self.first_node,
self.second_node,
self.second_node_display_name,
self.first_node_display_name,
),
(
self.second_node,
self.first_node,
self.first_node_display_name,
self.second_node_display_name,
),
]
messages = []
self.accept_contact_request()
missing_messages = []
for i in range(num_messages):
(
sending_node,
receiving_node,
receiving_display_name,
sending_node_display_name,
) = node_test_data[i % 2]
result = self.send_and_wait_for_message(
sending_node,
receiving_node,
receiving_display_name,
sending_node_display_name,
i,
timeout_secs,
)
timestamp, message_text, message_id, response = result
if not response:
missing_messages.append((timestamp, message_text, message_id, sending_node.name))
else:
messages.append((timestamp, message_text, message_id, sending_node.name))
self.first_node.stop()
self.second_node.stop()
if missing_messages:
formatted_missing_messages = [f"Timestamp: {ts}, Message: {msg}, ID: {mid}, Sender: {snd}" for ts, msg, mid, snd in missing_messages]
raise AssertionError(
f"{len(missing_messages)} messages out of {num_messages} were not received: " + "\n".join(formatted_missing_messages)
)
def send_and_wait_for_message(
self,
sending_node,
receiving_node,
receiving_display_name,
sending_node_display_name,
index,
timeout=10,
):
receiving_node_pubkey = receiving_node.get_pubkey(receiving_display_name)
message_text = f"message_from_{sending_node.name}_{index}"
timestamp, message_id, response = self.send_with_timestamp(sending_node.send_message, receiving_node_pubkey, message_text)
validator = MessageValidator(response)
validator.run_all_validations(
expected_chat_id=receiving_node_pubkey,
expected_display_name=sending_node_display_name,
expected_text=message_text,
)
try:
messages_new_events = receiving_node.wait_for_complete_signal("messages.new", timeout)
receiving_node.wait_for_signal("message.delivered", timeout)
messages_new_event = None
for event in messages_new_events:
if "chats" in event.get("event", {}):
messages_new_event = event
try:
validator.validate_event_against_response(
messages_new_event,
fields_to_validate={
"text": "text",
"displayName": "displayName",
"id": "id",
},
)
break
except AssertionError as validation_error:
logger.error(f"Validation failed for event: {messages_new_event}, Error: {validation_error}")
continue
if messages_new_event is None:
raise ValueError("No 'messages.new' event with 'chats' data found within the timeout period.")
except (TimeoutError, ValueError) as e:
logger.error(f"Signal validation failed: {str(e)}")
return timestamp, message_text, message_id, None
return timestamp, message_text, message_id, response
def test_one_to_one_message_with_latency(self):
with self.add_latency():
self.test_one_to_one_message_baseline()
def test_one_to_one_message_with_packet_loss(self):
with self.add_packet_loss():
self.test_one_to_one_message_baseline()
def test_one_to_one_message_with_low_bandwidth(self):
with self.add_low_bandwidth():
self.test_one_to_one_message_baseline()
def test_one_to_one_message_with_node_pause_30_seconds(self):
self.accept_contact_request()
with self.node_pause(self.first_node):
message = str(uuid4())
self.second_node.send_message(self.first_node_pubkey, message)
delay(30)
assert self.first_node.wait_for_signal("messages.new")
assert self.second_node.wait_for_signal("message.delivered")

View File

@ -3,10 +3,11 @@ import uuid
import pytest import pytest
from conftest import option from conftest import option
from constants import user_1, user_2 from src.constants import user_1, user_2
from test_cases import SignalTestCase from src.steps.common import SignalTestCase
@pytest.mark.usefixtures("init_status_backend")
@pytest.mark.rpc @pytest.mark.rpc
@pytest.mark.transaction @pytest.mark.transaction
@pytest.mark.wallet @pytest.mark.wallet
@ -16,11 +17,10 @@ class TestTransactionFromRoute(SignalTestCase):
"wallet.router.sign-transactions", "wallet.router.sign-transactions",
"wallet.router.sending-transactions-started", "wallet.router.sending-transactions-started",
"wallet.transaction.status-changed", "wallet.transaction.status-changed",
"wallet.router.transactions-sent" "wallet.router.transactions-sent",
] ]
def test_tx_from_route(self): def test_tx_from_route(self):
_uuid = str(uuid.uuid4()) _uuid = str(uuid.uuid4())
amount_in = "0xde0b6b3a7640000" amount_in = "0xde0b6b3a7640000"
@ -39,41 +39,30 @@ class TestTransactionFromRoute(SignalTestCase):
"disabledFromChainIDs": [10, 42161], "disabledFromChainIDs": [10, 42161],
"disabledToChainIDs": [10, 42161], "disabledToChainIDs": [10, 42161],
"gasFeeMode": 1, "gasFeeMode": 1,
"fromLockedAmount": {} "fromLockedAmount": {},
} }
] ]
response = self.rpc_client.rpc_valid_request(method, params) response = self.rpc_client.rpc_valid_request(method, params)
routes = self.signal_client.wait_for_signal("wallet.suggested.routes") routes = self.signal_client.wait_for_signal("wallet.suggested.routes")
assert routes['event']['Uuid'] == _uuid assert routes["event"]["Uuid"] == _uuid
method = "wallet_buildTransactionsFromRoute" method = "wallet_buildTransactionsFromRoute"
params = [ params = [{"uuid": _uuid, "slippagePercentage": 0}]
{
"uuid": _uuid,
"slippagePercentage": 0
}
]
response = self.rpc_client.rpc_valid_request(method, params) response = self.rpc_client.rpc_valid_request(method, params)
wallet_router_sign_transactions = self.signal_client.wait_for_signal( wallet_router_sign_transactions = self.signal_client.wait_for_signal("wallet.router.sign-transactions")
"wallet.router.sign-transactions")
assert wallet_router_sign_transactions['event']['signingDetails']['signOnKeycard'] == False assert wallet_router_sign_transactions["event"]["signingDetails"]["signOnKeycard"] == False
transaction_hashes = wallet_router_sign_transactions['event']['signingDetails']['hashes'] transaction_hashes = wallet_router_sign_transactions["event"]["signingDetails"]["hashes"]
assert transaction_hashes, "Transaction hashes are empty!" assert transaction_hashes, "Transaction hashes are empty!"
tx_signatures = {} tx_signatures = {}
for hash in transaction_hashes: for hash in transaction_hashes:
method = "wallet_signMessage" method = "wallet_signMessage"
params = [ params = [hash, user_1.address, option.password]
hash,
user_1.address,
option.password
]
response = self.rpc_client.rpc_valid_request(method, params) response = self.rpc_client.rpc_valid_request(method, params)
@ -83,22 +72,16 @@ class TestTransactionFromRoute(SignalTestCase):
signature = { signature = {
"r": tx_signature[:64], "r": tx_signature[:64],
"s": tx_signature[64:128], "s": tx_signature[64:128],
"v": tx_signature[128:] "v": tx_signature[128:],
} }
tx_signatures[hash] = signature tx_signatures[hash] = signature
method = "wallet_sendRouterTransactionsWithSignatures" method = "wallet_sendRouterTransactionsWithSignatures"
params = [ params = [{"uuid": _uuid, "Signatures": tx_signatures}]
{
"uuid": _uuid,
"Signatures": tx_signatures
}
]
response = self.rpc_client.rpc_valid_request(method, params) response = self.rpc_client.rpc_valid_request(method, params)
tx_status = self.signal_client.wait_for_signal( tx_status = self.signal_client.wait_for_signal("wallet.transaction.status-changed")
"wallet.transaction.status-changed")
assert tx_status["event"]["chainId"] == 31337 assert tx_status["event"]["chainId"] == 31337
assert tx_status["event"]["status"] == "Success" assert tx_status["event"]["status"] == "Success"

View File

@ -6,11 +6,11 @@ from typing import Optional
import pytest import pytest
from conftest import option from conftest import option
from test_cases import StatusBackendTestCase from src.steps.common import StatusBackendTestCase
@pytest.mark.usefixtures("init_status_backend")
class TestRpc(StatusBackendTestCase): class TestRpc(StatusBackendTestCase):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"method, params", "method, params",
[ [
@ -40,17 +40,11 @@ class TestRpcMessaging(StatusBackendTestCase):
# get chat public key # get chat public key
for user in self.user_1, self.user_2: for user in self.user_1, self.user_2:
response = self.rpc_client.rpc_request( response = self.rpc_client.rpc_request("accounts_getAccounts", [], _id, url=user.rpc_url)
"accounts_getAccounts", [], _id, url=user.rpc_url
)
self.rpc_client.verify_is_valid_json_rpc_response(response) self.rpc_client.verify_is_valid_json_rpc_response(response)
user.chat_public_key = next( user.chat_public_key = next(
( (item["public-key"] for item in response.json()["result"] if item["chat"]),
item["public-key"]
for item in response.json()["result"]
if item["chat"]
),
None, None,
) )

View File

@ -2,18 +2,25 @@ import random
import pytest import pytest
from test_cases import StatusBackendTestCase from src.steps.common import StatusBackendTestCase
@pytest.mark.usefixtures("init_status_backend")
class TestProfile(StatusBackendTestCase): class TestProfile(StatusBackendTestCase):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"method, params", "method, params",
[ [
("wakuext_setDisplayName", ["new valid username"]), ("wakuext_setDisplayName", ["new valid username"]),
("wakuext_setBio", ["some valid bio"]), ("wakuext_setBio", ["some valid bio"]),
("wakuext_setCustomizationColor", [{'customizationColor': 'magenta', (
'keyUid': '0xea42dd9a4e668b0b76c7a5210ca81576d51cd19cdd0f6a0c22196219dc423f29'}]), "wakuext_setCustomizationColor",
[
{
"customizationColor": "magenta",
"keyUid": "0xea42dd9a4e668b0b76c7a5210ca81576d51cd19cdd0f6a0c22196219dc423f29",
}
],
),
("wakuext_setUserStatus", [3, ""]), ("wakuext_setUserStatus", [3, ""]),
("wakuext_setSyncingOnMobileNetwork", [{"enabled": False}]), ("wakuext_setSyncingOnMobileNetwork", [{"enabled": False}]),
("wakuext_togglePeerSyncing", [{"enabled": True}]), ("wakuext_togglePeerSyncing", [{"enabled": True}]),

View File

@ -5,24 +5,24 @@ import jsonschema
import pytest import pytest
from conftest import option from conftest import option
from test_cases import StatusBackendTestCase, TransactionTestCase from src.steps.common import StatusBackendTestCase, TransactionTestCase
@pytest.mark.usefixtures("init_status_backend")
@pytest.mark.wallet @pytest.mark.wallet
@pytest.mark.tx @pytest.mark.tx
@pytest.mark.rpc @pytest.mark.rpc
class TestTransactionRpc(TransactionTestCase): class TestTransactionRpc(TransactionTestCase):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"method, params", "method, params",
[ [
( (
"wallet_checkRecentHistoryForChainIDs", "wallet_checkRecentHistoryForChainIDs",
[[31337], ["0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"]], [[31337], ["0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"]],
), ),
( (
"wallet_getPendingTransactionsForIdentities", "wallet_getPendingTransactionsForIdentities",
[[{"chainId": None, "hash": None}]], [[{"chainId": None, "hash": None}]],
), ),
], ],
) )
@ -41,43 +41,49 @@ class TestTransactionRpc(TransactionTestCase):
self.rpc_client.verify_is_valid_json_rpc_response(response) self.rpc_client.verify_is_valid_json_rpc_response(response)
# how to create schema: # how to create schema:
# from schema_builder import CustomSchemaBuilder # from src.schema_builder import CustomSchemaBuilder
# CustomSchemaBuilder(method).create_schema(response.json()) # CustomSchemaBuilder(method).create_schema(response.json())
with open(f"{option.base_dir}/schemas/wallet_createMultiTransaction/transferTx_positive", "r") as schema: with open(
f"{option.base_dir}/schemas/wallet_createMultiTransaction/transferTx_positive",
"r",
) as schema:
jsonschema.validate(instance=response.json(), schema=json.load(schema)) jsonschema.validate(instance=response.json(), schema=json.load(schema))
@pytest.mark.parametrize( @pytest.mark.parametrize(
"method, changed_values, expected_error_code, expected_error_text", "method, changed_values, expected_error_code, expected_error_text",
[ [
( (
"transferTx_value_not_enough_balance", "transferTx_value_not_enough_balance",
{'value': '0x21e438ea8139cd35004'}, -32000, "Insufficient funds for gas", {"value": "0x21e438ea8139cd35004"},
-32000,
"Insufficient funds for gas",
), ),
( (
"transferTx_from_from_invalid_string", "transferTx_from_from_invalid_string",
{'from': 'some_invalid_address'}, -32602, "cannot unmarshal hex string without 0x prefix", {"from": "some_invalid_address"},
-32602,
"cannot unmarshal hex string without 0x prefix",
), ),
], ],
) )
def test_create_multi_transaction_validation(self, method, def test_create_multi_transaction_validation(self, method, changed_values, expected_error_code, expected_error_text):
changed_values,
expected_error_code, expected_error_text):
response = self.wallet_create_multi_transaction(**changed_values) response = self.wallet_create_multi_transaction(**changed_values)
self.rpc_client.verify_is_json_rpc_error(response) self.rpc_client.verify_is_json_rpc_error(response)
actual_error_code, actual_error_text = response.json()['error']['code'], response.json()['error']['message'] actual_error_code, actual_error_text = (
assert expected_error_code == actual_error_code, \ response.json()["error"]["code"],
f"got code: {actual_error_code} instead of expected: {expected_error_code}" response.json()["error"]["message"],
assert expected_error_text in actual_error_text, \ )
f"got error: {actual_error_text} that does not include: {expected_error_text}" assert expected_error_code == actual_error_code, f"got code: {actual_error_code} instead of expected: {expected_error_code}"
assert expected_error_text in actual_error_text, f"got error: {actual_error_text} that does not include: {expected_error_text}"
self.rpc_client.verify_json_schema(response.json(), "wallet_createMultiTransaction/transferTx_error") self.rpc_client.verify_json_schema(response.json(), "wallet_createMultiTransaction/transferTx_error")
@pytest.mark.usefixtures("init_status_backend")
@pytest.mark.wallet @pytest.mark.wallet
@pytest.mark.rpc @pytest.mark.rpc
class TestRpc(StatusBackendTestCase): class TestRpc(StatusBackendTestCase):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"method, params", "method, params",
[ [
@ -85,7 +91,7 @@ class TestRpc(StatusBackendTestCase):
("wallet_getEthereumChains", []), ("wallet_getEthereumChains", []),
("wallet_getTokenList", []), ("wallet_getTokenList", []),
("wallet_getCryptoOnRamps", []), ("wallet_getCryptoOnRamps", []),
("wallet_getCachedCurrencyFormats", []) ("wallet_getCachedCurrencyFormats", []),
], ],
) )
def test_(self, method, params): def test_(self, method, params):