parent
dd140dcb74
commit
776d2841fe
|
@ -1,3 +1,4 @@
|
|||
import json
|
||||
import requests
|
||||
from abc import ABC, abstractmethod
|
||||
from src.env_vars import API_REQUEST_TIMEOUT
|
||||
|
@ -8,7 +9,7 @@ logger = get_custom_logger(__name__)
|
|||
|
||||
class BaseClient(ABC):
|
||||
def make_request(self, method, url, headers=None, data=None):
|
||||
logger.info(f"{method.upper()} call: {url} with payload: {data}")
|
||||
self.log_request_as_curl(method, url, headers, data)
|
||||
response = requests.request(method.upper(), url, headers=headers, data=data, timeout=API_REQUEST_TIMEOUT)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
|
@ -22,6 +23,20 @@ class BaseClient(ABC):
|
|||
logger.info(f"Response status code: {response.status_code}. Response content: {response.content}")
|
||||
return response
|
||||
|
||||
def log_request_as_curl(self, method, url, headers, data):
|
||||
if data:
|
||||
try:
|
||||
data_dict = json.loads(data)
|
||||
if "timestamp" in data_dict:
|
||||
data_dict["timestamp"] = "TIMESTAMP_PLACEHOLDER"
|
||||
data = json.dumps(data_dict)
|
||||
data = data.replace('"TIMESTAMP_PLACEHOLDER"', "'$(date +%s%N)'")
|
||||
except json.JSONDecodeError:
|
||||
logger.error("Invalid JSON data provided")
|
||||
headers_str_for_log = " ".join([f'-H "{key}: {value}"' for key, value in headers.items()]) if headers else ""
|
||||
curl_cmd = f"curl -v -X {method.upper()} \"{url}\" {headers_str_for_log} -d '{data}'"
|
||||
logger.info(curl_cmd)
|
||||
|
||||
@abstractmethod
|
||||
def info(self):
|
||||
pass
|
||||
|
|
|
@ -42,13 +42,15 @@ class DockerManager:
|
|||
cli_args.append(f"--{key}={value}") # Add a single command
|
||||
|
||||
port_bindings = {f"{port}/tcp": ("", port) for port in ports}
|
||||
logger.debug(f"Starting container with image {image_name}")
|
||||
logger.debug(f"Using args {cli_args}")
|
||||
port_bindings_for_log = " ".join(f"-p {port}:{port}" for port in ports)
|
||||
cli_args_str_for_log = " ".join(cli_args)
|
||||
logger.debug(f"docker run -i -t {port_bindings_for_log} {image_name} {cli_args_str_for_log}")
|
||||
container = self._client.containers.run(
|
||||
image_name, command=cli_args, ports=port_bindings, detach=True, remove=True, auto_remove=True, volumes=volumes
|
||||
)
|
||||
|
||||
network = self._client.networks.get(NETWORK_NAME)
|
||||
logger.debug(f"docker network connect --ip {container_ip} {NETWORK_NAME} {container.id}")
|
||||
network.connect(container, ipv4_address=container_ip)
|
||||
|
||||
logger.debug(f"Container started with ID {container.short_id}. Setting up logs at {log_path}")
|
||||
|
|
|
@ -37,7 +37,7 @@ class StepsFilter:
|
|||
def setup_main_filter_node(self):
|
||||
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
|
||||
self.node2 = WakuNode(NODE_2, f"node2_{self.test_id}")
|
||||
self.node2.start(relay="false", filter="true", discv5_bootstrap_node=self.enr_uri, filternode=self.multiaddr_with_id)
|
||||
self.node2.start(relay="false", discv5_bootstrap_node=self.enr_uri, filternode=self.multiaddr_with_id)
|
||||
self.main_nodes.append(self.node2)
|
||||
|
||||
@pytest.fixture(scope="function")
|
||||
|
@ -72,7 +72,7 @@ class StepsFilter:
|
|||
pytest.skip("ADDITIONAL_NODES/node_list is empty, cannot run test")
|
||||
for index, node in enumerate(nodes):
|
||||
node = WakuNode(node, f"node{index + 3}_{self.test_id}")
|
||||
node.start(relay="false", filter="true", discv5_bootstrap_node=self.enr_uri, filternode=self.multiaddr_with_id)
|
||||
node.start(relay="false", discv5_bootstrap_node=self.enr_uri, filternode=self.multiaddr_with_id)
|
||||
self.optional_nodes.append(node)
|
||||
|
||||
@allure.step
|
||||
|
|
|
@ -111,9 +111,9 @@ class StepsRelay:
|
|||
sender.send_relay_message(message, pubsub_topic)
|
||||
delay(message_propagation_delay)
|
||||
for index, peer in enumerate(peer_list):
|
||||
logger.debug(f"Checking that peer NODE_{index + 2}:{peer.image} can find the published message")
|
||||
logger.debug(f"Checking that peer NODE_{index + 1}:{peer.image} can find the published message")
|
||||
get_messages_response = peer.get_relay_messages(pubsub_topic)
|
||||
assert get_messages_response, f"Peer NODE_{index + 2}:{peer.image} couldn't find any messages"
|
||||
assert get_messages_response, f"Peer NODE_{index + 1}:{peer.image} couldn't find any messages"
|
||||
assert len(get_messages_response) == 1, f"Expected 1 message but got {len(get_messages_response)}"
|
||||
waku_message = WakuMessage(get_messages_response)
|
||||
waku_message.assert_received_message(message)
|
||||
|
|
|
@ -9,7 +9,6 @@ logger = get_custom_logger(__name__)
|
|||
|
||||
@pytest.mark.usefixtures("setup_main_relay_node", "setup_main_filter_node")
|
||||
class TestFilterMultipleNodes(StepsFilter):
|
||||
@pytest.mark.xfail("nwaku" in NODE_2, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2512")
|
||||
def test_all_nodes_subscribed_to_the_topic(self):
|
||||
self.setup_optional_filter_nodes()
|
||||
self.wait_for_subscriptions_on_main_nodes([self.test_content_topic])
|
||||
|
|
Loading…
Reference in New Issue