metrics tests (#68)

This commit is contained in:
fbarbu15 2024-08-30 14:15:45 +03:00 committed by GitHub
parent 4855cd710c
commit 089b8eefce
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 337 additions and 5 deletions

View File

@ -302,11 +302,14 @@ class WakuNode:
ws_address = next((addr for addr in addresses if "/ws" not in addr), None) ws_address = next((addr for addr in addresses if "/ws" not in addr), None)
if ws_address: if ws_address:
identifier = ws_address.split("/p2p/")[-1] identifier = ws_address.split("/p2p/")[-1]
new_address = f"/ip4/{self._ext_ip}/tcp/{self._tcp_port}/p2p/{identifier}" new_address = f"{self.get_tcp_address()}/p2p/{identifier}"
return new_address return new_address
else: else:
raise AttributeError("No '/ws' address found") raise AttributeError("No '/ws' address found")
def get_tcp_address(self):
return f"/ip4/{self._ext_ip}/tcp/{self._tcp_port}"
def info(self): def info(self):
return self._api.info() return self._api.info()

View File

@ -1,14 +1,17 @@
import re
from src.libs.custom_logger import get_custom_logger from src.libs.custom_logger import get_custom_logger
import allure import allure
from tenacity import retry, stop_after_delay, wait_fixed from tenacity import retry, stop_after_delay, wait_fixed
from src.test_data import METRICS_WITH_INITIAL_VALUE_ZERO
logger = get_custom_logger(__name__) logger = get_custom_logger(__name__)
class StepsMetrics: class StepsMetrics:
@allure.step @allure.step
def check_metric(self, node, metric_name, expected_value): def check_metric(self, node, metric_name, expected_value, exact=False):
logger.debug(f"Checking metric: {metric_name} has {expected_value}") logger.debug(f"Checking metric: {metric_name} has {expected_value}")
response = node.get_metrics() response = node.get_metrics()
lines = response.split("\n") lines = response.split("\n")
@ -22,7 +25,10 @@ class StepsMetrics:
if actual_value is None: if actual_value is None:
raise AttributeError(f"Metric '{metric_name}' not found") raise AttributeError(f"Metric '{metric_name}' not found")
logger.debug(f"Found metric: {metric_name} with value {actual_value}") logger.debug(f"Found metric: {metric_name} with value {actual_value}")
assert actual_value == expected_value, f"Expected value for '{metric_name}' is {expected_value}, but got {actual_value}" if exact:
assert actual_value == expected_value, f"Expected value for '{metric_name}' is {expected_value}, but got {actual_value}"
else:
assert actual_value >= expected_value, f"Expected value for '{metric_name}' is >= {expected_value}, but got {actual_value}"
@allure.step @allure.step
def wait_for_metric(self, node, metric_name, expected_value, timeout_duration=90): def wait_for_metric(self, node, metric_name, expected_value, timeout_duration=90):
@ -31,3 +37,32 @@ class StepsMetrics:
self.check_metric(node, metric_name, expected_value) self.check_metric(node, metric_name, expected_value)
check_metric_with_retry() check_metric_with_retry()
def validate_initial_metrics(self, node):
metrics_data = node.get_metrics()
# Regular expression to match metric lines, accounting for optional labels
metric_pattern = re.compile(r"^(?P<metric_name>[a-zA-Z0-9_:]+(?:{[^}]+})?)\s+(?P<value>[0-9]+\.?[0-9]*)$", re.MULTILINE)
# Dictionary to store the metrics and their values
metrics_dict = {}
for match in metric_pattern.finditer(metrics_data):
metric_name = match.group("metric_name")
value = float(match.group("value"))
metrics_dict[metric_name] = value
errors = []
# Assert that specific metrics have a value of 0.0
for metric in METRICS_WITH_INITIAL_VALUE_ZERO:
if metric not in metrics_dict:
errors.append(f"Metric {metric} is missing from the metrics data")
elif metrics_dict[metric] != 0.0:
errors.append(f"Expected {metric} to be 0.0, but got {metrics_dict[metric]}")
# Assert that all other metrics have a value greater than 0.0
for metric, value in metrics_dict.items():
if metric not in METRICS_WITH_INITIAL_VALUE_ZERO and value <= 0.0:
errors.append(f"Expected {metric} to have a positive value, but got {value}")
assert not errors, f"Metrics validation failed:\n" + "\n".join(errors)
logger.debug(f"All metrics are present and have valid values.")

View File

@ -33,9 +33,10 @@ class StepsStore(StepsCommon):
@pytest.fixture(scope="function", autouse=False) @pytest.fixture(scope="function", autouse=False)
def node_setup(self, store_setup): def node_setup(self, store_setup):
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
self.setup_first_publishing_node(store="true", relay="true") self.setup_first_publishing_node(store="true", relay="true")
self.setup_first_store_node(store="true", relay="true") self.setup_first_store_node(store="true", relay="true")
self.subscribe_to_pubsub_topics_via_relay() self.subscribe_to_pubsub_topics_via_relay(node=self.main_publishing_nodes)
@allure.step @allure.step
def start_publishing_node(self, image, node_index, **kwargs): def start_publishing_node(self, image, node_index, **kwargs):

View File

@ -177,7 +177,6 @@ LOG_ERROR_KEYWORDS = [
"abort", "abort",
"segfault", "segfault",
"corrupt", "corrupt",
"unreachable",
"terminated", "terminated",
"oom", "oom",
"unhandled", "unhandled",
@ -195,3 +194,183 @@ LOG_ERROR_KEYWORDS = [
"race condition", "race condition",
"double free", "double free",
] ]
METRICS_WITH_INITIAL_VALUE_ZERO = [
"libp2p_peers",
"libp2p_failed_upgrades_incoming_total",
"libp2p_failed_upgrades_outgoing_total",
"libp2p_total_dial_attempts_total",
"libp2p_successful_dials_total",
"libp2p_failed_dials_total",
"waku_rln_messages_total_total",
"waku_rln_spam_messages_total_total",
"waku_rln_valid_messages_total_sum",
"waku_rln_valid_messages_total_count",
'waku_rln_valid_messages_total_bucket{le="1.0"}',
'waku_rln_valid_messages_total_bucket{le="2.0"}',
'waku_rln_valid_messages_total_bucket{le="3.0"}',
'waku_rln_valid_messages_total_bucket{le="4.0"}',
'waku_rln_valid_messages_total_bucket{le="5.0"}',
'waku_rln_valid_messages_total_bucket{le="+Inf"}',
"waku_rln_proof_verification_total_total",
"waku_rln_number_registered_memberships",
"waku_rln_proof_verification_duration_seconds",
"waku_rln_proof_generation_duration_seconds",
"waku_rln_instance_creation_duration_seconds",
"waku_rln_membership_insertion_duration_seconds",
"waku_rln_membership_credentials_import_duration_seconds",
"libp2p_pubsub_sig_verify_success_total",
"libp2p_pubsub_sig_verify_failure_total",
"libp2p_pubsub_disconnects_over_non_priority_queue_limit_total",
"libp2p_pubsub_peers",
"libp2p_pubsub_topics",
"libp2p_pubsub_subscriptions_total",
"libp2p_pubsub_unsubscriptions_total",
"libp2p_pubsub_validation_success_total",
"libp2p_pubsub_validation_failure_total",
"libp2p_pubsub_validation_ignore_total",
"libp2p_pubsub_broadcast_iwant_total",
"libp2p_pubsub_received_iwant_total",
"libp2p_gossipsub_cache_window_size",
'libp2p_gossipsub_peers_per_topic_mesh{topic="other"}',
'libp2p_gossipsub_peers_per_topic_fanout{topic="other"}',
'libp2p_gossipsub_peers_per_topic_gossipsub{topic="other"}',
"libp2p_gossipsub_under_dout_topics",
"libp2p_gossipsub_no_peers_topics",
"libp2p_gossipsub_low_peers_topics",
"libp2p_gossipsub_healthy_peers_topics",
"libp2p_gossipsub_failed_publish_total",
"libp2p_gossipsub_invalid_topic_subscription_total",
"libp2p_gossipsub_duplicate_during_validation_total",
"libp2p_gossipsub_idontwant_saved_messages_total",
"libp2p_gossipsub_duplicate_total",
"libp2p_gossipsub_received_total",
"libp2p_rendezvous_register_total",
"libp2p_rendezvous_discover_total",
"libp2p_rendezvous_registered",
"libp2p_rendezvous_namespaces",
"waku_peer_store_size",
"waku_total_unique_peers",
"presto_server_missing_requests_count",
"presto_server_invalid_requests_count",
'waku_archive_messages{type="stored"}',
"waku_archive_queries",
"waku_archive_insert_duration_seconds_sum",
"waku_archive_insert_duration_seconds_count",
'waku_archive_insert_duration_seconds_bucket{le="0.005"}',
'waku_archive_insert_duration_seconds_bucket{le="0.01"}',
'waku_archive_insert_duration_seconds_bucket{le="0.025"}',
'waku_archive_insert_duration_seconds_bucket{le="0.05"}',
'waku_archive_insert_duration_seconds_bucket{le="0.075"}',
'waku_archive_insert_duration_seconds_bucket{le="0.1"}',
'waku_archive_insert_duration_seconds_bucket{le="0.25"}',
'waku_archive_insert_duration_seconds_bucket{le="0.5"}',
'waku_archive_insert_duration_seconds_bucket{le="0.75"}',
'waku_archive_insert_duration_seconds_bucket{le="1.0"}',
'waku_archive_insert_duration_seconds_bucket{le="2.5"}',
'waku_archive_insert_duration_seconds_bucket{le="5.0"}',
'waku_archive_insert_duration_seconds_bucket{le="7.5"}',
'waku_archive_insert_duration_seconds_bucket{le="10.0"}',
'waku_archive_insert_duration_seconds_bucket{le="+Inf"}',
"waku_archive_query_duration_seconds_sum",
"waku_archive_query_duration_seconds_count",
'waku_archive_query_duration_seconds_bucket{le="0.005"}',
'waku_archive_query_duration_seconds_bucket{le="0.01"}',
'waku_archive_query_duration_seconds_bucket{le="0.025"}',
'waku_archive_query_duration_seconds_bucket{le="0.05"}',
'waku_archive_query_duration_seconds_bucket{le="0.075"}',
'waku_archive_query_duration_seconds_bucket{le="0.1"}',
'waku_archive_query_duration_seconds_bucket{le="0.25"}',
'waku_archive_query_duration_seconds_bucket{le="0.5"}',
'waku_archive_query_duration_seconds_bucket{le="0.75"}',
'waku_archive_query_duration_seconds_bucket{le="1.0"}',
'waku_archive_query_duration_seconds_bucket{le="2.5"}',
'waku_archive_query_duration_seconds_bucket{le="5.0"}',
'waku_archive_query_duration_seconds_bucket{le="7.5"}',
'waku_archive_query_duration_seconds_bucket{le="10.0"}',
'waku_archive_query_duration_seconds_bucket{le="+Inf"}',
"waku_legacy_archive_queries",
"waku_legacy_archive_insert_duration_seconds_sum",
"waku_legacy_archive_insert_duration_seconds_count",
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.005"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.01"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.025"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.05"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.075"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.1"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.25"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.5"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="0.75"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="1.0"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="2.5"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="5.0"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="7.5"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="10.0"}',
'waku_legacy_archive_insert_duration_seconds_bucket{le="+Inf"}',
"waku_legacy_archive_query_duration_seconds_sum",
"waku_legacy_archive_query_duration_seconds_count",
'waku_legacy_archive_query_duration_seconds_bucket{le="0.005"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.01"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.025"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.05"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.075"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.1"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.25"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.5"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="0.75"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="1.0"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="2.5"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="5.0"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="7.5"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="10.0"}',
'waku_legacy_archive_query_duration_seconds_bucket{le="+Inf"}',
"waku_legacy_store_queries",
"waku_store_queries",
"waku_filter_subscriptions",
"waku_filter_handle_message_duration_seconds_sum",
"waku_filter_handle_message_duration_seconds_count",
'waku_filter_handle_message_duration_seconds_bucket{le="0.005"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.01"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.025"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.05"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.075"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.1"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.25"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.5"}',
'waku_filter_handle_message_duration_seconds_bucket{le="0.75"}',
'waku_filter_handle_message_duration_seconds_bucket{le="1.0"}',
'waku_filter_handle_message_duration_seconds_bucket{le="2.5"}',
'waku_filter_handle_message_duration_seconds_bucket{le="5.0"}',
'waku_filter_handle_message_duration_seconds_bucket{le="7.5"}',
'waku_filter_handle_message_duration_seconds_bucket{le="10.0"}',
'waku_filter_handle_message_duration_seconds_bucket{le="+Inf"}',
"discovery_session_lru_cache_hits_total",
"discovery_session_lru_cache_misses_total",
"discovery_session_decrypt_failures_total",
"discovery_unsolicited_messages_total",
"discovery_enr_auto_update_total",
"waku_discv5_discovered",
"waku_px_peers_received_total",
"waku_px_peers_received_unknown",
"waku_px_peers_sent",
"waku_px_peers_cached",
"waku_histogram_message_size_sum",
"waku_histogram_message_size_count",
'waku_histogram_message_size_bucket{le="0.0"}',
'waku_histogram_message_size_bucket{le="5.0"}',
'waku_histogram_message_size_bucket{le="15.0"}',
'waku_histogram_message_size_bucket{le="50.0"}',
'waku_histogram_message_size_bucket{le="75.0"}',
'waku_histogram_message_size_bucket{le="100.0"}',
'waku_histogram_message_size_bucket{le="125.0"}',
'waku_histogram_message_size_bucket{le="150.0"}',
'waku_histogram_message_size_bucket{le="300.0"}',
'waku_histogram_message_size_bucket{le="700.0"}',
'waku_histogram_message_size_bucket{le="1000.0"}',
'waku_histogram_message_size_bucket{le="+Inf"}',
"waku_lightpush_peers",
"waku_filter_peers",
"waku_store_peers",
"waku_px_peers",
"waku_dnsdisc_discovered",
]

View File

View File

@ -0,0 +1,114 @@
import pytest
from src.env_vars import DEFAULT_NWAKU
from src.libs.common import delay
from src.node.waku_node import WakuNode
from src.steps.filter import StepsFilter
from src.steps.light_push import StepsLightPush
from src.steps.metrics import StepsMetrics
from src.steps.relay import StepsRelay
from src.steps.store import StepsStore
class TestMetrics(StepsRelay, StepsMetrics, StepsFilter, StepsLightPush, StepsStore):
def test_metrics_initial_value(self):
node = WakuNode(DEFAULT_NWAKU, f"node1_{self.test_id}")
node.start(relay="true", filter="true", store="true", lightpush="true")
delay(5)
self.validate_initial_metrics(node)
@pytest.mark.usefixtures("setup_main_relay_nodes", "subscribe_main_relay_nodes", "relay_warm_up")
def test_metrics_after_relay_publish(self):
self.node1.send_relay_message(self.create_message(), self.test_pubsub_topic)
delay(0.5)
self.node2.get_relay_messages(self.test_pubsub_topic)
delay(5)
for node in self.main_nodes:
if node.is_nwaku():
self.check_metric(node, "libp2p_peers", 1)
self.check_metric(node, "libp2p_pubsub_peers", 1)
self.check_metric(node, "libp2p_pubsub_topics", 1)
self.check_metric(node, "libp2p_pubsub_subscriptions_total", 1)
self.check_metric(node, 'libp2p_gossipsub_peers_per_topic_mesh{topic="other"}', 1)
self.check_metric(node, "waku_peer_store_size", 1)
self.check_metric(node, "waku_histogram_message_size_count", 1)
self.check_metric(node, 'waku_node_messages_total{type="relay"}', 1)
@pytest.mark.usefixtures("setup_main_relay_node", "setup_main_filter_node", "subscribe_main_nodes")
def test_metrics_after_filter_get(self):
message = self.create_message()
self.node1.send_relay_message(message, self.test_pubsub_topic)
delay(0.5)
self.get_filter_messages(message["contentTopic"], pubsub_topic=self.test_pubsub_topic, node=self.node2)
delay(5)
self.check_metric(self.node1, "libp2p_peers", 1)
self.check_metric(self.node1, "libp2p_pubsub_peers", 1)
self.check_metric(self.node1, "libp2p_pubsub_topics", 1)
self.check_metric(self.node1, "libp2p_pubsub_subscriptions_total", 1)
self.check_metric(self.node1, "waku_peer_store_size", 1)
self.check_metric(self.node1, "waku_histogram_message_size_count", 1)
self.check_metric(self.node1, 'waku_node_messages_total{type="relay"}', 1)
self.check_metric(self.node1, 'waku_filter_requests{type="SUBSCRIBE"}', 1)
if self.node2.is_nwaku():
self.check_metric(
self.node2, f'waku_service_peers{{protocol="/vac/waku/filter-subscribe/2.0.0-beta1",peerId="{self.node1.get_tcp_address()}"}}', 1
)
self.check_metric(self.node2, "libp2p_peers", 1)
self.check_metric(self.node2, "libp2p_total_dial_attempts_total", 1)
self.check_metric(self.node2, "waku_peer_store_size", 1)
def test_metrics_after_light_push(self):
self.setup_first_receiving_node()
self.setup_second_receiving_node(lightpush="false", relay="true")
self.setup_first_lightpush_node()
self.subscribe_to_pubsub_topics_via_relay()
payload = self.create_payload(self.test_pubsub_topic)
self.light_push_node1.send_light_push_message(payload)
delay(0.5)
self.receiving_node1.get_relay_messages(self.test_pubsub_topic)
delay(5)
if self.light_push_node1.is_nwaku():
self.check_metric(
self.light_push_node1,
f'waku_service_peers{{protocol="/vac/waku/lightpush/2.0.0-beta1",peerId="{self.receiving_node1.get_tcp_address()}"}}',
1,
)
self.check_metric(self.light_push_node1, "libp2p_peers", 1)
self.check_metric(self.light_push_node1, "waku_peer_store_size", 1)
if self.receiving_node1.is_nwaku():
self.check_metric(self.receiving_node1, "libp2p_peers", 1)
self.check_metric(self.receiving_node1, "libp2p_pubsub_peers", 1)
self.check_metric(self.receiving_node1, "libp2p_pubsub_topics", 1)
self.check_metric(self.receiving_node1, "libp2p_pubsub_subscriptions_total", 1)
self.check_metric(self.receiving_node1, "waku_peer_store_size", 1)
self.check_metric(self.receiving_node1, "waku_histogram_message_size_count", 1)
self.check_metric(self.receiving_node1, 'waku_node_messages_total{type="relay"}', 1)
def test_metrics_after_store_get(self, node_setup):
self.publish_message(message=self.create_message())
self.check_published_message_is_stored(page_size=50, ascending="true")
delay(5)
self.check_metric(self.publishing_node1, "libp2p_peers", 1)
self.check_metric(self.publishing_node1, "libp2p_pubsub_peers", 1)
self.check_metric(self.publishing_node1, "libp2p_pubsub_topics", 1)
self.check_metric(self.publishing_node1, "libp2p_pubsub_subscriptions_total", 1)
self.check_metric(self.publishing_node1, "waku_peer_store_size", 1)
self.check_metric(self.publishing_node1, "waku_histogram_message_size_count", 1)
self.check_metric(self.publishing_node1, 'waku_node_messages_total{type="relay"}', 1)
if self.store_node1.is_nwaku():
self.check_metric(
self.store_node1,
f'waku_service_peers{{protocol="/vac/waku/store/2.0.0-beta4",peerId="{self.publishing_node1.get_tcp_address()}"}}',
1,
)
self.check_metric(
self.store_node1,
f'waku_service_peers{{protocol="/vac/waku/store-query/3.0.0",peerId="{self.publishing_node1.get_tcp_address()}"}}',
1,
)
self.check_metric(self.store_node1, "libp2p_peers", 1)
self.check_metric(self.store_node1, "libp2p_pubsub_peers", 1)
self.check_metric(self.store_node1, "libp2p_pubsub_topics", 1)
self.check_metric(self.store_node1, "libp2p_pubsub_subscriptions_total", 1)
self.check_metric(self.store_node1, "waku_peer_store_size", 1)
self.check_metric(self.store_node1, "waku_histogram_message_size_count", 1)
self.check_metric(self.store_node1, 'waku_node_messages_total{type="relay"}', 1)