From 6398249ad224f385f1192395812e05101408ebdc Mon Sep 17 00:00:00 2001 From: Florin Barbu Date: Tue, 21 May 2024 13:43:21 +0300 Subject: [PATCH] more tests --- src/node/waku_node.py | 2 +- src/test_data.py | 15 +++++ tests/store/test_cursor.py | 101 +++++++++++++++++++++++++++++-- tests/store/test_ephemeral.py | 2 - tests/store/test_get_messages.py | 4 +- tests/store/test_page_size.py | 35 +++++++++++ tests/store/test_reliability.py | 16 ++--- tests/store/test_sorting.py | 23 +++++++ tests/store/test_time_filter.py | 34 +++++++---- 9 files changed, 204 insertions(+), 28 deletions(-) create mode 100644 tests/store/test_page_size.py create mode 100644 tests/store/test_sorting.py diff --git a/src/node/waku_node.py b/src/node/waku_node.py index a5cb1758..f925cd6c 100644 --- a/src/node/waku_node.py +++ b/src/node/waku_node.py @@ -302,7 +302,7 @@ class WakuNode: cursor=None, page_size=None, ascending=None, - store_v=None, + store_v="v3", **kwargs, ): return self._api.get_store_messages( diff --git a/src/test_data.py b/src/test_data.py index bf52edd2..b0a62928 100644 --- a/src/test_data.py +++ b/src/test_data.py @@ -149,4 +149,19 @@ SAMPLE_TIMESTAMPS = [ {"description": "Missing", "value": None, "valid_for": ["gowaku"]}, ] + +STORE_TIMESTAMPS_PASS = [ + {"description": "3 sec Past", "value": int((datetime.now() - timedelta(seconds=3)).timestamp() * 1e9)}, + {"description": "1 sec Past", "value": int((datetime.now() - timedelta(seconds=1)).timestamp() * 1e9)}, + {"description": "0.1 sec Past", "value": int((datetime.now() - timedelta(seconds=0.1)).timestamp() * 1e9)}, + {"description": "0.1 sec Future", "value": int((datetime.now() + timedelta(seconds=0.1)).timestamp() * 1e9)}, + {"description": "2 sec Future", "value": int((datetime.now() + timedelta(seconds=2)).timestamp() * 1e9)}, + {"description": "10 sec Future", "value": int((datetime.now() + timedelta(seconds=10)).timestamp() * 1e9)}, +] + +STORE_TIMESTAMPS_FAIL = [ + {"description": "20 sec Past", "value": int((datetime.now() - timedelta(seconds=20)).timestamp() * 1e9)}, + {"description": "40 sec Future", "value": int((datetime.now() + timedelta(seconds=40)).timestamp() * 1e9)}, +] + PUBSUB_TOPICS_RLN = ["/waku/2/rs/1/0"] diff --git a/tests/store/test_cursor.py b/tests/store/test_cursor.py index 0db71fc4..8ada2afc 100644 --- a/tests/store/test_cursor.py +++ b/tests/store/test_cursor.py @@ -1,13 +1,106 @@ import pytest +from src.env_vars import NODE_1 from src.libs.common import to_base64 from src.steps.store import StepsStore +@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2715") @pytest.mark.usefixtures("node_setup") class TestCursor(StepsStore): + # we implicitly test the reusabilty of the cursor for multiple nodes + def test_get_multiple_2000_store_messages(self): - for i in range(110): - self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.001) + expected_message_hash_list = [] + for i in range(2000): + message = self.create_message(payload=to_base64(f"Message_{i}")) + self.publish_message(message=message, message_propagation_delay=0.01) + expected_message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message)) + store_response = {"paginationCursor": {"data": ""}} + response_message_hash_list = [] + while "paginationCursor" in store_response: + cursor = store_response["paginationCursor"]["data"] + store_response = self.store_node1.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + for message in store_response["messages"]: + response_message_hash_list.append(message["messageHash"]["data"]) + assert len(expected_message_hash_list) == len(response_message_hash_list), "Message count mismatch" + assert expected_message_hash_list == response_message_hash_list, "Message hash mismatch" + + @pytest.mark.parametrize("cursor_index, message_count", [[2, 4], [3, 20], [10, 40], [19, 20], [19, 50], [110, 120]]) + def test_different_cursor_and_indexes(self, cursor_index, message_count): + message_hash_list = [] + cursor = "" + cursor_index = cursor_index if cursor_index < 100 else 100 + for i in range(message_count): + message = self.create_message(payload=to_base64(f"Message_{i}")) + self.publish_message(message=message, message_propagation_delay=0.01) + message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message)) for node in self.store_nodes: - store_response = node.get_store_messages(pubsubTopic=self.test_pubsub_topic, page_size=50, ascending="true", store_v="v3") - print(len(store_response["messages"])) + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=cursor_index, ascending="true") + assert len(store_response["messages"]) == cursor_index + cursor = store_response["paginationCursor"]["data"] + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + assert len(store_response["messages"]) == message_count - cursor_index + for index, message_hash in enumerate(store_response["messages"]): + assert message_hash["messageHash"]["data"] == message_hash_list[cursor_index + index], f"Message hash at index {index} doesn't match" + + def test_passing_cursor_not_returned_in_paginationCursor(self): + cursor = "" + for i in range(10): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") + # retrieving the cursor with the message hash of the 3rd message stored + cursor = store_response["messages"][2]["messageHash"]["data"] + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + assert len(store_response["messages"]) == 7, "Message count mismatch" + + def test_passing_cursor_of_the_last_message_from_the_store(self): + cursor = "" + for i in range(10): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=10, ascending="true") + # retrieving the cursor with the message hash of the last message stored + cursor = store_response["messages"][9]["messageHash"]["data"] + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + assert len(store_response["messages"]) == 0, "Message count mismatch" + + @pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2716") + def test_passing_cursor_of_non_existing_message_from_the_store(self): + for i in range(4): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=10, ascending="true") + # creating a cursor to a message that doesn't exist + wrong_message = self.create_message(payload=to_base64("test")) + cursor = self.compute_message_hash(self.test_pubsub_topic, wrong_message) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + assert len(store_response["messages"]) == 0, "Message count mismatch" + + @pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2717") + def test_passing_invalid_cursor(self): + for i in range(4): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=10, ascending="true") + # creating a invalid base64 cursor + cursor = to_base64("test") + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + assert len(store_response["messages"]) == 0, "Message count mismatch" + + @pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2717") + def test_passing_non_base64_cursor(self): + for i in range(4): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=10, ascending="true") + # creating a non base64 cursor + cursor = "test" + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor) + assert len(store_response["messages"]) == 0, "Message count mismatch" diff --git a/tests/store/test_ephemeral.py b/tests/store/test_ephemeral.py index fb99372c..d96da561 100644 --- a/tests/store/test_ephemeral.py +++ b/tests/store/test_ephemeral.py @@ -1,8 +1,6 @@ import pytest from src.libs.custom_logger import get_custom_logger -from src.libs.common import to_base64 from src.steps.store import StepsStore -from src.test_data import SAMPLE_INPUTS, VALID_PUBSUB_TOPICS logger = get_custom_logger(__name__) diff --git a/tests/store/test_get_messages.py b/tests/store/test_get_messages.py index f15f2fbb..ec8eda31 100644 --- a/tests/store/test_get_messages.py +++ b/tests/store/test_get_messages.py @@ -82,7 +82,7 @@ class TestGetMessages(StepsStore): self.publish_message(message=message) message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message)) for node in self.store_nodes: - store_response = node.get_store_messages(pubsubTopic=self.test_pubsub_topic, page_size=50, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=50, ascending="true") assert len(store_response["messages"]) == len(SAMPLE_INPUTS) for index, message_hash in enumerate(store_response["messages"]): - assert message_hash["message_hash"]["data"] == message_hash_list[index], f"Message hash at index {index} doesn't match" + assert message_hash["messageHash"]["data"] == message_hash_list[index], f"Message hash at index {index} doesn't match" diff --git a/tests/store/test_page_size.py b/tests/store/test_page_size.py new file mode 100644 index 00000000..4c74ece2 --- /dev/null +++ b/tests/store/test_page_size.py @@ -0,0 +1,35 @@ +import pytest +from src.libs.common import to_base64 +from src.steps.store import StepsStore + + +@pytest.mark.usefixtures("node_setup") +class TestPageSize(StepsStore): + def test_default_page_size(self): + for i in range(30): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, ascending="true") + assert len(store_response["messages"]) == 20, "Message count mismatch" + + def test_page_size_0_defaults_to_20(self): + for i in range(30): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=0, ascending="true") + assert len(store_response["messages"]) == 20, "Message count mismatch" + + def test_max_page_size(self): + for i in range(200): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=200, ascending="true") + assert len(store_response["messages"]) == 100, "Message count mismatch" + + @pytest.mark.parametrize("page_size", [1, 11, 39, 81, 99]) + def test_different_page_size(self, page_size): + for i in range(page_size + 1): + self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")), message_propagation_delay=0.01) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=page_size, ascending="true") + assert len(store_response["messages"]) == page_size, "Message count mismatch" diff --git a/tests/store/test_reliability.py b/tests/store/test_reliability.py index 161a2bf5..25cbd6f2 100644 --- a/tests/store/test_reliability.py +++ b/tests/store/test_reliability.py @@ -12,7 +12,7 @@ class TestReliability(StepsStore): self.publish_message() self.check_published_message_is_stored(page_size=5, ascending="true") self.publishing_node1.stop() - store_response = self.store_node1.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = self.store_node1.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 1 def test_publishing_node_restarts(self): @@ -25,7 +25,7 @@ class TestReliability(StepsStore): self.publish_message() self.check_published_message_is_stored(page_size=5, ascending="true") for node in self.store_nodes: - store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 2 def test_store_node_restarts(self): @@ -37,7 +37,7 @@ class TestReliability(StepsStore): self.publish_message() self.check_published_message_is_stored(page_size=5, ascending="true") for node in self.store_nodes: - store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 2 def test_publishing_node_paused_and_unpaused(self): @@ -50,7 +50,7 @@ class TestReliability(StepsStore): self.publish_message() self.check_published_message_is_stored(page_size=5, ascending="true") for node in self.store_nodes: - store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 2 def test_store_node_paused_and_unpaused(self): @@ -63,7 +63,7 @@ class TestReliability(StepsStore): self.publish_message() self.check_published_message_is_stored(page_size=5, ascending="true") for node in self.store_nodes: - store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 2 def test_message_relayed_while_store_node_is_paused(self): @@ -75,7 +75,7 @@ class TestReliability(StepsStore): self.store_node1.ensure_ready() self.check_published_message_is_stored(page_size=5, ascending="true") for node in self.store_nodes: - store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 2 def test_message_relayed_while_store_node_is_stopped(self): @@ -89,7 +89,7 @@ class TestReliability(StepsStore): self.subscribe_to_pubsub_topics_via_relay(node=self.store_node1) delay(1) for node in self.store_nodes: - store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 2 def test_message_relayed_before_store_node_is_started(self): @@ -97,7 +97,7 @@ class TestReliability(StepsStore): self.check_published_message_is_stored(page_size=5, ascending="true") self.setup_second_store_node(store="true", relay="true") self.subscribe_to_pubsub_topics_via_relay() - store_response = self.store_node2.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true", store_v="v3") + store_response = self.store_node2.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true") assert len(store_response["messages"]) == 0 self.publish_message() self.check_published_message_is_stored(page_size=5, ascending="true") diff --git a/tests/store/test_sorting.py b/tests/store/test_sorting.py new file mode 100644 index 00000000..1cff2c4a --- /dev/null +++ b/tests/store/test_sorting.py @@ -0,0 +1,23 @@ +import pytest +from src.libs.common import to_base64 +from src.steps.store import StepsStore + + +@pytest.mark.usefixtures("node_setup") +class TestSorting(StepsStore): + @pytest.mark.parametrize("ascending", ["true", "false"]) + def test_store_sort_ascending(self, ascending): + expected_message_hash_list = [] + for i in range(10): + message = self.create_message(payload=to_base64(f"Message_{i}")) + self.publish_message(message=message, message_propagation_delay=0.01) + expected_message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message)) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending=ascending) + response_message_hash_list = [] + for message in store_response["messages"]: + response_message_hash_list.append(message["messageHash"]["data"]) + if ascending == "true": + assert response_message_hash_list == expected_message_hash_list[:5], "Message hash mismatch for acending order" + else: + assert response_message_hash_list == expected_message_hash_list[5:], "Message hash mismatch for descending order" diff --git a/tests/store/test_time_filter.py b/tests/store/test_time_filter.py index 8d67e419..429e2a0b 100644 --- a/tests/store/test_time_filter.py +++ b/tests/store/test_time_filter.py @@ -2,26 +2,38 @@ import pytest from datetime import timedelta, datetime from src.libs.custom_logger import get_custom_logger from src.steps.store import StepsStore +from src.test_data import STORE_TIMESTAMPS_PASS, STORE_TIMESTAMPS_FAIL logger = get_custom_logger(__name__) +## tests with time filters + + @pytest.mark.usefixtures("node_setup") class TestTimeFilter(StepsStore): def test_messages_with_timestamps_close_to_now(self): failed_timestamps = [] - sample_ts = [ - int((datetime.now() - timedelta(seconds=2)).timestamp() * 1e9), - int((datetime.now() + timedelta(seconds=2)).timestamp() * 1e9), - int((datetime.now() + timedelta(seconds=10)).timestamp() * 1e9), - ] - for timestamp in sample_ts: - logger.debug(f"Running test with timestamp {timestamp}") - message = self.create_message(timestamp=timestamp) + for timestamp in STORE_TIMESTAMPS_PASS: + logger.debug(f'Running test with payload {timestamp["description"]}') + message = self.create_message(timestamp=timestamp["value"]) try: self.publish_message(message=message) - self.check_published_message_is_stored(page_size=5, ascending="true") + self.check_published_message_is_stored(page_size=20, ascending="true") except Exception as ex: - logger.error(f"Timestamp {timestamp} failed: {str(ex)}") - failed_timestamps.append(timestamp) + logger.error(f'Payload {timestamp["description"]} failed: {str(ex)}') + failed_timestamps.append(timestamp["description"]) assert not failed_timestamps, f"Timestamps failed: {failed_timestamps}" + + def test_messages_with_timestamps_far_from_now(self): + success_timestamps = [] + for timestamp in STORE_TIMESTAMPS_FAIL: + logger.debug(f'Running test with payload {timestamp["description"]}') + message = self.create_message(timestamp=timestamp["value"]) + try: + self.publish_message(message=message) + self.check_store_returns_empty_response() + except Exception as ex: + logger.error(f'Payload {timestamp["description"]} succeeded where it should have failed: {str(ex)}') + success_timestamps.append(timestamp["description"]) + assert not success_timestamps, f"Timestamps succeeded: {success_timestamps}"