diff --git a/src/node/waku_node.py b/src/node/waku_node.py index 377baae5..b2b1f560 100644 --- a/src/node/waku_node.py +++ b/src/node/waku_node.py @@ -291,7 +291,19 @@ class WakuNode: return self._api.get_filter_messages(content_topic, pubsub_topic) def get_store_messages( - self, peerAddr, includeData, pubsubTopic, contentTopics, startTime, endTime, hashes, cursor, pageSize, ascending, store_v, **kwargs + self, + peerAddr=None, + includeData=None, + pubsubTopic=None, + contentTopics=None, + startTime=None, + endTime=None, + hashes=None, + cursor=None, + pageSize=None, + ascending=None, + store_v=None, + **kwargs, ): return self._api.get_store_messages( peerAddr=peerAddr, diff --git a/src/steps/common.py b/src/steps/common.py index 54de19d3..1ce4db9b 100644 --- a/src/steps/common.py +++ b/src/steps/common.py @@ -41,10 +41,10 @@ class StepsCommon: def compute_message_hash(self, pubsub_topic, msg): ctx = hashlib.sha256() ctx.update(pubsub_topic.encode("utf-8")) - payload_bytes = base64.b64decode(msg["payload"]) - ctx.update(payload_bytes) + ctx.update(base64.b64decode(msg["payload"])) ctx.update(msg["contentTopic"].encode("utf-8")) - timestamp_bytes = int(msg["timestamp"]).to_bytes(8, byteorder="big") - ctx.update(timestamp_bytes) + if "meta" in msg: + ctx.update(base64.b64decode(msg["meta"])) + ctx.update(int(msg["timestamp"]).to_bytes(8, byteorder="big")) hash_bytes = ctx.digest() return base64.b64encode(hash_bytes).decode("utf-8") diff --git a/src/steps/store.py b/src/steps/store.py index 74f1ff31..7121588f 100644 --- a/src/steps/store.py +++ b/src/steps/store.py @@ -173,10 +173,10 @@ class StepsStore(StepsCommon): if store_v == "v1": waku_message.assert_received_message(self.message) else: + expected_hash = self.compute_message_hash(pubsubTopic, self.message) assert ( - self.compute_message_hash(pubsubTopic, self.message) - == self.store_response["messages"][store_message_index]["message_hash"]["data"] - ) + expected_hash == self.store_response["messages"][store_message_index]["message_hash"]["data"] + ), f"Message hash returned by store doesn't match the computed message hash {expected_hash}" @allure.step def check_store_returns_empty_response(self, pubsub_topic=None): diff --git a/tests/store/test_get_messages.py b/tests/store/test_get_messages.py index 2f122616..99caf483 100644 --- a/tests/store/test_get_messages.py +++ b/tests/store/test_get_messages.py @@ -2,7 +2,7 @@ import pytest from src.libs.custom_logger import get_custom_logger from src.libs.common import to_base64 from src.steps.store import StepsStore -from src.test_data import SAMPLE_INPUTS +from src.test_data import SAMPLE_INPUTS, VALID_PUBSUB_TOPICS logger = get_custom_logger(__name__) @@ -16,7 +16,12 @@ class TestGetMessages(StepsStore): self.setup_first_store_node(store="true", relay="true") self.subscribe_to_pubsub_topics_via_relay() - def test_store_messages_with_valid_payloads(self): + # only one test for store v1, all other tests are using the new store v3 + def test_legacy_store_v1(self): + self.publish_message_via("relay") + self.check_published_message_is_stored(pubsubTopic=self.test_pubsub_topic, pageSize=5, ascending="true", store_v="v1") + + def test_get_store_messages_with_different_payloads(self): failed_payloads = [] for payload in SAMPLE_INPUTS: logger.debug(f'Running test with payload {payload["description"]}') @@ -27,8 +32,56 @@ class TestGetMessages(StepsStore): except Exception as e: logger.error(f'Payload {payload["description"]} failed: {str(e)}') failed_payloads.append(payload["description"]) + self.check_published_message_is_stored(pubsubTopic=self.test_pubsub_topic, pageSize=50, ascending="true", store_v="v1") assert not failed_payloads, f"Payloads failed: {failed_payloads}" - def test_store_v1(self): - self.publish_message_via("relay") - self.check_published_message_is_stored(pubsubTopic=self.test_pubsub_topic, pageSize=5, ascending="true", store_v="v1") + def test_get_multiple_store_messages(self): + message_hash_list = [] + for payload in SAMPLE_INPUTS: + message = self.create_message(payload=to_base64(payload["value"])) + self.publish_message_via("relay", message=message) + message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message)) + for node in self.store_nodes: + store_response = node.get_store_messages(pubsubTopic=self.test_pubsub_topic, pageSize=50, ascending="true", store_v="v3") + assert len(store_response["messages"]) == len(SAMPLE_INPUTS) + for index, message_hash in enumerate(store_response["messages"]): + assert message_hash["message_hash"]["data"] == message_hash_list[index], f"Message hash at index {index} doesn't match" + + def test_get_store_messages_with_different_content_topics(self): + failed_content_topics = [] + for content_topic in SAMPLE_INPUTS: + logger.debug(f'Running test with content topic {content_topic["description"]}') + message = self.create_message(contentTopic=content_topic["value"]) + try: + self.publish_message_via("relay", message=message) + self.check_published_message_is_stored(pubsubTopic=self.test_pubsub_topic, pageSize=50, ascending="true") + except Exception as e: + logger.error(f'ContentTopic {content_topic["description"]} failed: {str(e)}') + failed_content_topics.append(content_topic) + assert not failed_content_topics, f"ContentTopics failed: {failed_content_topics}" + + def test_get_store_messages_with_different_pubsub_topics(self): + self.subscribe_to_pubsub_topics_via_relay(pubsub_topics=VALID_PUBSUB_TOPICS) + failed_pubsub_topics = [] + for pubsub_topic in VALID_PUBSUB_TOPICS: + logger.debug(f"Running test with pubsub topic {pubsub_topic}") + try: + self.publish_message_via("relay", pubsub_topic=pubsub_topic) + self.check_published_message_is_stored(pubsubTopic=pubsub_topic, pageSize=50, ascending="true") + except Exception as e: + logger.error(f"PubsubTopic pubsub_topic failed: {str(e)}") + failed_pubsub_topics.append(pubsub_topic) + assert not failed_pubsub_topics, f"PubsubTopics failed: {failed_pubsub_topics}" + + def test_get_store_message_with_meta(self): + message = self.create_message(meta=to_base64(self.test_payload)) + self.publish_message_via("relay", message=message) + self.check_published_message_is_stored(pubsubTopic=self.test_pubsub_topic, pageSize=50, ascending="true") + + def test_get_store_duplicate_messages(self): + message = self.create_message() + self.publish_message_via("relay", message=message) + self.publish_message_via("relay", message=message) + self.check_published_message_is_stored(pubsubTopic=self.test_pubsub_topic, pageSize=50, ascending="true") + # only one message is stored + assert len(self.store_response["messages"]) == 1