adjustments

This commit is contained in:
Florin Barbu 2024-05-22 18:48:15 +03:00
parent 9149b9b7f3
commit 6d3875fdbd
No known key found for this signature in database
GPG Key ID: 593D6DBC6D9E5095
9 changed files with 98 additions and 48 deletions

View File

@ -131,6 +131,41 @@ class StepsStore(StepsCommon):
delay(message_propagation_delay)
return self.message
def get_messages_from_store(
self,
node=None,
peer_addr=None,
include_data=None,
pubsub_topic=None,
content_topics=None,
start_time=None,
end_time=None,
hashes=None,
cursor=None,
page_size=None,
ascending="true",
store_v="v3",
**kwargs,
):
if pubsub_topic is None:
pubsub_topic = self.test_pubsub_topic
if node.is_gowaku() and content_topics is None:
content_topics = self.test_content_topic
return node.get_store_messages(
peer_addr=peer_addr,
include_data=include_data,
pubsub_topic=pubsub_topic,
content_topics=content_topics,
start_time=start_time,
end_time=end_time,
hashes=hashes,
cursor=cursor,
page_size=page_size,
ascending=ascending,
store_v=store_v,
**kwargs,
)
@allure.step
def check_published_message_is_stored(
self,
@ -161,7 +196,8 @@ class StepsStore(StepsCommon):
store_node = store_node
for node in store_node:
logger.debug(f"Checking that peer {node.image} can find the stored message")
self.store_response = node.get_store_messages(
self.store_response = self.get_messages_from_store(
node=node,
peer_addr=peer_addr,
include_data=include_data,
pubsub_topic=pubsub_topic,
@ -185,9 +221,11 @@ class StepsStore(StepsCommon):
waku_message.assert_received_message(message_to_check)
else:
expected_hash = self.compute_message_hash(pubsub_topic, message_to_check)
assert (
expected_hash == self.store_response["messages"][store_message_index]["messageHash"]["data"]
), f"Message hash returned by store doesn't match the computed message hash {expected_hash}"
if node.is_nwaku():
actual = self.store_response["messages"][store_message_index]["messageHash"]
else:
actual = self.store_response["messages"][store_message_index]["message_hash"]
assert expected_hash == actual, f"Message hash returned by store doesn't match the computed message hash {expected_hash}"
@allure.step
def check_store_returns_empty_response(self, pubsub_topic=None):

View File

@ -98,6 +98,18 @@ CONTENT_TOPICS_SHARD_7 = [
VALID_PUBSUB_TOPICS = ["/waku/2/rs/0/0", "/waku/2/rs/0/1", "/waku/2/rs/0/9", "/waku/2/rs/0/25", "/waku/2/rs/0/1000"]
PUBSUB_TOPICS_STORE = [
"/waku/2/rs/0/0",
"/waku/2/rs/0/1",
"/waku/2/rs/0/2",
"/waku/2/rs/0/3",
"/waku/2/rs/0/4",
"/waku/2/rs/0/5",
"/waku/2/rs/0/6",
"/waku/2/rs/0/7",
"/waku/2/rs/0/8",
]
INVALID_PUBSUB_TOPICS = ["/test/2/rs/0/1", "/waku/3/rs/0/1", "/waku/2/test/0/1", "/waku/2/rs/0/b", "/waku/2/rs/0"]
PUBSUB_TOPICS_DIFFERENT_CLUSTERS = [

View File

@ -17,7 +17,6 @@ class TestApiFlags(StepsStore):
self.publish_message()
self.check_published_message_is_stored(store_node=self.store_node1, peer_addr=self.multiaddr_list[0])
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2715")
def test_store_with_hashes(self):
message_hash_list = []
for payload in SAMPLE_INPUTS:
@ -28,10 +27,9 @@ class TestApiFlags(StepsStore):
for message_hash in message_hash_list:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, hashes=message_hash, page_size=50, ascending="true")
assert len(store_response["messages"]) == 1
assert store_response["messages"][0]["messageHash"]["data"] == message_hash
assert store_response["messages"][0]["messageHash"] == message_hash
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2715")
def test_store_with_mulitple_hashes(self):
def test_store_with_multiple_hashes(self):
message_hash_list = []
for payload in SAMPLE_INPUTS:
message = self.create_message(payload=to_base64(payload["value"]))
@ -42,12 +40,8 @@ class TestApiFlags(StepsStore):
pubsub_topic=self.test_pubsub_topic, hashes=f"{message_hash_list[0]},{message_hash_list[4]}", page_size=50, ascending="true"
)
assert len(store_response["messages"]) == 2
assert (
store_response["messages"][0]["messageHash"]["data"] == message_hash_list[0]
), "Incorrect messaged filtered based on multiple hashes"
assert (
store_response["messages"][1]["messageHash"]["data"] == message_hash_list[4]
), "Incorrect messaged filtered based on multiple hashes"
assert store_response["messages"][0]["messageHash"] == message_hash_list[0], "Incorrect messaged filtered based on multiple hashes"
assert store_response["messages"][1]["messageHash"] == message_hash_list[4], "Incorrect messaged filtered based on multiple hashes"
def test_store_include_data(self):
message_list = []
@ -56,9 +50,10 @@ class TestApiFlags(StepsStore):
self.publish_message(message=message)
message_list.append(message)
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, include_data="true", page_size=50, ascending="true")
store_response = self.get_messages_from_store(node, include_data="true", page_size=50)
assert len(store_response["messages"]) == len(SAMPLE_INPUTS)
for index, message in enumerate(store_response["messages"]):
assert message["message"]["payload"] == message_list[index]["payload"]
assert message["pubsubTopic"] == self.test_pubsub_topic
waku_message = WakuMessage([message["message"]])
waku_message.assert_received_message(message_list[index])

View File

@ -4,7 +4,6 @@ from src.libs.common import to_base64
from src.steps.store import StepsStore
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2715")
@pytest.mark.usefixtures("node_setup")
class TestCursor(StepsStore):
# we implicitly test the reusabilty of the cursor for multiple nodes
@ -15,13 +14,13 @@ class TestCursor(StepsStore):
message = self.create_message(payload=to_base64(f"Message_{i}"))
self.publish_message(message=message)
expected_message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message))
store_response = {"paginationCursor": {"data": ""}}
store_response = {"paginationCursor": ""}
response_message_hash_list = []
while "paginationCursor" in store_response:
cursor = store_response["paginationCursor"]["data"]
cursor = store_response["paginationCursor"]
store_response = self.store_node1.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor)
for message in store_response["messages"]:
response_message_hash_list.append(message["messageHash"]["data"])
response_message_hash_list.append(message["messageHash"])
assert len(expected_message_hash_list) == len(response_message_hash_list), "Message count mismatch"
assert expected_message_hash_list == response_message_hash_list, "Message hash mismatch"
@ -37,12 +36,12 @@ class TestCursor(StepsStore):
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=cursor_index, ascending="true")
assert len(store_response["messages"]) == cursor_index
cursor = store_response["paginationCursor"]["data"]
cursor = store_response["paginationCursor"]
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor)
assert len(store_response["messages"]) == message_count - cursor_index
for index, message_hash in enumerate(store_response["messages"]):
assert message_hash["messageHash"]["data"] == message_hash_list[cursor_index + index], f"Message hash at index {index} doesn't match"
assert message_hash["messageHash"] == message_hash_list[cursor_index + index], f"Message hash at index {index} doesn't match"
def test_passing_cursor_not_returned_in_paginationCursor(self):
cursor = ""
@ -51,7 +50,7 @@ class TestCursor(StepsStore):
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending="true")
# retrieving the cursor with the message hash of the 3rd message stored
cursor = store_response["messages"][2]["messageHash"]["data"]
cursor = store_response["messages"][2]["messageHash"]
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor)
assert len(store_response["messages"]) == 7, "Message count mismatch"
@ -63,7 +62,7 @@ class TestCursor(StepsStore):
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=10, ascending="true")
# retrieving the cursor with the message hash of the last message stored
cursor = store_response["messages"][9]["messageHash"]["data"]
cursor = store_response["messages"][9]["messageHash"]
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor)
assert len(store_response["messages"]) == 0, "Message count mismatch"
@ -81,7 +80,7 @@ class TestCursor(StepsStore):
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor)
assert len(store_response["messages"]) == 0, "Message count mismatch"
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2717")
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2716")
def test_passing_invalid_cursor(self):
for i in range(4):
self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")))
@ -93,7 +92,7 @@ class TestCursor(StepsStore):
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=100, ascending="true", cursor=cursor)
assert len(store_response["messages"]) == 0, "Message count mismatch"
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2717")
@pytest.mark.xfail("nwaku" in NODE_1, reason="Bug reported: https://github.com/waku-org/nwaku/issues/2716")
def test_passing_non_base64_cursor(self):
for i in range(4):
self.publish_message(message=self.create_message(payload=to_base64(f"Message_{i}")))

View File

@ -2,7 +2,7 @@ import pytest
from src.libs.custom_logger import get_custom_logger
from src.libs.common import to_base64
from src.steps.store import StepsStore
from src.test_data import SAMPLE_INPUTS, VALID_PUBSUB_TOPICS
from src.test_data import CONTENT_TOPICS_DIFFERENT_SHARDS, SAMPLE_INPUTS, PUBSUB_TOPICS_STORE
logger = get_custom_logger(__name__)
@ -32,22 +32,21 @@ class TestGetMessages(StepsStore):
def test_get_store_messages_with_different_content_topics(self):
failed_content_topics = []
for content_topic in SAMPLE_INPUTS:
logger.debug(f'Running test with content topic {content_topic["description"]}')
message = self.create_message(contentTopic=content_topic["value"])
for content_topic in CONTENT_TOPICS_DIFFERENT_SHARDS:
logger.debug(f"Running test with content topic {content_topic}")
message = self.create_message(contentTopic=content_topic)
try:
self.publish_message(message=message)
self.check_published_message_is_stored(page_size=50, ascending="true")
self.check_published_message_is_stored(page_size=50, content_topics=content_topic, ascending="true")
except Exception as e:
logger.error(f'ContentTopic {content_topic["description"]} failed: {str(e)}')
logger.error(f"ContentTopic {content_topic} failed: {str(e)}")
failed_content_topics.append(content_topic)
assert not failed_content_topics, f"ContentTopics failed: {failed_content_topics}"
assert len(self.store_response["messages"]) == len(SAMPLE_INPUTS)
def test_get_store_messages_with_different_pubsub_topics(self):
self.subscribe_to_pubsub_topics_via_relay(pubsub_topics=VALID_PUBSUB_TOPICS)
self.subscribe_to_pubsub_topics_via_relay(pubsub_topics=PUBSUB_TOPICS_STORE)
failed_pubsub_topics = []
for pubsub_topic in VALID_PUBSUB_TOPICS:
for pubsub_topic in PUBSUB_TOPICS_STORE:
logger.debug(f"Running test with pubsub topic {pubsub_topic}")
try:
self.publish_message(pubsub_topic=pubsub_topic)
@ -82,12 +81,16 @@ class TestGetMessages(StepsStore):
self.publish_message(message=message)
message_hash_list.append(self.compute_message_hash(self.test_pubsub_topic, message))
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=50, ascending="true")
store_response = self.get_messages_from_store(node, page_size=50)
assert len(store_response["messages"]) == len(SAMPLE_INPUTS)
for index, message_hash in enumerate(store_response["messages"]):
assert message_hash["messageHash"]["data"] == message_hash_list[index], f"Message hash at index {index} doesn't match"
if node.is_nwaku():
actual = message_hash["messageHash"]
else:
actual = message_hash["message_hash"]
assert actual == message_hash_list[index], f"Message hash at index {index} doesn't match"
def test_store_is_empty(self):
for node in self.store_nodes:
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=50, ascending="true")
store_response = self.get_messages_from_store(node, page_size=50)
assert len(store_response["messages"]) == 0

View File

@ -1,3 +1,5 @@
import pytest
from src.env_vars import NODE_2
from src.steps.store import StepsStore
@ -30,6 +32,7 @@ class TestRunningNodes(StepsStore):
self.publish_message()
self.check_published_message_is_stored(page_size=5, ascending="true")
@pytest.mark.xfail("go-waku" in NODE_2, reason="Bug reported: https://github.com/waku-org/go-waku/issues/1106")
def test_main_node_only_relay__peer_relay_and_store(self):
self.setup_first_publishing_node(store="false", relay="true")
self.setup_first_store_node(store="true", relay="true")
@ -37,6 +40,7 @@ class TestRunningNodes(StepsStore):
self.publish_message()
self.check_published_message_is_stored(page_size=5, ascending="true")
@pytest.mark.xfail("go-waku" in NODE_2, reason="Bug reported: https://github.com/waku-org/go-waku/issues/1106")
def test_main_node_only_relay__peer_only_store(self):
self.setup_first_publishing_node(store="false", relay="true")
self.setup_first_store_node(store="true", relay="false")
@ -44,6 +48,7 @@ class TestRunningNodes(StepsStore):
self.publish_message()
self.check_store_returns_empty_response()
@pytest.mark.xfail("go-waku" in NODE_2, reason="Bug reported: https://github.com/waku-org/go-waku/issues/1106")
def test_main_node_only_relay__peer_only_relay(self):
self.setup_first_publishing_node(store="false", relay="true")
self.setup_first_store_node(store="false", relay="true")

View File

@ -16,7 +16,7 @@ class TestSorting(StepsStore):
store_response = node.get_store_messages(pubsub_topic=self.test_pubsub_topic, page_size=5, ascending=ascending)
response_message_hash_list = []
for message in store_response["messages"]:
response_message_hash_list.append(message["messageHash"]["data"])
response_message_hash_list.append(message["messageHash"])
if ascending == "true":
assert response_message_hash_list == expected_message_hash_list[:5], "Message hash mismatch for acending order"
else:

View File

@ -64,7 +64,7 @@ class TestTimeFilter(StepsStore):
end_time=self.ts_pass[0]["value"] + 100000,
)
assert len(store_response["messages"]) == 1, "Message count mismatch"
assert store_response["messages"][0]["messageHash"]["data"] == message_hash_list[0], "Incorrect messaged filtered based on time"
assert store_response["messages"][0]["messageHash"] == message_hash_list[0], "Incorrect messaged filtered based on time"
def test_time_filter_matches_multiple_messages(self):
message_hash_list = []
@ -82,9 +82,7 @@ class TestTimeFilter(StepsStore):
)
assert len(store_response["messages"]) == 5, "Message count mismatch"
for i in range(5):
assert (
store_response["messages"][i]["messageHash"]["data"] == message_hash_list[i]
), f"Incorrect messaged filtered based on time at index {i}"
assert store_response["messages"][i]["messageHash"] == message_hash_list[i], f"Incorrect messaged filtered based on time at index {i}"
def test_time_filter_matches_no_message(self):
message_hash_list = []
@ -117,4 +115,4 @@ class TestTimeFilter(StepsStore):
end_time=self.ts_pass[0]["value"],
)
assert len(store_response["messages"]) == 1, "Message count mismatch"
assert store_response["messages"][0]["messageHash"]["data"] == message_hash_list[0], "Incorrect messaged filtered based on time"
assert store_response["messages"][0]["messageHash"] == message_hash_list[0], "Incorrect messaged filtered based on time"

View File

@ -18,7 +18,7 @@ class TestTopics(StepsStore):
store_response = node.get_store_messages(content_topics=content_topic, page_size=20, ascending="true")
assert len(store_response["messages"]) == 1, "Message count mismatch"
assert (
store_response["messages"][0]["messageHash"]["data"] == self.message_hash_list[index]
store_response["messages"][0]["messageHash"] == self.message_hash_list[index]
), "Incorrect messaged filtered based on content topic"
def test_store_with_multiple_content_topics(self):
@ -28,10 +28,10 @@ class TestTopics(StepsStore):
)
assert len(store_response["messages"]) == 2, "Message count mismatch"
assert (
store_response["messages"][0]["messageHash"]["data"] == self.message_hash_list[0]
store_response["messages"][0]["messageHash"] == self.message_hash_list[0]
), "Incorrect messaged filtered based on multiple content topics"
assert (
store_response["messages"][1]["messageHash"]["data"] == self.message_hash_list[4]
store_response["messages"][1]["messageHash"] == self.message_hash_list[4]
), "Incorrect messaged filtered based on multiple content topics"
def test_store_with_unknown_content_topic(self):
@ -52,7 +52,7 @@ class TestTopics(StepsStore):
)
assert len(store_response["messages"]) == 1, "Message count mismatch"
assert (
store_response["messages"][0]["messageHash"]["data"] == self.message_hash_list[index]
store_response["messages"][0]["messageHash"] == self.message_hash_list[index]
), "Incorrect messaged filtered based on content topic"
def test_store_with_unknown_pubsub_topic_but_known_content_topic(self):
@ -70,7 +70,7 @@ class TestTopics(StepsStore):
)
assert len(store_response["messages"]) == 1, "Message count mismatch"
assert (
store_response["messages"][0]["messageHash"]["data"] == self.message_hash_list[index]
store_response["messages"][0]["messageHash"] == self.message_hash_list[index]
), "Incorrect messaged filtered based on content topic"
def test_store_without_pubsub_topic_and_content_topic(self):