From 81e86ee030af31c331745855258f1ff0c83bb279 Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Wed, 1 Nov 2023 14:02:29 +0200 Subject: [PATCH 1/7] relay publish tests --- .github/pull_request_template.md | 20 +++ .github/workflows/linters.yml | 33 +++++ .github/workflows/test-on-win.yml | 43 +++++++ .gitignore | 103 +++++++++++++++ .pre-commit-config.yaml | 11 ++ LICENSE-APACHE-v2 | 202 ++++++++++++++++++++++++++++++ LICENSE-MIT | 21 ++++ README.md | 26 ++++ pyrightconfig.json | 10 ++ pytest.ini | 7 ++ requirements.txt | 10 ++ src/__init__.py | 0 src/data_classes.py | 24 ++++ src/data_storage.py | 3 + src/env_vars.py | 21 ++++ src/libs/__init__.py | 0 src/libs/common.py | 18 +++ src/node/__init__.py | 0 src/node/docker_mananger.py | 81 ++++++++++++ src/node/waku_node.py | 156 +++++++++++++++++++++++ src/steps/__init__.py | 0 src/steps/relay.py | 32 +++++ src/test_data.py | 42 +++++++ tests/__init__.py | 4 + tests/conftest.py | 30 +++++ tests/relay/__init__.py | 0 tests/relay/test_publish.py | 52 ++++++++ 27 files changed, 949 insertions(+) create mode 100644 .github/pull_request_template.md create mode 100644 .github/workflows/linters.yml create mode 100644 .github/workflows/test-on-win.yml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 LICENSE-APACHE-v2 create mode 100644 LICENSE-MIT create mode 100644 README.md create mode 100644 pyrightconfig.json create mode 100644 pytest.ini create mode 100644 requirements.txt create mode 100644 src/__init__.py create mode 100644 src/data_classes.py create mode 100644 src/data_storage.py create mode 100644 src/env_vars.py create mode 100644 src/libs/__init__.py create mode 100644 src/libs/common.py create mode 100644 src/node/__init__.py create mode 100644 src/node/docker_mananger.py create mode 100644 src/node/waku_node.py create mode 100644 src/steps/__init__.py create mode 100644 src/steps/relay.py create mode 100644 src/test_data.py create mode 100644 tests/__init__.py create mode 100644 tests/conftest.py create mode 100644 tests/relay/__init__.py create mode 100644 tests/relay/test_publish.py diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md new file mode 100644 index 0000000000..fb6a2667d9 --- /dev/null +++ b/.github/pull_request_template.md @@ -0,0 +1,20 @@ +## Problem + + + +## Solution + + + +## Notes + + + +- Resolves +- Related to diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml new file mode 100644 index 0000000000..a73edbff75 --- /dev/null +++ b/.github/workflows/linters.yml @@ -0,0 +1,33 @@ +name: Code Linters + +on: + pull_request: + branches: + - master + +jobs: + pre-commit: + timeout-minutes: 10 + runs-on: ubuntu-latest + steps: + + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v4 + with: + python-version: '3.12' + cache: 'pip' + + - name: Set up virtual environment + run: | + python -m venv .venv + echo ".venv/bin" >> $GITHUB_PATH # Add virtualenv to PATH for subsequent steps + + - name: Install dependencies based on requirements.txt + run: pip install -r requirements.txt + + - name: Install pre-commit + run: pip install pre-commit + + - name: Run pre-commit hooks + run: pre-commit run --all-files \ No newline at end of file diff --git a/.github/workflows/test-on-win.yml b/.github/workflows/test-on-win.yml new file mode 100644 index 0000000000..6e1d9fed5e --- /dev/null +++ b/.github/workflows/test-on-win.yml @@ -0,0 +1,43 @@ +name: Interop Tests + +on: + schedule: + - cron: '0 3 * * *' + pull_request: + branches: + - master + workflow_dispatch: + inputs: + node1: + required: false + type: string + default: "wakuorg/nwaku:deploy-wakuv2-test" + node2: + required: false + type: string + default: "wakuorg/go-waku:latest" + +env: + FORCE_COLOR: "1" + NODE_1: ${{ inputs.node1 }} + NODE_2: ${{ inputs.node2 }} + +jobs: + + tests: + name: Start self-hosted runners + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v4 + with: + python-version: '3.12' + cache: 'pip' + + - run: pip install -r requirements.txt + + - name: Run tests + run: pytest diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000..fda8a5f799 --- /dev/null +++ b/.gitignore @@ -0,0 +1,103 @@ +# Custom +log/ +.vscode + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv’s lock file may be unsuitable. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +venv/ +ENV/ +env/ +ENV.bak/ +env.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..a52586110e --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,11 @@ +repos: +- repo: https://github.com/psf/black + rev: 23.7.0 + hooks: + - id: black + args: [--line-length=150] + +- repo: https://github.com/RobertCraigie/pyright-python + rev: v1.1.326 + hooks: + - id: pyright diff --git a/LICENSE-APACHE-v2 b/LICENSE-APACHE-v2 new file mode 100644 index 0000000000..318c8b1066 --- /dev/null +++ b/LICENSE-APACHE-v2 @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018 Status Research & Development GmbH + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LICENSE-MIT b/LICENSE-MIT new file mode 100644 index 0000000000..68faab7bf5 --- /dev/null +++ b/LICENSE-MIT @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2021 Status Research & Development GmbH + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000000..016958b59c --- /dev/null +++ b/README.md @@ -0,0 +1,26 @@ +# waku-interop-tests + +Waku interop testing between various implementation of the [Waku v2 protocol](https://rfc.vac.dev/spec/10/). + +## Setup + +```shell +git clone git@github.com:waku-org/waku-interop-tests.git +cd waku-interop-tests +python -m venv .venv +source .venv/bin/activate +pip install -r requirements.txt +pytest +``` + +## License + +Licensed and distributed under either of + +- MIT license: [LICENSE-MIT](https://github.com/waku-org/js-waku/blob/master/LICENSE-MIT) or http://opensource.org/licenses/MIT + +or + +- Apache License, Version 2.0, ([LICENSE-APACHE-v2](https://github.com/waku-org/js-waku/blob/master/LICENSE-APACHE-v2) or http://www.apache.org/licenses/LICENSE-2.0) + +at your option. These files may not be copied, modified, or distributed except according to those terms. diff --git a/pyrightconfig.json b/pyrightconfig.json new file mode 100644 index 0000000000..c730426126 --- /dev/null +++ b/pyrightconfig.json @@ -0,0 +1,10 @@ +{ + "include": ["src", "tests"], + "reportMissingImports": true, + "reportOptionalMemberAccess": false, + "reportGeneralTypeIssues": false, + "reportInvalidStringEscapeSequence": false, + "reportWildcardImportFromLibrary": false, + "venvPath": ".", + "venv": ".venv" +} \ No newline at end of file diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000000..3412b31451 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,7 @@ +[pytest] +addopts = --instafail --tb=short --color=auto +log_level = DEBUG +log_cli = True +log_file = log/test.log +log_cli_format = %(asctime)s %(name)s %(levelname)s %(message)s +log_file_format = %(asctime)s %(name)s %(levelname)s %(message)s diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000..091f0201e6 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,10 @@ +black +docker +pre-commit +pyright +pytest +docker +pytest-instafail +pytest-xdist +requests +tenacity diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/data_classes.py b/src/data_classes.py new file mode 100644 index 0000000000..0dcff31387 --- /dev/null +++ b/src/data_classes.py @@ -0,0 +1,24 @@ +from dataclasses import dataclass +from typing import Optional + + +@dataclass +class KeyPair: + privateKey: str + publicKey: str + + +@dataclass +class MessageRpcQuery: + payload: str # Hex encoded data string without `0x` prefix. + contentTopic: Optional[str] = None + timestamp: Optional[int] = None # Unix epoch time in nanoseconds as a 64-bit integer value. + + +@dataclass +class MessageRpcResponse: + payload: str + contentTopic: Optional[str] = None + version: Optional[int] = None + timestamp: Optional[int] = None # Unix epoch time in nanoseconds as a 64-bit integer value. + ephemeral: Optional[bool] = None diff --git a/src/data_storage.py b/src/data_storage.py new file mode 100644 index 0000000000..1b69a46311 --- /dev/null +++ b/src/data_storage.py @@ -0,0 +1,3 @@ +# We use this class for global variables +class DS: + waku_nodes = [] diff --git a/src/env_vars.py b/src/env_vars.py new file mode 100644 index 0000000000..eac9b3306a --- /dev/null +++ b/src/env_vars.py @@ -0,0 +1,21 @@ +import os +import logging + +logger = logging.getLogger(__name__) + + +def get_env_var(var_name, default): + env_var = os.getenv(var_name, default) + logger.debug(f"{var_name}: {env_var}") + return env_var + + +# Configuration constants +NODE_1 = get_env_var("NODE_1", "wakuorg/nwaku:deploy-wakuv2-test") +NODE_2 = get_env_var("NODE_2", "wakuorg/go-waku:latest") +LOG_DIR = get_env_var("LOG_DIR", "./log") +NETWORK_NAME = get_env_var("NETWORK_NAME", "waku") +SUBNET = get_env_var("SUBNET", "172.18.0.0/16") +IP_RANGE = get_env_var("IP_RANGE", "172.18.0.0/24") +GATEWAY = get_env_var("GATEWAY", "172.18.0.1") +DEFAULT_PUBSUBTOPIC = get_env_var("DEFAULT_PUBSUBTOPIC", "/waku/2/default-waku/proto") diff --git a/src/libs/__init__.py b/src/libs/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/libs/common.py b/src/libs/common.py new file mode 100644 index 0000000000..cd3d80705d --- /dev/null +++ b/src/libs/common.py @@ -0,0 +1,18 @@ +import base64 + + +def bytes_to_hex(byte_array): + return "".join(format(byte, "02x") for byte in byte_array) + + +def to_base64(input_data): + if isinstance(input_data, str): + input_bytes = input_data.encode() + elif isinstance(input_data, int): + input_bytes = str(input_data).encode() + elif isinstance(input_data, bytes): + input_bytes = input_data + else: + input_bytes = str(input_data).encode() + base64_encoded = base64.b64encode(input_bytes) + return base64_encoded.decode() diff --git a/src/node/__init__.py b/src/node/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/node/docker_mananger.py b/src/node/docker_mananger.py new file mode 100644 index 0000000000..d0bc36477c --- /dev/null +++ b/src/node/docker_mananger.py @@ -0,0 +1,81 @@ +import os +import logging +import random +import threading +import docker +from src.env_vars import NETWORK_NAME, SUBNET, IP_RANGE, GATEWAY +from docker.types import IPAMConfig, IPAMPool +from docker.errors import NotFound + +logger = logging.getLogger(__name__) + + +class DockerManager: + def __init__(self, image): + self._image = image + self._client = docker.from_env() + logger.debug(f"Docker client initialized with image {self._image}") + + def create_network(self, network_name=NETWORK_NAME): + logger.debug(f"Attempting to create or retrieve network '{network_name}'.") + networks = self._client.networks.list(names=[network_name]) + if networks: + logger.debug(f"Network '{network_name}' already exists.") + return networks[0] + + network = self._client.networks.create( + network_name, + driver="bridge", + ipam=IPAMConfig(driver="default", pool_configs=[IPAMPool(subnet=SUBNET, iprange=IP_RANGE, gateway=GATEWAY)]), + ) + logger.debug(f"Network '{network_name}' created.") + return network + + def start_container(self, image_name, ports, args, log_path, container_ip): + command = [f"--{key}={value}" for key, value in args.items()] + port_bindings = {f"{port}/tcp": ("", port) for port in ports} + logger.debug(f"Starting container with image '{image_name}'.") + + container = self._client.containers.run(image_name, command=command, ports=port_bindings, detach=True, auto_remove=True) + + network = self._client.networks.get(NETWORK_NAME) + network.connect(container, ipv4_address=container_ip) + + logger.debug(f"Container started with ID {container.short_id}. Setting up logs at '{log_path}'.") + log_thread = threading.Thread(target=self._log_container_output, args=(container, log_path)) + log_thread.daemon = True + log_thread.start() + + return container + + def _log_container_output(self, container, log_path): + os.makedirs(os.path.dirname(log_path), exist_ok=True) + with open(log_path, "wb+") as log_file: + for chunk in container.logs(stream=True): + log_file.write(chunk) + + def generate_ports(self, base_port=None, count=4): + if base_port is None: + base_port = random.randint(1024, 65535 - count) + ports = [base_port + i for i in range(count)] + logger.debug(f"Generated ports: {ports}") + return ports + + @staticmethod + def generate_random_ext_ip(): + base_ip_fragments = ["172", "18"] + ext_ip = ".".join(base_ip_fragments + [str(random.randint(0, 255)) for _ in range(2)]) + logger.debug(f"Generated random external IP: {ext_ip}") + return ext_ip + + def is_container_running(self, container): + try: + refreshed_container = self._client.containers.get(container.id) + return refreshed_container.status == "running" + except NotFound: + logger.error(f"Container with ID {container.id} not found.") + return False + + @property + def image(self): + return self._image diff --git a/src/node/waku_node.py b/src/node/waku_node.py new file mode 100644 index 0000000000..92b710f16f --- /dev/null +++ b/src/node/waku_node.py @@ -0,0 +1,156 @@ +import os +import logging +from time import time +import requests +import json +from tenacity import retry, stop_after_delay, wait_fixed +from dataclasses import asdict +from src.node.docker_mananger import DockerManager +from src.env_vars import LOG_DIR, DEFAULT_PUBSUBTOPIC +from src.data_storage import DS +from src.libs.common import bytes_to_hex + + +logger = logging.getLogger(__name__) + + +class WakuNode: + def __init__(self, docker_image, docker_log_sufix=""): + self._image_name = docker_image + self._log_path = os.path.join(LOG_DIR, f"docker_{self._image_name.replace('/', '_')}_{docker_log_sufix}.log") + self._docker_manager = DockerManager(self._image_name) + self._container = None + self._ext_ip = self._docker_manager.generate_random_ext_ip() + logger.debug(f"WakuNode instance initialized with log path: {self._log_path}") + + @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) + def start(self, **kwargs): + logger.debug("Starting Node...") + self._docker_manager.create_network() + ports = self._docker_manager.generate_ports() + self._rpc_port = ports[0] + self._websocket_port = ports[2] + + default_args = { + "listen-address": "0.0.0.0", + "rpc": "true", + "rpc-admin": "true", + "websocket-support": "true", + "log-level": "TRACE", + "websocket-port": str(ports[2]), + "rpc-port": str(ports[0]), + "tcp-port": str(ports[1]), + "discv5-udp-port": str(ports[3]), + "rpc-address": "0.0.0.0", + "topic": DEFAULT_PUBSUBTOPIC, + "nat": f"extip:{self._ext_ip}", + } + + if "go-waku" in self._docker_manager.image: + go_waku_args = { + "min-relay-peers-to-publish": "0", + "legacy-filter": "false", + "log-level": "DEBUG", + } + default_args.update(go_waku_args) + + for key, value in kwargs.items(): + key = key.replace("_", "-") + default_args[key] = value + + logger.debug(f"Starting container with args: {default_args}") + self._container = self._docker_manager.start_container(self._docker_manager.image, ports, default_args, self._log_path, self._ext_ip) + logger.debug(f"Started container from image {self._image_name}. RPC port: {self._rpc_port} and WebSocket port: {self._websocket_port}") + DS.waku_nodes.append(self) + try: + self.ensure_rpc_ready() + except Exception as e: + logger.error(f"RPC service did not become ready in time: {e}") + raise + + @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) + def stop(self): + if self._container: + logger.debug("Stopping container with id %s", self._container.short_id) + self._container.stop() + logger.debug("Container stopped.") + + @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) + def rpc_call(self, method, params=[]): + url = f"http://127.0.0.1:{self._rpc_port}" + headers = {"Content-Type": "application/json"} + payload = {"jsonrpc": "2.0", "method": method, "params": params, "id": 1} + logger.debug("RPC call payload %s", payload) + response = requests.post(url, data=json.dumps(payload), headers=headers) + return response.json() + + @retry(stop=stop_after_delay(5), wait=wait_fixed(0.05), reraise=True) + def ensure_rpc_ready(self): + self.info() + logger.debug("RPC service is ready.") + + def info(self): + return self.rpc_call("get_waku_v2_debug_v1_info", []) + + def set_subscriptions(self, pubsub_topics=[DEFAULT_PUBSUBTOPIC]): + return self.rpc_call("post_waku_v2_relay_v1_subscriptions", [pubsub_topics]) + + def send_message(self, message, pubsub_topic=DEFAULT_PUBSUBTOPIC): + if message.timestamp is None: + message.timestamp = int(time() * 1e9) + return self.rpc_call("post_waku_v2_relay_v1_message", [pubsub_topic, asdict(message)]) + + def get_messages(self, pubsub_topic=DEFAULT_PUBSUBTOPIC): + return self.rpc_call("get_waku_v2_relay_v1_messages", [pubsub_topic]) + + def get_asymmetric_key_pair(self): + response = self.rpc_call("get_waku_v2_private_v1_asymmetric_keypair", []) + seckey = response.get("seckey") + pubkey = response.get("pubkey") + private_key = response.get("privateKey") + public_key = response.get("publicKey") + if seckey: + return {"privateKey": seckey, "publicKey": pubkey} + else: + return {"privateKey": private_key, "publicKey": public_key} + + def post_asymmetric_message(self, message, public_key, pubsub_topic=None): + if not message.payload: + raise Exception("Attempting to send an empty message") + return self.rpc_call( + "post_waku_v2_private_v1_asymmetric_message", + [pubsub_topic or DEFAULT_PUBSUBTOPIC, message, "0x" + bytes_to_hex(public_key)], + ) + + def get_asymmetric_messages(self, private_key, pubsub_topic=None): + return self.rpc_call( + "get_waku_v2_private_v1_asymmetric_messages", + [pubsub_topic or DEFAULT_PUBSUBTOPIC, "0x" + bytes_to_hex(private_key)], + ) + + def get_symmetric_key(self): + return bytes.fromhex(self.rpc_call("get_waku_v2_private_v1_symmetric_key", [])) + + def post_symmetric_message(self, message, sym_key, pubsub_topic=None): + if not message.payload: + raise Exception("Attempting to send an empty message") + return self.rpc_call( + "post_waku_v2_private_v1_symmetric_message", + [pubsub_topic or DEFAULT_PUBSUBTOPIC, message, "0x" + bytes_to_hex(sym_key)], + ) + + def get_symmetric_messages(self, sym_key, pubsub_topic=None): + return self.rpc_call( + "get_waku_v2_private_v1_symmetric_messages", + [pubsub_topic or DEFAULT_PUBSUBTOPIC, "0x" + bytes_to_hex(sym_key)], + ) + + def get_peer_id(self): + # not implemented + peer_id = "" + return peer_id + + def get_multiaddr_with_id(self): + peer_id = self.get_peer_id() + multiaddr_with_id = f"/ip4/127.0.0.1/tcp/{self._websocket_port}/ws/p2p/{peer_id}" + return multiaddr_with_id diff --git a/src/steps/__init__.py b/src/steps/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/steps/relay.py b/src/steps/relay.py new file mode 100644 index 0000000000..26fb5eed69 --- /dev/null +++ b/src/steps/relay.py @@ -0,0 +1,32 @@ +import logging +from time import sleep +import pytest +from src.env_vars import NODE_1, NODE_2 +from src.node.waku_node import WakuNode +from tenacity import retry, stop_after_delay, wait_fixed + +logger = logging.getLogger(__name__) + + +class StepsRelay: + @pytest.fixture(scope="function", autouse=True) + def setup_nodes(self): + self.node1 = WakuNode(NODE_1) + self.node1.start(relay="true", discv5_discovery="true", peer_exchange="true") + enr_uri = self.node1.info()["result"]["enrUri"] + self.node2 = WakuNode(NODE_2) + self.node2.start(relay="true", discv5_discovery="true", discv5_bootstrap_node=enr_uri, peer_exchange="true") + self.node1.set_subscriptions() + self.node2.set_subscriptions() + self.default_content_topic = "/test/1/waku-relay" + self.default_payload = "Relay works!!" + + @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) + def check_published_message_reaches_peer(self, message): + self.node1.send_message(message) + sleep(0.1) + get_messages_response = self.node2.get_messages() + logger.debug("Got reponse from remote peer %s", get_messages_response) + assert get_messages_response["result"][0]["payload"] == message.payload + assert get_messages_response["result"][0]["contentTopic"] == message.contentTopic + assert get_messages_response["result"][0]["timestamp"] == message.timestamp diff --git a/src/test_data.py b/src/test_data.py new file mode 100644 index 0000000000..0a777099ef --- /dev/null +++ b/src/test_data.py @@ -0,0 +1,42 @@ +SAMPLE_INPUTS = [ + {"description": "A simple string.", "value": "Hello World!"}, + {"description": "An integer.", "value": "1234567890"}, + {"description": "A dictionary.", "value": '{"key": "value"}'}, + {"description": "Chinese characters.", "value": "这是一些中文"}, + {"description": "Emojis.", "value": "🚀🌟✨"}, + {"description": "Lorem ipsum text.", "value": "Lorem ipsum dolor sit amet"}, + {"description": "HTML content.", "value": "Hello"}, + {"description": "Cyrillic characters.", "value": "\u041f\u0440\u0438\u0432\u0435\u0442"}, + {"description": "Base64 encoded string.", "value": "Base64==dGVzdA=="}, + {"description": "Binary data.", "value": "d29ya2luZyB3aXRoIGJpbmFyeSBkYXRh: \x50\x51"}, + {"description": "Special characters with whitespace.", "value": "\t\nSpecial\tCharacters\n"}, + {"description": "Boolean false as a string.", "value": "False"}, + {"description": "A float number.", "value": "3.1415926535"}, + {"description": "A list.", "value": "[1, 2, 3, 4, 5]"}, + {"description": "Hexadecimal number as a string.", "value": "0xDEADBEEF"}, + {"description": "Email format.", "value": "user@example.com"}, + {"description": "URL format.", "value": "http://example.com"}, + {"description": "Date and time in ISO format.", "value": "2023-11-01T12:00:00Z"}, + {"description": "String with escaped quotes.", "value": '"Escaped" \\"quotes\\"'}, + {"description": "A regular expression.", "value": "Regular expression: ^[a-z0-9_-]{3,16}$"}, + {"description": "A very long string.", "value": "x" * 1000}, + {"description": "A JSON string.", "value": '{"name": "John", "age": 30, "city": "New York"}'}, + {"description": "A Unix path.", "value": "/usr/local/bin"}, + {"description": "A Windows path.", "value": "C:\\Windows\\System32"}, + {"description": "An SQL query.", "value": "SELECT * FROM users WHERE id = 1;"}, + {"description": "JavaScript code snippet.", "value": "function test() { console.log('Hello World'); }"}, + {"description": "A CSS snippet.", "value": "body { background-color: #fff; }"}, + {"description": "A Python one-liner.", "value": "print('Hello World')"}, + {"description": "An IP address.", "value": "192.168.1.1"}, + {"description": "A domain name.", "value": "www.example.com"}, + {"description": "A user agent string.", "value": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"}, + {"description": "A credit card number.", "value": "1234-5678-9012-3456"}, + {"description": "A phone number.", "value": "+1234567890"}, + {"description": "A UUID.", "value": "123e4567-e89b-12d3-a456-426614174000"}, + {"description": "A hashtag.", "value": "#helloWorld"}, + {"description": "A Twitter handle.", "value": "@username"}, + {"description": "A password.", "value": "P@ssw0rd!"}, + {"description": "A date in common format.", "value": "01/11/2023"}, + {"description": "A time string.", "value": "12:00:00"}, + {"description": "A mathematical equation.", "value": "E = mc^2"}, +] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000000..d392987ae1 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,4 @@ +import logging + +logging.getLogger("urllib3").setLevel(logging.WARNING) +logging.getLogger("docker").setLevel(logging.WARNING) diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000000..441b21970c --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +import logging +import pytest +from src.data_storage import DS + +logger = logging.getLogger(__name__) + + +# See https://docs.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures +@pytest.hookimpl(hookwrapper=True, tryfirst=True) +def pytest_runtest_makereport(item): + outcome = yield + rep = outcome.get_result() + if rep.when == "call": + setattr(item, "rep_call", rep) + return rep + return None + + +@pytest.fixture(scope="function", autouse=True) +def test_setup(request): + logger.debug("Running test: %s", request.node.name) + + +@pytest.fixture(scope="function", autouse=True) +def close_open_nodes(): + DS.waku_nodes = [] + yield + for node in DS.waku_nodes: + node.stop() diff --git a/tests/relay/__init__.py b/tests/relay/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/relay/test_publish.py b/tests/relay/test_publish.py new file mode 100644 index 0000000000..a44965a8e5 --- /dev/null +++ b/tests/relay/test_publish.py @@ -0,0 +1,52 @@ +import logging +from time import sleep + +from src.libs.common import to_base64 +from src.data_classes import MessageRpcQuery +from src.steps.relay import StepsRelay +from src.test_data import SAMPLE_INPUTS + +logger = logging.getLogger(__name__) + + +class TestRelayPublish(StepsRelay): + def test_publish_with_various_payloads(self): + failed_payloads = [] + for payload in SAMPLE_INPUTS: + logger.debug("Running test with payload %s", payload["description"]) + message = MessageRpcQuery(payload=to_base64(payload["value"]), contentTopic=self.default_content_topic) + try: + self.check_published_message_reaches_peer(message) + except Exception as e: + logger.error(f"Payload '{payload['description']}' failed: {str(e)}") + failed_payloads.append(payload) + assert not failed_payloads, f"Payloads failed: {failed_payloads}" + + def test_publish_with_various_content_topics(self): + failed_content_topics = [] + for content_topic in SAMPLE_INPUTS: + logger.debug("Running test with content topic %s", content_topic["description"]) + message = MessageRpcQuery(payload=to_base64(self.default_payload), contentTopic=content_topic["value"]) + try: + self.check_published_message_reaches_peer(message) + except Exception as e: + logger.error(f"ContentTopic '{content_topic['description']}' failed: {str(e)}") + failed_content_topics.append(content_topic) + assert not failed_content_topics, f"ContentTopics failed: {failed_content_topics}" + + # while True: + # message = MessageRpcQuery( + # payload="TTE=", + # contentTopic="/test/1/waku-filter", + # timestamp=int(time() * 1e9) + # ) + # node1.send_message(message) + # sleep(1) + # # print(node1.get_messages()) + # print(node2.get_messages()) + + # node1.stop() + # node2.stop() + + # info = node1.info() + # enr_uri = node1.info()["result"] From e56cb0784ae931af201f49b5f03e21fe91dd1a0a Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Wed, 1 Nov 2023 14:11:03 +0200 Subject: [PATCH 2/7] add missing defaults --- .github/workflows/test-on-win.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test-on-win.yml b/.github/workflows/test-on-win.yml index 6e1d9fed5e..6590b2f1e5 100644 --- a/.github/workflows/test-on-win.yml +++ b/.github/workflows/test-on-win.yml @@ -19,13 +19,13 @@ on: env: FORCE_COLOR: "1" - NODE_1: ${{ inputs.node1 }} - NODE_2: ${{ inputs.node2 }} + NODE_1: ${{ inputs.node1 || 'wakuorg/nwaku:deploy-wakuv2-test' }} + NODE_2: ${{ inputs.node2 || 'wakuorg/go-waku:latest' }} jobs: tests: - name: Start self-hosted runners + name: tests runs-on: ubuntu-latest timeout-minutes: 30 steps: From e270742d5bd2cfa6387d6772ba32d9a254d0ad6f Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Wed, 1 Nov 2023 14:31:36 +0200 Subject: [PATCH 3/7] setup report --- .github/workflows/test-on-win.yml | 43 ------------------------------- requirements.txt | 2 +- tests/relay/test_publish.py | 19 +++----------- 3 files changed, 4 insertions(+), 60 deletions(-) delete mode 100644 .github/workflows/test-on-win.yml diff --git a/.github/workflows/test-on-win.yml b/.github/workflows/test-on-win.yml deleted file mode 100644 index 6590b2f1e5..0000000000 --- a/.github/workflows/test-on-win.yml +++ /dev/null @@ -1,43 +0,0 @@ -name: Interop Tests - -on: - schedule: - - cron: '0 3 * * *' - pull_request: - branches: - - master - workflow_dispatch: - inputs: - node1: - required: false - type: string - default: "wakuorg/nwaku:deploy-wakuv2-test" - node2: - required: false - type: string - default: "wakuorg/go-waku:latest" - -env: - FORCE_COLOR: "1" - NODE_1: ${{ inputs.node1 || 'wakuorg/nwaku:deploy-wakuv2-test' }} - NODE_2: ${{ inputs.node2 || 'wakuorg/go-waku:latest' }} - -jobs: - - tests: - name: tests - runs-on: ubuntu-latest - timeout-minutes: 30 - steps: - - - uses: actions/checkout@v4 - - - uses: actions/setup-python@v4 - with: - python-version: '3.12' - cache: 'pip' - - - run: pip install -r requirements.txt - - - name: Run tests - run: pytest diff --git a/requirements.txt b/requirements.txt index 091f0201e6..b11fd8deeb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ +allure-pytest black docker pre-commit pyright pytest -docker pytest-instafail pytest-xdist requests diff --git a/tests/relay/test_publish.py b/tests/relay/test_publish.py index a44965a8e5..47d7343d5f 100644 --- a/tests/relay/test_publish.py +++ b/tests/relay/test_publish.py @@ -34,19 +34,6 @@ class TestRelayPublish(StepsRelay): failed_content_topics.append(content_topic) assert not failed_content_topics, f"ContentTopics failed: {failed_content_topics}" - # while True: - # message = MessageRpcQuery( - # payload="TTE=", - # contentTopic="/test/1/waku-filter", - # timestamp=int(time() * 1e9) - # ) - # node1.send_message(message) - # sleep(1) - # # print(node1.get_messages()) - # print(node2.get_messages()) - - # node1.stop() - # node2.stop() - - # info = node1.info() - # enr_uri = node1.info()["result"] + def test_fail_for_report_puposes(self): + message = MessageRpcQuery(payload="", contentTopic="") + self.check_published_message_reaches_peer(message) From a98ae52d831542b72e2bac3f4e908fec59cfeb24 Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Wed, 1 Nov 2023 14:34:55 +0200 Subject: [PATCH 4/7] setup report --- .github/workflows/test.yml | 68 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 68 insertions(+) create mode 100644 .github/workflows/test.yml diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000000..634f535216 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,68 @@ +name: Interop Tests + +on: + schedule: + - cron: '0 3 * * *' + pull_request: + branches: + - master + workflow_dispatch: + inputs: + node1: + required: false + type: string + default: "wakuorg/nwaku:deploy-wakuv2-test" + node2: + required: false + type: string + default: "wakuorg/go-waku:latest" + +env: + FORCE_COLOR: "1" + NODE_1: ${{ inputs.node1 || 'wakuorg/nwaku:deploy-wakuv2-test' }} + NODE_2: ${{ inputs.node2 || 'wakuorg/go-waku:latest' }} + +jobs: + + tests: + name: tests + runs-on: ubuntu-latest + timeout-minutes: 30 + steps: + + - uses: actions/checkout@v4 + + - uses: actions/setup-python@v4 + with: + python-version: '3.12' + cache: 'pip' + + - run: pip install -r requirements.txt + + - name: Run tests + run: pytest -n 3 --reruns 5 --alluredir=allure-results + + - name: Get allure history + if: always() + uses: actions/checkout@v4 + with: + ref: gh-pages + path: gh-pages + + - name: Setup allure report + uses: simple-elf/allure-report-action@master + if: always() + id: allure-report + with: + allure_results: allure-results + gh_pages: gh-pages + allure_history: allure-history + keep_reports: 30 + + - name: Deploy report to Github Pages + uses: peaceiris/actions-gh-pages@v3 + if: always() + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_branch: gh-pages + publish_dir: allure-history From 878448f654d6f5f03d49dec989dad359293ac4d7 Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Wed, 1 Nov 2023 14:36:16 +0200 Subject: [PATCH 5/7] add missing dependency --- .github/workflows/linters.yml | 2 +- requirements.txt | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index a73edbff75..07902a4d18 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -6,7 +6,7 @@ on: - master jobs: - pre-commit: + linters: timeout-minutes: 10 runs-on: ubuntu-latest steps: diff --git a/requirements.txt b/requirements.txt index b11fd8deeb..5800bd801b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,5 +6,6 @@ pyright pytest pytest-instafail pytest-xdist +pytest-rerunfailures requests tenacity From 7e76bedfc764a04138d133c693a1f7732490a845 Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Wed, 1 Nov 2023 16:44:42 +0200 Subject: [PATCH 6/7] improve reports --- .github/workflows/test.yml | 2 +- .gitignore | 1 + src/env_vars.py | 2 +- src/libs/common.py | 10 ++++++++++ src/node/waku_node.py | 4 ++-- src/steps/relay.py | 10 ++++++---- tests/conftest.py | 36 ++++++++++++++++++++++++++++++++++-- 7 files changed, 55 insertions(+), 10 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 634f535216..cb93d03157 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -40,7 +40,7 @@ jobs: - run: pip install -r requirements.txt - name: Run tests - run: pytest -n 3 --reruns 5 --alluredir=allure-results + run: pytest -n 3 --reruns 1 --alluredir=allure-results - name: Get allure history if: always() diff --git a/.gitignore b/.gitignore index fda8a5f799..5c9dbc1ae5 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,7 @@ # Custom log/ .vscode +allure-results/ # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/src/env_vars.py b/src/env_vars.py index eac9b3306a..e226705227 100644 --- a/src/env_vars.py +++ b/src/env_vars.py @@ -10,7 +10,7 @@ def get_env_var(var_name, default): return env_var -# Configuration constants +# Configuration constants. Need to be upercase to appear in reports NODE_1 = get_env_var("NODE_1", "wakuorg/nwaku:deploy-wakuv2-test") NODE_2 = get_env_var("NODE_2", "wakuorg/go-waku:latest") LOG_DIR = get_env_var("LOG_DIR", "./log") diff --git a/src/libs/common.py b/src/libs/common.py index cd3d80705d..697e2881d3 100644 --- a/src/libs/common.py +++ b/src/libs/common.py @@ -1,4 +1,9 @@ +import logging +import os import base64 +import allure + +logger = logging.getLogger(__name__) def bytes_to_hex(byte_array): @@ -16,3 +21,8 @@ def to_base64(input_data): input_bytes = str(input_data).encode() base64_encoded = base64.b64encode(input_bytes) return base64_encoded.decode() + + +def attach_allure_file(file): + logger.debug("Attaching file %s", file) + allure.attach.file(file, name=os.path.basename(file), attachment_type=allure.attachment_type.TEXT) diff --git a/src/node/waku_node.py b/src/node/waku_node.py index 92b710f16f..7e5391bacd 100644 --- a/src/node/waku_node.py +++ b/src/node/waku_node.py @@ -15,9 +15,9 @@ logger = logging.getLogger(__name__) class WakuNode: - def __init__(self, docker_image, docker_log_sufix=""): + def __init__(self, docker_image, docker_log_prefix=""): self._image_name = docker_image - self._log_path = os.path.join(LOG_DIR, f"docker_{self._image_name.replace('/', '_')}_{docker_log_sufix}.log") + self._log_path = os.path.join(LOG_DIR, f"{docker_log_prefix}__{self._image_name.replace('/', '_')}.log") self._docker_manager = DockerManager(self._image_name) self._container = None self._ext_ip = self._docker_manager.generate_random_ext_ip() diff --git a/src/steps/relay.py b/src/steps/relay.py index 26fb5eed69..fbb0150cd1 100644 --- a/src/steps/relay.py +++ b/src/steps/relay.py @@ -1,6 +1,7 @@ import logging from time import sleep import pytest +import allure from src.env_vars import NODE_1, NODE_2 from src.node.waku_node import WakuNode from tenacity import retry, stop_after_delay, wait_fixed @@ -10,18 +11,19 @@ logger = logging.getLogger(__name__) class StepsRelay: @pytest.fixture(scope="function", autouse=True) - def setup_nodes(self): - self.node1 = WakuNode(NODE_1) + def setup_nodes(self, request): + self.node1 = WakuNode(NODE_1, request.cls.test_id) self.node1.start(relay="true", discv5_discovery="true", peer_exchange="true") enr_uri = self.node1.info()["result"]["enrUri"] - self.node2 = WakuNode(NODE_2) + self.node2 = WakuNode(NODE_2, request.cls.test_id) self.node2.start(relay="true", discv5_discovery="true", discv5_bootstrap_node=enr_uri, peer_exchange="true") self.node1.set_subscriptions() self.node2.set_subscriptions() self.default_content_topic = "/test/1/waku-relay" self.default_payload = "Relay works!!" - @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) + @allure.step + @retry(stop=stop_after_delay(2), wait=wait_fixed(0.2), reraise=True) def check_published_message_reaches_peer(self, message): self.node1.send_message(message) sleep(0.1) diff --git a/tests/conftest.py b/tests/conftest.py index 441b21970c..e12f04200d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,6 +1,12 @@ # -*- coding: utf-8 -*- +import glob import logging +import os import pytest +from datetime import datetime +from uuid import uuid4 +from src.libs.common import attach_allure_file +import src.env_vars as env_vars from src.data_storage import DS logger = logging.getLogger(__name__) @@ -17,9 +23,35 @@ def pytest_runtest_makereport(item): return None +@pytest.fixture(scope="session", autouse=True) +def set_allure_env_variables(): + yield + if os.path.isdir("allure-results") and not os.path.isfile(os.path.join("allure-results", "environment.properties")): + with open(os.path.join("allure-results", "environment.properties"), "w") as outfile: + for attribute_name in dir(env_vars): + if attribute_name.isupper(): + attribute_value = getattr(env_vars, attribute_name) + outfile.write(f"{attribute_name}={attribute_value}\n") + + @pytest.fixture(scope="function", autouse=True) -def test_setup(request): - logger.debug("Running test: %s", request.node.name) +def test_id(request): + # setting up an unique test id to be used where needed + request.cls.test_id = f"{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}__{str(uuid4())}" + + +@pytest.fixture(scope="function", autouse=True) +def test_setup(request, test_id): + logger.debug("Running test: %s with id: %s", request.node.name, request.cls.test_id) + + +@pytest.fixture(scope="function", autouse=True) +def attach_logs_on_fail(request): + yield + if request.node.rep_call.failed: + logger.debug("Test failed, attempting to attach logs to the allure reports") + for file in glob.glob(os.path.join(env_vars.LOG_DIR, request.cls.test_id + "*")): + attach_allure_file(file) @pytest.fixture(scope="function", autouse=True) From 69e8be03713b0350763330db7b99757df9cd44c1 Mon Sep 17 00:00:00 2001 From: fbarbu15 Date: Fri, 3 Nov 2023 17:01:00 +0200 Subject: [PATCH 7/7] REST api support --- .github/workflows/test.yml | 9 ++ README.md | 11 ++- pytest.ini | 2 +- requirements.txt | 2 + src/data_classes.py | 28 +++---- src/env_vars.py | 14 ++-- src/node/api_clients/__init__.py | 0 src/node/api_clients/base_client.py | 44 ++++++++++ src/node/api_clients/rest.py | 31 +++++++ src/node/api_clients/rpc.py | 35 ++++++++ src/node/docker_mananger.py | 31 ++++--- src/node/waku_node.py | 123 ++++++++-------------------- src/steps/relay.py | 28 ++++--- tests/relay/test_publish.py | 13 +-- 14 files changed, 226 insertions(+), 145 deletions(-) create mode 100644 src/node/api_clients/__init__.py create mode 100644 src/node/api_clients/base_client.py create mode 100644 src/node/api_clients/rest.py create mode 100644 src/node/api_clients/rpc.py diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index cb93d03157..d6a9c08e7d 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,11 +16,20 @@ on: required: false type: string default: "wakuorg/go-waku:latest" + protocol: + description: "Protocol used to comunicate inside the network" + required: true + type: choice + default: "REST" + options: + - "REST" + - "RPC" env: FORCE_COLOR: "1" NODE_1: ${{ inputs.node1 || 'wakuorg/nwaku:deploy-wakuv2-test' }} NODE_2: ${{ inputs.node2 || 'wakuorg/go-waku:latest' }} + PROTOCOL: ${{ inputs.protocol || 'REST' }} jobs: diff --git a/README.md b/README.md index 016958b59c..eaf4d05ae9 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ # waku-interop-tests -Waku interop testing between various implementation of the [Waku v2 protocol](https://rfc.vac.dev/spec/10/). +Waku e2e and interop framework used to test various implementation of the [Waku v2 protocol](https://rfc.vac.dev/spec/10/). -## Setup +## Setup and contribute ```shell git clone git@github.com:waku-org/waku-interop-tests.git @@ -10,9 +10,16 @@ cd waku-interop-tests python -m venv .venv source .venv/bin/activate pip install -r requirements.txt +pre-commit install +(optional) Overwrite default vars from src/env_vars.py via cli env vars or by adding a .env file pytest ``` +## CI + +- Test runs via github actions +- [Allure Test Reports](https://waku-org.github.io/waku-interop-tests/3/) are published via github pages + ## License Licensed and distributed under either of diff --git a/pytest.ini b/pytest.ini index 3412b31451..313afa0aa3 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,5 +1,5 @@ [pytest] -addopts = --instafail --tb=short --color=auto +addopts = -s --instafail --tb=short --color=auto log_level = DEBUG log_cli = True log_file = log/test.log diff --git a/requirements.txt b/requirements.txt index 5800bd801b..38e3913adb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,11 +1,13 @@ allure-pytest black docker +marshmallow-dataclass pre-commit pyright pytest pytest-instafail pytest-xdist pytest-rerunfailures +python-dotenv requests tenacity diff --git a/src/data_classes.py b/src/data_classes.py index 0dcff31387..4266a55ba9 100644 --- a/src/data_classes.py +++ b/src/data_classes.py @@ -1,24 +1,24 @@ -from dataclasses import dataclass +from dataclasses import dataclass, field +from marshmallow_dataclass import class_schema from typing import Optional -@dataclass -class KeyPair: - privateKey: str - publicKey: str - - @dataclass class MessageRpcQuery: - payload: str # Hex encoded data string without `0x` prefix. - contentTopic: Optional[str] = None - timestamp: Optional[int] = None # Unix epoch time in nanoseconds as a 64-bit integer value. + payload: str + contentTopic: str + timestamp: Optional[int] = None @dataclass class MessageRpcResponse: payload: str - contentTopic: Optional[str] = None - version: Optional[int] = None - timestamp: Optional[int] = None # Unix epoch time in nanoseconds as a 64-bit integer value. - ephemeral: Optional[bool] = None + contentTopic: str + version: Optional[int] + timestamp: int + ephemeral: Optional[bool] + rateLimitProof: Optional[dict] = field(default_factory=dict) + rate_limit_proof: Optional[dict] = field(default_factory=dict) + + +message_rpc_response_schema = class_schema(MessageRpcResponse)() diff --git a/src/env_vars.py b/src/env_vars.py index e226705227..396697e5af 100644 --- a/src/env_vars.py +++ b/src/env_vars.py @@ -1,17 +1,20 @@ import os -import logging +from dotenv import load_dotenv -logger = logging.getLogger(__name__) +load_dotenv() # This will load environment variables from a .env file if it exists -def get_env_var(var_name, default): +def get_env_var(var_name, default=None): env_var = os.getenv(var_name, default) - logger.debug(f"{var_name}: {env_var}") + if env_var is not None: + print(f"{var_name}: {env_var}") + else: + print(f"{var_name} is not set; using default value: {default}") return env_var # Configuration constants. Need to be upercase to appear in reports -NODE_1 = get_env_var("NODE_1", "wakuorg/nwaku:deploy-wakuv2-test") +NODE_1 = get_env_var("NODE_1", "wakuorg/nwaku:latest") NODE_2 = get_env_var("NODE_2", "wakuorg/go-waku:latest") LOG_DIR = get_env_var("LOG_DIR", "./log") NETWORK_NAME = get_env_var("NETWORK_NAME", "waku") @@ -19,3 +22,4 @@ SUBNET = get_env_var("SUBNET", "172.18.0.0/16") IP_RANGE = get_env_var("IP_RANGE", "172.18.0.0/24") GATEWAY = get_env_var("GATEWAY", "172.18.0.1") DEFAULT_PUBSUBTOPIC = get_env_var("DEFAULT_PUBSUBTOPIC", "/waku/2/default-waku/proto") +PROTOCOL = get_env_var("PROTOCOL", "REST") diff --git a/src/node/api_clients/__init__.py b/src/node/api_clients/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/node/api_clients/base_client.py b/src/node/api_clients/base_client.py new file mode 100644 index 0000000000..6be538f815 --- /dev/null +++ b/src/node/api_clients/base_client.py @@ -0,0 +1,44 @@ +import logging +import requests +from tenacity import retry, stop_after_delay, wait_fixed +from abc import ABC, abstractmethod + +logger = logging.getLogger(__name__) + + +class BaseClient(ABC): + # The retry decorator is applied to handle transient errors gracefully. This is particularly + # useful when running tests in parallel, where occasional network-related errors such as + # connection drops, timeouts, or temporary unavailability of a service can occur. Retrying + # ensures that such intermittent issues don't cause the tests to fail outright. + @retry(stop=stop_after_delay(2), wait=wait_fixed(0.1), reraise=True) + def make_request(self, method, url, headers=None, data=None): + logger.debug("%s call: %s with payload: %s", method.upper(), url, data) + response = requests.request(method.upper(), url, headers=headers, data=data) + try: + response.raise_for_status() + except requests.HTTPError as http_err: + logger.error("HTTP error occurred: %s", http_err) + raise + except Exception as err: + logger.error("An error occurred: %s", err) + raise + else: + logger.info("Response status code: %s", response.status_code) + return response + + @abstractmethod + def info(self): + pass + + @abstractmethod + def set_subscriptions(self, pubsub_topics): + pass + + @abstractmethod + def send_message(self, message, pubsub_topic): + pass + + @abstractmethod + def get_messages(self, pubsub_topic): + pass diff --git a/src/node/api_clients/rest.py b/src/node/api_clients/rest.py new file mode 100644 index 0000000000..47ecbf79c9 --- /dev/null +++ b/src/node/api_clients/rest.py @@ -0,0 +1,31 @@ +import logging +import json +from dataclasses import asdict +from urllib.parse import quote +from src.node.api_clients.base_client import BaseClient + +logger = logging.getLogger(__name__) + + +class REST(BaseClient): + def __init__(self, rest_port): + self._rest_port = rest_port + + def rest_call(self, method, endpoint, payload=None): + url = f"http://127.0.0.1:{self._rest_port}/{endpoint}" + headers = {"Content-Type": "application/json"} + return self.make_request(method, url, headers=headers, data=payload) + + def info(self): + info_response = self.rest_call("get", "debug/v1/info") + return info_response.json() + + def set_subscriptions(self, pubsub_topics): + return self.rest_call("post", "relay/v1/subscriptions", json.dumps(pubsub_topics)) + + def send_message(self, message, pubsub_topic): + return self.rest_call("post", f"relay/v1/messages/{quote(pubsub_topic, safe='')}", json.dumps(asdict(message))) + + def get_messages(self, pubsub_topic): + get_messages_response = self.rest_call("get", f"relay/v1/messages/{quote(pubsub_topic, safe='')}") + return get_messages_response.json() diff --git a/src/node/api_clients/rpc.py b/src/node/api_clients/rpc.py new file mode 100644 index 0000000000..2ed32c5e94 --- /dev/null +++ b/src/node/api_clients/rpc.py @@ -0,0 +1,35 @@ +import logging +import json +from dataclasses import asdict +from src.node.api_clients.base_client import BaseClient + +logger = logging.getLogger(__name__) + + +class RPC(BaseClient): + def __init__(self, rpc_port, image_name): + self._image_name = image_name + self._rpc_port = rpc_port + + def rpc_call(self, endpoint, params=[]): + url = f"http://127.0.0.1:{self._rpc_port}" + headers = {"Content-Type": "application/json"} + payload = {"jsonrpc": "2.0", "method": endpoint, "params": params, "id": 1} + return self.make_request("post", url, headers=headers, data=json.dumps(payload)) + + def info(self): + info_response = self.rpc_call("get_waku_v2_debug_v1_info", []) + return info_response.json()["result"] + + def set_subscriptions(self, pubsub_topics): + if "nwaku" in self._image_name: + return self.rpc_call("post_waku_v2_relay_v1_subscriptions", [pubsub_topics]) + else: + return self.rpc_call("post_waku_v2_relay_v1_subscription", [pubsub_topics]) + + def send_message(self, message, pubsub_topic): + return self.rpc_call("post_waku_v2_relay_v1_message", [pubsub_topic, asdict(message)]) + + def get_messages(self, pubsub_topic): + get_messages_response = self.rpc_call("get_waku_v2_relay_v1_messages", [pubsub_topic]) + return get_messages_response.json()["result"] diff --git a/src/node/docker_mananger.py b/src/node/docker_mananger.py index d0bc36477c..4e76bf300b 100644 --- a/src/node/docker_mananger.py +++ b/src/node/docker_mananger.py @@ -14,13 +14,13 @@ class DockerManager: def __init__(self, image): self._image = image self._client = docker.from_env() - logger.debug(f"Docker client initialized with image {self._image}") + logger.debug("Docker client initialized with image %s", self._image) def create_network(self, network_name=NETWORK_NAME): - logger.debug(f"Attempting to create or retrieve network '{network_name}'.") + logger.debug("Attempting to create or retrieve network %s", network_name) networks = self._client.networks.list(names=[network_name]) if networks: - logger.debug(f"Network '{network_name}' already exists.") + logger.debug("Network %s already exists", network_name) return networks[0] network = self._client.networks.create( @@ -28,20 +28,25 @@ class DockerManager: driver="bridge", ipam=IPAMConfig(driver="default", pool_configs=[IPAMPool(subnet=SUBNET, iprange=IP_RANGE, gateway=GATEWAY)]), ) - logger.debug(f"Network '{network_name}' created.") + logger.debug("Network %s created", network_name) return network def start_container(self, image_name, ports, args, log_path, container_ip): - command = [f"--{key}={value}" for key, value in args.items()] + cli_args = [] + for key, value in args.items(): + if isinstance(value, list): # Check if value is a list + cli_args.extend([f"--{key}={item}" for item in value]) # Add a command for each item in the list + else: + cli_args.append(f"--{key}={value}") # Add a single command port_bindings = {f"{port}/tcp": ("", port) for port in ports} - logger.debug(f"Starting container with image '{image_name}'.") - - container = self._client.containers.run(image_name, command=command, ports=port_bindings, detach=True, auto_remove=True) + logger.debug("Starting container with image %s", image_name) + logger.debug("Using args %s", cli_args) + container = self._client.containers.run(image_name, command=cli_args, ports=port_bindings, detach=True, remove=True, auto_remove=True) network = self._client.networks.get(NETWORK_NAME) network.connect(container, ipv4_address=container_ip) - logger.debug(f"Container started with ID {container.short_id}. Setting up logs at '{log_path}'.") + logger.debug("Container started with ID %s. Setting up logs at %s", container.short_id, log_path) log_thread = threading.Thread(target=self._log_container_output, args=(container, log_path)) log_thread.daemon = True log_thread.start() @@ -54,18 +59,18 @@ class DockerManager: for chunk in container.logs(stream=True): log_file.write(chunk) - def generate_ports(self, base_port=None, count=4): + def generate_ports(self, base_port=None, count=5): if base_port is None: base_port = random.randint(1024, 65535 - count) ports = [base_port + i for i in range(count)] - logger.debug(f"Generated ports: {ports}") + logger.debug("Generated ports %s", ports) return ports @staticmethod def generate_random_ext_ip(): base_ip_fragments = ["172", "18"] ext_ip = ".".join(base_ip_fragments + [str(random.randint(0, 255)) for _ in range(2)]) - logger.debug(f"Generated random external IP: {ext_ip}") + logger.debug("Generated random external IP %s", ext_ip) return ext_ip def is_container_running(self, container): @@ -73,7 +78,7 @@ class DockerManager: refreshed_container = self._client.containers.get(container.id) return refreshed_container.status == "running" except NotFound: - logger.error(f"Container with ID {container.id} not found.") + logger.error("Container with ID %s not found", container.id) return False @property diff --git a/src/node/waku_node.py b/src/node/waku_node.py index 7e5391bacd..13fb3dd837 100644 --- a/src/node/waku_node.py +++ b/src/node/waku_node.py @@ -1,15 +1,11 @@ import os import logging -from time import time -import requests -import json from tenacity import retry, stop_after_delay, wait_fixed -from dataclasses import asdict +from src.node.api_clients.rpc import RPC +from src.node.api_clients.rest import REST from src.node.docker_mananger import DockerManager -from src.env_vars import LOG_DIR, DEFAULT_PUBSUBTOPIC +from src.env_vars import LOG_DIR, DEFAULT_PUBSUBTOPIC, PROTOCOL from src.data_storage import DS -from src.libs.common import bytes_to_hex - logger = logging.getLogger(__name__) @@ -21,29 +17,40 @@ class WakuNode: self._docker_manager = DockerManager(self._image_name) self._container = None self._ext_ip = self._docker_manager.generate_random_ext_ip() - logger.debug(f"WakuNode instance initialized with log path: {self._log_path}") + self._ports = self._docker_manager.generate_ports() + self._rest_port = self._ports[0] + self._rpc_port = self._ports[1] + self._websocket_port = self._ports[2] + logger.debug("WakuNode instance initialized with log path %s", self._log_path) + if PROTOCOL == "RPC": + self._api = RPC(self._rpc_port, self._image_name) + elif PROTOCOL == "REST": + self._api = REST(self._rest_port) + else: + raise ValueError(f"Unknown protocol: {PROTOCOL}") @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) def start(self, **kwargs): logger.debug("Starting Node...") self._docker_manager.create_network() - ports = self._docker_manager.generate_ports() - self._rpc_port = ports[0] - self._websocket_port = ports[2] default_args = { "listen-address": "0.0.0.0", "rpc": "true", "rpc-admin": "true", + "rest": "true", + "rest-admin": "true", "websocket-support": "true", "log-level": "TRACE", - "websocket-port": str(ports[2]), - "rpc-port": str(ports[0]), - "tcp-port": str(ports[1]), - "discv5-udp-port": str(ports[3]), + "websocket-port": str(self._ports[3]), + "rpc-port": self._rpc_port, + "rest-port": self._rest_port, + "tcp-port": str(self._ports[2]), + "discv5-udp-port": str(self._ports[4]), "rpc-address": "0.0.0.0", - "topic": DEFAULT_PUBSUBTOPIC, + "rest-address": "0.0.0.0", "nat": f"extip:{self._ext_ip}", + "pubsub-topic": DEFAULT_PUBSUBTOPIC, } if "go-waku" in self._docker_manager.image: @@ -58,14 +65,15 @@ class WakuNode: key = key.replace("_", "-") default_args[key] = value - logger.debug(f"Starting container with args: {default_args}") - self._container = self._docker_manager.start_container(self._docker_manager.image, ports, default_args, self._log_path, self._ext_ip) - logger.debug(f"Started container from image {self._image_name}. RPC port: {self._rpc_port} and WebSocket port: {self._websocket_port}") + self._container = self._docker_manager.start_container(self._docker_manager.image, self._ports, default_args, self._log_path, self._ext_ip) + logger.debug( + "Started container from image %s. RPC: %s REST: %s WebSocket: %s", self._image_name, self._rpc_port, self._rest_port, self._websocket_port + ) DS.waku_nodes.append(self) try: - self.ensure_rpc_ready() + self.ensure_ready() except Exception as e: - logger.error(f"RPC service did not become ready in time: {e}") + logger.error("%s service did not become ready in time: %s", PROTOCOL, e) raise @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) @@ -75,82 +83,19 @@ class WakuNode: self._container.stop() logger.debug("Container stopped.") - @retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True) - def rpc_call(self, method, params=[]): - url = f"http://127.0.0.1:{self._rpc_port}" - headers = {"Content-Type": "application/json"} - payload = {"jsonrpc": "2.0", "method": method, "params": params, "id": 1} - logger.debug("RPC call payload %s", payload) - response = requests.post(url, data=json.dumps(payload), headers=headers) - return response.json() - @retry(stop=stop_after_delay(5), wait=wait_fixed(0.05), reraise=True) - def ensure_rpc_ready(self): + def ensure_ready(self): self.info() logger.debug("RPC service is ready.") def info(self): - return self.rpc_call("get_waku_v2_debug_v1_info", []) + return self._api.info() def set_subscriptions(self, pubsub_topics=[DEFAULT_PUBSUBTOPIC]): - return self.rpc_call("post_waku_v2_relay_v1_subscriptions", [pubsub_topics]) + return self._api.set_subscriptions(pubsub_topics) def send_message(self, message, pubsub_topic=DEFAULT_PUBSUBTOPIC): - if message.timestamp is None: - message.timestamp = int(time() * 1e9) - return self.rpc_call("post_waku_v2_relay_v1_message", [pubsub_topic, asdict(message)]) + return self._api.send_message(message, pubsub_topic) def get_messages(self, pubsub_topic=DEFAULT_PUBSUBTOPIC): - return self.rpc_call("get_waku_v2_relay_v1_messages", [pubsub_topic]) - - def get_asymmetric_key_pair(self): - response = self.rpc_call("get_waku_v2_private_v1_asymmetric_keypair", []) - seckey = response.get("seckey") - pubkey = response.get("pubkey") - private_key = response.get("privateKey") - public_key = response.get("publicKey") - if seckey: - return {"privateKey": seckey, "publicKey": pubkey} - else: - return {"privateKey": private_key, "publicKey": public_key} - - def post_asymmetric_message(self, message, public_key, pubsub_topic=None): - if not message.payload: - raise Exception("Attempting to send an empty message") - return self.rpc_call( - "post_waku_v2_private_v1_asymmetric_message", - [pubsub_topic or DEFAULT_PUBSUBTOPIC, message, "0x" + bytes_to_hex(public_key)], - ) - - def get_asymmetric_messages(self, private_key, pubsub_topic=None): - return self.rpc_call( - "get_waku_v2_private_v1_asymmetric_messages", - [pubsub_topic or DEFAULT_PUBSUBTOPIC, "0x" + bytes_to_hex(private_key)], - ) - - def get_symmetric_key(self): - return bytes.fromhex(self.rpc_call("get_waku_v2_private_v1_symmetric_key", [])) - - def post_symmetric_message(self, message, sym_key, pubsub_topic=None): - if not message.payload: - raise Exception("Attempting to send an empty message") - return self.rpc_call( - "post_waku_v2_private_v1_symmetric_message", - [pubsub_topic or DEFAULT_PUBSUBTOPIC, message, "0x" + bytes_to_hex(sym_key)], - ) - - def get_symmetric_messages(self, sym_key, pubsub_topic=None): - return self.rpc_call( - "get_waku_v2_private_v1_symmetric_messages", - [pubsub_topic or DEFAULT_PUBSUBTOPIC, "0x" + bytes_to_hex(sym_key)], - ) - - def get_peer_id(self): - # not implemented - peer_id = "" - return peer_id - - def get_multiaddr_with_id(self): - peer_id = self.get_peer_id() - multiaddr_with_id = f"/ip4/127.0.0.1/tcp/{self._websocket_port}/ws/p2p/{peer_id}" - return multiaddr_with_id + return self._api.get_messages(pubsub_topic) diff --git a/src/steps/relay.py b/src/steps/relay.py index fbb0150cd1..6ba381c596 100644 --- a/src/steps/relay.py +++ b/src/steps/relay.py @@ -1,7 +1,8 @@ import logging -from time import sleep +from time import sleep, time import pytest import allure +from src.data_classes import message_rpc_response_schema from src.env_vars import NODE_1, NODE_2 from src.node.waku_node import WakuNode from tenacity import retry, stop_after_delay, wait_fixed @@ -14,21 +15,24 @@ class StepsRelay: def setup_nodes(self, request): self.node1 = WakuNode(NODE_1, request.cls.test_id) self.node1.start(relay="true", discv5_discovery="true", peer_exchange="true") - enr_uri = self.node1.info()["result"]["enrUri"] + enr_uri = self.node1.info()["enrUri"] self.node2 = WakuNode(NODE_2, request.cls.test_id) self.node2.start(relay="true", discv5_discovery="true", discv5_bootstrap_node=enr_uri, peer_exchange="true") - self.node1.set_subscriptions() - self.node2.set_subscriptions() - self.default_content_topic = "/test/1/waku-relay" - self.default_payload = "Relay works!!" + self.test_pubsub_topic = "test" + self.test_content_topic = "/test/1/waku-relay" + self.test_payload = "Relay works!!" + self.node1.set_subscriptions([self.test_pubsub_topic]) + self.node2.set_subscriptions([self.test_pubsub_topic]) @allure.step - @retry(stop=stop_after_delay(2), wait=wait_fixed(0.2), reraise=True) + @retry(stop=stop_after_delay(20), wait=wait_fixed(0.5), reraise=True) def check_published_message_reaches_peer(self, message): - self.node1.send_message(message) + message.timestamp = int(time() * 1e9) + self.node1.send_message(message, self.test_pubsub_topic) sleep(0.1) - get_messages_response = self.node2.get_messages() + get_messages_response = self.node2.get_messages(self.test_pubsub_topic) logger.debug("Got reponse from remote peer %s", get_messages_response) - assert get_messages_response["result"][0]["payload"] == message.payload - assert get_messages_response["result"][0]["contentTopic"] == message.contentTopic - assert get_messages_response["result"][0]["timestamp"] == message.timestamp + received_message = message_rpc_response_schema.load(get_messages_response[0]) + assert received_message.payload == message.payload + assert received_message.contentTopic == message.contentTopic + assert received_message.timestamp == message.timestamp diff --git a/tests/relay/test_publish.py b/tests/relay/test_publish.py index 47d7343d5f..0e88560e82 100644 --- a/tests/relay/test_publish.py +++ b/tests/relay/test_publish.py @@ -1,5 +1,4 @@ import logging -from time import sleep from src.libs.common import to_base64 from src.data_classes import MessageRpcQuery @@ -14,11 +13,11 @@ class TestRelayPublish(StepsRelay): failed_payloads = [] for payload in SAMPLE_INPUTS: logger.debug("Running test with payload %s", payload["description"]) - message = MessageRpcQuery(payload=to_base64(payload["value"]), contentTopic=self.default_content_topic) + message = MessageRpcQuery(payload=to_base64(payload["value"]), contentTopic=self.test_content_topic) try: self.check_published_message_reaches_peer(message) except Exception as e: - logger.error(f"Payload '{payload['description']}' failed: {str(e)}") + logger.error("Payload %s failed: %s", {payload["description"]}, {str(e)}) failed_payloads.append(payload) assert not failed_payloads, f"Payloads failed: {failed_payloads}" @@ -26,14 +25,10 @@ class TestRelayPublish(StepsRelay): failed_content_topics = [] for content_topic in SAMPLE_INPUTS: logger.debug("Running test with content topic %s", content_topic["description"]) - message = MessageRpcQuery(payload=to_base64(self.default_payload), contentTopic=content_topic["value"]) + message = MessageRpcQuery(payload=to_base64(self.test_payload), contentTopic=content_topic["value"]) try: self.check_published_message_reaches_peer(message) except Exception as e: - logger.error(f"ContentTopic '{content_topic['description']}' failed: {str(e)}") + logger.error("ContentTopic %s failed: %s", {content_topic["description"]}, {str(e)}) failed_content_topics.append(content_topic) assert not failed_content_topics, f"ContentTopics failed: {failed_content_topics}" - - def test_fail_for_report_puposes(self): - message = MessageRpcQuery(payload="", contentTopic="") - self.check_published_message_reaches_peer(message)