Merge pull request #1 from waku-org/tests/relay-publish
Tests/relay publish
This commit is contained in:
commit
88b1fa3e90
|
@ -0,0 +1,20 @@
|
|||
## Problem
|
||||
|
||||
<!--
|
||||
Describe in details the problem or scenario that this PR is fixing.
|
||||
|
||||
If this is a feature addition or change, then focus on the WHY you are making the change.
|
||||
|
||||
If this is a bug fix, please describe why the old behavior was problematic.
|
||||
-->
|
||||
|
||||
## Solution
|
||||
|
||||
<!-- describe the new behavior -->
|
||||
|
||||
## Notes
|
||||
|
||||
<!-- Remove items that are not relevant -->
|
||||
|
||||
- Resolves <issue number>
|
||||
- Related to <link to specs>
|
|
@ -0,0 +1,33 @@
|
|||
name: Code Linters
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
linters:
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'pip'
|
||||
|
||||
- name: Set up virtual environment
|
||||
run: |
|
||||
python -m venv .venv
|
||||
echo ".venv/bin" >> $GITHUB_PATH # Add virtualenv to PATH for subsequent steps
|
||||
|
||||
- name: Install dependencies based on requirements.txt
|
||||
run: pip install -r requirements.txt
|
||||
|
||||
- name: Install pre-commit
|
||||
run: pip install pre-commit
|
||||
|
||||
- name: Run pre-commit hooks
|
||||
run: pre-commit run --all-files
|
|
@ -0,0 +1,77 @@
|
|||
name: Interop Tests
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '0 3 * * *'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
node1:
|
||||
required: false
|
||||
type: string
|
||||
default: "wakuorg/nwaku:deploy-wakuv2-test"
|
||||
node2:
|
||||
required: false
|
||||
type: string
|
||||
default: "wakuorg/go-waku:latest"
|
||||
protocol:
|
||||
description: "Protocol used to comunicate inside the network"
|
||||
required: true
|
||||
type: choice
|
||||
default: "REST"
|
||||
options:
|
||||
- "REST"
|
||||
- "RPC"
|
||||
|
||||
env:
|
||||
FORCE_COLOR: "1"
|
||||
NODE_1: ${{ inputs.node1 || 'wakuorg/nwaku:deploy-wakuv2-test' }}
|
||||
NODE_2: ${{ inputs.node2 || 'wakuorg/go-waku:latest' }}
|
||||
PROTOCOL: ${{ inputs.protocol || 'REST' }}
|
||||
|
||||
jobs:
|
||||
|
||||
tests:
|
||||
name: tests
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.12'
|
||||
cache: 'pip'
|
||||
|
||||
- run: pip install -r requirements.txt
|
||||
|
||||
- name: Run tests
|
||||
run: pytest -n 3 --reruns 1 --alluredir=allure-results
|
||||
|
||||
- name: Get allure history
|
||||
if: always()
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
ref: gh-pages
|
||||
path: gh-pages
|
||||
|
||||
- name: Setup allure report
|
||||
uses: simple-elf/allure-report-action@master
|
||||
if: always()
|
||||
id: allure-report
|
||||
with:
|
||||
allure_results: allure-results
|
||||
gh_pages: gh-pages
|
||||
allure_history: allure-history
|
||||
keep_reports: 30
|
||||
|
||||
- name: Deploy report to Github Pages
|
||||
uses: peaceiris/actions-gh-pages@v3
|
||||
if: always()
|
||||
with:
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
publish_branch: gh-pages
|
||||
publish_dir: allure-history
|
|
@ -0,0 +1,104 @@
|
|||
# Custom
|
||||
log/
|
||||
.vscode
|
||||
allure-results/
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv’s lock file may be unsuitable.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
ENV.bak/
|
||||
env.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
|
@ -0,0 +1,11 @@
|
|||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.7.0
|
||||
hooks:
|
||||
- id: black
|
||||
args: [--line-length=150]
|
||||
|
||||
- repo: https://github.com/RobertCraigie/pyright-python
|
||||
rev: v1.1.326
|
||||
hooks:
|
||||
- id: pyright
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2018 Status Research & Development GmbH
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,21 @@
|
|||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2021 Status Research & Development GmbH
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
34
README.md
34
README.md
|
@ -1 +1,33 @@
|
|||
# waku-interop-tests
|
||||
# waku-interop-tests
|
||||
|
||||
Waku e2e and interop framework used to test various implementation of the [Waku v2 protocol](https://rfc.vac.dev/spec/10/).
|
||||
|
||||
## Setup and contribute
|
||||
|
||||
```shell
|
||||
git clone git@github.com:waku-org/waku-interop-tests.git
|
||||
cd waku-interop-tests
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -r requirements.txt
|
||||
pre-commit install
|
||||
(optional) Overwrite default vars from src/env_vars.py via cli env vars or by adding a .env file
|
||||
pytest
|
||||
```
|
||||
|
||||
## CI
|
||||
|
||||
- Test runs via github actions
|
||||
- [Allure Test Reports](https://waku-org.github.io/waku-interop-tests/3/) are published via github pages
|
||||
|
||||
## License
|
||||
|
||||
Licensed and distributed under either of
|
||||
|
||||
- MIT license: [LICENSE-MIT](https://github.com/waku-org/js-waku/blob/master/LICENSE-MIT) or http://opensource.org/licenses/MIT
|
||||
|
||||
or
|
||||
|
||||
- Apache License, Version 2.0, ([LICENSE-APACHE-v2](https://github.com/waku-org/js-waku/blob/master/LICENSE-APACHE-v2) or http://www.apache.org/licenses/LICENSE-2.0)
|
||||
|
||||
at your option. These files may not be copied, modified, or distributed except according to those terms.
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
{
|
||||
"include": ["src", "tests"],
|
||||
"reportMissingImports": true,
|
||||
"reportOptionalMemberAccess": false,
|
||||
"reportGeneralTypeIssues": false,
|
||||
"reportInvalidStringEscapeSequence": false,
|
||||
"reportWildcardImportFromLibrary": false,
|
||||
"venvPath": ".",
|
||||
"venv": ".venv"
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
[pytest]
|
||||
addopts = -s --instafail --tb=short --color=auto
|
||||
log_level = DEBUG
|
||||
log_cli = True
|
||||
log_file = log/test.log
|
||||
log_cli_format = %(asctime)s %(name)s %(levelname)s %(message)s
|
||||
log_file_format = %(asctime)s %(name)s %(levelname)s %(message)s
|
|
@ -0,0 +1,13 @@
|
|||
allure-pytest
|
||||
black
|
||||
docker
|
||||
marshmallow-dataclass
|
||||
pre-commit
|
||||
pyright
|
||||
pytest
|
||||
pytest-instafail
|
||||
pytest-xdist
|
||||
pytest-rerunfailures
|
||||
python-dotenv
|
||||
requests
|
||||
tenacity
|
|
@ -0,0 +1,24 @@
|
|||
from dataclasses import dataclass, field
|
||||
from marshmallow_dataclass import class_schema
|
||||
from typing import Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageRpcQuery:
|
||||
payload: str
|
||||
contentTopic: str
|
||||
timestamp: Optional[int] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class MessageRpcResponse:
|
||||
payload: str
|
||||
contentTopic: str
|
||||
version: Optional[int]
|
||||
timestamp: int
|
||||
ephemeral: Optional[bool]
|
||||
rateLimitProof: Optional[dict] = field(default_factory=dict)
|
||||
rate_limit_proof: Optional[dict] = field(default_factory=dict)
|
||||
|
||||
|
||||
message_rpc_response_schema = class_schema(MessageRpcResponse)()
|
|
@ -0,0 +1,3 @@
|
|||
# We use this class for global variables
|
||||
class DS:
|
||||
waku_nodes = []
|
|
@ -0,0 +1,25 @@
|
|||
import os
|
||||
from dotenv import load_dotenv
|
||||
|
||||
load_dotenv() # This will load environment variables from a .env file if it exists
|
||||
|
||||
|
||||
def get_env_var(var_name, default=None):
|
||||
env_var = os.getenv(var_name, default)
|
||||
if env_var is not None:
|
||||
print(f"{var_name}: {env_var}")
|
||||
else:
|
||||
print(f"{var_name} is not set; using default value: {default}")
|
||||
return env_var
|
||||
|
||||
|
||||
# Configuration constants. Need to be upercase to appear in reports
|
||||
NODE_1 = get_env_var("NODE_1", "wakuorg/nwaku:latest")
|
||||
NODE_2 = get_env_var("NODE_2", "wakuorg/go-waku:latest")
|
||||
LOG_DIR = get_env_var("LOG_DIR", "./log")
|
||||
NETWORK_NAME = get_env_var("NETWORK_NAME", "waku")
|
||||
SUBNET = get_env_var("SUBNET", "172.18.0.0/16")
|
||||
IP_RANGE = get_env_var("IP_RANGE", "172.18.0.0/24")
|
||||
GATEWAY = get_env_var("GATEWAY", "172.18.0.1")
|
||||
DEFAULT_PUBSUBTOPIC = get_env_var("DEFAULT_PUBSUBTOPIC", "/waku/2/default-waku/proto")
|
||||
PROTOCOL = get_env_var("PROTOCOL", "REST")
|
|
@ -0,0 +1,28 @@
|
|||
import logging
|
||||
import os
|
||||
import base64
|
||||
import allure
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def bytes_to_hex(byte_array):
|
||||
return "".join(format(byte, "02x") for byte in byte_array)
|
||||
|
||||
|
||||
def to_base64(input_data):
|
||||
if isinstance(input_data, str):
|
||||
input_bytes = input_data.encode()
|
||||
elif isinstance(input_data, int):
|
||||
input_bytes = str(input_data).encode()
|
||||
elif isinstance(input_data, bytes):
|
||||
input_bytes = input_data
|
||||
else:
|
||||
input_bytes = str(input_data).encode()
|
||||
base64_encoded = base64.b64encode(input_bytes)
|
||||
return base64_encoded.decode()
|
||||
|
||||
|
||||
def attach_allure_file(file):
|
||||
logger.debug("Attaching file %s", file)
|
||||
allure.attach.file(file, name=os.path.basename(file), attachment_type=allure.attachment_type.TEXT)
|
|
@ -0,0 +1,44 @@
|
|||
import logging
|
||||
import requests
|
||||
from tenacity import retry, stop_after_delay, wait_fixed
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class BaseClient(ABC):
|
||||
# The retry decorator is applied to handle transient errors gracefully. This is particularly
|
||||
# useful when running tests in parallel, where occasional network-related errors such as
|
||||
# connection drops, timeouts, or temporary unavailability of a service can occur. Retrying
|
||||
# ensures that such intermittent issues don't cause the tests to fail outright.
|
||||
@retry(stop=stop_after_delay(2), wait=wait_fixed(0.1), reraise=True)
|
||||
def make_request(self, method, url, headers=None, data=None):
|
||||
logger.debug("%s call: %s with payload: %s", method.upper(), url, data)
|
||||
response = requests.request(method.upper(), url, headers=headers, data=data)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
except requests.HTTPError as http_err:
|
||||
logger.error("HTTP error occurred: %s", http_err)
|
||||
raise
|
||||
except Exception as err:
|
||||
logger.error("An error occurred: %s", err)
|
||||
raise
|
||||
else:
|
||||
logger.info("Response status code: %s", response.status_code)
|
||||
return response
|
||||
|
||||
@abstractmethod
|
||||
def info(self):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def set_subscriptions(self, pubsub_topics):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def send_message(self, message, pubsub_topic):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_messages(self, pubsub_topic):
|
||||
pass
|
|
@ -0,0 +1,31 @@
|
|||
import logging
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
from urllib.parse import quote
|
||||
from src.node.api_clients.base_client import BaseClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class REST(BaseClient):
|
||||
def __init__(self, rest_port):
|
||||
self._rest_port = rest_port
|
||||
|
||||
def rest_call(self, method, endpoint, payload=None):
|
||||
url = f"http://127.0.0.1:{self._rest_port}/{endpoint}"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
return self.make_request(method, url, headers=headers, data=payload)
|
||||
|
||||
def info(self):
|
||||
info_response = self.rest_call("get", "debug/v1/info")
|
||||
return info_response.json()
|
||||
|
||||
def set_subscriptions(self, pubsub_topics):
|
||||
return self.rest_call("post", "relay/v1/subscriptions", json.dumps(pubsub_topics))
|
||||
|
||||
def send_message(self, message, pubsub_topic):
|
||||
return self.rest_call("post", f"relay/v1/messages/{quote(pubsub_topic, safe='')}", json.dumps(asdict(message)))
|
||||
|
||||
def get_messages(self, pubsub_topic):
|
||||
get_messages_response = self.rest_call("get", f"relay/v1/messages/{quote(pubsub_topic, safe='')}")
|
||||
return get_messages_response.json()
|
|
@ -0,0 +1,35 @@
|
|||
import logging
|
||||
import json
|
||||
from dataclasses import asdict
|
||||
from src.node.api_clients.base_client import BaseClient
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RPC(BaseClient):
|
||||
def __init__(self, rpc_port, image_name):
|
||||
self._image_name = image_name
|
||||
self._rpc_port = rpc_port
|
||||
|
||||
def rpc_call(self, endpoint, params=[]):
|
||||
url = f"http://127.0.0.1:{self._rpc_port}"
|
||||
headers = {"Content-Type": "application/json"}
|
||||
payload = {"jsonrpc": "2.0", "method": endpoint, "params": params, "id": 1}
|
||||
return self.make_request("post", url, headers=headers, data=json.dumps(payload))
|
||||
|
||||
def info(self):
|
||||
info_response = self.rpc_call("get_waku_v2_debug_v1_info", [])
|
||||
return info_response.json()["result"]
|
||||
|
||||
def set_subscriptions(self, pubsub_topics):
|
||||
if "nwaku" in self._image_name:
|
||||
return self.rpc_call("post_waku_v2_relay_v1_subscriptions", [pubsub_topics])
|
||||
else:
|
||||
return self.rpc_call("post_waku_v2_relay_v1_subscription", [pubsub_topics])
|
||||
|
||||
def send_message(self, message, pubsub_topic):
|
||||
return self.rpc_call("post_waku_v2_relay_v1_message", [pubsub_topic, asdict(message)])
|
||||
|
||||
def get_messages(self, pubsub_topic):
|
||||
get_messages_response = self.rpc_call("get_waku_v2_relay_v1_messages", [pubsub_topic])
|
||||
return get_messages_response.json()["result"]
|
|
@ -0,0 +1,86 @@
|
|||
import os
|
||||
import logging
|
||||
import random
|
||||
import threading
|
||||
import docker
|
||||
from src.env_vars import NETWORK_NAME, SUBNET, IP_RANGE, GATEWAY
|
||||
from docker.types import IPAMConfig, IPAMPool
|
||||
from docker.errors import NotFound
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerManager:
|
||||
def __init__(self, image):
|
||||
self._image = image
|
||||
self._client = docker.from_env()
|
||||
logger.debug("Docker client initialized with image %s", self._image)
|
||||
|
||||
def create_network(self, network_name=NETWORK_NAME):
|
||||
logger.debug("Attempting to create or retrieve network %s", network_name)
|
||||
networks = self._client.networks.list(names=[network_name])
|
||||
if networks:
|
||||
logger.debug("Network %s already exists", network_name)
|
||||
return networks[0]
|
||||
|
||||
network = self._client.networks.create(
|
||||
network_name,
|
||||
driver="bridge",
|
||||
ipam=IPAMConfig(driver="default", pool_configs=[IPAMPool(subnet=SUBNET, iprange=IP_RANGE, gateway=GATEWAY)]),
|
||||
)
|
||||
logger.debug("Network %s created", network_name)
|
||||
return network
|
||||
|
||||
def start_container(self, image_name, ports, args, log_path, container_ip):
|
||||
cli_args = []
|
||||
for key, value in args.items():
|
||||
if isinstance(value, list): # Check if value is a list
|
||||
cli_args.extend([f"--{key}={item}" for item in value]) # Add a command for each item in the list
|
||||
else:
|
||||
cli_args.append(f"--{key}={value}") # Add a single command
|
||||
port_bindings = {f"{port}/tcp": ("", port) for port in ports}
|
||||
logger.debug("Starting container with image %s", image_name)
|
||||
logger.debug("Using args %s", cli_args)
|
||||
container = self._client.containers.run(image_name, command=cli_args, ports=port_bindings, detach=True, remove=True, auto_remove=True)
|
||||
|
||||
network = self._client.networks.get(NETWORK_NAME)
|
||||
network.connect(container, ipv4_address=container_ip)
|
||||
|
||||
logger.debug("Container started with ID %s. Setting up logs at %s", container.short_id, log_path)
|
||||
log_thread = threading.Thread(target=self._log_container_output, args=(container, log_path))
|
||||
log_thread.daemon = True
|
||||
log_thread.start()
|
||||
|
||||
return container
|
||||
|
||||
def _log_container_output(self, container, log_path):
|
||||
os.makedirs(os.path.dirname(log_path), exist_ok=True)
|
||||
with open(log_path, "wb+") as log_file:
|
||||
for chunk in container.logs(stream=True):
|
||||
log_file.write(chunk)
|
||||
|
||||
def generate_ports(self, base_port=None, count=5):
|
||||
if base_port is None:
|
||||
base_port = random.randint(1024, 65535 - count)
|
||||
ports = [base_port + i for i in range(count)]
|
||||
logger.debug("Generated ports %s", ports)
|
||||
return ports
|
||||
|
||||
@staticmethod
|
||||
def generate_random_ext_ip():
|
||||
base_ip_fragments = ["172", "18"]
|
||||
ext_ip = ".".join(base_ip_fragments + [str(random.randint(0, 255)) for _ in range(2)])
|
||||
logger.debug("Generated random external IP %s", ext_ip)
|
||||
return ext_ip
|
||||
|
||||
def is_container_running(self, container):
|
||||
try:
|
||||
refreshed_container = self._client.containers.get(container.id)
|
||||
return refreshed_container.status == "running"
|
||||
except NotFound:
|
||||
logger.error("Container with ID %s not found", container.id)
|
||||
return False
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
return self._image
|
|
@ -0,0 +1,101 @@
|
|||
import os
|
||||
import logging
|
||||
from tenacity import retry, stop_after_delay, wait_fixed
|
||||
from src.node.api_clients.rpc import RPC
|
||||
from src.node.api_clients.rest import REST
|
||||
from src.node.docker_mananger import DockerManager
|
||||
from src.env_vars import LOG_DIR, DEFAULT_PUBSUBTOPIC, PROTOCOL
|
||||
from src.data_storage import DS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class WakuNode:
|
||||
def __init__(self, docker_image, docker_log_prefix=""):
|
||||
self._image_name = docker_image
|
||||
self._log_path = os.path.join(LOG_DIR, f"{docker_log_prefix}__{self._image_name.replace('/', '_')}.log")
|
||||
self._docker_manager = DockerManager(self._image_name)
|
||||
self._container = None
|
||||
self._ext_ip = self._docker_manager.generate_random_ext_ip()
|
||||
self._ports = self._docker_manager.generate_ports()
|
||||
self._rest_port = self._ports[0]
|
||||
self._rpc_port = self._ports[1]
|
||||
self._websocket_port = self._ports[2]
|
||||
logger.debug("WakuNode instance initialized with log path %s", self._log_path)
|
||||
if PROTOCOL == "RPC":
|
||||
self._api = RPC(self._rpc_port, self._image_name)
|
||||
elif PROTOCOL == "REST":
|
||||
self._api = REST(self._rest_port)
|
||||
else:
|
||||
raise ValueError(f"Unknown protocol: {PROTOCOL}")
|
||||
|
||||
@retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True)
|
||||
def start(self, **kwargs):
|
||||
logger.debug("Starting Node...")
|
||||
self._docker_manager.create_network()
|
||||
|
||||
default_args = {
|
||||
"listen-address": "0.0.0.0",
|
||||
"rpc": "true",
|
||||
"rpc-admin": "true",
|
||||
"rest": "true",
|
||||
"rest-admin": "true",
|
||||
"websocket-support": "true",
|
||||
"log-level": "TRACE",
|
||||
"websocket-port": str(self._ports[3]),
|
||||
"rpc-port": self._rpc_port,
|
||||
"rest-port": self._rest_port,
|
||||
"tcp-port": str(self._ports[2]),
|
||||
"discv5-udp-port": str(self._ports[4]),
|
||||
"rpc-address": "0.0.0.0",
|
||||
"rest-address": "0.0.0.0",
|
||||
"nat": f"extip:{self._ext_ip}",
|
||||
"pubsub-topic": DEFAULT_PUBSUBTOPIC,
|
||||
}
|
||||
|
||||
if "go-waku" in self._docker_manager.image:
|
||||
go_waku_args = {
|
||||
"min-relay-peers-to-publish": "0",
|
||||
"legacy-filter": "false",
|
||||
"log-level": "DEBUG",
|
||||
}
|
||||
default_args.update(go_waku_args)
|
||||
|
||||
for key, value in kwargs.items():
|
||||
key = key.replace("_", "-")
|
||||
default_args[key] = value
|
||||
|
||||
self._container = self._docker_manager.start_container(self._docker_manager.image, self._ports, default_args, self._log_path, self._ext_ip)
|
||||
logger.debug(
|
||||
"Started container from image %s. RPC: %s REST: %s WebSocket: %s", self._image_name, self._rpc_port, self._rest_port, self._websocket_port
|
||||
)
|
||||
DS.waku_nodes.append(self)
|
||||
try:
|
||||
self.ensure_ready()
|
||||
except Exception as e:
|
||||
logger.error("%s service did not become ready in time: %s", PROTOCOL, e)
|
||||
raise
|
||||
|
||||
@retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True)
|
||||
def stop(self):
|
||||
if self._container:
|
||||
logger.debug("Stopping container with id %s", self._container.short_id)
|
||||
self._container.stop()
|
||||
logger.debug("Container stopped.")
|
||||
|
||||
@retry(stop=stop_after_delay(5), wait=wait_fixed(0.05), reraise=True)
|
||||
def ensure_ready(self):
|
||||
self.info()
|
||||
logger.debug("RPC service is ready.")
|
||||
|
||||
def info(self):
|
||||
return self._api.info()
|
||||
|
||||
def set_subscriptions(self, pubsub_topics=[DEFAULT_PUBSUBTOPIC]):
|
||||
return self._api.set_subscriptions(pubsub_topics)
|
||||
|
||||
def send_message(self, message, pubsub_topic=DEFAULT_PUBSUBTOPIC):
|
||||
return self._api.send_message(message, pubsub_topic)
|
||||
|
||||
def get_messages(self, pubsub_topic=DEFAULT_PUBSUBTOPIC):
|
||||
return self._api.get_messages(pubsub_topic)
|
|
@ -0,0 +1,38 @@
|
|||
import logging
|
||||
from time import sleep, time
|
||||
import pytest
|
||||
import allure
|
||||
from src.data_classes import message_rpc_response_schema
|
||||
from src.env_vars import NODE_1, NODE_2
|
||||
from src.node.waku_node import WakuNode
|
||||
from tenacity import retry, stop_after_delay, wait_fixed
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class StepsRelay:
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def setup_nodes(self, request):
|
||||
self.node1 = WakuNode(NODE_1, request.cls.test_id)
|
||||
self.node1.start(relay="true", discv5_discovery="true", peer_exchange="true")
|
||||
enr_uri = self.node1.info()["enrUri"]
|
||||
self.node2 = WakuNode(NODE_2, request.cls.test_id)
|
||||
self.node2.start(relay="true", discv5_discovery="true", discv5_bootstrap_node=enr_uri, peer_exchange="true")
|
||||
self.test_pubsub_topic = "test"
|
||||
self.test_content_topic = "/test/1/waku-relay"
|
||||
self.test_payload = "Relay works!!"
|
||||
self.node1.set_subscriptions([self.test_pubsub_topic])
|
||||
self.node2.set_subscriptions([self.test_pubsub_topic])
|
||||
|
||||
@allure.step
|
||||
@retry(stop=stop_after_delay(20), wait=wait_fixed(0.5), reraise=True)
|
||||
def check_published_message_reaches_peer(self, message):
|
||||
message.timestamp = int(time() * 1e9)
|
||||
self.node1.send_message(message, self.test_pubsub_topic)
|
||||
sleep(0.1)
|
||||
get_messages_response = self.node2.get_messages(self.test_pubsub_topic)
|
||||
logger.debug("Got reponse from remote peer %s", get_messages_response)
|
||||
received_message = message_rpc_response_schema.load(get_messages_response[0])
|
||||
assert received_message.payload == message.payload
|
||||
assert received_message.contentTopic == message.contentTopic
|
||||
assert received_message.timestamp == message.timestamp
|
|
@ -0,0 +1,42 @@
|
|||
SAMPLE_INPUTS = [
|
||||
{"description": "A simple string.", "value": "Hello World!"},
|
||||
{"description": "An integer.", "value": "1234567890"},
|
||||
{"description": "A dictionary.", "value": '{"key": "value"}'},
|
||||
{"description": "Chinese characters.", "value": "这是一些中文"},
|
||||
{"description": "Emojis.", "value": "🚀🌟✨"},
|
||||
{"description": "Lorem ipsum text.", "value": "Lorem ipsum dolor sit amet"},
|
||||
{"description": "HTML content.", "value": "<html><body>Hello</body></html>"},
|
||||
{"description": "Cyrillic characters.", "value": "\u041f\u0440\u0438\u0432\u0435\u0442"},
|
||||
{"description": "Base64 encoded string.", "value": "Base64==dGVzdA=="},
|
||||
{"description": "Binary data.", "value": "d29ya2luZyB3aXRoIGJpbmFyeSBkYXRh: \x50\x51"},
|
||||
{"description": "Special characters with whitespace.", "value": "\t\nSpecial\tCharacters\n"},
|
||||
{"description": "Boolean false as a string.", "value": "False"},
|
||||
{"description": "A float number.", "value": "3.1415926535"},
|
||||
{"description": "A list.", "value": "[1, 2, 3, 4, 5]"},
|
||||
{"description": "Hexadecimal number as a string.", "value": "0xDEADBEEF"},
|
||||
{"description": "Email format.", "value": "user@example.com"},
|
||||
{"description": "URL format.", "value": "http://example.com"},
|
||||
{"description": "Date and time in ISO format.", "value": "2023-11-01T12:00:00Z"},
|
||||
{"description": "String with escaped quotes.", "value": '"Escaped" \\"quotes\\"'},
|
||||
{"description": "A regular expression.", "value": "Regular expression: ^[a-z0-9_-]{3,16}$"},
|
||||
{"description": "A very long string.", "value": "x" * 1000},
|
||||
{"description": "A JSON string.", "value": '{"name": "John", "age": 30, "city": "New York"}'},
|
||||
{"description": "A Unix path.", "value": "/usr/local/bin"},
|
||||
{"description": "A Windows path.", "value": "C:\\Windows\\System32"},
|
||||
{"description": "An SQL query.", "value": "SELECT * FROM users WHERE id = 1;"},
|
||||
{"description": "JavaScript code snippet.", "value": "function test() { console.log('Hello World'); }"},
|
||||
{"description": "A CSS snippet.", "value": "body { background-color: #fff; }"},
|
||||
{"description": "A Python one-liner.", "value": "print('Hello World')"},
|
||||
{"description": "An IP address.", "value": "192.168.1.1"},
|
||||
{"description": "A domain name.", "value": "www.example.com"},
|
||||
{"description": "A user agent string.", "value": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"},
|
||||
{"description": "A credit card number.", "value": "1234-5678-9012-3456"},
|
||||
{"description": "A phone number.", "value": "+1234567890"},
|
||||
{"description": "A UUID.", "value": "123e4567-e89b-12d3-a456-426614174000"},
|
||||
{"description": "A hashtag.", "value": "#helloWorld"},
|
||||
{"description": "A Twitter handle.", "value": "@username"},
|
||||
{"description": "A password.", "value": "P@ssw0rd!"},
|
||||
{"description": "A date in common format.", "value": "01/11/2023"},
|
||||
{"description": "A time string.", "value": "12:00:00"},
|
||||
{"description": "A mathematical equation.", "value": "E = mc^2"},
|
||||
]
|
|
@ -0,0 +1,4 @@
|
|||
import logging
|
||||
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("docker").setLevel(logging.WARNING)
|
|
@ -0,0 +1,62 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import glob
|
||||
import logging
|
||||
import os
|
||||
import pytest
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
from src.libs.common import attach_allure_file
|
||||
import src.env_vars as env_vars
|
||||
from src.data_storage import DS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# See https://docs.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures
|
||||
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
|
||||
def pytest_runtest_makereport(item):
|
||||
outcome = yield
|
||||
rep = outcome.get_result()
|
||||
if rep.when == "call":
|
||||
setattr(item, "rep_call", rep)
|
||||
return rep
|
||||
return None
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def set_allure_env_variables():
|
||||
yield
|
||||
if os.path.isdir("allure-results") and not os.path.isfile(os.path.join("allure-results", "environment.properties")):
|
||||
with open(os.path.join("allure-results", "environment.properties"), "w") as outfile:
|
||||
for attribute_name in dir(env_vars):
|
||||
if attribute_name.isupper():
|
||||
attribute_value = getattr(env_vars, attribute_name)
|
||||
outfile.write(f"{attribute_name}={attribute_value}\n")
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def test_id(request):
|
||||
# setting up an unique test id to be used where needed
|
||||
request.cls.test_id = f"{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}__{str(uuid4())}"
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def test_setup(request, test_id):
|
||||
logger.debug("Running test: %s with id: %s", request.node.name, request.cls.test_id)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def attach_logs_on_fail(request):
|
||||
yield
|
||||
if request.node.rep_call.failed:
|
||||
logger.debug("Test failed, attempting to attach logs to the allure reports")
|
||||
for file in glob.glob(os.path.join(env_vars.LOG_DIR, request.cls.test_id + "*")):
|
||||
attach_allure_file(file)
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def close_open_nodes():
|
||||
DS.waku_nodes = []
|
||||
yield
|
||||
for node in DS.waku_nodes:
|
||||
node.stop()
|
|
@ -0,0 +1,34 @@
|
|||
import logging
|
||||
|
||||
from src.libs.common import to_base64
|
||||
from src.data_classes import MessageRpcQuery
|
||||
from src.steps.relay import StepsRelay
|
||||
from src.test_data import SAMPLE_INPUTS
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class TestRelayPublish(StepsRelay):
|
||||
def test_publish_with_various_payloads(self):
|
||||
failed_payloads = []
|
||||
for payload in SAMPLE_INPUTS:
|
||||
logger.debug("Running test with payload %s", payload["description"])
|
||||
message = MessageRpcQuery(payload=to_base64(payload["value"]), contentTopic=self.test_content_topic)
|
||||
try:
|
||||
self.check_published_message_reaches_peer(message)
|
||||
except Exception as e:
|
||||
logger.error("Payload %s failed: %s", {payload["description"]}, {str(e)})
|
||||
failed_payloads.append(payload)
|
||||
assert not failed_payloads, f"Payloads failed: {failed_payloads}"
|
||||
|
||||
def test_publish_with_various_content_topics(self):
|
||||
failed_content_topics = []
|
||||
for content_topic in SAMPLE_INPUTS:
|
||||
logger.debug("Running test with content topic %s", content_topic["description"])
|
||||
message = MessageRpcQuery(payload=to_base64(self.test_payload), contentTopic=content_topic["value"])
|
||||
try:
|
||||
self.check_published_message_reaches_peer(message)
|
||||
except Exception as e:
|
||||
logger.error("ContentTopic %s failed: %s", {content_topic["description"]}, {str(e)})
|
||||
failed_content_topics.append(content_topic)
|
||||
assert not failed_content_topics, f"ContentTopics failed: {failed_content_topics}"
|
Loading…
Reference in New Issue