Merge 13e292d7e46bf7bfbdce0e7370831a8d115c98f8 into 639dffc505f022b223dc409908b619c556387b08

This commit is contained in:
AYAHASSAN287 2026-04-14 19:41:22 +02:00 committed by GitHub
commit f8f11156a7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
12 changed files with 450 additions and 92 deletions

View File

@ -32,18 +32,18 @@ env:
RLN_CREDENTIALS: ${{ secrets.RLN_CREDENTIALS }}
jobs:
tests:
name: tests
strategy:
fail-fast: false
matrix:
shard: [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17]
# total number of shards =18 means tests will split into 18 thread and run in parallel to increase execution speed
# command for sharding :
# pytest --shard-id=<shard_number> --num-shards=<total_shards>
# shard 16 for test_rln.py file as they shall run sequentially
# shard 17 for test_cursor_many_msgs.py as it takes time >7 mins
# total number of shards =18 means tests will split into 18 thread and run in parallel to increase execution speed
# command for sharding :
# pytest --shard-id=<shard_number> --num-shards=<total_shards>
# shard 16 for test_rln.py file as they shall run sequentially
# shard 17 for test_cursor_many_msgs.py as it takes time >7 mins
runs-on: ubuntu-latest
timeout-minutes: 120
outputs:
@ -65,60 +65,149 @@ jobs:
jobResult_15: ${{ steps.set_result.outputs.JOB_RESULT_15 }}
jobResult_16: ${{ steps.set_result.outputs.JOB_RESULT_16 }}
jobResult_17: ${{ steps.set_result.outputs.JOB_RESULT_17 }}
steps:
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/checkout@v4
- name: Remove unwanted software
uses: ./.github/actions/prune-vm
- name: Remove unwanted software
uses: ./.github/actions/prune-vm
- uses: actions/setup-python@v4
with:
python-version: "3.12"
cache: "pip"
- uses: actions/setup-python@v4
with:
python-version: '3.12'
cache: 'pip'
- name: Add python bindings to PYTHONPATH
run: echo "PYTHONPATH=$(pwd)/vendor/logos-delivery-python-bindings/waku:$PYTHONPATH" >> $GITHUB_ENV
- name: Install system deps for tc / nsenter
run: |
sudo apt-get update
sudo apt-get install -y \
util-linux \
iproute2 \
sudo \
ca-certificates \
curl
- name: Install system deps for tc / nsenter
run: |
sudo apt-get update
sudo apt-get install -y \
util-linux \
iproute2 \
sudo \
ca-certificates \
curl \
make \
gcc \
g++
- run: pip install -r requirements.txt
- name: Install Nim 2.2.4
run: |
set -euo pipefail
curl https://nim-lang.org/choosenim/init.sh -sSf | sh -s -- -y
echo "$HOME/.nimble/bin" >> "$GITHUB_PATH"
export PATH="$HOME/.nimble/bin:$PATH"
choosenim 2.2.4
nim --version
nimble --version
- name: Run tests
- run: pip install -r requirements.txt
run: |
if [ "${{ matrix.shard }}" == "16" ]; then
pytest tests/relay/test_rln.py --alluredir=allure-results-${{ matrix.shard }}
elif [ "${{ matrix.shard }}" == "17" ]; then
pytest tests/store/test_cursor_many_msgs.py --alluredir=allure-results-${{ matrix.shard }}
elif [ "${{ matrix.shard }}" != "17" ]; then
pytest --ignore=tests/relay/test_rln.py --ignore=tests/store/test_cursor_many_msgs.py --reruns 2 --shard-id=${{ matrix.shard }} --num-shards=16 --alluredir=allure-results-${{ matrix.shard }}
fi
- name: Build liblogosdelivery.so for python bindings
run: |
set -euo pipefail
- name: Upload allure results
if: always()
uses: actions/upload-artifact@v4
with:
name: allure-results-${{ matrix.shard }}
path: allure-results-${{ matrix.shard }}
export PATH="$HOME/.nimble/bin:$PATH"
- name: Set job result
id: set_result
if: always()
run: |
version="${{ matrix.shard }}"
echo "JOB_RESULT_${version}=${{ job.status }}" >> "$GITHUB_OUTPUT"
BINDINGS_DIR="$(pwd)/vendor/logos-delivery-python-bindings"
DELIVERY_DIR="$BINDINGS_DIR/vendor/logos-delivery"
mkdir -p "$BINDINGS_DIR/lib"
cd "$DELIVERY_DIR"
ln -sf waku.nimble waku.nims
# install Nim deps
nimble install -y
# do the real setup, do not fake .nimble-setup
make setup
# now build the shared library
make liblogosdelivery
SO_PATH="$(find . -type f -name 'liblogosdelivery.so' | head -n 1)"
if [ -z "$SO_PATH" ]; then
echo "liblogosdelivery.so was not built"
exit 1
fi
cp "$SO_PATH" "$BINDINGS_DIR/lib/liblogosdelivery.so"
echo "Built library:"
ls -l "$BINDINGS_DIR/lib/liblogosdelivery.so"
- name: Verify wrapper library
run: |
test -f vendor/logos-delivery-python-bindings/lib/liblogosdelivery.so
- name: Debug Python import paths
run: |
pwd
echo "PYTHONPATH=$PYTHONPATH"
find . -maxdepth 5 | grep wrapper || true
python - <<'PY'
import sys
print("sys.path:")
for p in sys.path:
print(p)
try:
import wrapper
print("wrapper import OK:", wrapper)
except Exception as e:
print("wrapper import failed:", e)
raise
PY
- name: Run tests
run: |
export PATH="$HOME/.nimble/bin:$PATH"
export PYTHONPATH="$(pwd)/vendor/logos-delivery-python-bindings/waku:$PYTHONPATH"
if [ "${{ matrix.shard }}" == "16" ]; then
pytest tests/relay/test_rln.py \
--ignore=vendor/logos-delivery-python-bindings/tests \
--alluredir=allure-results-${{ matrix.shard }}
elif [ "${{ matrix.shard }}" == "17" ]; then
pytest tests/store/test_cursor_many_msgs.py \
--ignore=vendor/logos-delivery-python-bindings/tests \
--alluredir=allure-results-${{ matrix.shard }}
else
pytest \
--ignore=vendor/logos-delivery-python-bindings/tests \
--ignore=tests/relay/test_rln.py \
--ignore=tests/store/test_cursor_many_msgs.py \
--reruns 2 \
--shard-id=${{ matrix.shard }} \
--num-shards=16 \
--alluredir=allure-results-${{ matrix.shard }}
fi
- name: Upload allure results
if: always()
uses: actions/upload-artifact@v4
with:
name: allure-results-${{ matrix.shard }}
path: allure-results-${{ matrix.shard }}
- name: Set job result
id: set_result
if: always()
run: |
version="${{ matrix.shard }}"
echo "JOB_RESULT_${version}=${{ job.status }}" >> "$GITHUB_OUTPUT"
aggregate-reports:
runs-on: ubuntu-latest
needs: [tests]
if: always()
steps:
- name: Download all allure results
uses: actions/download-artifact@v4
@ -148,59 +237,55 @@ jobs:
uses: peaceiris/actions-gh-pages@v3
if: always()
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_branch: gh-pages
publish_dir: allure-history
destination_dir: ${{ env.CALLER }}
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_branch: gh-pages
publish_dir: allure-history
destination_dir: ${{ env.CALLER }}
- name: Store output from matrix jobs
run: |
echo '${{ toJSON(needs.tests.outputs) }}' > results.json
echo '${{ toJSON(needs.tests.outputs) }}' > results.json
- name: Create job summary
if: always()
run: |
echo "## Run Information" >> $GITHUB_STEP_SUMMARY
echo "- **Event**: ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "- **Actor**: ${{ github.actor }}" >> $GITHUB_STEP_SUMMARY
echo "- **Node1**: ${{ env.NODE_1 }}" >> $GITHUB_STEP_SUMMARY
echo "- **Node2**: ${{ env.NODE_2 }}" >> $GITHUB_STEP_SUMMARY
echo "- **Additonal Nodes**: ${{ env.ADDITIONAL_NODES }}" >> $GITHUB_STEP_SUMMARY
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
echo "Allure report will be available at: https://logos-messaging.github.io/logos-delivery-interop-tests/${{ env.CALLER }}/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY
# Evaluate overall result
TESTS_RESULT="success"
for key in $(jq -r 'keys[]' results.json); do
result=$(jq -r --arg key "$key" '.[$key]' results.json)
echo "Key: $key, Value: $result"
# Check condition on the result
if [ "$result" != "success" ]; then
echo "Value 'success' not found at key: $key"
TESTS_RESULT="failure"
break
fi
done
# Notify Waku team
if [ "$TESTS_RESULT" != "success" ]; then
echo "There are failures with nwaku node. cc <@&1111608257824440330>" >> $GITHUB_STEP_SUMMARY
echo "## Run Information" >> $GITHUB_STEP_SUMMARY
echo "- **Event**: ${{ github.event_name }}" >> $GITHUB_STEP_SUMMARY
echo "- **Actor**: ${{ github.actor }}" >> $GITHUB_STEP_SUMMARY
echo "- **Node1**: ${{ env.NODE_1 }}" >> $GITHUB_STEP_SUMMARY
echo "- **Node2**: ${{ env.NODE_2 }}" >> $GITHUB_STEP_SUMMARY
echo "- **Additonal Nodes**: ${{ env.ADDITIONAL_NODES }}" >> $GITHUB_STEP_SUMMARY
echo "## Test Results" >> $GITHUB_STEP_SUMMARY
echo "Allure report will be available at: https://logos-messaging.github.io/logos-delivery-interop-tests/${{ env.CALLER }}/${{ github.run_number }}" >> $GITHUB_STEP_SUMMARY
TESTS_RESULT="success"
for key in $(jq -r 'keys[]' results.json); do
result=$(jq -r --arg key "$key" '.[$key]' results.json)
echo "Key: $key, Value: $result"
if [ "$result" != "success" ]; then
echo "Value 'success' not found at key: $key"
TESTS_RESULT="failure"
break
fi
# Write result and summary to ENV
echo "TESTS_RESULT=$TESTS_RESULT" >> $GITHUB_ENV
{
echo 'JOB_SUMMARY<<EOF'
cat $GITHUB_STEP_SUMMARY
echo EOF
} >> $GITHUB_ENV
done
if [ "$TESTS_RESULT" != "success" ]; then
echo "There are failures with nwaku node. cc <@&1111608257824440330>" >> $GITHUB_STEP_SUMMARY
fi
echo "TESTS_RESULT=$TESTS_RESULT" >> $GITHUB_ENV
{
echo 'JOB_SUMMARY<<EOF'
cat $GITHUB_STEP_SUMMARY
echo EOF
} >> $GITHUB_ENV
- name: Send report to Discord
uses: rjstone/discord-webhook-notify@v1
if: always() && env.CALLER != 'manual'
with:
severity: ${{ env.TESTS_RESULT == 'success' && 'info' || 'error' }}
username: ${{ github.workflow }}
description: "## Job Result: ${{ env.TESTS_RESULT }}"
details: ${{ env.JOB_SUMMARY }}
webhookUrl: ${{ secrets.DISCORD_TEST_REPORTS_WH }}
severity: ${{ env.TESTS_RESULT == 'success' && 'info' || 'error' }}
username: ${{ github.workflow }}
description: "## Job Result: ${{ env.TESTS_RESULT }}"
details: ${{ env.JOB_SUMMARY }}
webhookUrl: ${{ secrets.DISCORD_TEST_REPORTS_WH }}

2
.gitignore vendored
View File

@ -104,3 +104,5 @@ dmypy.json
# Pyre type checker
.pyre/
third_party/logos-delivery-python-bindings

4
.gitmodules vendored Normal file
View File

@ -0,0 +1,4 @@
[submodule "vendor/logos-delivery-python-bindings"]
path = vendor/logos-delivery-python-bindings
url = https://github.com/logos-messaging/logos-delivery-python-bindings.git

View File

@ -2,9 +2,13 @@
addopts = --instafail --tb=short --color=auto
log_level = DEBUG
log_cli = True
norecursedirs =
vendor
nimbledeps
*.egg-info
log_file = log/test.log
log_cli_format = %(asctime)s.%(msecs)03d %(levelname)s [%(name)s] %(message)s
log_file_format = %(asctime)s.%(msecs)03d %(levelname)s [%(name)s] %(message)s
timeout = 300
markers =
smoke: marks tests as smoke test (deselect with '-m "not smoke"')
smoke: marks tests as smoke test (deselect with '-m "not smoke"')

View File

@ -40,3 +40,5 @@ typing_extensions==4.9.0
urllib3==2.2.2
virtualenv==20.25.0
pytest-shard==0.1.2
result==0.17.0
cffi

View File

@ -16,6 +16,7 @@ def get_env_var(var_name, default=None):
# Configuration constants. Need to be upercase to appear in reports
DEFAULT_NWAKU = "wakuorg/nwaku:latest"
STRESS_ENABLED = False
USE_WRAPPERS = True
NODE_1 = get_env_var("NODE_1", DEFAULT_NWAKU)
NODE_2 = get_env_var("NODE_2", DEFAULT_NWAKU)
ADDITIONAL_NODES = get_env_var("ADDITIONAL_NODES", f"{DEFAULT_NWAKU},{DEFAULT_NWAKU},{DEFAULT_NWAKU}")

View File

@ -15,6 +15,7 @@ from src.node.docker_mananger import DockerManager
from src.env_vars import DOCKER_LOG_DIR
from src.data_storage import DS
from src.test_data import DEFAULT_CLUSTER_ID, LOG_ERROR_KEYWORDS, VALID_PUBSUB_TOPICS
from src.node.wrappers_manager import WrapperManager
logger = get_custom_logger(__name__)
@ -83,11 +84,25 @@ class WakuNode:
self._docker_manager = DockerManager(self._image_name)
self._container = None
self.start_args = {}
self._wrapper_node = None
logger.debug(f"WakuNode instance initialized with log path {self._log_path}")
@property
def _is_wrapper(self) -> bool:
return self._wrapper_node is not None
@retry(stop=stop_after_delay(60), wait=wait_fixed(0.1), reraise=True)
def start(self, wait_for_node_sec=20, **kwargs):
@retry(stop=stop_after_delay(60), wait=wait_fixed(0.1), reraise=True)
def start(self, wait_for_node_sec=20, use_wrapper=False, **kwargs):
logger.debug("Starting Node...")
default_args, remove_container = self._prepare_start_context(**kwargs)
if use_wrapper:
self._start_wrapper(default_args, wait_for_node_sec)
else:
self._start_docker(default_args, remove_container, wait_for_node_sec)
def _prepare_start_context(self, **kwargs):
self._docker_manager.create_network()
self._ext_ip = self._docker_manager.generate_random_ext_ip()
self._ports = self._docker_manager.generate_ports()
@ -175,8 +190,12 @@ class WakuNode:
else:
logger.info(f"RLN credentials not set or credential store not available, starting without RLN")
logger.debug(f"Using volumes {self._volumes}")
self.start_args = dict(default_args)
return default_args, remove_container
def _start_docker(self, default_args, remove_container, wait_for_node_sec):
logger.debug(f"Using volumes {self._volumes}")
self._container = self._docker_manager.start_container(
self._docker_manager.image,
ports=self._ports,
@ -186,16 +205,61 @@ class WakuNode:
volumes=self._volumes,
remove_container=remove_container,
)
logger.debug(f"Started container from image {self._image_name}. REST: {self._rest_port}")
DS.waku_nodes.append(self)
delay(1) # if we fire requests to soon after starting the node will sometimes fail to start correctly
delay(1)
try:
self.ensure_ready(timeout_duration=wait_for_node_sec)
except Exception as ex:
logger.error(f"REST service did not become ready in time: {ex}")
raise
def _start_wrapper(self, default_args, wait_for_node_sec):
logger.debug("Starting node using wrappers")
wrapper_config = self._default_args_to_wrapper_config(default_args)
result = WrapperManager.create_and_start(config=wrapper_config, timeout_s=wait_for_node_sec)
if result.is_err():
raise RuntimeError(f"Failed to start wrapper node: {result.err()}")
self._wrapper_node = result.ok_value
logger.debug(f"Started wrapper node. REST: {self._rest_port}")
DS.waku_nodes.append(self)
delay(1)
try:
self.ensure_ready(timeout_duration=wait_for_node_sec)
except Exception as ex:
logger.error(f"REST service did not become ready in time: {ex}")
raise
def _default_args_to_wrapper_config(self, default_args):
def _bool(key, default="false"):
return default_args.get(key, default).lower() == "true"
bootstrap = default_args.get("discv5-bootstrap-node")
return {
"logLevel": default_args.get("log-level", "DEBUG"),
"mode": "Core",
"networkingConfig": {
"listenIpv4": default_args.get("listen-address", "0.0.0.0"),
"p2pTcpPort": int(default_args["tcp-port"]),
"discv5UdpPort": int(default_args["discv5-udp-port"]),
"restPort": int(default_args["rest-port"]),
"restAddress": default_args.get("rest-address", "0.0.0.0"),
},
"protocolsConfig": {
"clusterId": int(default_args.get("cluster-id", DEFAULT_CLUSTER_ID)),
"relay": _bool("relay"),
"store": _bool("store"),
"filter": _bool("filter"),
"lightpush": _bool("lightpush"),
"peerExchange": _bool("peer-exchange"),
"discv5Discovery": _bool("discv5-discovery", "true"),
"discv5BootstrapNodes": [bootstrap] if bootstrap else [],
},
}
@retry(stop=stop_after_delay(250), wait=wait_fixed(0.1), reraise=True)
def register_rln(self, **kwargs):
logger.debug("Registering RLN credentials...")
@ -230,6 +294,12 @@ class WakuNode:
@retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True)
def stop(self):
if self._is_wrapper:
self._stop_wrapper()
else:
self._stop_docker()
def _stop_docker(self):
if self._container:
logger.debug(f"Stopping container with id {self._container.short_id}")
self._container.stop()
@ -240,6 +310,14 @@ class WakuNode:
self._container = None
logger.debug("Container stopped.")
def _stop_wrapper(self):
logger.debug("Stopping wrapper node")
result = self._wrapper_node.stop_and_destroy()
if result.is_err():
logger.error(f"Failed to stop wrapper node: {result.err()}")
self._wrapper_node = None
logger.debug("Wrapper node stopped and destroyed.")
@retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True)
def kill(self):
if self._container:
@ -320,6 +398,33 @@ class WakuNode:
def get_tcp_address(self):
return f"/ip4/{self._ext_ip}/tcp/{self._tcp_port}"
def subscribe_content_topic(self, content_topic: str, *, timeout_s: float = 20.0):
if self._is_wrapper:
result = self._wrapper_node.subscribe_content_topic(content_topic, timeout_s=timeout_s)
if result.is_err():
raise RuntimeError(f"subscribe_content_topic failed: {result.err()}")
return result.ok_value
else:
return self._api.set_relay_auto_subscriptions([content_topic])
def unsubscribe_content_topic(self, content_topic: str, *, timeout_s: float = 20.0):
if self._is_wrapper:
result = self._wrapper_node.unsubscribe_content_topic(content_topic, timeout_s=timeout_s)
if result.is_err():
raise RuntimeError(f"unsubscribe_content_topic failed: {result.err()}")
return result.ok_value
else:
return self._api.delete_relay_auto_subscriptions([content_topic])
def send_message(self, message: dict, *, timeout_s: float = 20.0):
if self._is_wrapper:
result = self._wrapper_node.send_message(message, timeout_s=timeout_s)
if result.is_err():
raise RuntimeError(f"send_message failed: {result.err()}")
return result.ok_value
else:
return self._api.send_relay_auto_message(message)
def info(self):
return self._api.info()

View File

@ -0,0 +1,76 @@
import sys
from pathlib import Path
from result import Result, Ok, Err
_THIRD_PARTY = Path(__file__).resolve().parents[2] / "third_party" / "logos-delivery-python-bindings" / "waku"
if str(_THIRD_PARTY) not in sys.path:
sys.path.insert(0, str(_THIRD_PARTY))
from wrapper import NodeWrapper as _NodeWrapper # type: ignore[import]
class WrapperManager:
def __init__(self, node: _NodeWrapper):
self._node = node
@classmethod
def create(
cls,
config: dict,
event_cb=None,
*,
timeout_s: float = 20.0,
) -> Result["WrapperManager", str]:
result = _NodeWrapper.create_node(config, event_cb, timeout_s=timeout_s)
if result.is_err():
return Err(result.err())
return Ok(cls(result.ok_value))
@classmethod
def create_and_start(
cls,
config: dict,
event_cb=None,
*,
timeout_s: float = 20.0,
) -> Result["WrapperManager", str]:
result = _NodeWrapper.create_and_start(config, event_cb, timeout_s=timeout_s)
if result.is_err():
return Err(result.err())
return Ok(cls(result.ok_value))
def __enter__(self) -> "WrapperManager":
return self
def __exit__(self, *_) -> None:
self.stop_and_destroy()
def start_node(self, *, timeout_s: float = 20.0) -> Result[int, str]:
return self._node.start_node(timeout_s=timeout_s)
def stop_node(self, *, timeout_s: float = 20.0) -> Result[int, str]:
return self._node.stop_node(timeout_s=timeout_s)
def destroy(self, *, timeout_s: float = 20.0) -> Result[int, str]:
return self._node.destroy(timeout_s=timeout_s)
def stop_and_destroy(self, *, timeout_s: float = 20.0) -> Result[int, str]:
return self._node.stop_and_destroy(timeout_s=timeout_s)
def subscribe_content_topic(self, content_topic: str, *, timeout_s: float = 20.0) -> Result[int, str]:
return self._node.subscribe_content_topic(content_topic, timeout_s=timeout_s)
def unsubscribe_content_topic(self, content_topic: str, *, timeout_s: float = 20.0) -> Result[int, str]:
return self._node.unsubscribe_content_topic(content_topic, timeout_s=timeout_s)
def send_message(self, message: dict, *, timeout_s: float = 20.0) -> Result[str, str]:
return self._node.send_message(message, timeout_s=timeout_s)
def get_available_node_info_ids(self, *, timeout_s: float = 20.0) -> Result[list[str], str]:
return self._node.get_available_node_info_ids(timeout_s=timeout_s)
def get_node_info(self, node_info_id: str, *, timeout_s: float = 20.0) -> Result[dict, str]:
return self._node.get_node_info(node_info_id, timeout_s=timeout_s)
def get_available_configs(self, *, timeout_s: float = 20.0) -> Result[dict, str]:
return self._node.get_available_configs(timeout_s=timeout_s)

View File

@ -0,0 +1,18 @@
class NodeStub:
def send_message(self, message, timeout_s=20.0):
self.message = message
self.timeout_s = timeout_s
return Ok(1)
def get_available_node_info_ids(self, timeout_s=20.0):
self.timeout_s = timeout_s
return Ok(2)
def get_node_info(self, node_info_id, timeout_s=20.0):
self.node_info_id = node_info_id
self.timeout_s = timeout_s
return Ok(3)
def get_available_configs(self, timeout_s=20.0):
self.timeout_s = timeout_s
return Ok(4)

View File

@ -0,0 +1,34 @@
import socket
import pytest
from src.test_data import DEFAULT_CLUSTER_ID
def _free_port():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(("", 0))
return s.getsockname()[1]
def build_node_config(**overrides):
config = {
"logLevel": "DEBUG",
"listenAddress": "0.0.0.0",
"tcpPort": _free_port(),
"discv5UdpPort": _free_port(),
"restPort": _free_port(),
"restAddress": "0.0.0.0",
"clusterId": DEFAULT_CLUSTER_ID,
"relay": True,
"store": True,
"filter": False,
"lightpush": False,
"peerExchange": False,
"discv5Discovery": False,
}
config.update(overrides)
return config
@pytest.fixture
def node_config():
return build_node_config()

View File

@ -0,0 +1,26 @@
import pytest
from src.node.wrappers_manager import WrapperManager
@pytest.mark.smoke
class TestLogosDeliveryLifecycle:
def _create_started_node(self, node_config):
result = WrapperManager.create_and_start(config=node_config)
assert result.is_ok(), f"Failed to create and start node: {result.err()}"
return result.ok_value
def test_create_and_start_node(self, node_config):
node = self._create_started_node(node_config)
stop_result = node.stop_and_destroy()
assert stop_result.is_ok(), f"Failed to stop and destroy node: {stop_result.err()}"
def test_stop_node_without_destroy(self, node_config):
node = self._create_started_node(node_config)
try:
stop_result = node.stop_node()
assert stop_result.is_ok(), f"Failed to stop node: {stop_result.err()}"
finally:
destroy_result = node.destroy()
assert destroy_result.is_ok(), f"Failed to destroy node: {destroy_result.err()}"

@ -0,0 +1 @@
Subproject commit fd4d0a5a7efcd56b395024f558afa416d7e27a2b