fix: add auto teardown

This commit is contained in:
Roman 2025-01-14 17:22:17 +08:00
parent 88ddf78252
commit 4af7b1944f
No known key found for this signature in database
GPG Key ID: B8FE070B54E11B75
5 changed files with 135 additions and 5 deletions

3
src/data_storage.py Normal file
View File

@ -0,0 +1,3 @@
# We use this class for global variables
class DS:
nomos_nodes = []

View File

@ -1,5 +1,6 @@
import os
from src.data_storage import DS
from src.libs.custom_logger import get_custom_logger
from tenacity import retry, stop_after_delay, wait_fixed
@ -7,6 +8,7 @@ from src.node.api_clients.rest import REST
from src.node.docker_mananger import DockerManager
from src.env_vars import DOCKER_LOG_DIR
from src.node.node_vars import nomos_nodes
from src.test_data import LOG_ERROR_KEYWORDS
logger = get_custom_logger(__name__)
@ -76,9 +78,10 @@ class NomosNode:
name=self._container_name,
)
logger.debug(f"Container returned {self._container}")
logger.debug(f"Started container from image {self._image_name}. " f"REST: {getattr(self, '_tcp_port', 'N/A')}")
DS.nomos_nodes.append(self)
@retry(stop=stop_after_delay(5), wait=wait_fixed(0.1), reraise=True)
def stop(self):
if self._container:
@ -132,3 +135,13 @@ class NomosNode:
def info(self):
return self._api.info()
def check_nomos_log_errors(self, whitelist=None):
keywords = LOG_ERROR_KEYWORDS
# If a whitelist is provided, remove those keywords from the keywords list
if whitelist:
keywords = [keyword for keyword in keywords if keyword not in whitelist]
matches = self._docker_manager.search_log_for_keywords(self._log_path, keywords, False)
assert not matches, f"Found errors {matches}"

26
src/test_data.py Normal file
View File

@ -0,0 +1,26 @@
from time import time
from datetime import datetime, timedelta
LOG_ERROR_KEYWORDS = [
"crash",
"fatal",
"panic",
"abort",
"segfault",
"corrupt",
"terminated",
"unhandled",
"stacktrace",
"deadlock",
"SIGSEGV",
"SIGABRT",
"stack overflow",
"index out of bounds",
"nil pointer dereference",
"goroutine exit",
"nil pointer",
"runtime error",
"goexit",
"race condition",
"double free",
]

92
tests/conftest.py Normal file
View File

@ -0,0 +1,92 @@
# -*- coding: utf-8 -*-
import inspect
import glob
from src.libs.custom_logger import get_custom_logger
import os
import pytest
from datetime import datetime
from time import time
from uuid import uuid4
from src.libs.common import attach_allure_file
import src.env_vars as env_vars
from src.data_storage import DS
logger = get_custom_logger(__name__)
# See https://docs.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_runtest_makereport(item):
outcome = yield
rep = outcome.get_result()
if rep.when == "call":
setattr(item, "rep_call", rep)
return rep
return None
@pytest.fixture(scope="session", autouse=True)
def set_allure_env_variables():
yield
if os.path.isdir("allure-results") and not os.path.isfile(os.path.join("allure-results", "environment.properties")):
logger.debug(f"Running fixture teardown: {inspect.currentframe().f_code.co_name}")
with open(os.path.join("allure-results", "environment.properties"), "w") as outfile:
for attribute_name in dir(env_vars):
if attribute_name.isupper():
attribute_value = getattr(env_vars, attribute_name)
outfile.write(f"{attribute_name}={attribute_value}\n")
@pytest.fixture(scope="function", autouse=True)
def test_id(request):
# setting up an unique test id to be used where needed
logger.debug(f"Running fixture setup: {inspect.currentframe().f_code.co_name}")
request.cls.test_id = f"{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}__{str(uuid4())}"
@pytest.fixture(scope="function", autouse=True)
def test_setup(request, test_id):
logger.debug(f"Running test: {request.node.name} with id: {request.cls.test_id}")
yield
logger.debug(f"Running fixture teardown: {inspect.currentframe().f_code.co_name}")
for file in glob.glob(os.path.join(env_vars.DOCKER_LOG_DIR, "*")):
if os.path.getmtime(file) < time() - 3600:
logger.debug(f"Deleting old log file: {file}")
try:
os.remove(file)
except:
logger.error("Could not delete file")
@pytest.fixture(scope="function", autouse=True)
def attach_logs_on_fail(request):
yield
if env_vars.RUNNING_IN_CI and hasattr(request.node, "rep_call") and request.node.rep_call.failed:
logger.debug(f"Running fixture teardown: {inspect.currentframe().f_code.co_name}")
logger.debug("Test failed, attempting to attach logs to the allure reports")
for file in glob.glob(os.path.join(env_vars.DOCKER_LOG_DIR, "*" + request.cls.test_id + "*")):
attach_allure_file(file)
@pytest.fixture(scope="function", autouse=True)
def close_open_nodes(attach_logs_on_fail):
DS.nomos_nodes = []
yield
logger.debug(f"Running fixture teardown: {inspect.currentframe().f_code.co_name}")
crashed_containers = []
for node in DS.nomos_nodes:
try:
node.stop()
except Exception as ex:
if "No such container" in str(ex):
crashed_containers.append(node.image)
logger.error(f"Failed to stop container because of error {ex}")
assert not crashed_containers, f"Containers {crashed_containers} crashed during the test!!!"
@pytest.fixture(scope="function", autouse=True)
def check_nomos_log_errors():
yield
logger.debug(f"Running fixture teardown: {inspect.currentframe().f_code.co_name}")
for node in DS.nomos_nodes:
node.check_nomos_log_errors()

View File

@ -22,7 +22,3 @@ class Test2NodeClAlive:
except Exception as ex:
logger.error(f"REST service did not become ready in time: {ex}")
raise
self.node1.stop()
self.node2.stop()
self.node3.stop()