write test generator diagnostics to a summary file

This commit is contained in:
Alex Stokes 2022-06-27 12:33:54 -06:00
parent 0ba5b3b5c5
commit 529ce301b8
No known key found for this signature in database
GPG Key ID: AFEAF4EC484AC206
2 changed files with 39 additions and 6 deletions

View File

@ -1041,7 +1041,7 @@ setup(
extras_require={ extras_require={
"test": ["pytest>=4.4", "pytest-cov", "pytest-xdist"], "test": ["pytest>=4.4", "pytest-cov", "pytest-xdist"],
"lint": ["flake8==3.7.7", "mypy==0.812", "pylint==2.12.2"], "lint": ["flake8==3.7.7", "mypy==0.812", "pylint==2.12.2"],
"generator": ["python-snappy==0.5.4"], "generator": ["python-snappy==0.5.4", "filelock"],
}, },
install_requires=[ install_requires=[
"eth-utils>=1.3.0,<2", "eth-utils>=1.3.0,<2",

View File

@ -3,7 +3,9 @@ import time
import shutil import shutil
import argparse import argparse
from pathlib import Path from pathlib import Path
from filelock import FileLock
import sys import sys
import json
from typing import Iterable, AnyStr, Any, Callable from typing import Iterable, AnyStr, Any, Callable
import traceback import traceback
@ -111,6 +113,8 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
collected_test_count = 0 collected_test_count = 0
generated_test_count = 0 generated_test_count = 0
skipped_test_count = 0 skipped_test_count = 0
test_identifiers = []
provider_start = time.time() provider_start = time.time()
for tprov in test_providers: for tprov in test_providers:
if not collect_only: if not collect_only:
@ -123,12 +127,10 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
/ Path(test_case.runner_name) / Path(test_case.handler_name) / Path(test_case.runner_name) / Path(test_case.handler_name)
/ Path(test_case.suite_name) / Path(test_case.case_name) / Path(test_case.suite_name) / Path(test_case.case_name)
) )
incomplete_tag_file = case_dir / "INCOMPLETE"
collected_test_count += 1 collected_test_count += 1
if collect_only:
print(f"Collected test at: {case_dir}") print(f"Collected test at: {case_dir}")
continue
incomplete_tag_file = case_dir / "INCOMPLETE"
if case_dir.exists(): if case_dir.exists():
if not args.force and not incomplete_tag_file.exists(): if not args.force and not incomplete_tag_file.exists():
@ -198,6 +200,15 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
shutil.rmtree(case_dir) shutil.rmtree(case_dir)
else: else:
generated_test_count += 1 generated_test_count += 1
test_identifier = "::".join([
test_case.preset_name,
test_case.fork_name,
test_case.runner_name,
test_case.handler_name,
test_case.suite_name,
test_case.case_name
])
test_identifiers.append(test_identifier)
# Only remove `INCOMPLETE` tag file # Only remove `INCOMPLETE` tag file
os.remove(incomplete_tag_file) os.remove(incomplete_tag_file)
test_end = time.time() test_end = time.time()
@ -216,6 +227,28 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
if span > TIME_THRESHOLD_TO_PRINT: if span > TIME_THRESHOLD_TO_PRINT:
summary_message += f" in {span} seconds" summary_message += f" in {span} seconds"
print(summary_message) print(summary_message)
diagnostics = {
"collected_test_count": collected_test_count,
"generated_test_count": generated_test_count,
"skipped_test_count": skipped_test_count,
"test_identifiers": test_identifiers,
"durations": [f"{span} seconds"],
}
diagnostics_path = Path(os.path.join(output_dir, "diagnostics.json"))
diagnostics_lock = FileLock(os.path.join(output_dir, "diagnostics.json.lock"))
with diagnostics_lock:
diagnostics_path.touch(exist_ok=True)
if os.path.getsize(diagnostics_path) == 0:
with open(diagnostics_path, "w+") as f:
json.dump(diagnostics, f)
else:
with open(diagnostics_path, "r+") as f:
existing_diagnostics = json.load(f)
for k, v in diagnostics.items():
existing_diagnostics[k] += v
with open(diagnostics_path, "w+") as f:
json.dump(existing_diagnostics, f)
print(f"wrote diagnostics to {diagnostics_path}")
def dump_yaml_fn(data: Any, name: str, file_mode: str, yaml_encoder: YAML): def dump_yaml_fn(data: Any, name: str, file_mode: str, yaml_encoder: YAML):