From 69052ac75080db77169632af9cd91006df45038c Mon Sep 17 00:00:00 2001 From: protolambda Date: Fri, 26 Jul 2019 19:19:36 +0200 Subject: [PATCH] Update testgen code, and if force is not on, test generation won't run if it already exists. --- test_libs/gen_helpers/gen_base/gen_runner.py | 70 +++++++++------- test_libs/gen_helpers/gen_base/gen_typing.py | 14 ++-- test_libs/gen_helpers/gen_from_tests/gen.py | 10 ++- test_libs/pyspec/eth2spec/test/context.py | 7 +- test_libs/pyspec/eth2spec/test/utils.py | 88 ++++++++++---------- 5 files changed, 102 insertions(+), 87 deletions(-) diff --git a/test_libs/gen_helpers/gen_base/gen_runner.py b/test_libs/gen_helpers/gen_base/gen_runner.py index b118f48d9..9a2d26664 100644 --- a/test_libs/gen_helpers/gen_base/gen_runner.py +++ b/test_libs/gen_helpers/gen_base/gen_runner.py @@ -31,18 +31,6 @@ def validate_configs_dir(path_str): if not path.is_dir(): raise argparse.ArgumentTypeError("Config path must lead to a directory") - if not Path(path, "constant_presets").exists(): - raise argparse.ArgumentTypeError("Constant Presets directory must exist") - - if not Path(path, "constant_presets").is_dir(): - raise argparse.ArgumentTypeError("Constant Presets path must lead to a directory") - - if not Path(path, "fork_timelines").exists(): - raise argparse.ArgumentTypeError("Fork Timelines directory must exist") - - if not Path(path, "fork_timelines").is_dir(): - raise argparse.ArgumentTypeError("Fork Timelines path must lead to a directory") - return path @@ -56,7 +44,7 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): and the iterable should not be resumed after a pause with a change of that configuration. :return: """ - + parser = argparse.ArgumentParser( prog="gen-" + generator_name, description=f"Generate YAML test suite files for {generator_name}", @@ -74,7 +62,7 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): "--force", action="store_true", default=False, - help="if set overwrite test files if they exist", + help="if set re-generate and overwrite test files if they already exist", ) parser.add_argument( "-c", @@ -102,25 +90,43 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]): # loads configuration etc. config_name = tprov.prepare(args.configs_path) for test_case in tprov.make_cases(): - case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \ - / Path(test_case.runner_name) / Path(test_case.handler_name) \ - / Path(test_case.suite_name) / Path(test_case.case_name) - print(f'Generating test: {case_dir}') + case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \ + / Path(test_case.runner_name) / Path(test_case.handler_name) \ + / Path(test_case.suite_name) / Path(test_case.case_name) + if case_dir.exists(): + if not args.force: + print(f'Skipping already existing test: {case_dir}') + continue + print(f'Warning, output directory {case_dir} already exist,' + f' old files are not deleted but will be overwritten when a new version is produced') + + print(f'Generating test: {case_dir}') + try: case_dir.mkdir(parents=True, exist_ok=True) + meta = dict() + for (name, out_kind, data) in test_case.case_fn(): + if out_kind == "meta": + meta[name] = data + elif out_kind == "data" or out_kind == "ssz": + try: + out_path = case_dir / Path(name + '.yaml') + with out_path.open(file_mode) as f: + yaml.dump(data, f) + except IOError as e: + sys.exit(f'Error when dumping test "{case_dir}", part "{name}": {e}') + # if out_kind == "ssz": + # # TODO write SSZ as binary file too. + # out_path = case_dir / Path(name + '.ssz') + # Once all meta data is collected (if any), write it to a meta data file. + if len(meta) != 0: + try: + out_path = case_dir / Path('meta.yaml') + with out_path.open(file_mode) as f: + yaml.dump(meta, f) + except IOError as e: + sys.exit(f'Error when dumping test "{case_dir}" meta data": {e}') - try: - for case_part in test_case.case_fn(): - if case_part.out_kind == "data" or case_part.out_kind == "ssz": - try: - out_path = case_dir / Path(case_part.name + '.yaml') - with out_path.open(file_mode) as f: - yaml.dump(case_part.data, f) - except IOError as e: - sys.exit(f'Error when dumping test "{case_dir}", part "{case_part.name}": {e}') - # if out_kind == "ssz": - # # TODO write SSZ as binary file too. - # out_path = case_dir / Path(name + '.ssz') - except Exception as e: - print(f"ERROR: failed to generate vector(s) for test {case_dir}: {e}") + except Exception as e: + print(f"ERROR: failed to generate vector(s) for test {case_dir}: {e}") print(f"completed {generator_name}") diff --git a/test_libs/gen_helpers/gen_base/gen_typing.py b/test_libs/gen_helpers/gen_base/gen_typing.py index 91c4be74a..97ddfa713 100644 --- a/test_libs/gen_helpers/gen_base/gen_typing.py +++ b/test_libs/gen_helpers/gen_base/gen_typing.py @@ -2,17 +2,19 @@ from typing import ( Any, Callable, Iterable, + NewType, Dict, Tuple, ) from collections import namedtuple - -@dataclass -class TestCasePart(object): - name: str # name of the file - out_kind: str # type of data ("data" for generic, "ssz" for SSZ encoded bytes) - data: Any +# Elements: name, out_kind, data +# +# out_kind is the type of data: +# - "data" for generic +# - "ssz" for SSZ encoded bytes +# - "meta" for generic data to collect into a meta data dict. +TestCasePart = NewType("TestCasePart", Tuple[str, str, Any]) @dataclass diff --git a/test_libs/gen_helpers/gen_from_tests/gen.py b/test_libs/gen_helpers/gen_from_tests/gen.py index cc64fbf41..22496de6b 100644 --- a/test_libs/gen_helpers/gen_from_tests/gen.py +++ b/test_libs/gen_helpers/gen_from_tests/gen.py @@ -22,11 +22,17 @@ def generate_from_tests(runner_name: str, handler_name: str, src: Any, print("generating test vectors from tests source: %s" % src.__name__) for name in fn_names: tfn = getattr(src, name) + + # strip off the `test_` + case_name = name + if case_name.startswith('test_'): + case_name = case_name[5:] + yield TestCase( fork_name=fork_name, runner_name=runner_name, handler_name=handler_name, suite_name='pyspec_tests', - case_name=name, - case_fn=lambda: tfn(generator_mode=True, phase=phase, bls_active=bls_active) + case_name=case_name, + case_fn=lambda: tfn(generator_mode=True, fork_name=fork_name, bls_active=bls_active) ) diff --git a/test_libs/pyspec/eth2spec/test/context.py b/test_libs/pyspec/eth2spec/test/context.py index e7560afc6..2adb76da0 100644 --- a/test_libs/pyspec/eth2spec/test/context.py +++ b/test_libs/pyspec/eth2spec/test/context.py @@ -28,7 +28,9 @@ DEFAULT_BLS_ACTIVE = False def spectest_with_bls_switch(fn): - return bls_switch(spectest()(fn)) + # Bls switch must be wrapped by spectest, + # to fully go through the yielded bls switch data, before setting back the BLS setting. + return spectest()(bls_switch(fn)) # shorthand for decorating @with_state @spectest() @@ -88,9 +90,8 @@ def bls_switch(fn): def entry(*args, **kw): old_state = bls.bls_active bls.bls_active = kw.pop('bls_active', DEFAULT_BLS_ACTIVE) - out = fn(*args, **kw) + yield from fn(*args, **kw) bls.bls_active = old_state - return out return entry diff --git a/test_libs/pyspec/eth2spec/test/utils.py b/test_libs/pyspec/eth2spec/test/utils.py index 253691764..4ecabb114 100644 --- a/test_libs/pyspec/eth2spec/test/utils.py +++ b/test_libs/pyspec/eth2spec/test/utils.py @@ -1,87 +1,87 @@ -from typing import Dict, Any, Callable, Iterable +from typing import Dict, Any from eth2spec.debug.encode import encode from eth2spec.utils.ssz.ssz_typing import SSZValue def spectest(description: str = None): + """ + Spectest decorator, should always be the most outer decorator around functions that yield data. + to deal with silent iteration through yielding function when in a pytest context (i.e. not in generator mode). + :param description: Optional description for the test to add to the metadata. + :return: Decorator. + """ def runner(fn): - # this wraps the function, to hide that the function actually is yielding data, instead of returning once. + # this wraps the function, to yield type-annotated entries of data. + # Valid types are: + # - "meta": all key-values with this type can be collected by the generator, to put somewhere together. + # - "ssz": raw SSZ bytes + # - "data": a python structure to be encoded by the user. def entry(*args, **kw): # check generator mode, may be None/else. # "pop" removes it, so it is not passed to the inner function. if kw.pop('generator_mode', False) is True: - out = {} - if description is None: - # fall back on function name for test description - name = fn.__name__ - if name.startswith('test_'): - name = name[5:] - out['description'] = name - else: + + if description is not None: # description can be explicit - out['description'] = description - has_contents = False - # put all generated data into a dict. + yield 'description', 'meta', description + + # transform the yielded data, and add type annotations for data in fn(*args, **kw): - has_contents = True # If there is a type argument, encode it as that type. if len(data) == 3: (key, value, typ) = data - out[key] = encode(value, typ) + yield key, 'data', encode(value, typ) + # TODO: add SSZ bytes as second output else: # Otherwise, try to infer the type, but keep it as-is if it's not a SSZ type or bytes. (key, value) = data if isinstance(value, (SSZValue, bytes)): - out[key] = encode(value) + yield key, 'data', encode(value) + # TODO: add SSZ bytes as second output elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]): - out[key] = [encode(el) for el in value] + for i, el in enumerate(value): + yield f'{key}_{i}', 'data', encode(el) + # TODO: add SSZ bytes as second output + yield f'{key}_count', 'meta', len(value) else: # not a ssz value. # It could be vector or bytes still, but it is a rare case, # and lists can't be inferred fully (generics lose element type). # In such cases, explicitly state the type of the yielded value as a third yielded object. - out[key] = value - if has_contents: - return out - else: - return None + # The data will now just be yielded as any python data, + # something that should be encodeable by the generator runner. + yield key, 'data', value else: - # just complete the function, ignore all yielded data, we are not using it + # Just complete the function, ignore all yielded data, + # we are not using it (or processing it, i.e. nearly zero efficiency loss) + # Pytest does not support yielded data in the outer function, so we need to wrap it like this. for _ in fn(*args, **kw): continue return None + return entry + return runner -def with_tags(tags: Dict[str, Any]): +def with_meta_tags(tags: Dict[str, Any]): """ - Decorator factory, adds tags (key, value) pairs to the output of the function. + Decorator factory, yields meta tags (key, value) pairs to the output of the function. Useful to build test-vector annotations with. - This decorator is applied after the ``spectest`` decorator is applied. :param tags: dict of tags :return: Decorator. """ def runner(fn): def entry(*args, **kw): - fn_out = fn(*args, **kw) - # do not add tags if the function is not returning a dict at all (i.e. not in generator mode) - if fn_out is None: - return None - return {**tags, **fn_out} + yielded_any = False + for part in fn(*args, **kw): + yield part + yielded_any = True + # Do not add tags if the function is not returning a dict at all (i.e. not in generator mode). + # As a pytest, we do not want to be yielding anything (unsupported by pytest) + if yielded_any: + for k, v in tags: + yield k, 'meta', v return entry return runner - -def with_args(create_args: Callable[[], Iterable[Any]]): - """ - Decorator factory, adds given extra arguments to the decorated function. - :param create_args: function to create arguments with. - :return: Decorator. - """ - def runner(fn): - # this wraps the function, to hide that the function actually yielding data. - def entry(*args, **kw): - return fn(*(list(create_args()) + list(args)), **kw) - return entry - return runner