Update testgen code, and if force is not on, test generation won't run if it already exists.

This commit is contained in:
protolambda 2019-07-26 19:19:36 +02:00
parent b73625fbf1
commit 69052ac750
No known key found for this signature in database
GPG Key ID: EC89FDBB2B4C7623
5 changed files with 102 additions and 87 deletions

View File

@ -31,18 +31,6 @@ def validate_configs_dir(path_str):
if not path.is_dir(): if not path.is_dir():
raise argparse.ArgumentTypeError("Config path must lead to a directory") raise argparse.ArgumentTypeError("Config path must lead to a directory")
if not Path(path, "constant_presets").exists():
raise argparse.ArgumentTypeError("Constant Presets directory must exist")
if not Path(path, "constant_presets").is_dir():
raise argparse.ArgumentTypeError("Constant Presets path must lead to a directory")
if not Path(path, "fork_timelines").exists():
raise argparse.ArgumentTypeError("Fork Timelines directory must exist")
if not Path(path, "fork_timelines").is_dir():
raise argparse.ArgumentTypeError("Fork Timelines path must lead to a directory")
return path return path
@ -74,7 +62,7 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
"--force", "--force",
action="store_true", action="store_true",
default=False, default=False,
help="if set overwrite test files if they exist", help="if set re-generate and overwrite test files if they already exist",
) )
parser.add_argument( parser.add_argument(
"-c", "-c",
@ -102,25 +90,43 @@ def run_generator(generator_name, test_providers: Iterable[TestProvider]):
# loads configuration etc. # loads configuration etc.
config_name = tprov.prepare(args.configs_path) config_name = tprov.prepare(args.configs_path)
for test_case in tprov.make_cases(): for test_case in tprov.make_cases():
case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \ case_dir = Path(output_dir) / Path(config_name) / Path(test_case.fork_name) \
/ Path(test_case.runner_name) / Path(test_case.handler_name) \ / Path(test_case.runner_name) / Path(test_case.handler_name) \
/ Path(test_case.suite_name) / Path(test_case.case_name) / Path(test_case.suite_name) / Path(test_case.case_name)
print(f'Generating test: {case_dir}')
if case_dir.exists():
if not args.force:
print(f'Skipping already existing test: {case_dir}')
continue
print(f'Warning, output directory {case_dir} already exist,'
f' old files are not deleted but will be overwritten when a new version is produced')
print(f'Generating test: {case_dir}')
try:
case_dir.mkdir(parents=True, exist_ok=True) case_dir.mkdir(parents=True, exist_ok=True)
meta = dict()
for (name, out_kind, data) in test_case.case_fn():
if out_kind == "meta":
meta[name] = data
elif out_kind == "data" or out_kind == "ssz":
try:
out_path = case_dir / Path(name + '.yaml')
with out_path.open(file_mode) as f:
yaml.dump(data, f)
except IOError as e:
sys.exit(f'Error when dumping test "{case_dir}", part "{name}": {e}')
# if out_kind == "ssz":
# # TODO write SSZ as binary file too.
# out_path = case_dir / Path(name + '.ssz')
# Once all meta data is collected (if any), write it to a meta data file.
if len(meta) != 0:
try:
out_path = case_dir / Path('meta.yaml')
with out_path.open(file_mode) as f:
yaml.dump(meta, f)
except IOError as e:
sys.exit(f'Error when dumping test "{case_dir}" meta data": {e}')
try: except Exception as e:
for case_part in test_case.case_fn(): print(f"ERROR: failed to generate vector(s) for test {case_dir}: {e}")
if case_part.out_kind == "data" or case_part.out_kind == "ssz":
try:
out_path = case_dir / Path(case_part.name + '.yaml')
with out_path.open(file_mode) as f:
yaml.dump(case_part.data, f)
except IOError as e:
sys.exit(f'Error when dumping test "{case_dir}", part "{case_part.name}": {e}')
# if out_kind == "ssz":
# # TODO write SSZ as binary file too.
# out_path = case_dir / Path(name + '.ssz')
except Exception as e:
print(f"ERROR: failed to generate vector(s) for test {case_dir}: {e}")
print(f"completed {generator_name}") print(f"completed {generator_name}")

View File

@ -2,17 +2,19 @@ from typing import (
Any, Any,
Callable, Callable,
Iterable, Iterable,
NewType,
Dict, Dict,
Tuple, Tuple,
) )
from collections import namedtuple from collections import namedtuple
# Elements: name, out_kind, data
@dataclass #
class TestCasePart(object): # out_kind is the type of data:
name: str # name of the file # - "data" for generic
out_kind: str # type of data ("data" for generic, "ssz" for SSZ encoded bytes) # - "ssz" for SSZ encoded bytes
data: Any # - "meta" for generic data to collect into a meta data dict.
TestCasePart = NewType("TestCasePart", Tuple[str, str, Any])
@dataclass @dataclass

View File

@ -22,11 +22,17 @@ def generate_from_tests(runner_name: str, handler_name: str, src: Any,
print("generating test vectors from tests source: %s" % src.__name__) print("generating test vectors from tests source: %s" % src.__name__)
for name in fn_names: for name in fn_names:
tfn = getattr(src, name) tfn = getattr(src, name)
# strip off the `test_`
case_name = name
if case_name.startswith('test_'):
case_name = case_name[5:]
yield TestCase( yield TestCase(
fork_name=fork_name, fork_name=fork_name,
runner_name=runner_name, runner_name=runner_name,
handler_name=handler_name, handler_name=handler_name,
suite_name='pyspec_tests', suite_name='pyspec_tests',
case_name=name, case_name=case_name,
case_fn=lambda: tfn(generator_mode=True, phase=phase, bls_active=bls_active) case_fn=lambda: tfn(generator_mode=True, fork_name=fork_name, bls_active=bls_active)
) )

View File

@ -28,7 +28,9 @@ DEFAULT_BLS_ACTIVE = False
def spectest_with_bls_switch(fn): def spectest_with_bls_switch(fn):
return bls_switch(spectest()(fn)) # Bls switch must be wrapped by spectest,
# to fully go through the yielded bls switch data, before setting back the BLS setting.
return spectest()(bls_switch(fn))
# shorthand for decorating @with_state @spectest() # shorthand for decorating @with_state @spectest()
@ -88,9 +90,8 @@ def bls_switch(fn):
def entry(*args, **kw): def entry(*args, **kw):
old_state = bls.bls_active old_state = bls.bls_active
bls.bls_active = kw.pop('bls_active', DEFAULT_BLS_ACTIVE) bls.bls_active = kw.pop('bls_active', DEFAULT_BLS_ACTIVE)
out = fn(*args, **kw) yield from fn(*args, **kw)
bls.bls_active = old_state bls.bls_active = old_state
return out
return entry return entry

View File

@ -1,87 +1,87 @@
from typing import Dict, Any, Callable, Iterable from typing import Dict, Any
from eth2spec.debug.encode import encode from eth2spec.debug.encode import encode
from eth2spec.utils.ssz.ssz_typing import SSZValue from eth2spec.utils.ssz.ssz_typing import SSZValue
def spectest(description: str = None): def spectest(description: str = None):
"""
Spectest decorator, should always be the most outer decorator around functions that yield data.
to deal with silent iteration through yielding function when in a pytest context (i.e. not in generator mode).
:param description: Optional description for the test to add to the metadata.
:return: Decorator.
"""
def runner(fn): def runner(fn):
# this wraps the function, to hide that the function actually is yielding data, instead of returning once. # this wraps the function, to yield type-annotated entries of data.
# Valid types are:
# - "meta": all key-values with this type can be collected by the generator, to put somewhere together.
# - "ssz": raw SSZ bytes
# - "data": a python structure to be encoded by the user.
def entry(*args, **kw): def entry(*args, **kw):
# check generator mode, may be None/else. # check generator mode, may be None/else.
# "pop" removes it, so it is not passed to the inner function. # "pop" removes it, so it is not passed to the inner function.
if kw.pop('generator_mode', False) is True: if kw.pop('generator_mode', False) is True:
out = {}
if description is None: if description is not None:
# fall back on function name for test description
name = fn.__name__
if name.startswith('test_'):
name = name[5:]
out['description'] = name
else:
# description can be explicit # description can be explicit
out['description'] = description yield 'description', 'meta', description
has_contents = False
# put all generated data into a dict. # transform the yielded data, and add type annotations
for data in fn(*args, **kw): for data in fn(*args, **kw):
has_contents = True
# If there is a type argument, encode it as that type. # If there is a type argument, encode it as that type.
if len(data) == 3: if len(data) == 3:
(key, value, typ) = data (key, value, typ) = data
out[key] = encode(value, typ) yield key, 'data', encode(value, typ)
# TODO: add SSZ bytes as second output
else: else:
# Otherwise, try to infer the type, but keep it as-is if it's not a SSZ type or bytes. # Otherwise, try to infer the type, but keep it as-is if it's not a SSZ type or bytes.
(key, value) = data (key, value) = data
if isinstance(value, (SSZValue, bytes)): if isinstance(value, (SSZValue, bytes)):
out[key] = encode(value) yield key, 'data', encode(value)
# TODO: add SSZ bytes as second output
elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]): elif isinstance(value, list) and all([isinstance(el, (SSZValue, bytes)) for el in value]):
out[key] = [encode(el) for el in value] for i, el in enumerate(value):
yield f'{key}_{i}', 'data', encode(el)
# TODO: add SSZ bytes as second output
yield f'{key}_count', 'meta', len(value)
else: else:
# not a ssz value. # not a ssz value.
# It could be vector or bytes still, but it is a rare case, # It could be vector or bytes still, but it is a rare case,
# and lists can't be inferred fully (generics lose element type). # and lists can't be inferred fully (generics lose element type).
# In such cases, explicitly state the type of the yielded value as a third yielded object. # In such cases, explicitly state the type of the yielded value as a third yielded object.
out[key] = value # The data will now just be yielded as any python data,
if has_contents: # something that should be encodeable by the generator runner.
return out yield key, 'data', value
else:
return None
else: else:
# just complete the function, ignore all yielded data, we are not using it # Just complete the function, ignore all yielded data,
# we are not using it (or processing it, i.e. nearly zero efficiency loss)
# Pytest does not support yielded data in the outer function, so we need to wrap it like this.
for _ in fn(*args, **kw): for _ in fn(*args, **kw):
continue continue
return None return None
return entry return entry
return runner return runner
def with_tags(tags: Dict[str, Any]): def with_meta_tags(tags: Dict[str, Any]):
""" """
Decorator factory, adds tags (key, value) pairs to the output of the function. Decorator factory, yields meta tags (key, value) pairs to the output of the function.
Useful to build test-vector annotations with. Useful to build test-vector annotations with.
This decorator is applied after the ``spectest`` decorator is applied.
:param tags: dict of tags :param tags: dict of tags
:return: Decorator. :return: Decorator.
""" """
def runner(fn): def runner(fn):
def entry(*args, **kw): def entry(*args, **kw):
fn_out = fn(*args, **kw) yielded_any = False
# do not add tags if the function is not returning a dict at all (i.e. not in generator mode) for part in fn(*args, **kw):
if fn_out is None: yield part
return None yielded_any = True
return {**tags, **fn_out} # Do not add tags if the function is not returning a dict at all (i.e. not in generator mode).
# As a pytest, we do not want to be yielding anything (unsupported by pytest)
if yielded_any:
for k, v in tags:
yield k, 'meta', v
return entry return entry
return runner return runner
def with_args(create_args: Callable[[], Iterable[Any]]):
"""
Decorator factory, adds given extra arguments to the decorated function.
:param create_args: function to create arguments with.
:return: Decorator.
"""
def runner(fn):
# this wraps the function, to hide that the function actually yielding data.
def entry(*args, **kw):
return fn(*(list(create_args()) + list(args)), **kw)
return entry
return runner