From 7baf35a177f107b5dac6fef7a7c47fd3f350dfd9 Mon Sep 17 00:00:00 2001 From: Jon Herron Date: Thu, 13 Apr 2023 10:29:30 -0400 Subject: [PATCH] Executing workflows from the test json specs, sanity testing before we move on --- Makefile | 2 +- scripts/specs_json.py | 3 +- spiff_element_units/__init__.py | 2 -- tests/integration/__init__.py | 0 tests/integration/test_execute_specs_json.py | 36 ++++++++++++++++++++ 5 files changed, 38 insertions(+), 5 deletions(-) create mode 100644 tests/integration/__init__.py create mode 100644 tests/integration/test_execute_specs_json.py diff --git a/Makefile b/Makefile index 2ab9fd9..5abaa4c 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,7 @@ dev-env: .PHONY: tests tests: - $(DEV_AS_ME) unittest-parallel -vs $(TESTS) -p test_*.py -t . + $(DEV_AS_ME) unittest-parallel -vs $(TESTS) -p test_\*.py -t . # # used to copy in/parse files from my process-models, probably will want to move these to diff --git a/scripts/specs_json.py b/scripts/specs_json.py index 504aff1..aab6906 100644 --- a/scripts/specs_json.py +++ b/scripts/specs_json.py @@ -32,8 +32,7 @@ def _to_dict(bpmn_file): return {k: SPEC_CONVERTER.convert(v) for k, v in specs.items()} def _write_dict_as_json(bpmn_file, dct): - # correct, this is not very robust - json_filename = bpmn_file.replace("process-models", "specs-json").replace(".bpmn", ".json") + json_filename = bpmn_file.replace("/process-models/", "/specs-json/").replace(".bpmn", ".json") os.makedirs(os.path.dirname(json_filename), exist_ok=True) with open(json_filename, "w") as f: f.write(json.dumps(dct, indent=4, sort_keys=True)) diff --git a/spiff_element_units/__init__.py b/spiff_element_units/__init__.py index feb40fd..e69de29 100644 --- a/spiff_element_units/__init__.py +++ b/spiff_element_units/__init__.py @@ -1,2 +0,0 @@ -def this_returns_true(): - return True diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/integration/test_execute_specs_json.py b/tests/integration/test_execute_specs_json.py new file mode 100644 index 0000000..b8403a1 --- /dev/null +++ b/tests/integration/test_execute_specs_json.py @@ -0,0 +1,36 @@ +import json + +from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer +from SpiffWorkflow.bpmn.workflow import BpmnWorkflow +from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser +from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG + +from unittest import TestCase + +# TODO: most likely will want these moves to a test helper file +SPEC_CONVERTER = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG) + +def _load_specs_json(relname): + with open(f"tests/data/specs-json/test-cases/{relname}") as f: + return json.load(f) + +def _converted_specs(specs, process_id): + converted_specs = {k: SPEC_CONVERTER.restore(v) for k, v in specs.items()} + top_level = converted_specs.pop(process_id) + subprocesses = converted_specs + return (top_level, subprocesses) + +def _workflow_from_specs_json(relname, process_id): + specs = _load_specs_json(relname) + top_level, subprocesses = _converted_specs(specs, process_id) + return BpmnWorkflow(top_level, subprocesses) + +# TODO: to leverage the unittest-parallel, is it better to have many small test suites? +# does the number of files matter or just the TestCase classes? +class ExecuteSpecJsonFilesTest(TestCase): + def test_no_tasks_executes(self): + workflow = _workflow_from_specs_json("no-tasks/no-tasks.json", "no_tasks") + workflow.do_engine_steps() + + assert workflow.is_completed() + assert workflow.data == {}