From f14b4acb86bbece3cf9056481847b403f0a7bd54 Mon Sep 17 00:00:00 2001 From: burnettk Date: Fri, 21 Apr 2023 11:22:52 -0400 Subject: [PATCH] Squashed 'SpiffWorkflow/' changes from 96ad2a2b0..73886584b 73886584b Merge pull request #315 from sartography/feature/new-task-states a04fdd311 Merge remote-tracking branch 'origin/main' into feature/new-task-states 3d376bf9a documentation and comment updates 7e368ee4b copy edit e7b9fe91c typo eceef15a7 Called elements (#316) 5a0fd2774 add serialization migration 7211e67ee Merge pull request #314 from sartography/bugfix/data-object-references 403acc1f5 use same data objects & references in subprocesses after deserialization c54091dee add two more states to spiff fa04a14a7 clean up script engine a little bit 162a1c5f5 override create parser in spiff package to allow duplicate process names 98a1b37e0 Merge pull request #312 from sartography/bugfix/run-boundary-events-from-engine-steps 067d6a723 do not execute boundary events in catch 1c877dd76 send external events to top level workflow git-subtree-dir: SpiffWorkflow git-subtree-split: 73886584b17c7d11a9713d0c4526ed41e411fc45 --- .gitignore | 1 + Makefile | 2 +- SpiffWorkflow/bpmn/PythonScriptEngine.py | 35 +- .../bpmn/PythonScriptEngineEnvironment.py | 22 +- SpiffWorkflow/bpmn/parser/ProcessParser.py | 6 + .../bpmn/serializer/migration/version_1_2.py | 111 ++-- .../serializer/migration/version_migration.py | 3 + SpiffWorkflow/bpmn/serializer/workflow.py | 6 +- SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py | 5 +- SpiffWorkflow/bpmn/specs/ScriptTask.py | 13 +- SpiffWorkflow/bpmn/specs/SubWorkflowTask.py | 12 +- .../bpmn/specs/events/IntermediateEvent.py | 14 +- .../bpmn/specs/events/event_definitions.py | 3 +- SpiffWorkflow/serializer/dict.py | 11 +- SpiffWorkflow/specs/base.py | 32 +- SpiffWorkflow/spiff/parser/process.py | 8 +- SpiffWorkflow/spiff/specs/service_task.py | 4 +- SpiffWorkflow/spiff/specs/spiff_task.py | 2 +- SpiffWorkflow/spiff/specs/subworkflow_task.py | 7 +- SpiffWorkflow/task.py | 63 +- tests/SpiffWorkflow/bpmn/ApprovalsTest.py | 16 + .../bpmn/BpmnWorkflowTestCase.py | 7 + .../bpmn/CallActivityEndEventTest.py | 8 +- .../bpmn/CallActivitySubProcessPropTest.py | 2 + tests/SpiffWorkflow/bpmn/CustomScriptTest.py | 5 +- .../bpmn/DataObjectReferenceTest.py | 3 + .../bpmn/ExclusiveGatewayNoDefaultTest.py | 13 +- tests/SpiffWorkflow/bpmn/IOSpecTest.py | 14 +- .../bpmn/InclusiveGatewayTest.py | 3 + .../SpiffWorkflow/bpmn/NestedProcessesTest.py | 6 +- .../bpmn/ParallelMultipleSplitsTest.py | 1 + tests/SpiffWorkflow/bpmn/ProcessParserTest.py | 24 + .../bpmn/PythonScriptEngineEnvironmentTest.py | 32 +- .../SpiffWorkflow/bpmn/ResetSubProcessTest.py | 15 +- .../bpmn/data/multiple_call_activities.bpmn | 62 ++ tests/SpiffWorkflow/bpmn/data/no-tasks.bpmn | 26 + .../SpiffWorkflow/bpmn/data/script-start.bpmn | 87 +++ .../data/serialization/v1.1-task-states.json | 563 ++++++++++++++++++ .../bpmn/data/single_call_activity.bpmn | 38 ++ .../bpmn/events/ActionManagementTest.py | 2 + .../bpmn/events/CallActivityEscalationTest.py | 1 + .../bpmn/events/MessageInterruptsSpTest.py | 7 +- .../bpmn/events/MessageInterruptsTest.py | 16 +- .../bpmn/events/MessageNonInterruptTest.py | 24 +- .../bpmn/events/MessageNonInterruptsSpTest.py | 16 +- .../SpiffWorkflow/bpmn/events/MessagesTest.py | 11 +- .../bpmn/events/MultipleThrowEventTest.py | 2 + .../events/NITimerDurationBoundaryTest.py | 4 +- .../bpmn/events/TransactionSubprocssTest.py | 1 + .../bpmn/serializer/VersionMigrationTest.py | 21 + .../camunda/CallActivityMessageTest.py | 2 +- .../ExternalMessageBoundaryEventTest.py | 2 +- .../camunda/NIMessageBoundaryTest.py | 1 + .../camunda/ResetTokenSubWorkflowTest.py | 5 +- .../SpiffWorkflow/camunda/SubWorkflowTest.py | 3 +- .../spiff/PrescriptPostscriptTest.py | 3 + 56 files changed, 1191 insertions(+), 215 deletions(-) create mode 100644 tests/SpiffWorkflow/bpmn/ProcessParserTest.py create mode 100644 tests/SpiffWorkflow/bpmn/data/multiple_call_activities.bpmn create mode 100644 tests/SpiffWorkflow/bpmn/data/no-tasks.bpmn create mode 100644 tests/SpiffWorkflow/bpmn/data/script-start.bpmn create mode 100644 tests/SpiffWorkflow/bpmn/data/serialization/v1.1-task-states.json create mode 100644 tests/SpiffWorkflow/bpmn/data/single_call_activity.bpmn diff --git a/.gitignore b/.gitignore index d16de165..c58f8c2e 100644 --- a/.gitignore +++ b/.gitignore @@ -277,3 +277,4 @@ coverage.xml .c9revisions .idea /venv +*~ \ No newline at end of file diff --git a/Makefile b/Makefile index 61cee6b5..a8b9819c 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ tests-par: echo " pip install unittest-parallel"; \ exit 1; \ fi - unittest-parallel --module-fixtures -vs tests/SpiffWorkflow -p \*Test.py -t . + unittest-parallel --module-fixtures -qbs tests/SpiffWorkflow -p \*Test.py -t . .PHONY : tests-cov tests-cov: diff --git a/SpiffWorkflow/bpmn/PythonScriptEngine.py b/SpiffWorkflow/bpmn/PythonScriptEngine.py index 882d76bd..6e9550d4 100644 --- a/SpiffWorkflow/bpmn/PythonScriptEngine.py +++ b/SpiffWorkflow/bpmn/PythonScriptEngine.py @@ -1,13 +1,11 @@ # -*- coding: utf-8 -*- import ast -import copy import sys import traceback import warnings from .PythonScriptEngineEnvironment import TaskDataEnvironment from ..exceptions import SpiffWorkflowException, WorkflowTaskException -from ..operators import Operator # Copyright (C) 2020 Kelly McDonald @@ -40,10 +38,12 @@ class PythonScriptEngine(object): """ def __init__(self, default_globals=None, scripting_additions=None, environment=None): + if default_globals is not None or scripting_additions is not None: warnings.warn(f'default_globals and scripting_additions are deprecated. ' f'Please provide an environment such as TaskDataEnvrionment', DeprecationWarning, stacklevel=2) + if environment is None: environment_globals = {} environment_globals.update(default_globals or {}) @@ -51,7 +51,6 @@ class PythonScriptEngine(object): self.environment = TaskDataEnvironment(environment_globals) else: self.environment = environment - self.error_tasks = {} def validate(self, expression): ast.parse(expression) @@ -62,12 +61,7 @@ class PythonScriptEngine(object): return the result. """ try: - if isinstance(expression, Operator): - # I am assuming that this takes care of some kind of XML - # expression judging from the contents of operators.py - return expression._matches(task) - else: - return self._evaluate(expression, task.data, external_methods) + return self._evaluate(expression, task.data, external_methods) except SpiffWorkflowException as se: se.add_note(f"Error evaluating expression '{expression}'") raise se @@ -75,15 +69,11 @@ class PythonScriptEngine(object): raise WorkflowTaskException(f"Error evaluating expression '{expression}'", task=task, exception=e) def execute(self, task, script, external_methods=None): - """ - Execute the script, within the context of the specified task - """ + """Execute the script, within the context of the specified task.""" try: - self.check_for_overwrite(task, external_methods or {}) - self._execute(script, task.data, external_methods or {}) + return self._execute(script, task.data, external_methods or {}) except Exception as err: wte = self.create_task_exec_exception(task, script, err) - self.error_tasks[task.id] = wte raise wte def call_service(self, operation_name, operation_params, task_data): @@ -120,21 +110,8 @@ class PythonScriptEngine(object): error_line = script.splitlines()[line_number - 1] return line_number, error_line - def check_for_overwrite(self, task, external_methods): - """It's possible that someone will define a variable with the - same name as a pre-defined script, rending the script un-callable. - This results in a nearly indecipherable error. Better to fail - fast with a sensible error message.""" - func_overwrites = set(self.environment.globals).intersection(task.data) - func_overwrites.update(set(external_methods).intersection(task.data)) - if len(func_overwrites) > 0: - msg = f"You have task data that overwrites a predefined " \ - f"function(s). Please change the following variable or " \ - f"field name(s) to something else: {func_overwrites}" - raise WorkflowTaskException(msg, task=task) - def _evaluate(self, expression, context, external_methods=None): return self.environment.evaluate(expression, context, external_methods) def _execute(self, script, context, external_methods=None): - self.environment.execute(script, context, external_methods) + return self.environment.execute(script, context, external_methods) diff --git a/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py b/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py index e09beb7c..1b07b478 100644 --- a/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py +++ b/SpiffWorkflow/bpmn/PythonScriptEngineEnvironment.py @@ -1,6 +1,7 @@ import copy import warnings + class BasePythonScriptEngineEnvironment: def __init__(self, environment_globals=None): self.globals = environment_globals or {} @@ -11,7 +12,9 @@ class BasePythonScriptEngineEnvironment: def execute(self, script, context, external_methods=None): raise NotImplementedError("Subclass must implement this method") + class TaskDataEnvironment(BasePythonScriptEngineEnvironment): + def evaluate(self, expression, context, external_methods=None): my_globals = copy.copy(self.globals) # else we pollute all later evals. self._prepare_context(context) @@ -20,6 +23,7 @@ class TaskDataEnvironment(BasePythonScriptEngineEnvironment): return eval(expression, my_globals) def execute(self, script, context, external_methods=None): + self.check_for_overwrite(context, external_methods or {}) my_globals = copy.copy(self.globals) self._prepare_context(context) my_globals.update(external_methods or {}) @@ -28,12 +32,12 @@ class TaskDataEnvironment(BasePythonScriptEngineEnvironment): exec(script, context) finally: self._remove_globals_and_functions_from_context(context, external_methods) + return True def _prepare_context(self, context): pass - def _remove_globals_and_functions_from_context(self, context, - external_methods=None): + def _remove_globals_and_functions_from_context(self, context, external_methods=None): """When executing a script, don't leave the globals, functions and external methods in the context that we have modified.""" for k in list(context): @@ -43,6 +47,20 @@ class TaskDataEnvironment(BasePythonScriptEngineEnvironment): external_methods and k in external_methods: context.pop(k) + def check_for_overwrite(self, context, external_methods): + """It's possible that someone will define a variable with the + same name as a pre-defined script, rendering the script un-callable. + This results in a nearly indecipherable error. Better to fail + fast with a sensible error message.""" + func_overwrites = set(self.globals).intersection(context) + func_overwrites.update(set(external_methods).intersection(context)) + if len(func_overwrites) > 0: + msg = f"You have task data that overwrites a predefined " \ + f"function(s). Please change the following variable or " \ + f"field name(s) to something else: {func_overwrites}" + raise ValueError(msg) + + class Box(dict): """ Example: diff --git a/SpiffWorkflow/bpmn/parser/ProcessParser.py b/SpiffWorkflow/bpmn/parser/ProcessParser.py index 05c0d500..18ddf3b2 100644 --- a/SpiffWorkflow/bpmn/parser/ProcessParser.py +++ b/SpiffWorkflow/bpmn/parser/ProcessParser.py @@ -88,6 +88,12 @@ class ProcessParser(NodeParser): return message_names + def called_element_ids(self): + """ + Returns a list of ids referenced by `bpmn:callActivity` nodes. + """ + return self.xpath("./bpmn:callActivity/@calledElement") + def parse_node(self, node): """ Parses the specified child task node, and returns the task spec. This diff --git a/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py b/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py index 473095e6..079120e0 100644 --- a/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py +++ b/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py @@ -16,58 +16,64 @@ def convert_timer_expressions(dct): message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution." + # Moving this code into helper functions to make sonarcloud STFU about this file. + # Don't really consider this better but whatever. + + def convert_timedate(spec): + expr = spec['event_definition'].pop('dateTime') + try: + dt = eval(expr) + if isinstance(dt, datetime): + spec['event_definition']['expression'] = f"'{dt.isoformat()}'" + spec['event_definition']['typename'] = 'TimeDateEventDefinition' + elif isinstance(dt, timedelta): + spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'" + spec['event_definition']['typename'] = 'DurationTimerEventDefinition' + except: + raise VersionMigrationError(message.format(spec=spec['name'])) + + def convert_cycle(spec, task): + expr = spec['event_definition'].pop('cycle_definition') + try: + repeat, duration = eval(expr) + spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'" + if task is not None: + cycles_complete = task['data'].pop('repeat_count', 0) + start_time = task['internal_data'].pop('start_time', None) + if start_time is not None: + dt = datetime.fromisoformat(start_time) + task['internal_data']['event_value'] = { + 'cycles': repeat - cycles_complete, + 'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(), + 'duration': duration.total_seconds(), + } + except: + raise VersionMigrationError(message.format(spec=spec['name'])) + + if spec['typename'] == 'StartEvent': + spec['outputs'].remove(spec['name']) + if task is not None: + children = [ dct['tasks'][c] for c in task['children'] ] + # Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks + remove = [ c for c in children if c['task_spec'] == task['task_spec']][0] + for task_id in remove['children']: + child = dct['tasks'][task_id] + if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED: + dct['tasks'].pop(task_id) + else: + task['children'].append(task_id) + task['children'].remove(remove['id']) + dct['tasks'].pop(remove['id']) + has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition'] for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]: spec['event_definition']['name'] = spec['event_definition'].pop('label') if spec['event_definition']['typename'] == 'TimerEventDefinition': - expr = spec['event_definition'].pop('dateTime') - try: - dt = eval(expr) - if isinstance(dt, datetime): - spec['event_definition']['expression'] = f"'{dt.isoformat()}'" - spec['event_definition']['typename'] = 'TimeDateEventDefinition' - elif isinstance(dt, timedelta): - spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'" - spec['event_definition']['typename'] = 'DurationTimerEventDefinition' - except: - raise VersionMigrationError(message.format(spec=spec['name'])) - + convert_timedate(spec) if spec['event_definition']['typename'] == 'CycleTimerEventDefinition': - tasks = [ t for t in dct['tasks'].values() if t['task_spec'] == spec['name'] ] task = tasks[0] if len(tasks) > 0 else None - - expr = spec['event_definition'].pop('cycle_definition') - try: - repeat, duration = eval(expr) - spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'" - if task is not None: - cycles_complete = task['data'].pop('repeat_count', 0) - start_time = task['internal_data'].pop('start_time', None) - if start_time is not None: - dt = datetime.fromisoformat(start_time) - task['internal_data']['event_value'] = { - 'cycles': repeat - cycles_complete, - 'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(), - 'duration': duration.total_seconds(), - } - except: - raise VersionMigrationError(message.format(spec=spec['name'])) - - if spec['typename'] == 'StartEvent': - spec['outputs'].remove(spec['name']) - if task is not None: - children = [ dct['tasks'][c] for c in task['children'] ] - # Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks - remove = [ c for c in children if c['task_spec'] == task['task_spec']][0] - for task_id in remove['children']: - child = dct['tasks'][task_id] - if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED: - dct['tasks'].pop(task_id) - else: - task['children'].append(task_id) - task['children'].remove(remove['id']) - dct['tasks'].pop(remove['id']) + convert_cycle(spec, task) def add_default_condition_to_cond_task_specs(dct): @@ -122,3 +128,18 @@ def remove_loop_reset(dct): parent = dct['tasks'].get(task['parent']) parent['children'] = [c for c in parent['children'] if c != task['id']] dct['spec']['task_specs'].pop(spec['name']) + +def update_task_states(dct): + + def update(process): + for task in process['tasks'].values(): + if task['state'] == 32: + task['state'] = TaskState.COMPLETED + elif task['state'] == 64: + task['state'] = TaskState.CANCELLED + + root = dct['tasks'].get(dct['root']) + if root['state'] == 32: + update(dct) + for sp in dct['subprocesses'].values(): + update(sp) diff --git a/SpiffWorkflow/bpmn/serializer/migration/version_migration.py b/SpiffWorkflow/bpmn/serializer/migration/version_migration.py index 47e1fe4b..d10fcf0e 100644 --- a/SpiffWorkflow/bpmn/serializer/migration/version_migration.py +++ b/SpiffWorkflow/bpmn/serializer/migration/version_migration.py @@ -7,6 +7,7 @@ from .version_1_2 import ( create_data_objects_and_io_specs, check_multiinstance, remove_loop_reset, + update_task_states, ) def from_version_1_1(old): @@ -36,6 +37,7 @@ def from_version_1_1(old): create_data_objects_and_io_specs(new) check_multiinstance(new) remove_loop_reset(new) + update_task_states(new) new['VERSION'] = "1.2" return new @@ -53,6 +55,7 @@ def from_version_1_0(old): attributes based on the task states. """ new = deepcopy(old) + new['VERSION'] = "1.1" move_subprocesses_to_top(new) return from_version_1_1(new) diff --git a/SpiffWorkflow/bpmn/serializer/workflow.py b/SpiffWorkflow/bpmn/serializer/workflow.py index 5167847e..737180a2 100644 --- a/SpiffWorkflow/bpmn/serializer/workflow.py +++ b/SpiffWorkflow/bpmn/serializer/workflow.py @@ -248,7 +248,11 @@ class BpmnWorkflowSerializer: subprocess_spec = top.subprocess_specs[task_spec.spec] subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, deserializing=True) subprocess_dct = top_dct['subprocesses'].get(task_id, {}) - subprocess.data = self.data_converter.restore(subprocess_dct.pop('data')) + subprocess.spec.data_objects.update(process.spec.data_objects) + if len(subprocess.spec.data_objects) > 0: + subprocess.data = process.data + else: + subprocess.data = self.data_converter.restore(subprocess_dct.pop('data')) subprocess.success = subprocess_dct.pop('success') subprocess.task_tree = self.task_tree_from_dict(subprocess_dct, subprocess_dct.pop('root'), None, subprocess, top, top_dct) subprocess.completed_event.connect(task_spec._on_subworkflow_completed, task) diff --git a/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py b/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py index 414e702d..08661036 100644 --- a/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py +++ b/SpiffWorkflow/bpmn/specs/BpmnSpecMixin.py @@ -89,6 +89,9 @@ class BpmnSpecMixin(TaskSpec): def _on_complete_hook(self, my_task): + if isinstance(my_task.parent.task_spec, BpmnSpecMixin): + my_task.parent.task_spec._child_complete_hook(my_task) + if self.io_specification is not None and len(self.io_specification.data_outputs) > 0: data = {} for var in self.io_specification.data_outputs: @@ -105,8 +108,6 @@ class BpmnSpecMixin(TaskSpec): my_task.data.pop(obj.name, None) super(BpmnSpecMixin, self)._on_complete_hook(my_task) - if isinstance(my_task.parent.task_spec, BpmnSpecMixin): - my_task.parent.task_spec._child_complete_hook(my_task) def _child_complete_hook(self, child_task): pass diff --git a/SpiffWorkflow/bpmn/specs/ScriptTask.py b/SpiffWorkflow/bpmn/specs/ScriptTask.py index 1332e95a..e00f7039 100644 --- a/SpiffWorkflow/bpmn/specs/ScriptTask.py +++ b/SpiffWorkflow/bpmn/specs/ScriptTask.py @@ -18,7 +18,6 @@ # 02110-1301 USA from .BpmnSpecMixin import BpmnSpecMixin -from ...task import TaskState from ...specs.Simple import Simple @@ -30,13 +29,8 @@ class ScriptEngineTask(Simple, BpmnSpecMixin): pass def _run_hook(self, task): - try: - self._execute(task) - super(ScriptEngineTask, self)._run_hook(task) - except Exception as exc: - task._set_state(TaskState.WAITING) - raise exc - return True + return self._execute(task) + class ScriptTask(ScriptEngineTask): @@ -54,5 +48,4 @@ class ScriptTask(ScriptEngineTask): return 'Script Task' def _execute(self, task): - task.workflow.script_engine.execute(task, self.script) - + return task.workflow.script_engine.execute(task, self.script) diff --git a/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py b/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py index aa429624..9ba45393 100644 --- a/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py +++ b/SpiffWorkflow/bpmn/specs/SubWorkflowTask.py @@ -27,15 +27,17 @@ class SubWorkflowTask(BpmnSpecMixin): def _on_subworkflow_completed(self, subworkflow, my_task): self.update_data(my_task, subworkflow) - my_task._set_state(TaskState.READY) def _update_hook(self, my_task): wf = my_task.workflow._get_outermost_workflow(my_task) - if my_task.id not in wf.subprocesses: + subprocess = wf.subprocesses.get(my_task.id) + if subprocess is None: super()._update_hook(my_task) self.create_workflow(my_task) self.start_workflow(my_task) my_task._set_state(TaskState.WAITING) + else: + return subprocess.is_completed() def _on_cancel(self, my_task): subworkflow = my_task.workflow.get_subprocess(my_task) @@ -44,7 +46,11 @@ class SubWorkflowTask(BpmnSpecMixin): def copy_data(self, my_task, subworkflow): # There is only one copy of any given data object, so it should be updated immediately - subworkflow.data = my_task.workflow.data + # Doing this is actually a little problematic, because it gives parent processes access to + # data objects defined in subprocesses. + # But our data management is already hopelessly messed up and in dire needs of reconsideration + if len(subworkflow.spec.data_objects) > 0: + subworkflow.data = my_task.workflow.data start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow) start[0].set_data(**my_task.data) diff --git a/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py b/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py index 5afcfe7e..2eb43e9a 100644 --- a/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py +++ b/SpiffWorkflow/bpmn/specs/events/IntermediateEvent.py @@ -67,9 +67,7 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin): return 'Boundary Event Parent' def _run_hook(self, my_task): - - # Clear any events that our children might have received and - # wait for new events + # Clear any events that our children might have received and wait for new events for child in my_task.children: if isinstance(child.task_spec, BoundaryEvent): child.task_spec.event_definition.reset(child) @@ -77,7 +75,6 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin): return True def _child_complete_hook(self, child_task): - # If the main child completes, or a cancelling event occurs, cancel any unfinished children if child_task.task_spec == self.main_child_task_spec or child_task.task_spec.cancel_activity: for sibling in child_task.parent.children: @@ -85,11 +82,8 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin): continue if sibling.task_spec == self.main_child_task_spec or not sibling._is_finished(): sibling.cancel() - for t in child_task.workflow._get_waiting_tasks(): - t.task_spec._update(t) def _predict_hook(self, my_task): - # Events attached to the main task might occur my_task._sync_children(self.outputs, state=TaskState.MAYBE) # The main child's state is based on this task's state @@ -120,12 +114,6 @@ class BoundaryEvent(CatchingEvent): # Boundary events should only be caught while waiting return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING - def catch(self, my_task, event_definition): - super(BoundaryEvent, self).catch(my_task, event_definition) - # Would love to get rid of this statement and manage in the workflow - # However, it is not really compatible with how boundary events work. - my_task.run() - class EventBasedGateway(CatchingEvent): diff --git a/SpiffWorkflow/bpmn/specs/events/event_definitions.py b/SpiffWorkflow/bpmn/specs/events/event_definitions.py index 4f6c1322..d2cbd462 100644 --- a/SpiffWorkflow/bpmn/specs/events/event_definitions.py +++ b/SpiffWorkflow/bpmn/specs/events/event_definitions.py @@ -75,7 +75,8 @@ class EventDefinition(object): # a particular process, but this at least provides a mechanism for distinguishing # between processes and subprocesses. if self.external and outer_workflow != workflow: - outer_workflow.catch(event, correlations) + top = workflow._get_outermost_workflow() + top.catch(event, correlations) else: workflow.catch(event) diff --git a/SpiffWorkflow/serializer/dict.py b/SpiffWorkflow/serializer/dict.py index df1939c9..d9108dbf 100644 --- a/SpiffWorkflow/serializer/dict.py +++ b/SpiffWorkflow/serializer/dict.py @@ -21,7 +21,7 @@ import pickle from base64 import b64encode, b64decode from ..workflow import Workflow from ..util.impl import get_class -from ..task import Task +from ..task import Task, TaskState from ..operators import (Attrib, PathAttrib, Equal, NotEqual, Operator, GreaterThan, LessThan, Match) from ..specs.base import TaskSpec from ..specs.AcquireMutex import AcquireMutex @@ -604,11 +604,18 @@ class DictionarySerializer(Serializer): workflow.spec = wf_spec workflow.task_tree = self.deserialize_task(workflow, s_state['task_tree'], reset_specs) - # Re-connect parents + # Re-connect parents and update states if necessary tasklist = workflow.get_tasks() + root = workflow.get_tasks_from_spec_name('Root')[0] + update_state = root.state != TaskState.COMPLETED for task in tasklist: if task.parent is not None: task.parent = workflow.get_task_from_id(task.parent, tasklist) + if update_state: + if task.state == 32: + task.state = TaskState.COMPLETED + elif task.state == 64: + task.state = TaskState.CANCELLED if workflow.last_task is not None: workflow.last_task = workflow.get_task_from_id(s_state['last_task'],tasklist) diff --git a/SpiffWorkflow/specs/base.py b/SpiffWorkflow/specs/base.py index 4fc736b0..7cb63739 100644 --- a/SpiffWorkflow/specs/base.py +++ b/SpiffWorkflow/specs/base.py @@ -302,15 +302,24 @@ class TaskSpec(object): :rtype: boolean or None :returns: the value returned by the task spec's run method. """ - result = self._run_hook(my_task) - # Run user code, if any. - if self.ready_event.emit(my_task.workflow, my_task): - # Assign variables, if so requested. - for assignment in self.post_assign: - assignment.assign(my_task, my_task) + # I'm not sure I like setting the state here. I'd like to handle it in `task` like + # the other transitions, and allow task specific error handling behavior. + # Having a task return a boolean indicating success (or None if it should just wait + # because the task is running) works well for scripts, but not for other types + # This is the easiest way of dealing with all other errors. + try: + result = self._run_hook(my_task) + # Run user code, if any. + if self.ready_event.emit(my_task.workflow, my_task): + # Assign variables, if so requested. + for assignment in self.post_assign: + assignment.assign(my_task, my_task) - self.finished_event.emit(my_task.workflow, my_task) - return result + self.finished_event.emit(my_task.workflow, my_task) + return result + except Exception as exc: + my_task._set_state(TaskState.ERROR) + raise exc def _run_hook(self, my_task): """ @@ -371,6 +380,13 @@ class TaskSpec(object): """ pass + def _on_error(self, my_task): + self._on_error_hook(my_task) + + def _on_error_hook(self, my_task): + """Can be overridden for task specific error handling""" + pass + @abstractmethod def serialize(self, serializer, **kwargs): """ diff --git a/SpiffWorkflow/spiff/parser/process.py b/SpiffWorkflow/spiff/parser/process.py index 1dd61a71..203078e1 100644 --- a/SpiffWorkflow/spiff/parser/process.py +++ b/SpiffWorkflow/spiff/parser/process.py @@ -1,7 +1,7 @@ import os from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser -from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag +from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag, ValidationException from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent @@ -53,3 +53,9 @@ class SpiffBpmnParser(BpmnDmnParser): full_tag('receiveTask'): (SpiffReceiveTaskParser, ReceiveTask), full_tag('businessRuleTask'): (BusinessRuleTaskParser, BusinessRuleTask) } + + def create_parser(self, node, filename=None, lane=None): + parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, self.data_stores, filename=filename, lane=lane) + if parser.get_id() in self.process_parsers: + raise ValidationException(f'Duplicate process ID: {parser.get_id()}', node=node, file_name=filename) + self.process_parsers[parser.get_id()] = parser diff --git a/SpiffWorkflow/spiff/specs/service_task.py b/SpiffWorkflow/spiff/specs/service_task.py index c31e2fb5..c504e84b 100644 --- a/SpiffWorkflow/spiff/specs/service_task.py +++ b/SpiffWorkflow/spiff/specs/service_task.py @@ -1,5 +1,6 @@ -from copy import deepcopy import json +from copy import deepcopy + from SpiffWorkflow.bpmn.specs.ServiceTask import ServiceTask from SpiffWorkflow.exceptions import WorkflowTaskException from SpiffWorkflow.spiff.specs.spiff_task import SpiffBpmnTask @@ -42,3 +43,4 @@ class ServiceTask(SpiffBpmnTask, ServiceTask): raise wte parsed_result = json.loads(result) task.data[self._result_variable(task)] = parsed_result + return True diff --git a/SpiffWorkflow/spiff/specs/spiff_task.py b/SpiffWorkflow/spiff/specs/spiff_task.py index e3cb6c44..b5d02e96 100644 --- a/SpiffWorkflow/spiff/specs/spiff_task.py +++ b/SpiffWorkflow/spiff/specs/spiff_task.py @@ -22,7 +22,7 @@ class SpiffBpmnTask(BpmnSpecMixin): try: my_task.workflow.script_engine.execute(my_task, script) except Exception as exc: - my_task._set_state(TaskState.WAITING) + my_task._set_state(TaskState.ERROR) raise exc def get_payload(self, my_task, script, expr): diff --git a/SpiffWorkflow/spiff/specs/subworkflow_task.py b/SpiffWorkflow/spiff/specs/subworkflow_task.py index ba1c061b..fb43d8b5 100644 --- a/SpiffWorkflow/spiff/specs/subworkflow_task.py +++ b/SpiffWorkflow/spiff/specs/subworkflow_task.py @@ -22,11 +22,14 @@ class SubWorkflowTask(DefaultSubWorkflow, SpiffBpmnTask): def _update_hook(self, my_task): # Don't really like duplicating this, but we need to run SpiffBpmn update rather than the default wf = my_task.workflow._get_outermost_workflow(my_task) - if my_task.id not in wf.subprocesses: - SpiffBpmnTask._update_hook(self, my_task) + subprocess = wf.subprocesses.get(my_task.id) + if subprocess is None: + super()._update_hook(my_task) self.create_workflow(my_task) self.start_workflow(my_task) my_task._set_state(TaskState.WAITING) + else: + return subprocess.is_completed() def _on_complete_hook(self, my_task): SpiffBpmnTask._on_complete_hook(self, my_task) diff --git a/SpiffWorkflow/task.py b/SpiffWorkflow/task.py index 325222c4..15c9bfac 100644 --- a/SpiffWorkflow/task.py +++ b/SpiffWorkflow/task.py @@ -66,37 +66,41 @@ class TaskState: created to allow for visualizing the workflow at a time where the required decisions have not yet been made. """ - # Note: The states in this list are ordered in the sequence in which - # they may appear. Do not change. MAYBE = 1 LIKELY = 2 FUTURE = 4 WAITING = 8 READY = 16 - COMPLETED = 32 - CANCELLED = 64 + STARTED = 32 + COMPLETED = 64 + ERROR = 128 + CANCELLED = 256 - FINISHED_MASK = CANCELLED | COMPLETED - DEFINITE_MASK = FUTURE | WAITING | READY + FINISHED_MASK = CANCELLED | ERROR | COMPLETED + DEFINITE_MASK = FUTURE | WAITING | READY | STARTED PREDICTED_MASK = LIKELY | MAYBE NOT_FINISHED_MASK = PREDICTED_MASK | DEFINITE_MASK ANY_MASK = FINISHED_MASK | NOT_FINISHED_MASK -TaskStateNames = {TaskState.FUTURE: 'FUTURE', - TaskState.WAITING: 'WAITING', - TaskState.READY: 'READY', - TaskState.CANCELLED: 'CANCELLED', - TaskState.COMPLETED: 'COMPLETED', - TaskState.LIKELY: 'LIKELY', - TaskState.MAYBE: 'MAYBE'} +TaskStateNames = { + TaskState.FUTURE: 'FUTURE', + TaskState.WAITING: 'WAITING', + TaskState.READY: 'READY', + TaskState.STARTED: 'STARTED', + TaskState.CANCELLED: 'CANCELLED', + TaskState.COMPLETED: 'COMPLETED', + TaskState.ERROR: 'ERROR', + TaskState.LIKELY: 'LIKELY', + TaskState.MAYBE: 'MAYBE' +} TaskStateMasks = { - TaskState.FINISHED_MASK: 'FINISHED_MASK', - TaskState.DEFINITE_MASK: 'DEFINITE_MASK', - TaskState.PREDICTED_MASK: 'PREDICTED_MASK', - TaskState.NOT_FINISHED_MASK: 'NOT_FINISHED_MASK', - TaskState.ANY_MASK: 'ANY_MASK', - } + TaskState.FINISHED_MASK: 'FINISHED_MASK', + TaskState.DEFINITE_MASK: 'DEFINITE_MASK', + TaskState.PREDICTED_MASK: 'PREDICTED_MASK', + TaskState.NOT_FINISHED_MASK: 'NOT_FINISHED_MASK', + TaskState.ANY_MASK: 'ANY_MASK', +} class DeprecatedMetaTask(type): @@ -629,10 +633,21 @@ class Task(object, metaclass=DeprecatedMetaTask): }) metrics.debug('', extra=extra) if retval is None: - self._set_state(TaskState.WAITING) + # This state is intended to indicate a task that is not finished, but will continue + # in the background without blocking other unrelated tasks (ie on other branches). + # It is a distinct state from "waiting" so that `update` does not have to distinguish + # between tasks that can be started and tasks that have already been started. + # Spiff can manage deciding if a task can run, but if a task is set to "started", it will + # have to be tracked independently of the workflow and completed manually when it finishes + # for the time being (probably I'll add polling methods in the future, but I'm not exactly + # sure how they should work). + # I'm adding this state now because I'm adding an error state (which I think there is a + # need for) and don't want to go through the hassle of updating serialization of task states + # twice; doing this at all is going to be painful enough. + self._set_state(TaskState.STARTED) + elif retval is False: + self.error() else: - # If we add an error state, the we can move the task to COMPLETE or ERROR - # according to the return value. self.complete() return retval @@ -652,6 +667,10 @@ class Task(object, metaclass=DeprecatedMetaTask): self.task_spec._on_complete(self) self.workflow.last_task = self + def error(self): + self._set_state(TaskState.ERROR) + self.task_spec._on_error(self) + def trigger(self, *args): """ If recursive is True, the state is applied to the tree recursively. diff --git a/tests/SpiffWorkflow/bpmn/ApprovalsTest.py b/tests/SpiffWorkflow/bpmn/ApprovalsTest.py index 9247dec9..ef1c3c51 100644 --- a/tests/SpiffWorkflow/bpmn/ApprovalsTest.py +++ b/tests/SpiffWorkflow/bpmn/ApprovalsTest.py @@ -46,6 +46,8 @@ class ApprovalsTest(BpmnWorkflowTestCase): def testRunThroughHappy(self): self.do_next_named_step('First_Approval_Wins.Manager_Approval') + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done') self.do_next_named_step('Approvals.Manager_Approval__P_') @@ -55,11 +57,15 @@ class ApprovalsTest(BpmnWorkflowTestCase): self.do_next_named_step('Parallel_Approvals_SP.Step1') self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval') self.do_next_named_step('Parallel_Approvals_SP.Supervisor_Approval') + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.Parallel_SP_Done') def testRunThroughHappyOtherOrders(self): self.do_next_named_step('First_Approval_Wins.Supervisor_Approval') + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done') self.do_next_named_step('Approvals.Supervisor_Approval__P_') @@ -69,11 +75,15 @@ class ApprovalsTest(BpmnWorkflowTestCase): self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval') self.do_next_named_step('Parallel_Approvals_SP.Step1') self.do_next_named_step('Parallel_Approvals_SP.Supervisor_Approval') + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.Parallel_SP_Done') def testSaveRestore(self): self.do_next_named_step('First_Approval_Wins.Manager_Approval') + self.complete_subworkflow() + self.complete_subworkflow() self.save_restore() self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done') @@ -86,12 +96,16 @@ class ApprovalsTest(BpmnWorkflowTestCase): self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval') self.do_next_exclusive_step('Parallel_Approvals_SP.Step1') self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval') + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.Parallel_SP_Done') def testSaveRestoreWaiting(self): self.do_next_named_step('First_Approval_Wins.Manager_Approval') self.save_restore() + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done') self.save_restore() @@ -108,6 +122,8 @@ class ApprovalsTest(BpmnWorkflowTestCase): self.save_restore() self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval') self.save_restore() + self.complete_subworkflow() + self.complete_subworkflow() self.do_next_exclusive_step('Approvals.Parallel_SP_Done') diff --git a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py index 5f9b797c..1896acde 100644 --- a/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py +++ b/tests/SpiffWorkflow/bpmn/BpmnWorkflowTestCase.py @@ -118,6 +118,13 @@ class BpmnWorkflowTestCase(unittest.TestCase): tasks[0].set_data(**set_attribs) tasks[0].run() + def complete_subworkflow(self): + # A side effect of finer grained contol over task execution is that tasks require more explicit intervention + # to change states. Subworkflows tasks no longer go directly to ready when the subworkflow completes. + # So they may need to explicitly refreshed to become ready, and then run. + self.workflow.refresh_waiting_tasks() + self.workflow.do_engine_steps() + def save_restore(self): script_engine = self.workflow.script_engine diff --git a/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py b/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py index 2fa5adfc..0dfd32ab 100644 --- a/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py +++ b/tests/SpiffWorkflow/bpmn/CallActivityEndEventTest.py @@ -6,6 +6,8 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.exceptions import WorkflowTaskException +from SpiffWorkflow.task import TaskState + from .BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'kellym' @@ -34,13 +36,12 @@ class CallActivityTest(BpmnWorkflowTestCase): self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=CustomScriptEngine()) self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertTrue(self.workflow.is_completed()) self.assertIsInstance(self.workflow.script_engine, CustomScriptEngine) if save_restore: self.save_restore() - # We have to reset the script engine after deserialize. - self.workflow.script_engine = CustomScriptEngine() # Get the subworkflow sub_task = self.workflow.get_tasks_from_spec_name('Sub_Bpmn_Task')[0] @@ -54,6 +55,7 @@ class CallActivityTest(BpmnWorkflowTestCase): # data should be removed in the final output as well. self.workflow = BpmnWorkflow(self.spec, self.subprocesses) self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertTrue(self.workflow.is_completed()) self.assertNotIn('remove_this_var', self.workflow.last_task.data.keys()) @@ -66,6 +68,8 @@ class CallActivityTest(BpmnWorkflowTestCase): self.assertEquals(2, len(context.exception.task_trace)) self.assertRegexpMatches(context.exception.task_trace[0], 'Create Data \(.*?call_activity_call_activity.bpmn\)') self.assertRegexpMatches(context.exception.task_trace[1], 'Get Data Call Activity \(.*?call_activity_with_error.bpmn\)') + task = self.workflow.get_tasks_from_spec_name('Sub_Bpmn_Task')[0] + self.assertEqual(task.state, TaskState.ERROR) def suite(): return unittest.TestLoader().loadTestsFromTestCase(CallActivityTest) diff --git a/tests/SpiffWorkflow/bpmn/CallActivitySubProcessPropTest.py b/tests/SpiffWorkflow/bpmn/CallActivitySubProcessPropTest.py index e540f717..7a69539b 100644 --- a/tests/SpiffWorkflow/bpmn/CallActivitySubProcessPropTest.py +++ b/tests/SpiffWorkflow/bpmn/CallActivitySubProcessPropTest.py @@ -27,6 +27,8 @@ class CallActivitySubProcessPropTest(BpmnWorkflowTestCase): def actualTest(self, save_restore=False): self.workflow.do_engine_steps() + self.complete_subworkflow() + self.complete_subworkflow() if save_restore: self.save_restore() self.assertTrue(self.workflow.is_completed()) diff --git a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py index d2b21886..ddd5e143 100644 --- a/tests/SpiffWorkflow/bpmn/CustomScriptTest.py +++ b/tests/SpiffWorkflow/bpmn/CustomScriptTest.py @@ -38,6 +38,8 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase): def actual_test(self, save_restore): if save_restore: self.save_restore() self.workflow.do_engine_steps() + self.complete_subworkflow() + self.complete_subworkflow() if save_restore: self.save_restore() data = self.workflow.last_task.data self.assertEqual(data['c1'], 'HELLO') @@ -49,8 +51,9 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase): ready_task.data = {'custom_function': "bill"} with self.assertRaises(WorkflowTaskException) as e: self.workflow.do_engine_steps() - self.assertTrue('' in str(e.exception)) self.assertTrue('custom_function' in str(e.exception)) + task = self.workflow.get_tasks_from_spec_name('Activity_1y303ko')[0] + self.assertEqual(task.state, TaskState.ERROR) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py b/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py index 58420290..45b52430 100644 --- a/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py +++ b/tests/SpiffWorkflow/bpmn/DataObjectReferenceTest.py @@ -72,6 +72,9 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase): self.assertNotIn('obj_1', ready_tasks[0].data) self.assertEqual(self.workflow.data['obj_1'], 'hello') + if save_restore: + self.save_restore() + # Make sure data objects are accessible inside a subprocess self.workflow.do_engine_steps() ready_tasks = self.workflow.get_ready_user_tasks() diff --git a/tests/SpiffWorkflow/bpmn/ExclusiveGatewayNoDefaultTest.py b/tests/SpiffWorkflow/bpmn/ExclusiveGatewayNoDefaultTest.py index 58fdb5d6..93e9f360 100644 --- a/tests/SpiffWorkflow/bpmn/ExclusiveGatewayNoDefaultTest.py +++ b/tests/SpiffWorkflow/bpmn/ExclusiveGatewayNoDefaultTest.py @@ -1,14 +1,11 @@ # -*- coding: utf-8 -*- - - - -import sys -import os import unittest -sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..')) + from SpiffWorkflow.bpmn.workflow import BpmnWorkflow -from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase from SpiffWorkflow.exceptions import WorkflowException +from SpiffWorkflow.task import TaskState + +from .BpmnWorkflowTestCase import BpmnWorkflowTestCase __author__ = 'essweine' @@ -25,6 +22,8 @@ class ExclusiveGatewayNoDefaultTest(BpmnWorkflowTestCase): first = self.workflow.get_tasks_from_spec_name('StartEvent_1')[0] first.data = { 'x': 1 } self.assertRaises(WorkflowException, self.workflow.do_engine_steps) + task = self.workflow.get_tasks_from_spec_name('Gateway_CheckValue')[0] + self.assertEqual(task.state, TaskState.ERROR) def suite(): return unittest.TestLoader().loadTestsFromTestCase(ExclusiveGatewayNoDefaultTest) diff --git a/tests/SpiffWorkflow/bpmn/IOSpecTest.py b/tests/SpiffWorkflow/bpmn/IOSpecTest.py index fe8dc7dd..963fa336 100644 --- a/tests/SpiffWorkflow/bpmn/IOSpecTest.py +++ b/tests/SpiffWorkflow/bpmn/IOSpecTest.py @@ -62,6 +62,8 @@ class CallActivityDataTest(BpmnWorkflowTestCase): self.assertNotIn('unused', task.data) self.complete_subprocess() + # Refreshing causes the subprocess to become ready + self.workflow.refresh_waiting_tasks() task = self.workflow.get_tasks(TaskState.READY)[0] # Originals should not change self.assertEqual(task.data['in_1'], 1) @@ -80,13 +82,11 @@ class CallActivityDataTest(BpmnWorkflowTestCase): waiting = self.workflow.get_tasks(TaskState.WAITING) def complete_subprocess(self): - # When we complete, the subworkflow task will move from WAITING to READY - waiting = self.workflow.get_tasks(TaskState.WAITING) - while len(waiting) > 0: - next_task = self.workflow.get_tasks(TaskState.READY)[0] - next_task.run() - waiting = self.workflow.get_tasks(TaskState.WAITING) - + # Complete the ready tasks in the subprocess + ready = self.workflow.get_tasks(TaskState.READY) + while len(ready) > 0: + ready[0].run() + ready = self.workflow.get_tasks(TaskState.READY) class IOSpecOnTaskTest(BpmnWorkflowTestCase): diff --git a/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py b/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py index 1b46e29d..1d3b987d 100644 --- a/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py +++ b/tests/SpiffWorkflow/bpmn/InclusiveGatewayTest.py @@ -1,5 +1,6 @@ from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.exceptions import WorkflowTaskException +from SpiffWorkflow.task import TaskState from .BpmnWorkflowTestCase import BpmnWorkflowTestCase @@ -26,6 +27,8 @@ class InclusiveGatewayTest(BpmnWorkflowTestCase): def testNoPathFromSecondGateway(self): self.set_data({'v': 0, 'u': -1, 'w': -1}) self.assertRaises(WorkflowTaskException, self.workflow.do_engine_steps) + task = self.workflow.get_tasks_from_spec_name('second')[0] + self.assertEqual(task.state, TaskState.ERROR) def testParallelCondition(self): self.set_data({'v': 0, 'u': 1, 'w': 1}) diff --git a/tests/SpiffWorkflow/bpmn/NestedProcessesTest.py b/tests/SpiffWorkflow/bpmn/NestedProcessesTest.py index 2fe55b77..e3eddf31 100644 --- a/tests/SpiffWorkflow/bpmn/NestedProcessesTest.py +++ b/tests/SpiffWorkflow/bpmn/NestedProcessesTest.py @@ -25,9 +25,11 @@ class NestedProcessesTest(BpmnWorkflowTestCase): self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY))) self.do_next_named_step('Action3') self.workflow.do_engine_steps() + self.complete_subworkflow() + self.complete_subworkflow() + self.complete_subworkflow() self.save_restore() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/ParallelMultipleSplitsTest.py b/tests/SpiffWorkflow/bpmn/ParallelMultipleSplitsTest.py index d779337e..b813a195 100644 --- a/tests/SpiffWorkflow/bpmn/ParallelMultipleSplitsTest.py +++ b/tests/SpiffWorkflow/bpmn/ParallelMultipleSplitsTest.py @@ -32,6 +32,7 @@ class ParallelMultipleSplitsTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() self.do_next_named_step('SP 3 - Yes Task') self.workflow.do_engine_steps() + self.complete_subworkflow() self.do_next_named_step('Done') self.workflow.do_engine_steps() diff --git a/tests/SpiffWorkflow/bpmn/ProcessParserTest.py b/tests/SpiffWorkflow/bpmn/ProcessParserTest.py new file mode 100644 index 00000000..88d03ded --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/ProcessParserTest.py @@ -0,0 +1,24 @@ +import os +import unittest + +from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser +from SpiffWorkflow.bpmn.parser.ProcessParser import ProcessParser + +def _process_parser(bpmn_filename, process_id): + parser = BpmnParser() + bpmn_file = os.path.join(os.path.dirname(__file__), 'data', bpmn_filename) + parser.add_bpmn_file(bpmn_file) + return parser.get_process_parser(process_id) + +class ProcessParserTest(unittest.TestCase): + def testReturnsEmptyListIfNoCallActivities(self): + parser = _process_parser("no-tasks.bpmn", "no_tasks") + assert parser.called_element_ids() == [] + + def testHandlesSingleCallActivity(self): + parser = _process_parser("single_call_activity.bpmn", "Process_p4pfxhq") + assert parser.called_element_ids() == ["SingleTask_Process"] + + def testHandlesMultipleCallActivities(self): + parser = _process_parser("multiple_call_activities.bpmn", "Process_90mmqlw") + assert parser.called_element_ids() == ["Process_sypm122", "Process_diu8ta2", "Process_l14lar1"] diff --git a/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py b/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py index cb9c40c0..58290f4f 100644 --- a/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py +++ b/tests/SpiffWorkflow/bpmn/PythonScriptEngineEnvironmentTest.py @@ -2,7 +2,7 @@ import json from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine -from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment +from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment, TaskDataEnvironment from SpiffWorkflow.bpmn.workflow import BpmnWorkflow from SpiffWorkflow.task import TaskState @@ -23,10 +23,19 @@ class NonTaskDataExampleEnvironment(BasePythonScriptEngineEnvironment): self.environment.update(external_methods or {}) exec(script, self.environment) self.environment = {k: v for k, v in self.environment.items() if k not in external_methods} + return True def user_defined_values(self): return {k: v for k, v in self.environment.items() if k not in self.globals} + +class AsyncScriptEnvironment(TaskDataEnvironment): + + def execute(self, script, context, external_methods=None): + super().execute(script, context, external_methods) + return None + + class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase): def setUp(self): @@ -78,3 +87,24 @@ class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase): task_data_len = len(json.dumps(task_data_to_check)) return task_data_len + +class StartedTaskTest(BpmnWorkflowTestCase): + + def setUp(self): + spec, subprocesses = self.load_workflow_spec('script-start.bpmn', 'Process_cozt5fu') + self.workflow = BpmnWorkflow(spec, subprocesses) + + def testStartedState(self): + script_engine_environemnt = AsyncScriptEnvironment() + script_engine = PythonScriptEngine(environment=script_engine_environemnt) + self.workflow.script_engine = script_engine + self.workflow.do_engine_steps() + script_task = self.workflow.get_tasks_from_spec_name('script')[0] + self.assertEqual(script_task.state, TaskState.STARTED) + script_task.complete() + manual_task = self.workflow.get_tasks_from_spec_name('manual')[0] + manual_task.run() + self.workflow.do_engine_steps() + end = self.workflow.get_tasks_from_spec_name('End')[0] + self.assertDictEqual(end.data, {'x': 1, 'y': 2, 'z': 3}) + self.assertTrue(self.workflow.is_completed()) diff --git a/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py b/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py index b9cd99be..61a7cc4c 100644 --- a/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py +++ b/tests/SpiffWorkflow/bpmn/ResetSubProcessTest.py @@ -35,7 +35,7 @@ class ResetSubProcessTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() top_level_task = self.workflow.get_ready_user_tasks()[0] - self.workflow.run_task_from_id(top_level_task.id) + top_level_task.run() self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.save_restore() @@ -50,11 +50,11 @@ class ResetSubProcessTest(BpmnWorkflowTestCase): self.workflow.do_engine_steps() self.assertEqual(1, len(self.workflow.get_ready_user_tasks())) task = self.workflow.get_ready_user_tasks()[0] - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'SubTask2') - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() task = self.workflow.get_tasks_from_spec_name('Task1')[0] task.reset_token(self.workflow.last_task.data) @@ -62,19 +62,20 @@ class ResetSubProcessTest(BpmnWorkflowTestCase): self.reload_save_restore() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Task1') - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Subtask2') - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Subtask2A') - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() + self.complete_subworkflow() task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(task.get_name(),'Task2') - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() self.assertTrue(self.workflow.is_completed()) diff --git a/tests/SpiffWorkflow/bpmn/data/multiple_call_activities.bpmn b/tests/SpiffWorkflow/bpmn/data/multiple_call_activities.bpmn new file mode 100644 index 00000000..0ce562e6 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/multiple_call_activities.bpmn @@ -0,0 +1,62 @@ + + + + + Flow_0b6y930 + + + + Flow_0b6y930 + Flow_0eaeoas + + + + Flow_0eaeoas + Flow_0hqm48x + + + + Flow_0vhq9kk + + + + Flow_0hqm48x + Flow_0vhq9kk + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/data/no-tasks.bpmn b/tests/SpiffWorkflow/bpmn/data/no-tasks.bpmn new file mode 100644 index 00000000..5299c7d3 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/no-tasks.bpmn @@ -0,0 +1,26 @@ + + + + + Flow_184umot + + + Flow_184umot + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/data/script-start.bpmn b/tests/SpiffWorkflow/bpmn/data/script-start.bpmn new file mode 100644 index 00000000..e35cbb04 --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/script-start.bpmn @@ -0,0 +1,87 @@ + + + + + Flow_1r4la8u + + + + Flow_1r4la8u + Flow_0rx8ly6 + Flow_0m6kw0s + + + + + Flow_0rx8ly6 + Flow_1wcrqdb + x, y = 1, 2 +z = x + y + + + Flow_0m6kw0s + Flow_0npa1g8 + + + + + Flow_1wcrqdb + Flow_0npa1g8 + Flow_1vd5x0n + + + Flow_1vd5x0n + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/data/serialization/v1.1-task-states.json b/tests/SpiffWorkflow/bpmn/data/serialization/v1.1-task-states.json new file mode 100644 index 00000000..6fba7f4c --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/serialization/v1.1-task-states.json @@ -0,0 +1,563 @@ +{ + "serializer_version": "1.1", + "data": {}, + "last_task": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676", + "success": true, + "tasks": { + "d10fb568-6104-4a26-9081-e29c8eb42e70": { + "id": "d10fb568-6104-4a26-9081-e29c8eb42e70", + "parent": null, + "children": [ + "f0a4e631-c30a-4444-b2b1-2cdc21179c65" + ], + "last_state_change": 1681913960.451874, + "state": 32, + "task_spec": "Root", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "f0a4e631-c30a-4444-b2b1-2cdc21179c65": { + "id": "f0a4e631-c30a-4444-b2b1-2cdc21179c65", + "parent": "d10fb568-6104-4a26-9081-e29c8eb42e70", + "children": [ + "3468f48a-c493-4731-9484-0821d046a0bc" + ], + "last_state_change": 1681913960.4591289, + "state": 32, + "task_spec": "Start", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "3468f48a-c493-4731-9484-0821d046a0bc": { + "id": "3468f48a-c493-4731-9484-0821d046a0bc", + "parent": "f0a4e631-c30a-4444-b2b1-2cdc21179c65", + "children": [ + "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb" + ], + "last_state_change": 1681913960.460862, + "state": 32, + "task_spec": "StartEvent_1", + "triggered": false, + "workflow_name": "main", + "internal_data": { + "event_fired": true + }, + "data": {} + }, + "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb": { + "id": "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb", + "parent": "3468f48a-c493-4731-9484-0821d046a0bc", + "children": [ + "afd334bf-f987-46b3-b6df-779562c4b4bf", + "ad8b1640-55ac-4ff5-b07e-16c0b7c92976" + ], + "last_state_change": 1681913960.4621637, + "state": 32, + "task_spec": "task_1.BoundaryEventParent", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "afd334bf-f987-46b3-b6df-779562c4b4bf": { + "id": "afd334bf-f987-46b3-b6df-779562c4b4bf", + "parent": "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb", + "children": [ + "3a4a6596-3b3d-4cd5-9724-cd1ccc263676" + ], + "last_state_change": 1681913960.4634154, + "state": 32, + "task_spec": "task_1", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "3a4a6596-3b3d-4cd5-9724-cd1ccc263676": { + "id": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676", + "parent": "afd334bf-f987-46b3-b6df-779562c4b4bf", + "children": [ + "8ee8a04c-e784-4419-adb4-7c7f99f72bf6", + "00c5b447-760e-47dc-a77b-cffbf893d098" + ], + "last_state_change": 1681913960.4667528, + "state": 32, + "task_spec": "Gateway_1eg2w56", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "8ee8a04c-e784-4419-adb4-7c7f99f72bf6": { + "id": "8ee8a04c-e784-4419-adb4-7c7f99f72bf6", + "parent": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676", + "children": [ + "cee7ab8e-d50b-4681-94f2-51bbd8dd6df1" + ], + "last_state_change": 1681913960.4669333, + "state": 16, + "task_spec": "task_2", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "cee7ab8e-d50b-4681-94f2-51bbd8dd6df1": { + "id": "cee7ab8e-d50b-4681-94f2-51bbd8dd6df1", + "parent": "8ee8a04c-e784-4419-adb4-7c7f99f72bf6", + "children": [ + "4695356f-8db1-48f6-a8bb-d610b2da7a63" + ], + "last_state_change": 1681913960.454116, + "state": 4, + "task_spec": "Gateway_1dpu3vt", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "4695356f-8db1-48f6-a8bb-d610b2da7a63": { + "id": "4695356f-8db1-48f6-a8bb-d610b2da7a63", + "parent": "cee7ab8e-d50b-4681-94f2-51bbd8dd6df1", + "children": [ + "f4ab0e07-d985-412f-aca0-369bd29797e1" + ], + "last_state_change": 1681913960.45428, + "state": 4, + "task_spec": "Event_1deqprp", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "f4ab0e07-d985-412f-aca0-369bd29797e1": { + "id": "f4ab0e07-d985-412f-aca0-369bd29797e1", + "parent": "4695356f-8db1-48f6-a8bb-d610b2da7a63", + "children": [ + "e326e6ec-f20d-4171-8133-24b160ad3404" + ], + "last_state_change": 1681913960.4544458, + "state": 4, + "task_spec": "main.EndJoin", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "e326e6ec-f20d-4171-8133-24b160ad3404": { + "id": "e326e6ec-f20d-4171-8133-24b160ad3404", + "parent": "f4ab0e07-d985-412f-aca0-369bd29797e1", + "children": [], + "last_state_change": 1681913960.4546103, + "state": 4, + "task_spec": "End", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "00c5b447-760e-47dc-a77b-cffbf893d098": { + "id": "00c5b447-760e-47dc-a77b-cffbf893d098", + "parent": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676", + "children": [ + "c57e4be7-670c-41aa-9740-9fff831f6ccd" + ], + "last_state_change": 1681913960.4671006, + "state": 16, + "task_spec": "task_3", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "c57e4be7-670c-41aa-9740-9fff831f6ccd": { + "id": "c57e4be7-670c-41aa-9740-9fff831f6ccd", + "parent": "00c5b447-760e-47dc-a77b-cffbf893d098", + "children": [ + "ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3" + ], + "last_state_change": 1681913960.454881, + "state": 4, + "task_spec": "Gateway_1dpu3vt", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3": { + "id": "ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3", + "parent": "c57e4be7-670c-41aa-9740-9fff831f6ccd", + "children": [ + "91a213bf-bef6-40fa-b0a4-7c7f58552184" + ], + "last_state_change": 1681913960.4550629, + "state": 4, + "task_spec": "Event_1deqprp", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "91a213bf-bef6-40fa-b0a4-7c7f58552184": { + "id": "91a213bf-bef6-40fa-b0a4-7c7f58552184", + "parent": "ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3", + "children": [ + "8166e255-1d17-4f94-8c0e-82cefe8500fd" + ], + "last_state_change": 1681913960.4552596, + "state": 4, + "task_spec": "main.EndJoin", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "8166e255-1d17-4f94-8c0e-82cefe8500fd": { + "id": "8166e255-1d17-4f94-8c0e-82cefe8500fd", + "parent": "91a213bf-bef6-40fa-b0a4-7c7f58552184", + "children": [], + "last_state_change": 1681913960.455456, + "state": 4, + "task_spec": "End", + "triggered": false, + "workflow_name": "main", + "internal_data": {}, + "data": {} + }, + "ad8b1640-55ac-4ff5-b07e-16c0b7c92976": { + "id": "ad8b1640-55ac-4ff5-b07e-16c0b7c92976", + "parent": "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb", + "children": [], + "last_state_change": 1681913960.463645, + "state": 64, + "task_spec": "signal", + "triggered": false, + "workflow_name": "main", + "internal_data": { + "event_fired": false + }, + "data": {} + } + }, + "root": "d10fb568-6104-4a26-9081-e29c8eb42e70", + "spec": { + "name": "main", + "description": "main", + "file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/diagram_1.bpmn", + "task_specs": { + "Start": { + "id": "main_1", + "name": "Start", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [], + "outputs": [ + "StartEvent_1" + ], + "typename": "StartTask" + }, + "main.EndJoin": { + "id": "main_2", + "name": "main.EndJoin", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Event_1deqprp" + ], + "outputs": [ + "End" + ], + "typename": "_EndJoin" + }, + "End": { + "id": "main_3", + "name": "End", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "main.EndJoin" + ], + "outputs": [], + "typename": "Simple" + }, + "StartEvent_1": { + "id": "main_4", + "name": "StartEvent_1", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Start" + ], + "outputs": [ + "task_1.BoundaryEventParent" + ], + "lane": null, + "documentation": null, + "position": { + "x": 179.0, + "y": 99.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "StartEvent", + "extensions": {} + }, + "task_1": { + "id": "main_5", + "name": "task_1", + "description": "Task 1", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "task_1.BoundaryEventParent" + ], + "outputs": [ + "Gateway_1eg2w56" + ], + "lane": null, + "documentation": null, + "position": { + "x": 270.0, + "y": 77.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "typename": "NoneTask", + "extensions": {} + }, + "task_1.BoundaryEventParent": { + "id": "main_6", + "name": "task_1.BoundaryEventParent", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "StartEvent_1" + ], + "outputs": [ + "task_1", + "signal" + ], + "lane": null, + "documentation": null, + "position": { + "x": 0, + "y": 0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "main_child_task_spec": "task_1", + "typename": "_BoundaryEventParent" + }, + "signal": { + "id": "main_7", + "name": "signal", + "description": "Signal Event", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "task_1.BoundaryEventParent" + ], + "outputs": [], + "lane": null, + "documentation": null, + "position": { + "x": 302.0, + "y": 139.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": true, + "external": true, + "name": "Signal_08n3u9r", + "typename": "SignalEventDefinition" + }, + "cancel_activity": true, + "typename": "BoundaryEvent", + "extensions": {} + }, + "Gateway_1eg2w56": { + "id": "main_8", + "name": "Gateway_1eg2w56", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "task_1" + ], + "outputs": [ + "task_2", + "task_3" + ], + "lane": null, + "documentation": null, + "position": { + "x": 425.0, + "y": 92.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "split_task": null, + "threshold": null, + "cancel": false, + "typename": "ParallelGateway", + "extensions": {} + }, + "task_2": { + "id": "main_9", + "name": "task_2", + "description": "Task 2", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Gateway_1eg2w56" + ], + "outputs": [ + "Gateway_1dpu3vt" + ], + "lane": null, + "documentation": null, + "position": { + "x": 530.0, + "y": 77.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "typename": "NoneTask", + "extensions": {} + }, + "Gateway_1dpu3vt": { + "id": "main_10", + "name": "Gateway_1dpu3vt", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "task_2", + "task_3" + ], + "outputs": [ + "Event_1deqprp" + ], + "lane": null, + "documentation": null, + "position": { + "x": 685.0, + "y": 92.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "split_task": null, + "threshold": null, + "cancel": false, + "typename": "ParallelGateway", + "extensions": {} + }, + "Event_1deqprp": { + "id": "main_11", + "name": "Event_1deqprp", + "description": null, + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Gateway_1dpu3vt" + ], + "outputs": [ + "main.EndJoin" + ], + "lane": null, + "documentation": null, + "position": { + "x": 792.0, + "y": 99.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "event_definition": { + "internal": false, + "external": false, + "typename": "NoneEventDefinition" + }, + "typename": "EndEvent", + "extensions": {} + }, + "task_3": { + "id": "main_12", + "name": "task_3", + "description": "Task 3", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [ + "Gateway_1eg2w56" + ], + "outputs": [ + "Gateway_1dpu3vt" + ], + "lane": null, + "documentation": null, + "position": { + "x": 530.0, + "y": 190.0 + }, + "data_input_associations": [], + "data_output_associations": [], + "io_specification": null, + "typename": "NoneTask", + "extensions": {} + }, + "Root": { + "id": "main_13", + "name": "Root", + "description": "", + "manual": false, + "internal": false, + "lookahead": 2, + "inputs": [], + "outputs": [], + "typename": "Simple" + } + }, + "io_specification": null, + "data_objects": {}, + "correlation_keys": {}, + "typename": "BpmnProcessSpec" + }, + "subprocess_specs": {}, + "subprocesses": {}, + "bpmn_messages": [], + "correlations": {} +} diff --git a/tests/SpiffWorkflow/bpmn/data/single_call_activity.bpmn b/tests/SpiffWorkflow/bpmn/data/single_call_activity.bpmn new file mode 100644 index 00000000..39611faa --- /dev/null +++ b/tests/SpiffWorkflow/bpmn/data/single_call_activity.bpmn @@ -0,0 +1,38 @@ + + + + + Flow_04380wl + + + + Flow_1io4ukf + + + + Flow_04380wl + Flow_1io4ukf + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/SpiffWorkflow/bpmn/events/ActionManagementTest.py b/tests/SpiffWorkflow/bpmn/events/ActionManagementTest.py index 0bd5c578..1f1da73a 100644 --- a/tests/SpiffWorkflow/bpmn/events/ActionManagementTest.py +++ b/tests/SpiffWorkflow/bpmn/events/ActionManagementTest.py @@ -50,6 +50,7 @@ class ActionManagementTest(BpmnWorkflowTestCase): self.do_next_named_step("Complete Work", choice="Done") self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertTrue(self.workflow.is_completed()) @@ -91,6 +92,7 @@ class ActionManagementTest(BpmnWorkflowTestCase): self.do_next_named_step("Complete Work", choice="Done") self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertTrue(self.workflow.is_completed()) diff --git a/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py b/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py index e8bae0e5..9ef11e41 100644 --- a/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py +++ b/tests/SpiffWorkflow/bpmn/events/CallActivityEscalationTest.py @@ -80,6 +80,7 @@ class CallActivityEscalationTest(BpmnWorkflowTestCase): for task in self.workflow.get_tasks(TaskState.READY): task.set_data(should_escalate=False) self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() self.workflow.run_all() self.assertEqual(True, self.workflow.is_completed()) diff --git a/tests/SpiffWorkflow/bpmn/events/MessageInterruptsSpTest.py b/tests/SpiffWorkflow/bpmn/events/MessageInterruptsSpTest.py index 0b93d1e6..52d79dcc 100644 --- a/tests/SpiffWorkflow/bpmn/events/MessageInterruptsSpTest.py +++ b/tests/SpiffWorkflow/bpmn/events/MessageInterruptsSpTest.py @@ -28,6 +28,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase): self.do_next_exclusive_step('Do Something In a Subprocess') self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() self.do_next_exclusive_step('Ack Subprocess Done') @@ -35,8 +36,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughInterruptSaveAndRestore(self): @@ -58,8 +58,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/events/MessageInterruptsTest.py b/tests/SpiffWorkflow/bpmn/events/MessageInterruptsTest.py index bdd6d6c1..1ca3a201 100644 --- a/tests/SpiffWorkflow/bpmn/events/MessageInterruptsTest.py +++ b/tests/SpiffWorkflow/bpmn/events/MessageInterruptsTest.py @@ -30,13 +30,13 @@ class MessageInterruptsTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING))) self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageInterruptSaveAndRestore(self): @@ -61,9 +61,9 @@ class MessageInterruptsTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughHappy(self): @@ -77,11 +77,11 @@ class MessageInterruptsTest(BpmnWorkflowTestCase): self.do_next_exclusive_step('Do Something That Takes A Long Time') self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING))) self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageInterrupt(self): @@ -101,8 +101,8 @@ class MessageInterruptsTest(BpmnWorkflowTestCase): self.do_next_exclusive_step('Acknowledge Interrupt Message') self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptTest.py b/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptTest.py index 3127cc78..ce2d290e 100644 --- a/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptTest.py +++ b/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptTest.py @@ -31,13 +31,13 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING))) self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageInterruptSaveAndRestore(self): @@ -71,8 +71,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughHappy(self): @@ -87,11 +87,11 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase): self.do_next_exclusive_step('Do Something That Takes A Long Time') self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING))) self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageInterrupt(self): @@ -118,8 +118,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase): self.do_next_named_step('Do Something That Takes A Long Time') self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageInterruptOtherOrder(self): @@ -145,8 +145,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase): self.do_next_named_step('Acknowledge Non-Interrupt Message') self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageInterruptOtherOrderSaveAndRestore(self): @@ -177,8 +177,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptsSpTest.py b/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptsSpTest.py index a10f8e93..2297654a 100644 --- a/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptsSpTest.py +++ b/tests/SpiffWorkflow/bpmn/events/MessageNonInterruptsSpTest.py @@ -28,6 +28,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.do_next_exclusive_step('Do Something In a Subprocess') self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() self.do_next_exclusive_step('Ack Subprocess Done') @@ -35,8 +36,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageSaveAndRestore(self): @@ -53,6 +53,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.do_next_named_step('Do Something In a Subprocess') self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() self.do_next_named_step('Ack Subprocess Done') @@ -64,8 +65,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageOrder2SaveAndRestore(self): @@ -81,6 +81,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.workflow.catch(MessageEventDefinition('Test Message')) self.do_next_named_step('Do Something In a Subprocess') self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() self.do_next_named_step('Acknowledge SP Parallel Message') @@ -92,8 +93,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughMessageOrder3SaveAndRestore(self): @@ -114,6 +114,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.do_next_named_step('Do Something In a Subprocess') self.workflow.do_engine_steps() + self.complete_subworkflow() self.save_restore() self.do_next_named_step('Ack Subprocess Done') @@ -121,8 +122,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/events/MessagesTest.py b/tests/SpiffWorkflow/bpmn/events/MessagesTest.py index 2535daba..3757f1c7 100644 --- a/tests/SpiffWorkflow/bpmn/events/MessagesTest.py +++ b/tests/SpiffWorkflow/bpmn/events/MessagesTest.py @@ -27,12 +27,11 @@ class MessagesTest(BpmnWorkflowTestCase): self.workflow.catch(MessageEventDefinition('Test Message')) self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY))) - self.assertEqual( - 'Test Message', self.workflow.get_tasks(TaskState.READY)[0].task_spec.description) + self.assertEqual('Test Message', self.workflow.get_tasks(TaskState.READY)[0].task_spec.description) self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def testRunThroughSaveAndRestore(self): @@ -52,8 +51,8 @@ class MessagesTest(BpmnWorkflowTestCase): self.save_restore() self.workflow.do_engine_steps() - self.assertEqual( - 0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) + self.complete_subworkflow() + self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING))) def suite(): diff --git a/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py b/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py index 031a1643..0b7b383f 100644 --- a/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py +++ b/tests/SpiffWorkflow/bpmn/events/MultipleThrowEventTest.py @@ -19,6 +19,7 @@ class MultipleThrowEventIntermediateCatchTest(BpmnWorkflowTestCase): if save_restore: self.save_restore() self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(len(self.workflow.get_waiting_tasks()), 0) self.assertEqual(self.workflow.is_completed(), True) @@ -44,4 +45,5 @@ class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase): self.assertEqual(len(ready_tasks), 1) ready_tasks[0].run() self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(self.workflow.is_completed(), True) \ No newline at end of file diff --git a/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py b/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py index 136b3ef3..275640b0 100644 --- a/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py +++ b/tests/SpiffWorkflow/bpmn/events/NITimerDurationBoundaryTest.py @@ -65,7 +65,9 @@ class NITimerDurationTest(BpmnWorkflowTestCase): task.run() self.workflow.refresh_waiting_tasks() self.workflow.do_engine_steps() - self.assertEqual(self.workflow.is_completed(),True) + self.workflow.do_engine_steps() + self.complete_subworkflow() + self.assertEqual(self.workflow.is_completed(), True) self.assertEqual(self.workflow.last_task.data, {'work_done': 'Yes', 'delay_reason': 'Just Because'}) diff --git a/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py b/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py index de7def17..4abae178 100644 --- a/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py +++ b/tests/SpiffWorkflow/bpmn/events/TransactionSubprocssTest.py @@ -25,6 +25,7 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase): ready_tasks[0].update_data({'quantity': 2}) ready_tasks[0].run() self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertIn('value', self.workflow.last_task.data) # Check that workflow and next task completed diff --git a/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py b/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py index 393b2d76..1b501b55 100644 --- a/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py +++ b/tests/SpiffWorkflow/bpmn/serializer/VersionMigrationTest.py @@ -23,6 +23,10 @@ class Version_1_0_Test(BaseTestCase): self.assertEqual('Action3', ready_tasks[0].task_spec.description) ready_tasks[0].run() wf.do_engine_steps() + wf.refresh_waiting_tasks() + wf.do_engine_steps() + wf.refresh_waiting_tasks() + wf.do_engine_steps() self.assertEqual(True, wf.is_completed()) @@ -34,12 +38,16 @@ class Version_1_1_Test(BaseTestCase): wf.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"time": time})) wf.refresh_waiting_tasks() wf.do_engine_steps() + wf.refresh_waiting_tasks() + wf.do_engine_steps() self.assertTrue(wf.is_completed()) def test_convert_data_specs(self): fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-data.json') wf = self.serializer.deserialize_json(open(fn).read()) wf.do_engine_steps() + wf.refresh_waiting_tasks() + wf.do_engine_steps() self.assertTrue(wf.is_completed()) def test_convert_exclusive_gateway(self): @@ -71,3 +79,16 @@ class Version_1_1_Test(BaseTestCase): wf.refresh_waiting_tasks() self.assertTrue(wf.is_completed()) self.assertEqual(wf.last_task.data['counter'], 20) + + def test_update_task_states(self): + fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-task-states.json') + wf = self.serializer.deserialize_json(open(fn).read()) + start = wf.get_tasks_from_spec_name('Start')[0] + self.assertEqual(start.state, TaskState.COMPLETED) + signal = wf.get_tasks_from_spec_name('signal')[0] + self.assertEqual(signal.state, TaskState.CANCELLED) + ready_tasks = wf.get_tasks(TaskState.READY) + while len(ready_tasks) > 0: + ready_tasks[0].run() + ready_tasks = wf.get_tasks(TaskState.READY) + self.assertTrue(wf.is_completed()) diff --git a/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py b/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py index b5177c78..8210b6c7 100644 --- a/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py +++ b/tests/SpiffWorkflow/camunda/CallActivityMessageTest.py @@ -41,7 +41,7 @@ class CallActivityMessageTest(BaseTestCase): current_task.update_data(step[1]) current_task.run() self.workflow.do_engine_steps() - self.workflow.refresh_waiting_tasks() + self.complete_subworkflow() if save_restore: self.save_restore() ready_tasks = self.workflow.get_tasks(TaskState.READY) self.assertEqual(self.workflow.is_completed(),True,'Expected the workflow to be complete at this point') diff --git a/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py b/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py index 74b5dd99..a416924a 100644 --- a/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py +++ b/tests/SpiffWorkflow/camunda/ExternalMessageBoundaryEventTest.py @@ -50,7 +50,7 @@ class ExternalMessageBoundaryTest(BaseTestCase): self.workflow.catch(MessageEventDefinition('reset', payload='SomethingDrastic', result_var='reset_var')) ready_tasks = self.workflow.get_tasks(TaskState.READY) # The user activity was cancelled and we should continue from the boundary event - self.assertEqual(1, len(ready_tasks),'Expected to have two ready tasks') + self.assertEqual(2, len(ready_tasks), 'Expected to have two ready tasks') event = self.workflow.get_tasks_from_spec_name('Event_19detfv')[0] event.run() self.assertEqual('SomethingDrastic', event.data['reset_var']) diff --git a/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py b/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py index cd5120da..68d1da71 100644 --- a/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py +++ b/tests/SpiffWorkflow/camunda/NIMessageBoundaryTest.py @@ -84,6 +84,7 @@ class NIMessageBoundaryTest(BaseTestCase): task.data['work_completed'] = 'Lots of Stuff' self.workflow.run_task_from_id(task.id) self.workflow.do_engine_steps() + self.complete_subworkflow() self.assertEqual(self.workflow.is_completed(),True) self.assertEqual(self.workflow.last_task.data,{'Event_InterruptBoundary_Response': 'Youre late!', 'flag_task': 'Yes', diff --git a/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py b/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py index f8a288e0..e444374e 100644 --- a/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py +++ b/tests/SpiffWorkflow/camunda/ResetTokenSubWorkflowTest.py @@ -51,7 +51,7 @@ class ResetTokenTestSubProcess(BaseTestCase): firsttaskid = task.id self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() if save_restore: self.save_restore() @@ -75,8 +75,9 @@ class ResetTokenTestSubProcess(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual(step['taskname'], task.task_spec.name) task.update_data({step['formvar']: step['answer']}) - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() + self.complete_subworkflow() if save_restore: self.save_restore() diff --git a/tests/SpiffWorkflow/camunda/SubWorkflowTest.py b/tests/SpiffWorkflow/camunda/SubWorkflowTest.py index 97aed8de..930020ce 100644 --- a/tests/SpiffWorkflow/camunda/SubWorkflowTest.py +++ b/tests/SpiffWorkflow/camunda/SubWorkflowTest.py @@ -31,8 +31,9 @@ class SubWorkflowTest(BaseTestCase): task = self.workflow.get_ready_user_tasks()[0] self.assertEqual("Activity_"+answer, task.task_spec.name) task.update_data({"Field"+answer: answer}) - self.workflow.run_task_from_id(task.id) + task.run() self.workflow.do_engine_steps() + self.complete_subworkflow() if save_restore: self.save_restore() self.assertEqual(self.workflow.last_task.data,{'FieldA': 'A', diff --git a/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py b/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py index f928f80c..c44f1749 100644 --- a/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py +++ b/tests/SpiffWorkflow/spiff/PrescriptPostscriptTest.py @@ -58,6 +58,8 @@ class PrescriptPostsciptTest(BaseTestCase): self.workflow.do_engine_steps() ex = se.exception self.assertIn("Error occurred in the Pre-Script", str(ex)) + task = self.workflow.get_tasks_from_spec_name('Activity_1iqs4li')[0] + self.assertEqual(task.state, TaskState.ERROR) def call_activity_test(self, save_restore=False): @@ -82,3 +84,4 @@ class PrescriptPostsciptTest(BaseTestCase): ready_tasks = self.workflow.get_tasks(TaskState.READY) ready_tasks[0].set_data(**data) self.workflow.do_engine_steps() + self.complete_subworkflow()