Merge commit 'f14b4acb86bbece3cf9056481847b403f0a7bd54'

This commit is contained in:
burnettk 2023-04-21 11:22:52 -04:00
commit 5e7df4ed1d
No known key found for this signature in database
56 changed files with 1191 additions and 215 deletions

View File

@ -277,3 +277,4 @@ coverage.xml
.c9revisions
.idea
/venv
*~

View File

@ -45,7 +45,7 @@ tests-par:
echo " pip install unittest-parallel"; \
exit 1; \
fi
unittest-parallel --module-fixtures -vs tests/SpiffWorkflow -p \*Test.py -t .
unittest-parallel --module-fixtures -qbs tests/SpiffWorkflow -p \*Test.py -t .
.PHONY : tests-cov
tests-cov:

View File

@ -1,13 +1,11 @@
# -*- coding: utf-8 -*-
import ast
import copy
import sys
import traceback
import warnings
from .PythonScriptEngineEnvironment import TaskDataEnvironment
from ..exceptions import SpiffWorkflowException, WorkflowTaskException
from ..operators import Operator
# Copyright (C) 2020 Kelly McDonald
@ -40,10 +38,12 @@ class PythonScriptEngine(object):
"""
def __init__(self, default_globals=None, scripting_additions=None, environment=None):
if default_globals is not None or scripting_additions is not None:
warnings.warn(f'default_globals and scripting_additions are deprecated. '
f'Please provide an environment such as TaskDataEnvrionment',
DeprecationWarning, stacklevel=2)
if environment is None:
environment_globals = {}
environment_globals.update(default_globals or {})
@ -51,7 +51,6 @@ class PythonScriptEngine(object):
self.environment = TaskDataEnvironment(environment_globals)
else:
self.environment = environment
self.error_tasks = {}
def validate(self, expression):
ast.parse(expression)
@ -62,12 +61,7 @@ class PythonScriptEngine(object):
return the result.
"""
try:
if isinstance(expression, Operator):
# I am assuming that this takes care of some kind of XML
# expression judging from the contents of operators.py
return expression._matches(task)
else:
return self._evaluate(expression, task.data, external_methods)
return self._evaluate(expression, task.data, external_methods)
except SpiffWorkflowException as se:
se.add_note(f"Error evaluating expression '{expression}'")
raise se
@ -75,15 +69,11 @@ class PythonScriptEngine(object):
raise WorkflowTaskException(f"Error evaluating expression '{expression}'", task=task, exception=e)
def execute(self, task, script, external_methods=None):
"""
Execute the script, within the context of the specified task
"""
"""Execute the script, within the context of the specified task."""
try:
self.check_for_overwrite(task, external_methods or {})
self._execute(script, task.data, external_methods or {})
return self._execute(script, task.data, external_methods or {})
except Exception as err:
wte = self.create_task_exec_exception(task, script, err)
self.error_tasks[task.id] = wte
raise wte
def call_service(self, operation_name, operation_params, task_data):
@ -120,21 +110,8 @@ class PythonScriptEngine(object):
error_line = script.splitlines()[line_number - 1]
return line_number, error_line
def check_for_overwrite(self, task, external_methods):
"""It's possible that someone will define a variable with the
same name as a pre-defined script, rending the script un-callable.
This results in a nearly indecipherable error. Better to fail
fast with a sensible error message."""
func_overwrites = set(self.environment.globals).intersection(task.data)
func_overwrites.update(set(external_methods).intersection(task.data))
if len(func_overwrites) > 0:
msg = f"You have task data that overwrites a predefined " \
f"function(s). Please change the following variable or " \
f"field name(s) to something else: {func_overwrites}"
raise WorkflowTaskException(msg, task=task)
def _evaluate(self, expression, context, external_methods=None):
return self.environment.evaluate(expression, context, external_methods)
def _execute(self, script, context, external_methods=None):
self.environment.execute(script, context, external_methods)
return self.environment.execute(script, context, external_methods)

View File

@ -1,6 +1,7 @@
import copy
import warnings
class BasePythonScriptEngineEnvironment:
def __init__(self, environment_globals=None):
self.globals = environment_globals or {}
@ -11,7 +12,9 @@ class BasePythonScriptEngineEnvironment:
def execute(self, script, context, external_methods=None):
raise NotImplementedError("Subclass must implement this method")
class TaskDataEnvironment(BasePythonScriptEngineEnvironment):
def evaluate(self, expression, context, external_methods=None):
my_globals = copy.copy(self.globals) # else we pollute all later evals.
self._prepare_context(context)
@ -20,6 +23,7 @@ class TaskDataEnvironment(BasePythonScriptEngineEnvironment):
return eval(expression, my_globals)
def execute(self, script, context, external_methods=None):
self.check_for_overwrite(context, external_methods or {})
my_globals = copy.copy(self.globals)
self._prepare_context(context)
my_globals.update(external_methods or {})
@ -28,12 +32,12 @@ class TaskDataEnvironment(BasePythonScriptEngineEnvironment):
exec(script, context)
finally:
self._remove_globals_and_functions_from_context(context, external_methods)
return True
def _prepare_context(self, context):
pass
def _remove_globals_and_functions_from_context(self, context,
external_methods=None):
def _remove_globals_and_functions_from_context(self, context, external_methods=None):
"""When executing a script, don't leave the globals, functions
and external methods in the context that we have modified."""
for k in list(context):
@ -43,6 +47,20 @@ class TaskDataEnvironment(BasePythonScriptEngineEnvironment):
external_methods and k in external_methods:
context.pop(k)
def check_for_overwrite(self, context, external_methods):
"""It's possible that someone will define a variable with the
same name as a pre-defined script, rendering the script un-callable.
This results in a nearly indecipherable error. Better to fail
fast with a sensible error message."""
func_overwrites = set(self.globals).intersection(context)
func_overwrites.update(set(external_methods).intersection(context))
if len(func_overwrites) > 0:
msg = f"You have task data that overwrites a predefined " \
f"function(s). Please change the following variable or " \
f"field name(s) to something else: {func_overwrites}"
raise ValueError(msg)
class Box(dict):
"""
Example:

View File

@ -88,6 +88,12 @@ class ProcessParser(NodeParser):
return message_names
def called_element_ids(self):
"""
Returns a list of ids referenced by `bpmn:callActivity` nodes.
"""
return self.xpath("./bpmn:callActivity/@calledElement")
def parse_node(self, node):
"""
Parses the specified child task node, and returns the task spec. This

View File

@ -16,58 +16,64 @@ def convert_timer_expressions(dct):
message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution."
# Moving this code into helper functions to make sonarcloud STFU about this file.
# Don't really consider this better but whatever.
def convert_timedate(spec):
expr = spec['event_definition'].pop('dateTime')
try:
dt = eval(expr)
if isinstance(dt, datetime):
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
elif isinstance(dt, timedelta):
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
except:
raise VersionMigrationError(message.format(spec=spec['name']))
def convert_cycle(spec, task):
expr = spec['event_definition'].pop('cycle_definition')
try:
repeat, duration = eval(expr)
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
if task is not None:
cycles_complete = task['data'].pop('repeat_count', 0)
start_time = task['internal_data'].pop('start_time', None)
if start_time is not None:
dt = datetime.fromisoformat(start_time)
task['internal_data']['event_value'] = {
'cycles': repeat - cycles_complete,
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
'duration': duration.total_seconds(),
}
except:
raise VersionMigrationError(message.format(spec=spec['name']))
if spec['typename'] == 'StartEvent':
spec['outputs'].remove(spec['name'])
if task is not None:
children = [ dct['tasks'][c] for c in task['children'] ]
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
for task_id in remove['children']:
child = dct['tasks'][task_id]
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
dct['tasks'].pop(task_id)
else:
task['children'].append(task_id)
task['children'].remove(remove['id'])
dct['tasks'].pop(remove['id'])
has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition']
for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]:
spec['event_definition']['name'] = spec['event_definition'].pop('label')
if spec['event_definition']['typename'] == 'TimerEventDefinition':
expr = spec['event_definition'].pop('dateTime')
try:
dt = eval(expr)
if isinstance(dt, datetime):
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
elif isinstance(dt, timedelta):
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
except:
raise VersionMigrationError(message.format(spec=spec['name']))
convert_timedate(spec)
if spec['event_definition']['typename'] == 'CycleTimerEventDefinition':
tasks = [ t for t in dct['tasks'].values() if t['task_spec'] == spec['name'] ]
task = tasks[0] if len(tasks) > 0 else None
expr = spec['event_definition'].pop('cycle_definition')
try:
repeat, duration = eval(expr)
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
if task is not None:
cycles_complete = task['data'].pop('repeat_count', 0)
start_time = task['internal_data'].pop('start_time', None)
if start_time is not None:
dt = datetime.fromisoformat(start_time)
task['internal_data']['event_value'] = {
'cycles': repeat - cycles_complete,
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
'duration': duration.total_seconds(),
}
except:
raise VersionMigrationError(message.format(spec=spec['name']))
if spec['typename'] == 'StartEvent':
spec['outputs'].remove(spec['name'])
if task is not None:
children = [ dct['tasks'][c] for c in task['children'] ]
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
for task_id in remove['children']:
child = dct['tasks'][task_id]
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
dct['tasks'].pop(task_id)
else:
task['children'].append(task_id)
task['children'].remove(remove['id'])
dct['tasks'].pop(remove['id'])
convert_cycle(spec, task)
def add_default_condition_to_cond_task_specs(dct):
@ -122,3 +128,18 @@ def remove_loop_reset(dct):
parent = dct['tasks'].get(task['parent'])
parent['children'] = [c for c in parent['children'] if c != task['id']]
dct['spec']['task_specs'].pop(spec['name'])
def update_task_states(dct):
def update(process):
for task in process['tasks'].values():
if task['state'] == 32:
task['state'] = TaskState.COMPLETED
elif task['state'] == 64:
task['state'] = TaskState.CANCELLED
root = dct['tasks'].get(dct['root'])
if root['state'] == 32:
update(dct)
for sp in dct['subprocesses'].values():
update(sp)

View File

@ -7,6 +7,7 @@ from .version_1_2 import (
create_data_objects_and_io_specs,
check_multiinstance,
remove_loop_reset,
update_task_states,
)
def from_version_1_1(old):
@ -36,6 +37,7 @@ def from_version_1_1(old):
create_data_objects_and_io_specs(new)
check_multiinstance(new)
remove_loop_reset(new)
update_task_states(new)
new['VERSION'] = "1.2"
return new
@ -53,6 +55,7 @@ def from_version_1_0(old):
attributes based on the task states.
"""
new = deepcopy(old)
new['VERSION'] = "1.1"
move_subprocesses_to_top(new)
return from_version_1_1(new)

View File

@ -248,7 +248,11 @@ class BpmnWorkflowSerializer:
subprocess_spec = top.subprocess_specs[task_spec.spec]
subprocess = self.wf_class(subprocess_spec, {}, name=task_spec.name, parent=process, deserializing=True)
subprocess_dct = top_dct['subprocesses'].get(task_id, {})
subprocess.data = self.data_converter.restore(subprocess_dct.pop('data'))
subprocess.spec.data_objects.update(process.spec.data_objects)
if len(subprocess.spec.data_objects) > 0:
subprocess.data = process.data
else:
subprocess.data = self.data_converter.restore(subprocess_dct.pop('data'))
subprocess.success = subprocess_dct.pop('success')
subprocess.task_tree = self.task_tree_from_dict(subprocess_dct, subprocess_dct.pop('root'), None, subprocess, top, top_dct)
subprocess.completed_event.connect(task_spec._on_subworkflow_completed, task)

View File

@ -89,6 +89,9 @@ class BpmnSpecMixin(TaskSpec):
def _on_complete_hook(self, my_task):
if isinstance(my_task.parent.task_spec, BpmnSpecMixin):
my_task.parent.task_spec._child_complete_hook(my_task)
if self.io_specification is not None and len(self.io_specification.data_outputs) > 0:
data = {}
for var in self.io_specification.data_outputs:
@ -105,8 +108,6 @@ class BpmnSpecMixin(TaskSpec):
my_task.data.pop(obj.name, None)
super(BpmnSpecMixin, self)._on_complete_hook(my_task)
if isinstance(my_task.parent.task_spec, BpmnSpecMixin):
my_task.parent.task_spec._child_complete_hook(my_task)
def _child_complete_hook(self, child_task):
pass

View File

@ -18,7 +18,6 @@
# 02110-1301 USA
from .BpmnSpecMixin import BpmnSpecMixin
from ...task import TaskState
from ...specs.Simple import Simple
@ -30,13 +29,8 @@ class ScriptEngineTask(Simple, BpmnSpecMixin):
pass
def _run_hook(self, task):
try:
self._execute(task)
super(ScriptEngineTask, self)._run_hook(task)
except Exception as exc:
task._set_state(TaskState.WAITING)
raise exc
return True
return self._execute(task)
class ScriptTask(ScriptEngineTask):
@ -54,5 +48,4 @@ class ScriptTask(ScriptEngineTask):
return 'Script Task'
def _execute(self, task):
task.workflow.script_engine.execute(task, self.script)
return task.workflow.script_engine.execute(task, self.script)

View File

@ -27,15 +27,17 @@ class SubWorkflowTask(BpmnSpecMixin):
def _on_subworkflow_completed(self, subworkflow, my_task):
self.update_data(my_task, subworkflow)
my_task._set_state(TaskState.READY)
def _update_hook(self, my_task):
wf = my_task.workflow._get_outermost_workflow(my_task)
if my_task.id not in wf.subprocesses:
subprocess = wf.subprocesses.get(my_task.id)
if subprocess is None:
super()._update_hook(my_task)
self.create_workflow(my_task)
self.start_workflow(my_task)
my_task._set_state(TaskState.WAITING)
else:
return subprocess.is_completed()
def _on_cancel(self, my_task):
subworkflow = my_task.workflow.get_subprocess(my_task)
@ -44,7 +46,11 @@ class SubWorkflowTask(BpmnSpecMixin):
def copy_data(self, my_task, subworkflow):
# There is only one copy of any given data object, so it should be updated immediately
subworkflow.data = my_task.workflow.data
# Doing this is actually a little problematic, because it gives parent processes access to
# data objects defined in subprocesses.
# But our data management is already hopelessly messed up and in dire needs of reconsideration
if len(subworkflow.spec.data_objects) > 0:
subworkflow.data = my_task.workflow.data
start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow)
start[0].set_data(**my_task.data)

View File

@ -67,9 +67,7 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin):
return 'Boundary Event Parent'
def _run_hook(self, my_task):
# Clear any events that our children might have received and
# wait for new events
# Clear any events that our children might have received and wait for new events
for child in my_task.children:
if isinstance(child.task_spec, BoundaryEvent):
child.task_spec.event_definition.reset(child)
@ -77,7 +75,6 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin):
return True
def _child_complete_hook(self, child_task):
# If the main child completes, or a cancelling event occurs, cancel any unfinished children
if child_task.task_spec == self.main_child_task_spec or child_task.task_spec.cancel_activity:
for sibling in child_task.parent.children:
@ -85,11 +82,8 @@ class _BoundaryEventParent(Simple, BpmnSpecMixin):
continue
if sibling.task_spec == self.main_child_task_spec or not sibling._is_finished():
sibling.cancel()
for t in child_task.workflow._get_waiting_tasks():
t.task_spec._update(t)
def _predict_hook(self, my_task):
# Events attached to the main task might occur
my_task._sync_children(self.outputs, state=TaskState.MAYBE)
# The main child's state is based on this task's state
@ -120,12 +114,6 @@ class BoundaryEvent(CatchingEvent):
# Boundary events should only be caught while waiting
return super(BoundaryEvent, self).catches(my_task, event_definition, correlations) and my_task.state == TaskState.WAITING
def catch(self, my_task, event_definition):
super(BoundaryEvent, self).catch(my_task, event_definition)
# Would love to get rid of this statement and manage in the workflow
# However, it is not really compatible with how boundary events work.
my_task.run()
class EventBasedGateway(CatchingEvent):

View File

@ -75,7 +75,8 @@ class EventDefinition(object):
# a particular process, but this at least provides a mechanism for distinguishing
# between processes and subprocesses.
if self.external and outer_workflow != workflow:
outer_workflow.catch(event, correlations)
top = workflow._get_outermost_workflow()
top.catch(event, correlations)
else:
workflow.catch(event)

View File

@ -21,7 +21,7 @@ import pickle
from base64 import b64encode, b64decode
from ..workflow import Workflow
from ..util.impl import get_class
from ..task import Task
from ..task import Task, TaskState
from ..operators import (Attrib, PathAttrib, Equal, NotEqual, Operator, GreaterThan, LessThan, Match)
from ..specs.base import TaskSpec
from ..specs.AcquireMutex import AcquireMutex
@ -604,11 +604,18 @@ class DictionarySerializer(Serializer):
workflow.spec = wf_spec
workflow.task_tree = self.deserialize_task(workflow, s_state['task_tree'], reset_specs)
# Re-connect parents
# Re-connect parents and update states if necessary
tasklist = workflow.get_tasks()
root = workflow.get_tasks_from_spec_name('Root')[0]
update_state = root.state != TaskState.COMPLETED
for task in tasklist:
if task.parent is not None:
task.parent = workflow.get_task_from_id(task.parent, tasklist)
if update_state:
if task.state == 32:
task.state = TaskState.COMPLETED
elif task.state == 64:
task.state = TaskState.CANCELLED
if workflow.last_task is not None:
workflow.last_task = workflow.get_task_from_id(s_state['last_task'],tasklist)

View File

@ -302,15 +302,24 @@ class TaskSpec(object):
:rtype: boolean or None
:returns: the value returned by the task spec's run method.
"""
result = self._run_hook(my_task)
# Run user code, if any.
if self.ready_event.emit(my_task.workflow, my_task):
# Assign variables, if so requested.
for assignment in self.post_assign:
assignment.assign(my_task, my_task)
# I'm not sure I like setting the state here. I'd like to handle it in `task` like
# the other transitions, and allow task specific error handling behavior.
# Having a task return a boolean indicating success (or None if it should just wait
# because the task is running) works well for scripts, but not for other types
# This is the easiest way of dealing with all other errors.
try:
result = self._run_hook(my_task)
# Run user code, if any.
if self.ready_event.emit(my_task.workflow, my_task):
# Assign variables, if so requested.
for assignment in self.post_assign:
assignment.assign(my_task, my_task)
self.finished_event.emit(my_task.workflow, my_task)
return result
self.finished_event.emit(my_task.workflow, my_task)
return result
except Exception as exc:
my_task._set_state(TaskState.ERROR)
raise exc
def _run_hook(self, my_task):
"""
@ -371,6 +380,13 @@ class TaskSpec(object):
"""
pass
def _on_error(self, my_task):
self._on_error_hook(my_task)
def _on_error_hook(self, my_task):
"""Can be overridden for task specific error handling"""
pass
@abstractmethod
def serialize(self, serializer, **kwargs):
"""

View File

@ -1,7 +1,7 @@
import os
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag, ValidationException
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
@ -53,3 +53,9 @@ class SpiffBpmnParser(BpmnDmnParser):
full_tag('receiveTask'): (SpiffReceiveTaskParser, ReceiveTask),
full_tag('businessRuleTask'): (BusinessRuleTaskParser, BusinessRuleTask)
}
def create_parser(self, node, filename=None, lane=None):
parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, self.data_stores, filename=filename, lane=lane)
if parser.get_id() in self.process_parsers:
raise ValidationException(f'Duplicate process ID: {parser.get_id()}', node=node, file_name=filename)
self.process_parsers[parser.get_id()] = parser

View File

@ -1,5 +1,6 @@
from copy import deepcopy
import json
from copy import deepcopy
from SpiffWorkflow.bpmn.specs.ServiceTask import ServiceTask
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.spiff.specs.spiff_task import SpiffBpmnTask
@ -42,3 +43,4 @@ class ServiceTask(SpiffBpmnTask, ServiceTask):
raise wte
parsed_result = json.loads(result)
task.data[self._result_variable(task)] = parsed_result
return True

View File

@ -22,7 +22,7 @@ class SpiffBpmnTask(BpmnSpecMixin):
try:
my_task.workflow.script_engine.execute(my_task, script)
except Exception as exc:
my_task._set_state(TaskState.WAITING)
my_task._set_state(TaskState.ERROR)
raise exc
def get_payload(self, my_task, script, expr):

View File

@ -22,11 +22,14 @@ class SubWorkflowTask(DefaultSubWorkflow, SpiffBpmnTask):
def _update_hook(self, my_task):
# Don't really like duplicating this, but we need to run SpiffBpmn update rather than the default
wf = my_task.workflow._get_outermost_workflow(my_task)
if my_task.id not in wf.subprocesses:
SpiffBpmnTask._update_hook(self, my_task)
subprocess = wf.subprocesses.get(my_task.id)
if subprocess is None:
super()._update_hook(my_task)
self.create_workflow(my_task)
self.start_workflow(my_task)
my_task._set_state(TaskState.WAITING)
else:
return subprocess.is_completed()
def _on_complete_hook(self, my_task):
SpiffBpmnTask._on_complete_hook(self, my_task)

View File

@ -66,37 +66,41 @@ class TaskState:
created to allow for visualizing the workflow at a time where
the required decisions have not yet been made.
"""
# Note: The states in this list are ordered in the sequence in which
# they may appear. Do not change.
MAYBE = 1
LIKELY = 2
FUTURE = 4
WAITING = 8
READY = 16
COMPLETED = 32
CANCELLED = 64
STARTED = 32
COMPLETED = 64
ERROR = 128
CANCELLED = 256
FINISHED_MASK = CANCELLED | COMPLETED
DEFINITE_MASK = FUTURE | WAITING | READY
FINISHED_MASK = CANCELLED | ERROR | COMPLETED
DEFINITE_MASK = FUTURE | WAITING | READY | STARTED
PREDICTED_MASK = LIKELY | MAYBE
NOT_FINISHED_MASK = PREDICTED_MASK | DEFINITE_MASK
ANY_MASK = FINISHED_MASK | NOT_FINISHED_MASK
TaskStateNames = {TaskState.FUTURE: 'FUTURE',
TaskState.WAITING: 'WAITING',
TaskState.READY: 'READY',
TaskState.CANCELLED: 'CANCELLED',
TaskState.COMPLETED: 'COMPLETED',
TaskState.LIKELY: 'LIKELY',
TaskState.MAYBE: 'MAYBE'}
TaskStateNames = {
TaskState.FUTURE: 'FUTURE',
TaskState.WAITING: 'WAITING',
TaskState.READY: 'READY',
TaskState.STARTED: 'STARTED',
TaskState.CANCELLED: 'CANCELLED',
TaskState.COMPLETED: 'COMPLETED',
TaskState.ERROR: 'ERROR',
TaskState.LIKELY: 'LIKELY',
TaskState.MAYBE: 'MAYBE'
}
TaskStateMasks = {
TaskState.FINISHED_MASK: 'FINISHED_MASK',
TaskState.DEFINITE_MASK: 'DEFINITE_MASK',
TaskState.PREDICTED_MASK: 'PREDICTED_MASK',
TaskState.NOT_FINISHED_MASK: 'NOT_FINISHED_MASK',
TaskState.ANY_MASK: 'ANY_MASK',
}
TaskState.FINISHED_MASK: 'FINISHED_MASK',
TaskState.DEFINITE_MASK: 'DEFINITE_MASK',
TaskState.PREDICTED_MASK: 'PREDICTED_MASK',
TaskState.NOT_FINISHED_MASK: 'NOT_FINISHED_MASK',
TaskState.ANY_MASK: 'ANY_MASK',
}
class DeprecatedMetaTask(type):
@ -629,10 +633,21 @@ class Task(object, metaclass=DeprecatedMetaTask):
})
metrics.debug('', extra=extra)
if retval is None:
self._set_state(TaskState.WAITING)
# This state is intended to indicate a task that is not finished, but will continue
# in the background without blocking other unrelated tasks (ie on other branches).
# It is a distinct state from "waiting" so that `update` does not have to distinguish
# between tasks that can be started and tasks that have already been started.
# Spiff can manage deciding if a task can run, but if a task is set to "started", it will
# have to be tracked independently of the workflow and completed manually when it finishes
# for the time being (probably I'll add polling methods in the future, but I'm not exactly
# sure how they should work).
# I'm adding this state now because I'm adding an error state (which I think there is a
# need for) and don't want to go through the hassle of updating serialization of task states
# twice; doing this at all is going to be painful enough.
self._set_state(TaskState.STARTED)
elif retval is False:
self.error()
else:
# If we add an error state, the we can move the task to COMPLETE or ERROR
# according to the return value.
self.complete()
return retval
@ -652,6 +667,10 @@ class Task(object, metaclass=DeprecatedMetaTask):
self.task_spec._on_complete(self)
self.workflow.last_task = self
def error(self):
self._set_state(TaskState.ERROR)
self.task_spec._on_error(self)
def trigger(self, *args):
"""
If recursive is True, the state is applied to the tree recursively.

View File

@ -46,6 +46,8 @@ class ApprovalsTest(BpmnWorkflowTestCase):
def testRunThroughHappy(self):
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
self.do_next_named_step('Approvals.Manager_Approval__P_')
@ -55,11 +57,15 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.do_next_named_step('Parallel_Approvals_SP.Step1')
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
self.do_next_named_step('Parallel_Approvals_SP.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
def testRunThroughHappyOtherOrders(self):
self.do_next_named_step('First_Approval_Wins.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
self.do_next_named_step('Approvals.Supervisor_Approval__P_')
@ -69,11 +75,15 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
self.do_next_named_step('Parallel_Approvals_SP.Step1')
self.do_next_named_step('Parallel_Approvals_SP.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
def testSaveRestore(self):
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.save_restore()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
@ -86,12 +96,16 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.do_next_named_step('Parallel_Approvals_SP.Manager_Approval')
self.do_next_exclusive_step('Parallel_Approvals_SP.Step1')
self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval')
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')
def testSaveRestoreWaiting(self):
self.do_next_named_step('First_Approval_Wins.Manager_Approval')
self.save_restore()
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.First_Approval_Wins_Done')
self.save_restore()
@ -108,6 +122,8 @@ class ApprovalsTest(BpmnWorkflowTestCase):
self.save_restore()
self.do_next_exclusive_step('Parallel_Approvals_SP.Supervisor_Approval')
self.save_restore()
self.complete_subworkflow()
self.complete_subworkflow()
self.do_next_exclusive_step('Approvals.Parallel_SP_Done')

View File

@ -118,6 +118,13 @@ class BpmnWorkflowTestCase(unittest.TestCase):
tasks[0].set_data(**set_attribs)
tasks[0].run()
def complete_subworkflow(self):
# A side effect of finer grained contol over task execution is that tasks require more explicit intervention
# to change states. Subworkflows tasks no longer go directly to ready when the subworkflow completes.
# So they may need to explicitly refreshed to become ready, and then run.
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
def save_restore(self):
script_engine = self.workflow.script_engine

View File

@ -6,6 +6,8 @@ from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.task import TaskState
from .BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -34,13 +36,12 @@ class CallActivityTest(BpmnWorkflowTestCase):
self.workflow = BpmnWorkflow(self.spec, self.subprocesses,
script_engine=CustomScriptEngine())
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
self.assertIsInstance(self.workflow.script_engine, CustomScriptEngine)
if save_restore:
self.save_restore()
# We have to reset the script engine after deserialize.
self.workflow.script_engine = CustomScriptEngine()
# Get the subworkflow
sub_task = self.workflow.get_tasks_from_spec_name('Sub_Bpmn_Task')[0]
@ -54,6 +55,7 @@ class CallActivityTest(BpmnWorkflowTestCase):
# data should be removed in the final output as well.
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
self.assertNotIn('remove_this_var', self.workflow.last_task.data.keys())
@ -66,6 +68,8 @@ class CallActivityTest(BpmnWorkflowTestCase):
self.assertEquals(2, len(context.exception.task_trace))
self.assertRegexpMatches(context.exception.task_trace[0], 'Create Data \(.*?call_activity_call_activity.bpmn\)')
self.assertRegexpMatches(context.exception.task_trace[1], 'Get Data Call Activity \(.*?call_activity_with_error.bpmn\)')
task = self.workflow.get_tasks_from_spec_name('Sub_Bpmn_Task')[0]
self.assertEqual(task.state, TaskState.ERROR)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(CallActivityTest)

View File

@ -27,6 +27,8 @@ class CallActivitySubProcessPropTest(BpmnWorkflowTestCase):
def actualTest(self, save_restore=False):
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.complete_subworkflow()
if save_restore:
self.save_restore()
self.assertTrue(self.workflow.is_completed())

View File

@ -38,6 +38,8 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase):
def actual_test(self, save_restore):
if save_restore: self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.complete_subworkflow()
if save_restore: self.save_restore()
data = self.workflow.last_task.data
self.assertEqual(data['c1'], 'HELLO')
@ -49,8 +51,9 @@ class CustomInlineScriptTest(BpmnWorkflowTestCase):
ready_task.data = {'custom_function': "bill"}
with self.assertRaises(WorkflowTaskException) as e:
self.workflow.do_engine_steps()
self.assertTrue('' in str(e.exception))
self.assertTrue('custom_function' in str(e.exception))
task = self.workflow.get_tasks_from_spec_name('Activity_1y303ko')[0]
self.assertEqual(task.state, TaskState.ERROR)
def suite():

View File

@ -72,6 +72,9 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase):
self.assertNotIn('obj_1', ready_tasks[0].data)
self.assertEqual(self.workflow.data['obj_1'], 'hello')
if save_restore:
self.save_restore()
# Make sure data objects are accessible inside a subprocess
self.workflow.do_engine_steps()
ready_tasks = self.workflow.get_ready_user_tasks()

View File

@ -1,14 +1,11 @@
# -*- coding: utf-8 -*-
import sys
import os
import unittest
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.task import TaskState
from .BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'essweine'
@ -25,6 +22,8 @@ class ExclusiveGatewayNoDefaultTest(BpmnWorkflowTestCase):
first = self.workflow.get_tasks_from_spec_name('StartEvent_1')[0]
first.data = { 'x': 1 }
self.assertRaises(WorkflowException, self.workflow.do_engine_steps)
task = self.workflow.get_tasks_from_spec_name('Gateway_CheckValue')[0]
self.assertEqual(task.state, TaskState.ERROR)
def suite():
return unittest.TestLoader().loadTestsFromTestCase(ExclusiveGatewayNoDefaultTest)

View File

@ -62,6 +62,8 @@ class CallActivityDataTest(BpmnWorkflowTestCase):
self.assertNotIn('unused', task.data)
self.complete_subprocess()
# Refreshing causes the subprocess to become ready
self.workflow.refresh_waiting_tasks()
task = self.workflow.get_tasks(TaskState.READY)[0]
# Originals should not change
self.assertEqual(task.data['in_1'], 1)
@ -80,13 +82,11 @@ class CallActivityDataTest(BpmnWorkflowTestCase):
waiting = self.workflow.get_tasks(TaskState.WAITING)
def complete_subprocess(self):
# When we complete, the subworkflow task will move from WAITING to READY
waiting = self.workflow.get_tasks(TaskState.WAITING)
while len(waiting) > 0:
next_task = self.workflow.get_tasks(TaskState.READY)[0]
next_task.run()
waiting = self.workflow.get_tasks(TaskState.WAITING)
# Complete the ready tasks in the subprocess
ready = self.workflow.get_tasks(TaskState.READY)
while len(ready) > 0:
ready[0].run()
ready = self.workflow.get_tasks(TaskState.READY)
class IOSpecOnTaskTest(BpmnWorkflowTestCase):

View File

@ -1,5 +1,6 @@
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.task import TaskState
from .BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -26,6 +27,8 @@ class InclusiveGatewayTest(BpmnWorkflowTestCase):
def testNoPathFromSecondGateway(self):
self.set_data({'v': 0, 'u': -1, 'w': -1})
self.assertRaises(WorkflowTaskException, self.workflow.do_engine_steps)
task = self.workflow.get_tasks_from_spec_name('second')[0]
self.assertEqual(task.state, TaskState.ERROR)
def testParallelCondition(self):
self.set_data({'v': 0, 'u': 1, 'w': 1})

View File

@ -25,9 +25,11 @@ class NestedProcessesTest(BpmnWorkflowTestCase):
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.do_next_named_step('Action3')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.complete_subworkflow()
self.complete_subworkflow()
self.save_restore()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def suite():

View File

@ -32,6 +32,7 @@ class ParallelMultipleSplitsTest(BpmnWorkflowTestCase):
self.workflow.do_engine_steps()
self.do_next_named_step('SP 3 - Yes Task')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.do_next_named_step('Done')
self.workflow.do_engine_steps()

View File

@ -0,0 +1,24 @@
import os
import unittest
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
from SpiffWorkflow.bpmn.parser.ProcessParser import ProcessParser
def _process_parser(bpmn_filename, process_id):
parser = BpmnParser()
bpmn_file = os.path.join(os.path.dirname(__file__), 'data', bpmn_filename)
parser.add_bpmn_file(bpmn_file)
return parser.get_process_parser(process_id)
class ProcessParserTest(unittest.TestCase):
def testReturnsEmptyListIfNoCallActivities(self):
parser = _process_parser("no-tasks.bpmn", "no_tasks")
assert parser.called_element_ids() == []
def testHandlesSingleCallActivity(self):
parser = _process_parser("single_call_activity.bpmn", "Process_p4pfxhq")
assert parser.called_element_ids() == ["SingleTask_Process"]
def testHandlesMultipleCallActivities(self):
parser = _process_parser("multiple_call_activities.bpmn", "Process_90mmqlw")
assert parser.called_element_ids() == ["Process_sypm122", "Process_diu8ta2", "Process_l14lar1"]

View File

@ -2,7 +2,7 @@ import json
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment, TaskDataEnvironment
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.task import TaskState
@ -23,10 +23,19 @@ class NonTaskDataExampleEnvironment(BasePythonScriptEngineEnvironment):
self.environment.update(external_methods or {})
exec(script, self.environment)
self.environment = {k: v for k, v in self.environment.items() if k not in external_methods}
return True
def user_defined_values(self):
return {k: v for k, v in self.environment.items() if k not in self.globals}
class AsyncScriptEnvironment(TaskDataEnvironment):
def execute(self, script, context, external_methods=None):
super().execute(script, context, external_methods)
return None
class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase):
def setUp(self):
@ -78,3 +87,24 @@ class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase):
task_data_len = len(json.dumps(task_data_to_check))
return task_data_len
class StartedTaskTest(BpmnWorkflowTestCase):
def setUp(self):
spec, subprocesses = self.load_workflow_spec('script-start.bpmn', 'Process_cozt5fu')
self.workflow = BpmnWorkflow(spec, subprocesses)
def testStartedState(self):
script_engine_environemnt = AsyncScriptEnvironment()
script_engine = PythonScriptEngine(environment=script_engine_environemnt)
self.workflow.script_engine = script_engine
self.workflow.do_engine_steps()
script_task = self.workflow.get_tasks_from_spec_name('script')[0]
self.assertEqual(script_task.state, TaskState.STARTED)
script_task.complete()
manual_task = self.workflow.get_tasks_from_spec_name('manual')[0]
manual_task.run()
self.workflow.do_engine_steps()
end = self.workflow.get_tasks_from_spec_name('End')[0]
self.assertDictEqual(end.data, {'x': 1, 'y': 2, 'z': 3})
self.assertTrue(self.workflow.is_completed())

View File

@ -35,7 +35,7 @@ class ResetSubProcessTest(BpmnWorkflowTestCase):
self.workflow.do_engine_steps()
top_level_task = self.workflow.get_ready_user_tasks()[0]
self.workflow.run_task_from_id(top_level_task.id)
top_level_task.run()
self.workflow.do_engine_steps()
task = self.workflow.get_ready_user_tasks()[0]
self.save_restore()
@ -50,11 +50,11 @@ class ResetSubProcessTest(BpmnWorkflowTestCase):
self.workflow.do_engine_steps()
self.assertEqual(1, len(self.workflow.get_ready_user_tasks()))
task = self.workflow.get_ready_user_tasks()[0]
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(task.get_name(),'SubTask2')
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
task = self.workflow.get_tasks_from_spec_name('Task1')[0]
task.reset_token(self.workflow.last_task.data)
@ -62,19 +62,20 @@ class ResetSubProcessTest(BpmnWorkflowTestCase):
self.reload_save_restore()
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(task.get_name(),'Task1')
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(task.get_name(),'Subtask2')
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(task.get_name(),'Subtask2A')
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(task.get_name(),'Task2')
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
self.assertTrue(self.workflow.is_completed())

View File

@ -0,0 +1,62 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_90mmqlw" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0b6y930</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0b6y930" sourceRef="StartEvent_1" targetRef="Activity_185dcnc" />
<bpmn:callActivity id="Activity_185dcnc" calledElement="Process_sypm122">
<bpmn:incoming>Flow_0b6y930</bpmn:incoming>
<bpmn:outgoing>Flow_0eaeoas</bpmn:outgoing>
</bpmn:callActivity>
<bpmn:sequenceFlow id="Flow_0eaeoas" sourceRef="Activity_185dcnc" targetRef="Activity_0rsppq2" />
<bpmn:callActivity id="Activity_0rsppq2" calledElement="Process_diu8ta2">
<bpmn:incoming>Flow_0eaeoas</bpmn:incoming>
<bpmn:outgoing>Flow_0hqm48x</bpmn:outgoing>
</bpmn:callActivity>
<bpmn:sequenceFlow id="Flow_0hqm48x" sourceRef="Activity_0rsppq2" targetRef="Activity_0d5dido" />
<bpmn:endEvent id="Event_0yflyfj">
<bpmn:incoming>Flow_0vhq9kk</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0vhq9kk" sourceRef="Activity_0d5dido" targetRef="Event_0yflyfj" />
<bpmn:callActivity id="Activity_0d5dido" calledElement="Process_l14lar1">
<bpmn:incoming>Flow_0hqm48x</bpmn:incoming>
<bpmn:outgoing>Flow_0vhq9kk</bpmn:outgoing>
</bpmn:callActivity>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_90mmqlw">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1xwhx08_di" bpmnElement="Activity_185dcnc">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0c85c6h_di" bpmnElement="Activity_0rsppq2">
<dc:Bounds x="430" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0yflyfj_di" bpmnElement="Event_0yflyfj">
<dc:Bounds x="752" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1d46rm4_di" bpmnElement="Activity_0d5dido">
<dc:Bounds x="590" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0b6y930_di" bpmnElement="Flow_0b6y930">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0eaeoas_di" bpmnElement="Flow_0eaeoas">
<di:waypoint x="370" y="177" />
<di:waypoint x="430" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0hqm48x_di" bpmnElement="Flow_0hqm48x">
<di:waypoint x="530" y="177" />
<di:waypoint x="590" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0vhq9kk_di" bpmnElement="Flow_0vhq9kk">
<di:waypoint x="690" y="177" />
<di:waypoint x="752" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,26 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="no_tasks" name="No Tasks" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_184umot</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:endEvent id="Event_0qq9il3">
<bpmn:incoming>Flow_184umot</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_184umot" sourceRef="StartEvent_1" targetRef="Event_0qq9il3" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="no_tasks">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0qq9il3_di" bpmnElement="Event_0qq9il3">
<dc:Bounds x="272" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_184umot_di" bpmnElement="Flow_184umot">
<di:waypoint x="215" y="177" />
<di:waypoint x="272" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,87 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_cozt5fu" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1r4la8u</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_1r4la8u" sourceRef="StartEvent_1" targetRef="Gateway_1b4ue4p" />
<bpmn:parallelGateway id="Gateway_1b4ue4p">
<bpmn:incoming>Flow_1r4la8u</bpmn:incoming>
<bpmn:outgoing>Flow_0rx8ly6</bpmn:outgoing>
<bpmn:outgoing>Flow_0m6kw0s</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:sequenceFlow id="Flow_0rx8ly6" sourceRef="Gateway_1b4ue4p" targetRef="script" />
<bpmn:sequenceFlow id="Flow_0m6kw0s" sourceRef="Gateway_1b4ue4p" targetRef="manual" />
<bpmn:scriptTask id="script" name="script">
<bpmn:incoming>Flow_0rx8ly6</bpmn:incoming>
<bpmn:outgoing>Flow_1wcrqdb</bpmn:outgoing>
<bpmn:script>x, y = 1, 2
z = x + y</bpmn:script>
</bpmn:scriptTask>
<bpmn:manualTask id="manual" name="manual">
<bpmn:incoming>Flow_0m6kw0s</bpmn:incoming>
<bpmn:outgoing>Flow_0npa1g8</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:sequenceFlow id="Flow_1wcrqdb" sourceRef="script" targetRef="Gateway_0lxkg29" />
<bpmn:sequenceFlow id="Flow_0npa1g8" sourceRef="manual" targetRef="Gateway_0lxkg29" />
<bpmn:parallelGateway id="Gateway_0lxkg29">
<bpmn:incoming>Flow_1wcrqdb</bpmn:incoming>
<bpmn:incoming>Flow_0npa1g8</bpmn:incoming>
<bpmn:outgoing>Flow_1vd5x0n</bpmn:outgoing>
</bpmn:parallelGateway>
<bpmn:endEvent id="Event_05ezfux">
<bpmn:incoming>Flow_1vd5x0n</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1vd5x0n" sourceRef="Gateway_0lxkg29" targetRef="Event_05ezfux" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_cozt5fu">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0wzobnw_di" bpmnElement="Gateway_1b4ue4p">
<dc:Bounds x="265" y="152" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1ufpwld_di" bpmnElement="script">
<dc:Bounds x="370" y="137" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0q6915b_di" bpmnElement="manual">
<dc:Bounds x="370" y="250" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1rol38p_di" bpmnElement="Gateway_0lxkg29">
<dc:Bounds x="535" y="265" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_05ezfux_di" bpmnElement="Event_05ezfux">
<dc:Bounds x="632" y="272" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1r4la8u_di" bpmnElement="Flow_1r4la8u">
<di:waypoint x="215" y="177" />
<di:waypoint x="265" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0rx8ly6_di" bpmnElement="Flow_0rx8ly6">
<di:waypoint x="315" y="177" />
<di:waypoint x="370" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0m6kw0s_di" bpmnElement="Flow_0m6kw0s">
<di:waypoint x="290" y="202" />
<di:waypoint x="290" y="290" />
<di:waypoint x="370" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1wcrqdb_di" bpmnElement="Flow_1wcrqdb">
<di:waypoint x="470" y="177" />
<di:waypoint x="560" y="177" />
<di:waypoint x="560" y="265" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0npa1g8_di" bpmnElement="Flow_0npa1g8">
<di:waypoint x="470" y="290" />
<di:waypoint x="535" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1vd5x0n_di" bpmnElement="Flow_1vd5x0n">
<di:waypoint x="585" y="290" />
<di:waypoint x="632" y="290" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -0,0 +1,563 @@
{
"serializer_version": "1.1",
"data": {},
"last_task": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676",
"success": true,
"tasks": {
"d10fb568-6104-4a26-9081-e29c8eb42e70": {
"id": "d10fb568-6104-4a26-9081-e29c8eb42e70",
"parent": null,
"children": [
"f0a4e631-c30a-4444-b2b1-2cdc21179c65"
],
"last_state_change": 1681913960.451874,
"state": 32,
"task_spec": "Root",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"f0a4e631-c30a-4444-b2b1-2cdc21179c65": {
"id": "f0a4e631-c30a-4444-b2b1-2cdc21179c65",
"parent": "d10fb568-6104-4a26-9081-e29c8eb42e70",
"children": [
"3468f48a-c493-4731-9484-0821d046a0bc"
],
"last_state_change": 1681913960.4591289,
"state": 32,
"task_spec": "Start",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"3468f48a-c493-4731-9484-0821d046a0bc": {
"id": "3468f48a-c493-4731-9484-0821d046a0bc",
"parent": "f0a4e631-c30a-4444-b2b1-2cdc21179c65",
"children": [
"0366c1b3-d55e-42e4-a6fd-db2d6a1940eb"
],
"last_state_change": 1681913960.460862,
"state": 32,
"task_spec": "StartEvent_1",
"triggered": false,
"workflow_name": "main",
"internal_data": {
"event_fired": true
},
"data": {}
},
"0366c1b3-d55e-42e4-a6fd-db2d6a1940eb": {
"id": "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb",
"parent": "3468f48a-c493-4731-9484-0821d046a0bc",
"children": [
"afd334bf-f987-46b3-b6df-779562c4b4bf",
"ad8b1640-55ac-4ff5-b07e-16c0b7c92976"
],
"last_state_change": 1681913960.4621637,
"state": 32,
"task_spec": "task_1.BoundaryEventParent",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"afd334bf-f987-46b3-b6df-779562c4b4bf": {
"id": "afd334bf-f987-46b3-b6df-779562c4b4bf",
"parent": "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb",
"children": [
"3a4a6596-3b3d-4cd5-9724-cd1ccc263676"
],
"last_state_change": 1681913960.4634154,
"state": 32,
"task_spec": "task_1",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"3a4a6596-3b3d-4cd5-9724-cd1ccc263676": {
"id": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676",
"parent": "afd334bf-f987-46b3-b6df-779562c4b4bf",
"children": [
"8ee8a04c-e784-4419-adb4-7c7f99f72bf6",
"00c5b447-760e-47dc-a77b-cffbf893d098"
],
"last_state_change": 1681913960.4667528,
"state": 32,
"task_spec": "Gateway_1eg2w56",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"8ee8a04c-e784-4419-adb4-7c7f99f72bf6": {
"id": "8ee8a04c-e784-4419-adb4-7c7f99f72bf6",
"parent": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676",
"children": [
"cee7ab8e-d50b-4681-94f2-51bbd8dd6df1"
],
"last_state_change": 1681913960.4669333,
"state": 16,
"task_spec": "task_2",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"cee7ab8e-d50b-4681-94f2-51bbd8dd6df1": {
"id": "cee7ab8e-d50b-4681-94f2-51bbd8dd6df1",
"parent": "8ee8a04c-e784-4419-adb4-7c7f99f72bf6",
"children": [
"4695356f-8db1-48f6-a8bb-d610b2da7a63"
],
"last_state_change": 1681913960.454116,
"state": 4,
"task_spec": "Gateway_1dpu3vt",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"4695356f-8db1-48f6-a8bb-d610b2da7a63": {
"id": "4695356f-8db1-48f6-a8bb-d610b2da7a63",
"parent": "cee7ab8e-d50b-4681-94f2-51bbd8dd6df1",
"children": [
"f4ab0e07-d985-412f-aca0-369bd29797e1"
],
"last_state_change": 1681913960.45428,
"state": 4,
"task_spec": "Event_1deqprp",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"f4ab0e07-d985-412f-aca0-369bd29797e1": {
"id": "f4ab0e07-d985-412f-aca0-369bd29797e1",
"parent": "4695356f-8db1-48f6-a8bb-d610b2da7a63",
"children": [
"e326e6ec-f20d-4171-8133-24b160ad3404"
],
"last_state_change": 1681913960.4544458,
"state": 4,
"task_spec": "main.EndJoin",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"e326e6ec-f20d-4171-8133-24b160ad3404": {
"id": "e326e6ec-f20d-4171-8133-24b160ad3404",
"parent": "f4ab0e07-d985-412f-aca0-369bd29797e1",
"children": [],
"last_state_change": 1681913960.4546103,
"state": 4,
"task_spec": "End",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"00c5b447-760e-47dc-a77b-cffbf893d098": {
"id": "00c5b447-760e-47dc-a77b-cffbf893d098",
"parent": "3a4a6596-3b3d-4cd5-9724-cd1ccc263676",
"children": [
"c57e4be7-670c-41aa-9740-9fff831f6ccd"
],
"last_state_change": 1681913960.4671006,
"state": 16,
"task_spec": "task_3",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"c57e4be7-670c-41aa-9740-9fff831f6ccd": {
"id": "c57e4be7-670c-41aa-9740-9fff831f6ccd",
"parent": "00c5b447-760e-47dc-a77b-cffbf893d098",
"children": [
"ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3"
],
"last_state_change": 1681913960.454881,
"state": 4,
"task_spec": "Gateway_1dpu3vt",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3": {
"id": "ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3",
"parent": "c57e4be7-670c-41aa-9740-9fff831f6ccd",
"children": [
"91a213bf-bef6-40fa-b0a4-7c7f58552184"
],
"last_state_change": 1681913960.4550629,
"state": 4,
"task_spec": "Event_1deqprp",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"91a213bf-bef6-40fa-b0a4-7c7f58552184": {
"id": "91a213bf-bef6-40fa-b0a4-7c7f58552184",
"parent": "ee2c8edc-5d5e-4fc2-ba4b-1801d000c6c3",
"children": [
"8166e255-1d17-4f94-8c0e-82cefe8500fd"
],
"last_state_change": 1681913960.4552596,
"state": 4,
"task_spec": "main.EndJoin",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"8166e255-1d17-4f94-8c0e-82cefe8500fd": {
"id": "8166e255-1d17-4f94-8c0e-82cefe8500fd",
"parent": "91a213bf-bef6-40fa-b0a4-7c7f58552184",
"children": [],
"last_state_change": 1681913960.455456,
"state": 4,
"task_spec": "End",
"triggered": false,
"workflow_name": "main",
"internal_data": {},
"data": {}
},
"ad8b1640-55ac-4ff5-b07e-16c0b7c92976": {
"id": "ad8b1640-55ac-4ff5-b07e-16c0b7c92976",
"parent": "0366c1b3-d55e-42e4-a6fd-db2d6a1940eb",
"children": [],
"last_state_change": 1681913960.463645,
"state": 64,
"task_spec": "signal",
"triggered": false,
"workflow_name": "main",
"internal_data": {
"event_fired": false
},
"data": {}
}
},
"root": "d10fb568-6104-4a26-9081-e29c8eb42e70",
"spec": {
"name": "main",
"description": "main",
"file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/diagram_1.bpmn",
"task_specs": {
"Start": {
"id": "main_1",
"name": "Start",
"description": "",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [],
"outputs": [
"StartEvent_1"
],
"typename": "StartTask"
},
"main.EndJoin": {
"id": "main_2",
"name": "main.EndJoin",
"description": "",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"Event_1deqprp"
],
"outputs": [
"End"
],
"typename": "_EndJoin"
},
"End": {
"id": "main_3",
"name": "End",
"description": "",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"main.EndJoin"
],
"outputs": [],
"typename": "Simple"
},
"StartEvent_1": {
"id": "main_4",
"name": "StartEvent_1",
"description": null,
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"Start"
],
"outputs": [
"task_1.BoundaryEventParent"
],
"lane": null,
"documentation": null,
"position": {
"x": 179.0,
"y": 99.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"event_definition": {
"internal": false,
"external": false,
"typename": "NoneEventDefinition"
},
"typename": "StartEvent",
"extensions": {}
},
"task_1": {
"id": "main_5",
"name": "task_1",
"description": "Task 1",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"task_1.BoundaryEventParent"
],
"outputs": [
"Gateway_1eg2w56"
],
"lane": null,
"documentation": null,
"position": {
"x": 270.0,
"y": 77.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"typename": "NoneTask",
"extensions": {}
},
"task_1.BoundaryEventParent": {
"id": "main_6",
"name": "task_1.BoundaryEventParent",
"description": "",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"StartEvent_1"
],
"outputs": [
"task_1",
"signal"
],
"lane": null,
"documentation": null,
"position": {
"x": 0,
"y": 0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"main_child_task_spec": "task_1",
"typename": "_BoundaryEventParent"
},
"signal": {
"id": "main_7",
"name": "signal",
"description": "Signal Event",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"task_1.BoundaryEventParent"
],
"outputs": [],
"lane": null,
"documentation": null,
"position": {
"x": 302.0,
"y": 139.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"event_definition": {
"internal": true,
"external": true,
"name": "Signal_08n3u9r",
"typename": "SignalEventDefinition"
},
"cancel_activity": true,
"typename": "BoundaryEvent",
"extensions": {}
},
"Gateway_1eg2w56": {
"id": "main_8",
"name": "Gateway_1eg2w56",
"description": null,
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"task_1"
],
"outputs": [
"task_2",
"task_3"
],
"lane": null,
"documentation": null,
"position": {
"x": 425.0,
"y": 92.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"split_task": null,
"threshold": null,
"cancel": false,
"typename": "ParallelGateway",
"extensions": {}
},
"task_2": {
"id": "main_9",
"name": "task_2",
"description": "Task 2",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"Gateway_1eg2w56"
],
"outputs": [
"Gateway_1dpu3vt"
],
"lane": null,
"documentation": null,
"position": {
"x": 530.0,
"y": 77.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"typename": "NoneTask",
"extensions": {}
},
"Gateway_1dpu3vt": {
"id": "main_10",
"name": "Gateway_1dpu3vt",
"description": null,
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"task_2",
"task_3"
],
"outputs": [
"Event_1deqprp"
],
"lane": null,
"documentation": null,
"position": {
"x": 685.0,
"y": 92.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"split_task": null,
"threshold": null,
"cancel": false,
"typename": "ParallelGateway",
"extensions": {}
},
"Event_1deqprp": {
"id": "main_11",
"name": "Event_1deqprp",
"description": null,
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"Gateway_1dpu3vt"
],
"outputs": [
"main.EndJoin"
],
"lane": null,
"documentation": null,
"position": {
"x": 792.0,
"y": 99.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"event_definition": {
"internal": false,
"external": false,
"typename": "NoneEventDefinition"
},
"typename": "EndEvent",
"extensions": {}
},
"task_3": {
"id": "main_12",
"name": "task_3",
"description": "Task 3",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [
"Gateway_1eg2w56"
],
"outputs": [
"Gateway_1dpu3vt"
],
"lane": null,
"documentation": null,
"position": {
"x": 530.0,
"y": 190.0
},
"data_input_associations": [],
"data_output_associations": [],
"io_specification": null,
"typename": "NoneTask",
"extensions": {}
},
"Root": {
"id": "main_13",
"name": "Root",
"description": "",
"manual": false,
"internal": false,
"lookahead": 2,
"inputs": [],
"outputs": [],
"typename": "Simple"
}
},
"io_specification": null,
"data_objects": {},
"correlation_keys": {},
"typename": "BpmnProcessSpec"
},
"subprocess_specs": {},
"subprocesses": {},
"bpmn_messages": [],
"correlations": {}
}

View File

@ -0,0 +1,38 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_p4pfxhq" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_04380wl</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_04380wl" sourceRef="StartEvent_1" targetRef="Activity_0q1z4d4" />
<bpmn:endEvent id="Event_05ktmeu">
<bpmn:incoming>Flow_1io4ukf</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1io4ukf" sourceRef="Activity_0q1z4d4" targetRef="Event_05ktmeu" />
<bpmn:callActivity id="Activity_0q1z4d4" calledElement="SingleTask_Process">
<bpmn:incoming>Flow_04380wl</bpmn:incoming>
<bpmn:outgoing>Flow_1io4ukf</bpmn:outgoing>
</bpmn:callActivity>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_p4pfxhq">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_05ktmeu_di" bpmnElement="Event_05ktmeu">
<dc:Bounds x="432" y="159" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0tmeu68_di" bpmnElement="Activity_0q1z4d4">
<dc:Bounds x="270" y="137" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_04380wl_di" bpmnElement="Flow_04380wl">
<di:waypoint x="215" y="177" />
<di:waypoint x="270" y="177" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1io4ukf_di" bpmnElement="Flow_1io4ukf">
<di:waypoint x="370" y="177" />
<di:waypoint x="432" y="177" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -50,6 +50,7 @@ class ActionManagementTest(BpmnWorkflowTestCase):
self.do_next_named_step("Complete Work", choice="Done")
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())
@ -91,6 +92,7 @@ class ActionManagementTest(BpmnWorkflowTestCase):
self.do_next_named_step("Complete Work", choice="Done")
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertTrue(self.workflow.is_completed())

View File

@ -80,6 +80,7 @@ class CallActivityEscalationTest(BpmnWorkflowTestCase):
for task in self.workflow.get_tasks(TaskState.READY):
task.set_data(should_escalate=False)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.workflow.run_all()
self.assertEqual(True, self.workflow.is_completed())

View File

@ -28,6 +28,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_exclusive_step('Ack Subprocess Done')
@ -35,8 +36,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughInterruptSaveAndRestore(self):
@ -58,8 +58,7 @@ class MessageInterruptsSpTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def suite():

View File

@ -30,13 +30,13 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptSaveAndRestore(self):
@ -61,9 +61,9 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughHappy(self):
@ -77,11 +77,11 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterrupt(self):
@ -101,8 +101,8 @@ class MessageInterruptsTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Acknowledge Interrupt Message')
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def suite():

View File

@ -31,13 +31,13 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptSaveAndRestore(self):
@ -71,8 +71,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughHappy(self):
@ -87,11 +87,11 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.WAITING)))
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterrupt(self):
@ -118,8 +118,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.do_next_named_step('Do Something That Takes A Long Time')
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptOtherOrder(self):
@ -145,8 +145,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.do_next_named_step('Acknowledge Non-Interrupt Message')
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageInterruptOtherOrderSaveAndRestore(self):
@ -177,8 +177,8 @@ class MessageNonInterruptTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def suite():

View File

@ -28,6 +28,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_exclusive_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_exclusive_step('Ack Subprocess Done')
@ -35,8 +36,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageSaveAndRestore(self):
@ -53,6 +53,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_named_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_named_step('Ack Subprocess Done')
@ -64,8 +65,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageOrder2SaveAndRestore(self):
@ -81,6 +81,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.workflow.catch(MessageEventDefinition('Test Message'))
self.do_next_named_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_named_step('Acknowledge SP Parallel Message')
@ -92,8 +93,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughMessageOrder3SaveAndRestore(self):
@ -114,6 +114,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.do_next_named_step('Do Something In a Subprocess')
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.save_restore()
self.do_next_named_step('Ack Subprocess Done')
@ -121,8 +122,7 @@ class MessageNonInterruptsSpTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def suite():

View File

@ -27,12 +27,11 @@ class MessagesTest(BpmnWorkflowTestCase):
self.workflow.catch(MessageEventDefinition('Test Message'))
self.assertEqual(1, len(self.workflow.get_tasks(TaskState.READY)))
self.assertEqual(
'Test Message', self.workflow.get_tasks(TaskState.READY)[0].task_spec.description)
self.assertEqual('Test Message', self.workflow.get_tasks(TaskState.READY)[0].task_spec.description)
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def testRunThroughSaveAndRestore(self):
@ -52,8 +51,8 @@ class MessagesTest(BpmnWorkflowTestCase):
self.save_restore()
self.workflow.do_engine_steps()
self.assertEqual(
0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
self.complete_subworkflow()
self.assertEqual(0, len(self.workflow.get_tasks(TaskState.READY | TaskState.WAITING)))
def suite():

View File

@ -19,6 +19,7 @@ class MultipleThrowEventIntermediateCatchTest(BpmnWorkflowTestCase):
if save_restore:
self.save_restore()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(len(self.workflow.get_waiting_tasks()), 0)
self.assertEqual(self.workflow.is_completed(), True)
@ -44,4 +45,5 @@ class MultipleThrowEventStartsEventTest(BpmnWorkflowTestCase):
self.assertEqual(len(ready_tasks), 1)
ready_tasks[0].run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(self.workflow.is_completed(), True)

View File

@ -65,7 +65,9 @@ class NITimerDurationTest(BpmnWorkflowTestCase):
task.run()
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.assertEqual(self.workflow.is_completed(),True)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(self.workflow.is_completed(), True)
self.assertEqual(self.workflow.last_task.data, {'work_done': 'Yes', 'delay_reason': 'Just Because'})

View File

@ -25,6 +25,7 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
ready_tasks[0].update_data({'quantity': 2})
ready_tasks[0].run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertIn('value', self.workflow.last_task.data)
# Check that workflow and next task completed

View File

@ -23,6 +23,10 @@ class Version_1_0_Test(BaseTestCase):
self.assertEqual('Action3', ready_tasks[0].task_spec.description)
ready_tasks[0].run()
wf.do_engine_steps()
wf.refresh_waiting_tasks()
wf.do_engine_steps()
wf.refresh_waiting_tasks()
wf.do_engine_steps()
self.assertEqual(True, wf.is_completed())
@ -34,12 +38,16 @@ class Version_1_1_Test(BaseTestCase):
wf.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"time": time}))
wf.refresh_waiting_tasks()
wf.do_engine_steps()
wf.refresh_waiting_tasks()
wf.do_engine_steps()
self.assertTrue(wf.is_completed())
def test_convert_data_specs(self):
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-data.json')
wf = self.serializer.deserialize_json(open(fn).read())
wf.do_engine_steps()
wf.refresh_waiting_tasks()
wf.do_engine_steps()
self.assertTrue(wf.is_completed())
def test_convert_exclusive_gateway(self):
@ -71,3 +79,16 @@ class Version_1_1_Test(BaseTestCase):
wf.refresh_waiting_tasks()
self.assertTrue(wf.is_completed())
self.assertEqual(wf.last_task.data['counter'], 20)
def test_update_task_states(self):
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-task-states.json')
wf = self.serializer.deserialize_json(open(fn).read())
start = wf.get_tasks_from_spec_name('Start')[0]
self.assertEqual(start.state, TaskState.COMPLETED)
signal = wf.get_tasks_from_spec_name('signal')[0]
self.assertEqual(signal.state, TaskState.CANCELLED)
ready_tasks = wf.get_tasks(TaskState.READY)
while len(ready_tasks) > 0:
ready_tasks[0].run()
ready_tasks = wf.get_tasks(TaskState.READY)
self.assertTrue(wf.is_completed())

View File

@ -41,7 +41,7 @@ class CallActivityMessageTest(BaseTestCase):
current_task.update_data(step[1])
current_task.run()
self.workflow.do_engine_steps()
self.workflow.refresh_waiting_tasks()
self.complete_subworkflow()
if save_restore: self.save_restore()
ready_tasks = self.workflow.get_tasks(TaskState.READY)
self.assertEqual(self.workflow.is_completed(),True,'Expected the workflow to be complete at this point')

View File

@ -50,7 +50,7 @@ class ExternalMessageBoundaryTest(BaseTestCase):
self.workflow.catch(MessageEventDefinition('reset', payload='SomethingDrastic', result_var='reset_var'))
ready_tasks = self.workflow.get_tasks(TaskState.READY)
# The user activity was cancelled and we should continue from the boundary event
self.assertEqual(1, len(ready_tasks),'Expected to have two ready tasks')
self.assertEqual(2, len(ready_tasks), 'Expected to have two ready tasks')
event = self.workflow.get_tasks_from_spec_name('Event_19detfv')[0]
event.run()
self.assertEqual('SomethingDrastic', event.data['reset_var'])

View File

@ -84,6 +84,7 @@ class NIMessageBoundaryTest(BaseTestCase):
task.data['work_completed'] = 'Lots of Stuff'
self.workflow.run_task_from_id(task.id)
self.workflow.do_engine_steps()
self.complete_subworkflow()
self.assertEqual(self.workflow.is_completed(),True)
self.assertEqual(self.workflow.last_task.data,{'Event_InterruptBoundary_Response': 'Youre late!',
'flag_task': 'Yes',

View File

@ -51,7 +51,7 @@ class ResetTokenTestSubProcess(BaseTestCase):
firsttaskid = task.id
self.assertEqual(step['taskname'], task.task_spec.name)
task.update_data({step['formvar']: step['answer']})
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
if save_restore:
self.save_restore()
@ -75,8 +75,9 @@ class ResetTokenTestSubProcess(BaseTestCase):
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual(step['taskname'], task.task_spec.name)
task.update_data({step['formvar']: step['answer']})
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
if save_restore:
self.save_restore()

View File

@ -31,8 +31,9 @@ class SubWorkflowTest(BaseTestCase):
task = self.workflow.get_ready_user_tasks()[0]
self.assertEqual("Activity_"+answer, task.task_spec.name)
task.update_data({"Field"+answer: answer})
self.workflow.run_task_from_id(task.id)
task.run()
self.workflow.do_engine_steps()
self.complete_subworkflow()
if save_restore: self.save_restore()
self.assertEqual(self.workflow.last_task.data,{'FieldA': 'A',

View File

@ -58,6 +58,8 @@ class PrescriptPostsciptTest(BaseTestCase):
self.workflow.do_engine_steps()
ex = se.exception
self.assertIn("Error occurred in the Pre-Script", str(ex))
task = self.workflow.get_tasks_from_spec_name('Activity_1iqs4li')[0]
self.assertEqual(task.state, TaskState.ERROR)
def call_activity_test(self, save_restore=False):
@ -82,3 +84,4 @@ class PrescriptPostsciptTest(BaseTestCase):
ready_tasks = self.workflow.get_tasks(TaskState.READY)
ready_tasks[0].set_data(**data)
self.workflow.do_engine_steps()
self.complete_subworkflow()