Merge commit '35fd8ffc0f9c1bf9cbd346464419b6387cb5d4e9' into feature/message_fixes
This commit is contained in:
commit
5f9ba556d1
|
@ -21,7 +21,7 @@ import glob
|
|||
import os
|
||||
|
||||
from lxml import etree
|
||||
from lxml.etree import DocumentInvalid, LxmlError
|
||||
from lxml.etree import LxmlError
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import NoneEventDefinition
|
||||
|
||||
|
@ -106,7 +106,7 @@ class BpmnParser(object):
|
|||
full_tag('endEvent'): (EndEventParser, EndEvent),
|
||||
full_tag('userTask'): (TaskParser, UserTask),
|
||||
full_tag('task'): (TaskParser, NoneTask),
|
||||
full_tag('subProcess'): (SubWorkflowParser, CallActivity),
|
||||
full_tag('subProcess'): (SubWorkflowParser, SubWorkflowTask),
|
||||
full_tag('manualTask'): (TaskParser, ManualTask),
|
||||
full_tag('exclusiveGateway'): (ConditionalGatewayParser, ExclusiveGateway),
|
||||
full_tag('parallelGateway'): (GatewayParser, ParallelGateway),
|
||||
|
@ -222,8 +222,7 @@ class BpmnParser(object):
|
|||
for correlation in bpmn.xpath('.//bpmn:correlationProperty', namespaces=self.namespaces):
|
||||
correlation_identifier = correlation.attrib.get("id")
|
||||
if correlation_identifier is None:
|
||||
raise ValidationException(
|
||||
"Correlation identifier is missing from bpmn xml" )
|
||||
raise ValidationException("Correlation identifier is missing from bpmn xml")
|
||||
correlation_property_retrieval_expressions = correlation.xpath(
|
||||
"//bpmn:correlationPropertyRetrievalExpression", namespaces = self.namespaces)
|
||||
if not correlation_property_retrieval_expressions:
|
||||
|
@ -260,9 +259,6 @@ class BpmnParser(object):
|
|||
self.process_parsers[parser.get_id()] = parser
|
||||
self.process_parsers_by_name[parser.get_name()] = parser
|
||||
|
||||
def get_dependencies(self):
|
||||
return self.process_dependencies
|
||||
|
||||
def get_process_dependencies(self):
|
||||
return self.process_dependencies
|
||||
|
||||
|
|
|
@ -18,7 +18,8 @@
|
|||
# 02110-1301 USA
|
||||
|
||||
from .ValidationException import ValidationException
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec, BpmnDataSpecification
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||
from ..specs.data_spec import DataObject
|
||||
from .node_parser import NodeParser
|
||||
from .util import first
|
||||
|
||||
|
@ -45,6 +46,7 @@ class ProcessParser(NodeParser):
|
|||
self.lane = lane
|
||||
self.spec = None
|
||||
self.process_executable = self.is_executable()
|
||||
self.inherited_data_objects = {}
|
||||
|
||||
def get_name(self):
|
||||
"""
|
||||
|
@ -94,7 +96,7 @@ class ProcessParser(NodeParser):
|
|||
if not node_parser or not spec_class:
|
||||
raise ValidationException("There is no support implemented for this task type.",
|
||||
node=node, file_name=self.filename)
|
||||
np = node_parser(self, spec_class, node, lane=self.lane)
|
||||
np = node_parser(self, spec_class, node, self.nsmap, lane=self.lane)
|
||||
task_spec = np.parse_node()
|
||||
return task_spec
|
||||
|
||||
|
@ -106,21 +108,24 @@ class ProcessParser(NodeParser):
|
|||
raise ValidationException("No start event found", node=self.node, file_name=self.filename)
|
||||
self.spec = BpmnProcessSpec(name=self.get_id(), description=self.get_name(), filename=self.filename)
|
||||
|
||||
# Check for an IO Specification.
|
||||
io_spec = first(self.xpath('./bpmn:ioSpecification'))
|
||||
if io_spec is not None:
|
||||
data_parser = DataSpecificationParser(io_spec, filename=self.filename)
|
||||
self.spec.data_inputs, self.spec.data_outputs = data_parser.parse_io_spec()
|
||||
self.spec.data_objects.update(self.inherited_data_objects)
|
||||
|
||||
# Get the data objects
|
||||
for obj in self.xpath('./bpmn:dataObject'):
|
||||
data_parser = DataSpecificationParser(obj, filename=self.filename)
|
||||
data_object = data_parser.parse_data_object()
|
||||
data_object = self.parse_data_object(obj)
|
||||
self.spec.data_objects[data_object.name] = data_object
|
||||
|
||||
# Check for an IO Specification.
|
||||
io_spec = first(self.xpath('./bpmn:ioSpecification'))
|
||||
if io_spec is not None:
|
||||
self.spec.io_specification = self.parse_io_spec()
|
||||
|
||||
for node in start_node_list:
|
||||
self.parse_node(node)
|
||||
|
||||
def parse_data_object(self, obj):
|
||||
return DataObject(obj.get('id'), obj.get('name'))
|
||||
|
||||
def get_spec(self):
|
||||
"""
|
||||
Parse this process (if it has not already been parsed), and return the
|
||||
|
@ -129,17 +134,3 @@ class ProcessParser(NodeParser):
|
|||
if self.spec is None:
|
||||
self._parse()
|
||||
return self.spec
|
||||
|
||||
|
||||
class DataSpecificationParser(NodeParser):
|
||||
|
||||
def parse_io_spec(self):
|
||||
inputs, outputs = [], []
|
||||
for elem in self.xpath('./bpmn:dataInput'):
|
||||
inputs.append(BpmnDataSpecification(elem.get('id'), elem.get('name')))
|
||||
for elem in self.xpath('./bpmn:dataOutput'):
|
||||
outputs.append(BpmnDataSpecification(elem.get('id'), elem.get('name')))
|
||||
return inputs, outputs
|
||||
|
||||
def parse_data_object(self):
|
||||
return BpmnDataSpecification(self.node.get('id'), self.node.get('name'))
|
||||
|
|
|
@ -17,27 +17,18 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
from .ValidationException import ValidationException
|
||||
from ..specs.NoneTask import NoneTask
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.UserTask import UserTask
|
||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent
|
||||
from ..specs.events.event_definitions import CancelEventDefinition
|
||||
from ..specs.MultiInstanceTask import getDynamicMIClass
|
||||
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess, SubWorkflowTask
|
||||
from ..specs.MultiInstanceTask import StandardLoopTask, SequentialMultiInstanceTask, ParallelMultiInstanceTask
|
||||
from ..specs.SubWorkflowTask import TransactionSubprocess
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ..specs.InclusiveGateway import InclusiveGateway
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...operators import Attrib, PathAttrib
|
||||
from .util import one, first
|
||||
from ..specs.data_spec import TaskDataReference
|
||||
|
||||
from .util import one
|
||||
from .node_parser import NodeParser
|
||||
|
||||
STANDARDLOOPCOUNT = '25'
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class TaskParser(NodeParser):
|
||||
"""
|
||||
|
@ -48,6 +39,12 @@ class TaskParser(NodeParser):
|
|||
outgoing transitions, once the child tasks have all been parsed.
|
||||
"""
|
||||
|
||||
# I hate myself for this. I wanted to at least relegate it to the top-level
|
||||
# parser where the rest of the similar nonsense is, but it's inaccessible here.
|
||||
STANDARD_LOOP_CLASS = StandardLoopTask
|
||||
PARALLEL_MI_CLASS = ParallelMultiInstanceTask
|
||||
SEQUENTIAL_MI_CLASS = SequentialMultiInstanceTask
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, nsmap=None, lane=None):
|
||||
"""
|
||||
Constructor.
|
||||
|
@ -63,64 +60,97 @@ class TaskParser(NodeParser):
|
|||
self.spec_class = spec_class
|
||||
self.spec = self.process_parser.spec
|
||||
|
||||
def _set_multiinstance_attributes(self, is_sequential, expanded, loop_count,
|
||||
loop_task=False, element_var=None, collection=None, completion_condition=None):
|
||||
# This should be replaced with its own task parser (though I'm not sure how feasible this is given
|
||||
# the current parser achitecture). We should also consider separate classes for loop vs
|
||||
# multiinstance because having all these optional attributes is a nightmare
|
||||
def _copy_task_attrs(self, original):
|
||||
|
||||
if not isinstance(self.task, (NoneTask, UserTask, BusinessRuleTask, ScriptTask, CallActivity, SubWorkflowTask)):
|
||||
raise ValidationException(
|
||||
f'Unsupported MultiInstance Task: {self.task.__class__}',
|
||||
node=self.node,
|
||||
file_name=self.filename)
|
||||
self.task.inputs = original.inputs
|
||||
self.task.outputs = original.outputs
|
||||
self.task.io_specification = original.io_specification
|
||||
self.task.data_input_associations = original.data_input_associations
|
||||
self.task.data_output_associations = original.data_output_associations
|
||||
self.task.description = original.description
|
||||
|
||||
self.task.loopTask = loop_task
|
||||
self.task.isSequential = is_sequential
|
||||
self.task.expanded = expanded
|
||||
# make dot notation compatible with bmpmn path notation.
|
||||
self.task.times = PathAttrib(loop_count.replace('.', '/')) if loop_count.find('.') > 0 else Attrib(loop_count)
|
||||
self.task.elementVar = element_var
|
||||
self.task.collection = collection
|
||||
self.task.completioncondition = completion_condition
|
||||
original.inputs = [self.task]
|
||||
original.outputs = []
|
||||
original.io_specification = None
|
||||
original.data_input_associations = []
|
||||
original.data_output_associations = []
|
||||
original.name = f'{original.name} [child]'
|
||||
self.task.task_spec = original.name
|
||||
self.spec.task_specs[original.name] = original
|
||||
|
||||
self.task.prevtaskclass = self.task.__module__ + "." + self.task.__class__.__name__
|
||||
newtaskclass = getDynamicMIClass(self.get_id(),self.task.__class__)
|
||||
self.task.__class__ = newtaskclass
|
||||
def _add_loop_task(self, loop_characteristics):
|
||||
|
||||
def _detect_multiinstance(self):
|
||||
maximum = loop_characteristics.attrib.get('loopMaximum')
|
||||
if maximum is not None:
|
||||
maximum = int(maximum)
|
||||
condition = self.xpath('./bpmn:standardLoopCharacteristics/bpmn:loopCondition')
|
||||
condition = condition[0].text if len(condition) > 0 else None
|
||||
test_before = loop_characteristics.get('testBefore', 'false') == 'true'
|
||||
if maximum is None and condition is None:
|
||||
self.raise_validation_exception('A loopMaximum or loopCondition must be specified for Loop Tasks')
|
||||
|
||||
multiinstance_element = first(self.xpath('./bpmn:multiInstanceLoopCharacteristics'))
|
||||
if multiinstance_element is not None:
|
||||
is_sequential = multiinstance_element.get('isSequential') == 'true'
|
||||
original = self.spec.task_specs.pop(self.task.name)
|
||||
self.task = self.STANDARD_LOOP_CLASS(self.spec, original.name, '', maximum, condition, test_before)
|
||||
self._copy_task_attrs(original)
|
||||
|
||||
element_var_text = multiinstance_element.attrib.get('{' + CAMUNDA_MODEL_NS + '}elementVariable')
|
||||
collection_text = multiinstance_element.attrib.get('{' + CAMUNDA_MODEL_NS + '}collection')
|
||||
def _add_multiinstance_task(self, loop_characteristics):
|
||||
|
||||
sequential = loop_characteristics.get('isSequential') == 'true'
|
||||
prefix = 'bpmn:multiInstanceLoopCharacteristics'
|
||||
cardinality = self.xpath(f'./{prefix}/bpmn:loopCardinality')
|
||||
loop_input = self.xpath(f'./{prefix}/bpmn:loopDataInputRef')
|
||||
if len(cardinality) == 0 and len(loop_input) == 0:
|
||||
self.raise_validation_exception("A multiinstance task must specify a cardinality or a loop input data reference")
|
||||
elif len(cardinality) > 0 and len(loop_input) > 0:
|
||||
self.raise_validation_exception("A multiinstance task must specify exactly one of cardinality or loop input data reference")
|
||||
cardinality = cardinality[0].text if len(cardinality) > 0 else None
|
||||
|
||||
loop_cardinality = first(self.xpath('./bpmn:multiInstanceLoopCharacteristics/bpmn:loopCardinality'))
|
||||
if loop_cardinality is not None:
|
||||
loop_count = loop_cardinality.text
|
||||
elif collection_text is not None:
|
||||
loop_count = collection_text
|
||||
loop_input = loop_input[0].text if len(loop_input) > 0 else None
|
||||
if loop_input is not None:
|
||||
if self.task.io_specification is not None:
|
||||
try:
|
||||
loop_input = [v for v in self.task.io_specification.data_inputs if v.name == loop_input][0]
|
||||
except:
|
||||
self.raise_validation_exception('The loop input data reference is missing from the IO specification')
|
||||
else:
|
||||
loop_count = '1'
|
||||
loop_input = TaskDataReference(loop_input)
|
||||
|
||||
if collection_text is not None:
|
||||
collection = PathAttrib(collection_text.replace('.', '/')) if collection_text.find('.') > 0 else Attrib(collection_text)
|
||||
input_item = self.xpath(f'./{prefix}/bpmn:inputDataItem')
|
||||
input_item = self.create_data_spec(input_item[0], TaskDataReference) if len(input_item) > 0 else None
|
||||
|
||||
loop_output = self.xpath(f'./{prefix}/bpmn:loopDataOutputRef')
|
||||
loop_output = loop_output[0].text if len(loop_output) > 0 else None
|
||||
if loop_output is not None:
|
||||
if self.task.io_specification is not None:
|
||||
try:
|
||||
refs = set(self.task.io_specification.data_inputs + self.task.io_specification.data_outputs)
|
||||
loop_output = [v for v in refs if v.name == loop_output][0]
|
||||
except:
|
||||
self.raise_validation_exception('The loop output data reference is missing from the IO specification')
|
||||
else:
|
||||
collection = None
|
||||
loop_output = TaskDataReference(loop_output)
|
||||
|
||||
completion_condition = first(self.xpath('./bpmn:multiInstanceLoopCharacteristics/bpmn:completionCondition'))
|
||||
if completion_condition is not None:
|
||||
completion_condition = completion_condition.text
|
||||
output_item = self.xpath(f'./{prefix}/bpmn:outputDataItem')
|
||||
output_item = self.create_data_spec(output_item[0], TaskDataReference) if len(output_item) > 0 else None
|
||||
|
||||
self._set_multiinstance_attributes(is_sequential, 1, loop_count,
|
||||
element_var=element_var_text,
|
||||
collection=collection,
|
||||
completion_condition=completion_condition)
|
||||
condition = self.xpath(f'./{prefix}/bpmn:completionCondition')
|
||||
condition = condition[0].text if len(condition) > 0 else None
|
||||
|
||||
elif len(self.xpath('./bpmn:standardLoopCharacteristics')) > 0:
|
||||
self._set_multiinstance_attributes(True, 25, STANDARDLOOPCOUNT, loop_task=True)
|
||||
original = self.spec.task_specs.pop(self.task.name)
|
||||
params = {
|
||||
'task_spec': '',
|
||||
'cardinality': cardinality,
|
||||
'data_input': loop_input,
|
||||
'data_output':loop_output,
|
||||
'input_item': input_item,
|
||||
'output_item': output_item,
|
||||
'condition': condition,
|
||||
}
|
||||
if sequential:
|
||||
self.task = self.SEQUENTIAL_MI_CLASS(self.spec, original.name, **params)
|
||||
else:
|
||||
self.task = self.PARALLEL_MI_CLASS(self.spec, original.name, **params)
|
||||
self._copy_task_attrs(original)
|
||||
|
||||
def _add_boundary_event(self, children):
|
||||
|
||||
|
@ -133,9 +163,7 @@ class TaskParser(NodeParser):
|
|||
child = self.process_parser.parse_node(event)
|
||||
if isinstance(child.event_definition, CancelEventDefinition) \
|
||||
and not isinstance(self.task, TransactionSubprocess):
|
||||
raise ValidationException('Cancel Events may only be used with transactions',
|
||||
node=self.node,
|
||||
file_name=self.filename)
|
||||
self.raise_validation_exception('Cancel Events may only be used with transactions')
|
||||
parent.connect(child)
|
||||
return parent
|
||||
|
||||
|
@ -153,7 +181,17 @@ class TaskParser(NodeParser):
|
|||
self.task.data_input_associations = self.parse_incoming_data_references()
|
||||
self.task.data_output_associations = self.parse_outgoing_data_references()
|
||||
|
||||
self._detect_multiinstance()
|
||||
io_spec = self.xpath('./bpmn:ioSpecification')
|
||||
if len(io_spec) > 0:
|
||||
self.task.io_specification = self.parse_io_spec()
|
||||
|
||||
loop_characteristics = self.xpath('./bpmn:standardLoopCharacteristics')
|
||||
if len(loop_characteristics) > 0:
|
||||
self._add_loop_task(loop_characteristics[0])
|
||||
|
||||
mi_loop_characteristics = self.xpath('./bpmn:multiInstanceLoopCharacteristics')
|
||||
if len(mi_loop_characteristics) > 0:
|
||||
self._add_multiinstance_task(mi_loop_characteristics[0])
|
||||
|
||||
boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.get_id())
|
||||
if boundary_event_nodes:
|
||||
|
@ -164,21 +202,14 @@ class TaskParser(NodeParser):
|
|||
children = []
|
||||
outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.get_id())
|
||||
if len(outgoing) > 1 and not self.handles_multiple_outgoing():
|
||||
raise ValidationException(
|
||||
'Multiple outgoing flows are not supported for '
|
||||
'tasks of type',
|
||||
node=self.node,
|
||||
file_name=self.filename)
|
||||
self.raise_validation_exception('Multiple outgoing flows are not supported for tasks of type')
|
||||
for sequence_flow in outgoing:
|
||||
target_ref = sequence_flow.get('targetRef')
|
||||
try:
|
||||
target_node = one(self.doc_xpath('.//bpmn:*[@id="%s"]'% target_ref))
|
||||
except:
|
||||
raise ValidationException(
|
||||
'When looking for a task spec, we found two items, '
|
||||
'perhaps a form has the same ID? (%s)' % target_ref,
|
||||
node=self.node,
|
||||
file_name=self.filename)
|
||||
self.raise_validation_exception('When looking for a task spec, we found two items, '
|
||||
'perhaps a form has the same ID? (%s)' % target_ref)
|
||||
|
||||
c = self.process_parser.parse_node(target_node)
|
||||
position = c.position
|
||||
|
@ -233,3 +264,4 @@ class TaskParser(NodeParser):
|
|||
outgoing sequence flows.
|
||||
"""
|
||||
return False
|
||||
|
||||
|
|
|
@ -19,8 +19,6 @@ from ..specs.events.event_definitions import (
|
|||
NoneEventDefinition
|
||||
)
|
||||
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
CANCEL_EVENT_XPATH = './/bpmn:cancelEventDefinition'
|
||||
ERROR_EVENT_XPATH = './/bpmn:errorEventDefinition'
|
||||
ESCALATION_EVENT_XPATH = './/bpmn:escalationEventDefinition'
|
||||
|
@ -29,6 +27,7 @@ MESSAGE_EVENT_XPATH = './/bpmn:messageEventDefinition'
|
|||
SIGNAL_EVENT_XPATH = './/bpmn:signalEventDefinition'
|
||||
TIMER_EVENT_XPATH = './/bpmn:timerEventDefinition'
|
||||
|
||||
|
||||
class EventDefinitionParser(TaskParser):
|
||||
"""This class provvides methods for parsing different event definitions."""
|
||||
|
||||
|
|
|
@ -1,14 +1,13 @@
|
|||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference, BpmnIoSpecification
|
||||
from .util import first
|
||||
|
||||
DEFAULT_NSMAP = {
|
||||
'bpmn': 'http://www.omg.org/spec/BPMN/20100524/MODEL',
|
||||
'bpmndi': 'http://www.omg.org/spec/BPMN/20100524/DI',
|
||||
'dc': 'http://www.omg.org/spec/DD/20100524/DC',
|
||||
|
||||
}
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
class NodeParser:
|
||||
|
||||
|
@ -30,6 +29,12 @@ class NodeParser:
|
|||
root = self.node.getroottree().getroot()
|
||||
return self._xpath(root, xpath, extra_ns)
|
||||
|
||||
def attribute(self, attribute, namespace=None, node=None):
|
||||
if node is None:
|
||||
node = self.node
|
||||
prefix = '{' + self.nsmap.get(namespace or 'bpmn') + '}'
|
||||
return node.attrib.get(f'{prefix}{attribute}')
|
||||
|
||||
def parse_condition(self, sequence_flow):
|
||||
expression = first(self._xpath(sequence_flow, './/bpmn:conditionExpression'))
|
||||
return expression.text if expression is not None else None
|
||||
|
@ -41,7 +46,7 @@ class NodeParser:
|
|||
|
||||
def parse_incoming_data_references(self):
|
||||
specs = []
|
||||
for name in self.xpath('.//bpmn:dataInputAssociation/bpmn:sourceRef'):
|
||||
for name in self.xpath('./bpmn:dataInputAssociation/bpmn:sourceRef'):
|
||||
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
|
@ -51,7 +56,7 @@ class NodeParser:
|
|||
|
||||
def parse_outgoing_data_references(self):
|
||||
specs = []
|
||||
for name in self.xpath('.//bpmn:dataOutputAssociation/bpmn:targetRef'):
|
||||
for name in self.xpath('./bpmn:dataOutputAssociation/bpmn:targetRef'):
|
||||
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
|
||||
if ref is not None and ref.get('dataObjectRef') in self.process_parser.spec.data_objects:
|
||||
specs.append(self.process_parser.spec.data_objects[ref.get('dataObjectRef')])
|
||||
|
@ -59,13 +64,29 @@ class NodeParser:
|
|||
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename)
|
||||
return specs
|
||||
|
||||
def parse_io_spec(self):
|
||||
data_refs = {}
|
||||
for elem in self.xpath('./bpmn:ioSpecification/bpmn:dataInput'):
|
||||
ref = self.create_data_spec(elem, TaskDataReference)
|
||||
data_refs[ref.name] = ref
|
||||
for elem in self.xpath('./bpmn:ioSpecification/bpmn:dataOutput'):
|
||||
ref = self.create_data_spec(elem, TaskDataReference)
|
||||
data_refs[ref.name] = ref
|
||||
|
||||
inputs, outputs = [], []
|
||||
for ref in self.xpath('./bpmn:ioSpecification/bpmn:inputSet/bpmn:dataInputRefs'):
|
||||
if ref.text in data_refs:
|
||||
inputs.append(data_refs[ref.text])
|
||||
for ref in self.xpath('./bpmn:ioSpecification/bpmn:outputSet/bpmn:dataOutputRefs'):
|
||||
if ref.text in data_refs:
|
||||
outputs.append(data_refs[ref.text])
|
||||
return BpmnIoSpecification(inputs, outputs)
|
||||
|
||||
def create_data_spec(self, item, cls):
|
||||
return cls(item.attrib.get('id'), item.attrib.get('name'))
|
||||
|
||||
def parse_extensions(self, node=None):
|
||||
extensions = {}
|
||||
extra_ns = {'camunda': CAMUNDA_MODEL_NS}
|
||||
extension_nodes = self.xpath('.//bpmn:extensionElements/camunda:properties/camunda:property', extra_ns)
|
||||
for ex_node in extension_nodes:
|
||||
extensions[ex_node.get('name')] = ex_node.get('value')
|
||||
return extensions
|
||||
return {}
|
||||
|
||||
def _get_lane(self):
|
||||
noderef = first(self.doc_xpath(f".//bpmn:flowNodeRef[text()='{self.get_id()}']"))
|
||||
|
@ -84,3 +105,6 @@ class NodeParser:
|
|||
else:
|
||||
nsmap = self.nsmap
|
||||
return node.xpath(xpath, namespaces=nsmap)
|
||||
|
||||
def raise_validation_exception(self, message):
|
||||
raise ValidationException(message, self.node, self.filename)
|
||||
|
|
|
@ -16,13 +16,9 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from .ValidationException import ValidationException
|
||||
from .TaskParser import TaskParser
|
||||
from .util import one, DEFAULT_NSMAP
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
from .util import one
|
||||
|
||||
|
||||
class GatewayParser(TaskParser):
|
||||
|
@ -61,30 +57,24 @@ class SubprocessParser:
|
|||
workflow_start_event = task_parser.xpath('./bpmn:startEvent')
|
||||
workflow_end_event = task_parser.xpath('./bpmn:endEvent')
|
||||
if len(workflow_start_event) != 1:
|
||||
raise ValidationException(
|
||||
'Multiple Start points are not allowed in SubWorkflow Task',
|
||||
raise ValidationException('Multiple Start points are not allowed in SubWorkflow Task',
|
||||
node=task_parser.node,
|
||||
file_name=task_parser.filename)
|
||||
if len(workflow_end_event) == 0:
|
||||
raise ValidationException(
|
||||
'A SubWorkflow Must contain an End event',
|
||||
raise ValidationException('A SubWorkflow Must contain an End event',
|
||||
node=task_parser.node,
|
||||
file_name=task_parser.filename)
|
||||
|
||||
nsmap = DEFAULT_NSMAP.copy()
|
||||
nsmap['camunda'] = "http://camunda.org/schema/1.0/bpmn"
|
||||
nsmap['di'] = "http://www.omg.org/spec/DD/20100524/DI"
|
||||
|
||||
# Create wrapper xml for the subworkflow
|
||||
for ns, val in nsmap.items():
|
||||
etree.register_namespace(ns, val)
|
||||
|
||||
task_parser.process_parser.parser.create_parser(
|
||||
task_parser.node,
|
||||
filename=task_parser.filename,
|
||||
lane=task_parser.lane
|
||||
)
|
||||
return task_parser.node.get('id')
|
||||
spec_id = task_parser.node.get('id')
|
||||
# This parser makes me want to cry
|
||||
spec_parser = task_parser.process_parser.parser.process_parsers[spec_id]
|
||||
spec_parser.inherited_data_objects.update(task_parser.process_parser.spec.data_objects)
|
||||
return spec_id
|
||||
|
||||
@staticmethod
|
||||
def get_call_activity_spec(task_parser):
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
from ..specs.data_spec import DataObject, TaskDataReference, BpmnIoSpecification
|
||||
from .helpers.spec import BpmnSpecConverter, BpmnDataSpecificationConverter
|
||||
|
||||
|
||||
class BpmnDataObjectConverter(BpmnDataSpecificationConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(DataObject, registry)
|
||||
|
||||
|
||||
class TaskDataReferenceConverter(BpmnDataSpecificationConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(TaskDataReference, registry)
|
||||
|
||||
|
||||
class IOSpecificationConverter(BpmnSpecConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(BpmnIoSpecification, registry)
|
||||
|
||||
def to_dict(self, spec):
|
||||
return {
|
||||
'data_inputs': [self.registry.convert(item) for item in spec.data_inputs],
|
||||
'data_outputs': [self.registry.convert(item) for item in spec.data_outputs],
|
||||
}
|
||||
|
||||
def from_dict(self, dct):
|
||||
return BpmnIoSpecification(
|
||||
data_inputs=[self.registry.restore(item) for item in dct['data_inputs']],
|
||||
data_outputs=[self.registry.restore(item) for item in dct['data_outputs']],
|
||||
)
|
|
@ -144,10 +144,10 @@ class TaskSpecConverter(BpmnSpecConverter):
|
|||
return {
|
||||
'lane': spec.lane,
|
||||
'documentation': spec.documentation,
|
||||
'loopTask': spec.loopTask,
|
||||
'position': spec.position,
|
||||
'data_input_associations': [ self.registry.convert(obj) for obj in spec.data_input_associations ],
|
||||
'data_output_associations': [ self.registry.convert(obj) for obj in spec.data_output_associations ],
|
||||
'io_specification': self.registry.convert(spec.io_specification),
|
||||
}
|
||||
|
||||
def get_join_attributes(self, spec):
|
||||
|
@ -174,6 +174,21 @@ class TaskSpecConverter(BpmnSpecConverter):
|
|||
"""
|
||||
return {'spec': spec.spec}
|
||||
|
||||
def get_standard_loop_attributes(self, spec):
|
||||
"""Extracts attributes for standard loop tasks.
|
||||
|
||||
:param spec: the task spec to be converted
|
||||
|
||||
Returns:
|
||||
a dictionary of standard loop task spec attributes
|
||||
"""
|
||||
return {
|
||||
'task_spec': spec.task_spec,
|
||||
'maximum': spec.maximum,
|
||||
'condition': spec.condition,
|
||||
'test_before': spec.test_before,
|
||||
}
|
||||
|
||||
def task_spec_from_dict(self, dct, include_data=False):
|
||||
"""
|
||||
Creates a task spec based on the supplied dictionary. It handles setting the default
|
||||
|
@ -204,9 +219,9 @@ class TaskSpecConverter(BpmnSpecConverter):
|
|||
if isinstance(spec, BpmnSpecMixin):
|
||||
spec.documentation = dct.pop('documentation', None)
|
||||
spec.lane = dct.pop('lane', None)
|
||||
spec.loopTask = dct.pop('loopTask', False)
|
||||
spec.data_input_associations = self.registry.restore(dct.pop('data_input_associations', []))
|
||||
spec.data_output_associations = self.registry.restore(dct.pop('data_output_associations', []))
|
||||
spec.io_specification = self.registry.restore(dct.pop('io_specification', None))
|
||||
|
||||
return spec
|
||||
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
|
||||
class VersionMigrationError(WorkflowException):
|
||||
pass
|
|
@ -0,0 +1,34 @@
|
|||
def move_subprocesses_to_top(dct):
|
||||
subprocesses = dict((sp, { 'tasks': {}, 'root': None, 'data': {}, 'success': True }) for sp in dct['subprocesses'])
|
||||
|
||||
# Move the tasks out of the top-level
|
||||
for sp, task_ids in dct['subprocesses'].items():
|
||||
for task_id in task_ids:
|
||||
if task_id in dct['tasks']:
|
||||
subprocesses[sp]['tasks'][task_id] = dct['tasks'].pop(task_id)
|
||||
if subprocesses[sp]['root'] is None:
|
||||
subprocesses[sp]['root'] = task_id
|
||||
subprocesses[sp]['tasks'][task_id]['parent'] = None
|
||||
|
||||
# Fix up th task and workflow states
|
||||
waiting = []
|
||||
for sp in subprocesses:
|
||||
completed = sorted(
|
||||
[t for t in subprocesses[sp]['tasks'].values() if t['state'] in [32, 64] ],
|
||||
key=lambda t: t['last_state_change']
|
||||
)
|
||||
if len(completed) > 0:
|
||||
subprocesses[sp]['last_task'] = completed[-1]
|
||||
# If there are uncompleted tasks, set the subworkflow task state to waiting
|
||||
if len(completed) < len(subprocesses[sp]['tasks']):
|
||||
waiting.append(sp)
|
||||
|
||||
# Check the top level and all subprocesses for waiting tasks
|
||||
# Also remove any children that are no longer in the tree
|
||||
for sp in [dct] + list(subprocesses.values()):
|
||||
for task_id, task in sp['tasks'].items():
|
||||
if task_id in waiting:
|
||||
task['state'] = 8
|
||||
task['children'] = [ c for c in task['children'] if c in sp['tasks'] ]
|
||||
|
||||
dct['subprocesses'] = subprocesses
|
|
@ -0,0 +1,113 @@
|
|||
from datetime import datetime, timedelta
|
||||
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import LOCALTZ
|
||||
|
||||
from .exceptions import VersionMigrationError
|
||||
|
||||
def td_to_iso(td):
|
||||
total = td.total_seconds()
|
||||
v1, seconds = total // 60, total % 60
|
||||
v2, minutes = v1 // 60, v1 % 60
|
||||
days, hours = v2 // 24, v2 % 60
|
||||
return f"P{days:.0f}DT{hours:.0f}H{minutes:.0f}M{seconds}S"
|
||||
|
||||
def convert_timer_expressions(dct):
|
||||
|
||||
message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution."
|
||||
|
||||
has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition']
|
||||
for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]:
|
||||
spec['event_definition']['name'] = spec['event_definition'].pop('label')
|
||||
if spec['event_definition']['typename'] == 'TimerEventDefinition':
|
||||
expr = spec['event_definition'].pop('dateTime')
|
||||
try:
|
||||
dt = eval(expr)
|
||||
if isinstance(dt, datetime):
|
||||
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
|
||||
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
|
||||
elif isinstance(dt, timedelta):
|
||||
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
|
||||
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
|
||||
except:
|
||||
raise VersionMigrationError(message.format(spec=spec['name']))
|
||||
|
||||
if spec['event_definition']['typename'] == 'CycleTimerEventDefinition':
|
||||
|
||||
tasks = [ t for t in dct['tasks'].values() if t['task_spec'] == spec['name'] ]
|
||||
task = tasks[0] if len(tasks) > 0 else None
|
||||
|
||||
expr = spec['event_definition'].pop('cycle_definition')
|
||||
try:
|
||||
repeat, duration = eval(expr)
|
||||
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
|
||||
if task is not None:
|
||||
cycles_complete = task['data'].pop('repeat_count', 0)
|
||||
start_time = task['internal_data'].pop('start_time', None)
|
||||
if start_time is not None:
|
||||
dt = datetime.fromisoformat(start_time)
|
||||
task['internal_data']['event_value'] = {
|
||||
'cycles': repeat - cycles_complete,
|
||||
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
|
||||
'duration': duration.total_seconds(),
|
||||
}
|
||||
except:
|
||||
raise VersionMigrationError(message.format(spec=spec['name']))
|
||||
|
||||
if spec['typename'] == 'StartEvent':
|
||||
spec['outputs'].remove(spec['name'])
|
||||
if task is not None:
|
||||
children = [ dct['tasks'][c] for c in task['children'] ]
|
||||
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
|
||||
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
|
||||
for task_id in remove['children']:
|
||||
child = dct['tasks'][task_id]
|
||||
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
|
||||
dct['tasks'].pop(task_id)
|
||||
else:
|
||||
task['children'].append(task_id)
|
||||
task['children'].remove(remove['id'])
|
||||
dct['tasks'].pop(remove['id'])
|
||||
|
||||
def add_default_condition_to_cond_task_specs(dct):
|
||||
|
||||
for spec in [ts for ts in dct['spec']['task_specs'].values() if ts['typename'] == 'ExclusiveGateway']:
|
||||
if (None, spec['default_task_spec']) not in spec['cond_task_specs']:
|
||||
spec['cond_task_specs'].append({'condition': None, 'task_spec': spec['default_task_spec']})
|
||||
|
||||
def create_data_objects_and_io_specs(dct):
|
||||
|
||||
def update_data_specs(spec):
|
||||
for obj in spec.get('data_objects', {}).values():
|
||||
obj['typename'] = 'DataObject'
|
||||
data_inputs = spec.pop('data_inputs', [])
|
||||
data_outputs = spec.pop('data_outputs', [])
|
||||
if len(data_outputs) > 0 or len(data_outputs) > 0:
|
||||
for item in data_inputs:
|
||||
item['typename'] = 'TaskDataReference'
|
||||
for item in data_outputs:
|
||||
item['typename'] = 'TaskDataReference'
|
||||
io_spec = {
|
||||
'typename': 'BpmnIoSpecification',
|
||||
'data_inputs': data_inputs,
|
||||
'data_outputs': data_outputs,
|
||||
}
|
||||
spec['io_specification'] = io_spec
|
||||
else:
|
||||
spec['io_specification'] = None
|
||||
|
||||
update_data_specs(dct['spec'])
|
||||
for sp in dct['subprocess_specs'].values():
|
||||
update_data_specs(sp)
|
||||
|
||||
for spec in dct['spec']['task_specs'].values():
|
||||
for item in spec.get('data_input_associations', {}):
|
||||
item['typename'] = 'DataObject'
|
||||
for item in spec.get('data_output_associations', {}):
|
||||
item['typename'] = 'DataObject'
|
||||
|
||||
def check_multiinstance(dct):
|
||||
|
||||
specs = [ spec for spec in dct['spec']['task_specs'].values() if 'prevtaskclass' in spec ]
|
||||
if len(specs) > 0:
|
||||
raise VersionMigrationError("This workflow cannot be migrated because it contains MultiInstance Tasks")
|
|
@ -0,0 +1,55 @@
|
|||
from copy import deepcopy
|
||||
|
||||
from .version_1_1 import move_subprocesses_to_top
|
||||
from .version_1_2 import (
|
||||
convert_timer_expressions,
|
||||
add_default_condition_to_cond_task_specs,
|
||||
create_data_objects_and_io_specs,
|
||||
check_multiinstance,
|
||||
)
|
||||
|
||||
def from_version_1_1(old):
|
||||
"""
|
||||
Upgrade v1.1 serialization to v1.2.
|
||||
|
||||
Expressions in timer event definitions have been converted from python expressions to
|
||||
ISO 8601 expressions.
|
||||
|
||||
Cycle timers no longer connect back to themselves. New children are created from a single
|
||||
tasks rather than reusing previously executed tasks.
|
||||
|
||||
All conditions (including the default) are included in the conditions for gateways.
|
||||
|
||||
Data inputs and outputs on process specs were moved inside a BPMNIOSpecification, and
|
||||
are now TaskDataReferences; BpmnDataSpecifications that referred to Data Objects are
|
||||
now DataObjects.
|
||||
"""
|
||||
new = deepcopy(old)
|
||||
convert_timer_expressions(new)
|
||||
add_default_condition_to_cond_task_specs(new)
|
||||
create_data_objects_and_io_specs(new)
|
||||
check_multiinstance(new)
|
||||
new['VERSION'] = "1.2"
|
||||
return new
|
||||
|
||||
def from_version_1_0(old):
|
||||
"""
|
||||
Upgrade v1.0 serializations to v1.1.
|
||||
|
||||
Starting with Spiff 1.1.8, subworkflows are no longer integrated in main task tree. When
|
||||
a subworkflow (a subprocess, transaction, or call activity) is reached, a subprocss is
|
||||
added to the top level workflow and the task enters a waiting state until the workflow
|
||||
completes.
|
||||
|
||||
To make the serialization backwards compatible, we delete the tasks from the main workflow
|
||||
task list and add them to the appropriate subprocess and recreate the remaining subprocess
|
||||
attributes based on the task states.
|
||||
"""
|
||||
new = deepcopy(old)
|
||||
move_subprocesses_to_top(new)
|
||||
return from_version_1_1(new)
|
||||
|
||||
MIGRATIONS = {
|
||||
'1.0': from_version_1_0,
|
||||
'1.1': from_version_1_1,
|
||||
}
|
|
@ -1,95 +1,14 @@
|
|||
from .helpers.spec import WorkflowSpecConverter, BpmnDataSpecificationConverter
|
||||
|
||||
from ..specs.BpmnProcessSpec import BpmnProcessSpec
|
||||
from ..specs.MultiInstanceTask import MultiInstanceTask, getDynamicMIClass
|
||||
from ..specs.MultiInstanceTask import MultiInstanceTask
|
||||
from ..specs.events.IntermediateEvent import _BoundaryEventParent
|
||||
from ..specs.BpmnProcessSpec import BpmnDataSpecification
|
||||
|
||||
|
||||
class BpmnDataObjectConverter(BpmnDataSpecificationConverter):
|
||||
def __init__(self, registry, typename=None):
|
||||
super().__init__(BpmnDataSpecification, registry, typename)
|
||||
|
||||
from .helpers.spec import WorkflowSpecConverter
|
||||
|
||||
class BpmnProcessSpecConverter(WorkflowSpecConverter):
|
||||
|
||||
def __init__(self, registry):
|
||||
super().__init__(BpmnProcessSpec, registry)
|
||||
|
||||
def multi_instance_to_dict(self, spec):
|
||||
|
||||
# This is a hot mess, but I don't know how else to deal with the dynamically
|
||||
# generated classes. Why do we use them?
|
||||
classname = spec.prevtaskclass.split('.')[-1]
|
||||
# Bypass the automatic selection of a conversion function
|
||||
# This returns the partial function that was created on register for the original task type.
|
||||
# The second argument is the function that would be called by `convert`.
|
||||
conversion = self.registry.convert_to_dict[classname]
|
||||
func = conversion.args[1]
|
||||
# We can just call it directly and add the typename manually
|
||||
dct = func(spec)
|
||||
dct['typename'] = classname
|
||||
dct.update({
|
||||
'times': self.registry.convert(spec.times) if spec.times is not None else None,
|
||||
'elementVar': spec.elementVar,
|
||||
'collection': self.registry.convert(spec.collection) if spec.collection is not None else None,
|
||||
# These are not defined in the constructor, but added by the parser, or somewhere else inappropriate
|
||||
'completioncondition': spec.completioncondition,
|
||||
'prevtaskclass': spec.prevtaskclass,
|
||||
'isSequential': spec.isSequential,
|
||||
})
|
||||
# Also from the parser, but not always present.
|
||||
if hasattr(spec, 'expanded'):
|
||||
dct['expanded'] = spec.expanded
|
||||
return dct
|
||||
|
||||
def multiinstance_from_dict(self, dct):
|
||||
|
||||
# The restore function removes items from the dictionary.
|
||||
# We need the original so that we can restore everything without enumerating all
|
||||
# possibiliies in this function.
|
||||
attrs = list(dct.keys())
|
||||
attrs.remove('typename')
|
||||
attrs.remove('wf_spec')
|
||||
# These need to be restored here
|
||||
attrs.remove('times')
|
||||
attrs.remove('collection')
|
||||
# If only I'd done this right in the DMN converter I wouldn't have to pollute this on with
|
||||
# task specific cases.
|
||||
if 'decision_table' in attrs:
|
||||
attrs.remove('decision_table')
|
||||
attrs.append('dmnEngine')
|
||||
|
||||
# Terrible ugly hack
|
||||
registered = dict((name, c) for c, name in self.registry.typenames.items())
|
||||
# First get the dynamic class
|
||||
cls = getDynamicMIClass(dct['name'], registered[dct['typename']])
|
||||
# Restore the task according to the original task spec, so that its attributes can be converted
|
||||
# recursively
|
||||
original = self.registry.restore(dct.copy())
|
||||
# But this task has the wrong class, so delete it from the spec
|
||||
del dct['wf_spec'].task_specs[original.name]
|
||||
|
||||
# Create a new class using the dynamic class
|
||||
task_spec = cls(**dct)
|
||||
task_spec.times = self.registry.restore(dct['times']) if dct['times'] is not None else None
|
||||
task_spec.collection = self.registry.restore(dct['collection']) if dct['collection'] is not None else None
|
||||
# Now copy everything else, from the temporary task spec if possible, otherwise the dict
|
||||
for attr in attrs:
|
||||
# If the original task has the attr, use the converted value
|
||||
if hasattr(original, attr):
|
||||
task_spec.__dict__[attr] = original.__dict__[attr]
|
||||
else:
|
||||
task_spec.__dict__[attr] = self.registry.restore(dct[attr])
|
||||
|
||||
# Handle adding any remaining attributes from the original task type that might not be
|
||||
# present in the restored version (for example attributes added since last serialized)
|
||||
for attr in original.__dict__:
|
||||
if not hasattr(task_spec, attr):
|
||||
task_spec.__dict__[attr] = original.__dict__[attr]
|
||||
|
||||
return task_spec
|
||||
|
||||
def convert_task_spec_extensions(self, task_spec, dct):
|
||||
# Extensions will be moved out of the base parser, but since we currently add them to some
|
||||
# indeterminate set of tasks, we'll just check all the tasks for them here.
|
||||
|
@ -107,16 +26,12 @@ class BpmnProcessSpecConverter(WorkflowSpecConverter):
|
|||
'description': spec.description,
|
||||
'file': spec.file,
|
||||
'task_specs': {},
|
||||
'data_inputs': [ self.registry.convert(obj) for obj in spec.data_inputs ],
|
||||
'data_outputs': [ self.registry.convert(obj) for obj in spec.data_outputs ],
|
||||
'io_specification': self.registry.convert(spec.io_specification),
|
||||
'data_objects': dict([ (name, self.registry.convert(obj)) for name, obj in spec.data_objects.items() ]),
|
||||
'correlation_keys': spec.correlation_keys,
|
||||
}
|
||||
for name, task_spec in spec.task_specs.items():
|
||||
if isinstance(task_spec, MultiInstanceTask):
|
||||
task_dict = self.multi_instance_to_dict(task_spec)
|
||||
else:
|
||||
task_dict = self.registry.convert(task_spec)
|
||||
task_dict = self.registry.convert(task_spec)
|
||||
self.convert_task_spec_extensions(task_spec, task_dict)
|
||||
dct['task_specs'][name] = task_dict
|
||||
|
||||
|
@ -135,8 +50,7 @@ class BpmnProcessSpecConverter(WorkflowSpecConverter):
|
|||
del spec.task_specs[f'{spec.name}.EndJoin']
|
||||
|
||||
# Add the data specs
|
||||
spec.data_inputs = [ self.registry.restore(obj_dct) for obj_dct in dct.pop('data_inputs', []) ]
|
||||
spec.data_outputs = [ self.registry.restore(obj_dct) for obj_dct in dct.pop('data_outputs', []) ]
|
||||
spec.io_specification = self.registry.restore(dct.pop('io_specification', None))
|
||||
# fixme: This conditional can be removed in the next release, just avoiding invalid a potential
|
||||
# serialization issue for some users caught between official releases.
|
||||
if isinstance(dct.get('data_objects', {}), dict):
|
||||
|
@ -152,11 +66,7 @@ class BpmnProcessSpecConverter(WorkflowSpecConverter):
|
|||
# IMO storing the workflow spec on the task spec is a TERRIBLE idea, but that's
|
||||
# how this thing works.
|
||||
task_dict['wf_spec'] = spec
|
||||
# Ugh.
|
||||
if 'prevtaskclass' in task_dict:
|
||||
task_spec = self.multiinstance_from_dict(task_dict)
|
||||
else:
|
||||
task_spec = self.registry.restore(task_dict)
|
||||
task_spec = self.registry.restore(task_dict)
|
||||
if name == 'Start':
|
||||
spec.start = task_spec
|
||||
self.restore_task_spec_extensions(task_dict, task_spec)
|
||||
|
|
|
@ -10,7 +10,8 @@ from ..specs.NoneTask import NoneTask
|
|||
from ..specs.UserTask import UserTask
|
||||
from ..specs.ManualTask import ManualTask
|
||||
from ..specs.ScriptTask import ScriptTask
|
||||
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess
|
||||
from ..specs.MultiInstanceTask import StandardLoopTask, SequentialMultiInstanceTask, ParallelMultiInstanceTask
|
||||
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess, SubWorkflowTask
|
||||
from ..specs.ExclusiveGateway import ExclusiveGateway
|
||||
from ..specs.InclusiveGateway import InclusiveGateway
|
||||
from ..specs.ParallelGateway import ParallelGateway
|
||||
|
@ -109,6 +110,49 @@ class ScriptTaskConverter(BpmnTaskSpecConverter):
|
|||
return dct
|
||||
|
||||
|
||||
class StandardLoopTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, registry):
|
||||
super().__init__(StandardLoopTask, registry)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct.update(self.get_standard_loop_attributes(spec))
|
||||
return dct
|
||||
|
||||
|
||||
class MultiInstanceTaskConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct['task_spec'] = spec.task_spec
|
||||
dct['cardinality'] = spec.cardinality
|
||||
dct['data_input'] = self.registry.convert(spec.data_input)
|
||||
dct['data_output'] = self.registry.convert(spec.data_output)
|
||||
dct['input_item'] = self.registry.convert(spec.input_item)
|
||||
dct['output_item'] = self.registry.convert(spec.output_item)
|
||||
dct['condition'] = spec.condition
|
||||
return dct
|
||||
|
||||
def from_dict(self, dct):
|
||||
dct['data_input'] = self.registry.restore(dct['data_input'])
|
||||
dct['data_output'] = self.registry.restore(dct['data_output'])
|
||||
dct['input_item'] = self.registry.restore(dct['input_item'])
|
||||
dct['output_item'] = self.registry.restore(dct['output_item'])
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class ParallelMultiInstanceTaskConverter(MultiInstanceTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(ParallelMultiInstanceTask, registry)
|
||||
|
||||
class SequentialMultiInstanceTaskConverter(MultiInstanceTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(SequentialMultiInstanceTask, registry)
|
||||
|
||||
|
||||
class BoundaryEventParentConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, registry):
|
||||
|
@ -120,7 +164,10 @@ class BoundaryEventParentConverter(BpmnTaskSpecConverter):
|
|||
return dct
|
||||
|
||||
|
||||
class SubprocessConverter(BpmnTaskSpecConverter):
|
||||
class SubWorkflowConverter(BpmnTaskSpecConverter):
|
||||
|
||||
def __init__(self, cls, registry):
|
||||
super().__init__(cls, registry)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = super().to_dict(spec)
|
||||
|
@ -131,17 +178,17 @@ class SubprocessConverter(BpmnTaskSpecConverter):
|
|||
dct['subworkflow_spec'] = dct.pop('spec')
|
||||
return self.task_spec_from_dict(dct)
|
||||
|
||||
class SubprocessTaskConverter(SubWorkflowConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(SubWorkflowTask, registry)
|
||||
|
||||
class CallActivityTaskConverter(SubprocessConverter):
|
||||
class CallActivityTaskConverter(SubWorkflowConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(CallActivity, registry)
|
||||
self.wf_class = BpmnWorkflow
|
||||
|
||||
|
||||
class TransactionSubprocessTaskConverter(SubprocessConverter):
|
||||
class TransactionSubprocessTaskConverter(SubWorkflowConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(TransactionSubprocess, registry)
|
||||
self.wf_class = BpmnWorkflow
|
||||
|
||||
|
||||
class ConditionalGatewayConverter(BpmnTaskSpecConverter):
|
||||
|
@ -275,6 +322,10 @@ DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
|
|||
UserTaskConverter,
|
||||
ManualTaskConverter,
|
||||
ScriptTaskConverter,
|
||||
StandardLoopTaskConverter,
|
||||
ParallelMultiInstanceTaskConverter,
|
||||
SequentialMultiInstanceTaskConverter,
|
||||
SubprocessTaskConverter,
|
||||
CallActivityTaskConverter,
|
||||
TransactionSubprocessTaskConverter,
|
||||
StartEventConverter,
|
||||
|
|
|
@ -1,148 +0,0 @@
|
|||
from copy import deepcopy
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.specs.events.event_definitions import LOCALTZ
|
||||
|
||||
class VersionMigrationError(WorkflowException):
|
||||
pass
|
||||
|
||||
def version_1_1_to_1_2(old):
|
||||
"""
|
||||
Upgrade v1.1 serialization to v1.2.
|
||||
|
||||
Expressions in timer event definitions have been converted from python expressions to
|
||||
ISO 8601 expressions.
|
||||
|
||||
Cycle timers no longer connect back to themselves. New children are created from a single
|
||||
tasks rather than reusing previously executed tasks.
|
||||
|
||||
All conditions (including the default) are included in the conditions for gateways.
|
||||
"""
|
||||
new = deepcopy(old)
|
||||
|
||||
def td_to_iso(td):
|
||||
total = td.total_seconds()
|
||||
v1, seconds = total // 60, total % 60
|
||||
v2, minutes = v1 // 60, v1 % 60
|
||||
days, hours = v2 // 24, v2 % 60
|
||||
return f"P{days:.0f}DT{hours:.0f}H{minutes:.0f}M{seconds}S"
|
||||
|
||||
message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution."
|
||||
|
||||
has_timer = lambda ts: 'event_definition' in ts and ts['event_definition']['typename'] in [ 'CycleTimerEventDefinition', 'TimerEventDefinition']
|
||||
for spec in [ ts for ts in new['spec']['task_specs'].values() if has_timer(ts) ]:
|
||||
spec['event_definition']['name'] = spec['event_definition'].pop('label')
|
||||
if spec['event_definition']['typename'] == 'TimerEventDefinition':
|
||||
expr = spec['event_definition'].pop('dateTime')
|
||||
try:
|
||||
dt = eval(expr)
|
||||
if isinstance(dt, datetime):
|
||||
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
|
||||
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
|
||||
elif isinstance(dt, timedelta):
|
||||
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
|
||||
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
|
||||
except:
|
||||
raise VersionMigrationError(message.format(spec=spec['name']))
|
||||
|
||||
if spec['event_definition']['typename'] == 'CycleTimerEventDefinition':
|
||||
|
||||
tasks = [ t for t in new['tasks'].values() if t['task_spec'] == spec['name'] ]
|
||||
task = tasks[0] if len(tasks) > 0 else None
|
||||
|
||||
expr = spec['event_definition'].pop('cycle_definition')
|
||||
try:
|
||||
repeat, duration = eval(expr)
|
||||
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
|
||||
if task is not None:
|
||||
cycles_complete = task['data'].pop('repeat_count', 0)
|
||||
start_time = task['internal_data'].pop('start_time', None)
|
||||
if start_time is not None:
|
||||
dt = datetime.fromisoformat(start_time)
|
||||
task['internal_data']['event_value'] = {
|
||||
'cycles': repeat - cycles_complete,
|
||||
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
|
||||
'duration': duration.total_seconds(),
|
||||
}
|
||||
except:
|
||||
raise VersionMigrationError(message.format(spec=spec['name']))
|
||||
|
||||
if spec['typename'] == 'StartEvent':
|
||||
spec['outputs'].remove(spec['name'])
|
||||
if task is not None:
|
||||
children = [ new['tasks'][c] for c in task['children'] ]
|
||||
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
|
||||
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
|
||||
for task_id in remove['children']:
|
||||
child = new['tasks'][task_id]
|
||||
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
|
||||
new['tasks'].pop(task_id)
|
||||
else:
|
||||
task['children'].append(task_id)
|
||||
task['children'].remove(remove['id'])
|
||||
new['tasks'].pop(remove['id'])
|
||||
|
||||
for spec in [ts for ts in new['spec']['task_specs'].values() if ts['typename'] == 'ExclusiveGateway']:
|
||||
if (None, spec['default_task_spec']) not in spec['cond_task_specs']:
|
||||
spec['cond_task_specs'].append((None, spec['default_task_spec']))
|
||||
|
||||
new['VERSION'] = "1.2"
|
||||
return new
|
||||
|
||||
def version_1_0_to_1_1(old):
|
||||
"""
|
||||
Upgrade v1.0 serializations to v1.1.
|
||||
|
||||
Starting with Spiff 1.1.8, subworkflows are no longer integrated in main task tree. When
|
||||
a subworkflow (a subprocess, transaction, or call activity) is reached, a subprocss is
|
||||
added to the top level workflow and the task enters a waiting state until the workflow
|
||||
completes.
|
||||
|
||||
To make the serialization backwards compatible, we delete the tasks from the main workflow
|
||||
task list and add them to the appropriate subprocess and recreate the remaining subprocess
|
||||
attributes based on the task states.
|
||||
"""
|
||||
new = deepcopy(old)
|
||||
subprocesses = dict((sp, { 'tasks': {}, 'root': None, 'data': {}, 'success': True }) for sp in new['subprocesses'])
|
||||
|
||||
# Move the tasks out of the top-level
|
||||
for sp, task_ids in new['subprocesses'].items():
|
||||
for task_id in task_ids:
|
||||
if task_id in new['tasks']:
|
||||
subprocesses[sp]['tasks'][task_id] = new['tasks'].pop(task_id)
|
||||
if subprocesses[sp]['root'] is None:
|
||||
subprocesses[sp]['root'] = task_id
|
||||
subprocesses[sp]['tasks'][task_id]['parent'] = None
|
||||
|
||||
# Fix up th task and workflow states
|
||||
waiting = []
|
||||
for sp in subprocesses:
|
||||
completed = sorted(
|
||||
[t for t in subprocesses[sp]['tasks'].values() if t['state'] in [32, 64] ],
|
||||
key=lambda t: t['last_state_change']
|
||||
)
|
||||
if len(completed) > 0:
|
||||
subprocesses[sp]['last_task'] = completed[-1]
|
||||
# If there are uncompleted tasks, set the subworkflow task state to waiting
|
||||
if len(completed) < len(subprocesses[sp]['tasks']):
|
||||
waiting.append(sp)
|
||||
|
||||
# Check the top level and all subprocesses for waiting tasks
|
||||
# Also remove any children that are no longer in the tree
|
||||
for sp in [new] + list(subprocesses.values()):
|
||||
for task_id, task in sp['tasks'].items():
|
||||
if task_id in waiting:
|
||||
task['state'] = 8
|
||||
task['children'] = [ c for c in task['children'] if c in sp['tasks'] ]
|
||||
|
||||
new['subprocesses'] = subprocesses
|
||||
new['VERSION'] = "1.1"
|
||||
return version_1_1_to_1_2(new)
|
||||
|
||||
|
||||
MIGRATIONS = {
|
||||
'1.0': version_1_0_to_1_1,
|
||||
'1.1': version_1_1_to_1_2,
|
||||
}
|
|
@ -7,17 +7,18 @@ from ..workflow import BpmnMessage, BpmnWorkflow
|
|||
from ..specs.SubWorkflowTask import SubWorkflowTask
|
||||
from ...task import Task
|
||||
|
||||
from .version_migration import MIGRATIONS
|
||||
from .migration.version_migration import MIGRATIONS
|
||||
from .helpers.registry import DefaultRegistry
|
||||
from .helpers.dictionary import DictionaryConverter
|
||||
|
||||
from .process_spec import BpmnProcessSpecConverter, BpmnDataObjectConverter
|
||||
from .process_spec import BpmnProcessSpecConverter
|
||||
from .data_spec import BpmnDataObjectConverter, TaskDataReferenceConverter, IOSpecificationConverter
|
||||
from .task_spec import DEFAULT_TASK_SPEC_CONVERTER_CLASSES
|
||||
from .event_definition import DEFAULT_EVENT_CONVERTERS
|
||||
|
||||
DEFAULT_SPEC_CONFIG = {
|
||||
'process': BpmnProcessSpecConverter,
|
||||
'data_specs': [BpmnDataObjectConverter],
|
||||
'data_specs': [IOSpecificationConverter, BpmnDataObjectConverter, TaskDataReferenceConverter],
|
||||
'task_specs': DEFAULT_TASK_SPEC_CONVERTER_CLASSES,
|
||||
'event_definitions': DEFAULT_EVENT_CONVERTERS,
|
||||
}
|
||||
|
|
|
@ -16,18 +16,12 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
|
||||
from ...task import TaskState
|
||||
from .UnstructuredJoin import UnstructuredJoin
|
||||
from ...specs.Simple import Simple
|
||||
from ...specs.WorkflowSpec import WorkflowSpec
|
||||
|
||||
|
||||
data_log = logging.getLogger('spiff.data')
|
||||
|
||||
class _EndJoin(UnstructuredJoin):
|
||||
|
||||
def _check_threshold_unstructured(self, my_task, force=False):
|
||||
|
@ -58,48 +52,6 @@ class _EndJoin(UnstructuredJoin):
|
|||
my_task.workflow.data.update(my_task.data)
|
||||
|
||||
|
||||
class BpmnDataSpecification:
|
||||
|
||||
def __init__(self, name, description=None):
|
||||
"""
|
||||
:param name: the name of the task (the BPMN ID)
|
||||
:param description: the task description (the BPMN name)
|
||||
"""
|
||||
self.name = name
|
||||
self.description = description or name
|
||||
# In the future, we can add schemas defining the objects here.
|
||||
|
||||
def get(self, my_task):
|
||||
"""Copy a value form the workflow data to the task data."""
|
||||
if self.name not in my_task.workflow.data:
|
||||
message = f"Data object '{self.name}' " \
|
||||
f"does not exist and can not be read."
|
||||
raise WorkflowDataException(message, my_task, data_input=self)
|
||||
my_task.data[self.name] = deepcopy(my_task.workflow.data[self.name])
|
||||
|
||||
def set(self, my_task):
|
||||
"""Copy a value from the task data to the workflow data"""
|
||||
if self.name not in my_task.data:
|
||||
message = f"A Data Object '{self.name}' " \
|
||||
f"could not be set, it does not exist in the task data"
|
||||
raise WorkflowDataException(message, my_task, data_output=self)
|
||||
my_task.workflow.data[self.name] = deepcopy(my_task.data[self.name])
|
||||
del my_task.data[self.name]
|
||||
data_log.info(f'Set workflow variable {self.name}', extra=my_task.log_info())
|
||||
|
||||
def copy(self, source, destination, data_input=False, data_output=False):
|
||||
"""Copy a value from one task to another."""
|
||||
if self.name not in source.data:
|
||||
message = f"'{self.name}' was not found in the task data"
|
||||
raise WorkflowDataException(
|
||||
message,
|
||||
source,
|
||||
data_input=self if data_input else None,
|
||||
data_output=self if data_output else None,
|
||||
)
|
||||
destination.data[self.name] = deepcopy(source.data[self.name])
|
||||
|
||||
|
||||
class BpmnProcessSpec(WorkflowSpec):
|
||||
"""
|
||||
This class represents the specification of a BPMN process workflow. This
|
||||
|
@ -116,11 +68,9 @@ class BpmnProcessSpec(WorkflowSpec):
|
|||
"""
|
||||
super(BpmnProcessSpec, self).__init__(name=name, filename=filename)
|
||||
self.end = _EndJoin(self, '%s.EndJoin' % (self.name))
|
||||
end = Simple(self, 'End')
|
||||
end.follow(self.end)
|
||||
self.end.connect(Simple(self, 'End'))
|
||||
self.svg = svg
|
||||
self.description = description
|
||||
self.data_inputs = []
|
||||
self.data_outputs = []
|
||||
self.io_specification = None
|
||||
self.data_objects = {}
|
||||
self.correlation_keys = {}
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
from ..exceptions import WorkflowDataException
|
||||
from ...operators import Operator
|
||||
from ...specs.base import TaskSpec
|
||||
|
||||
|
@ -48,21 +49,15 @@ class BpmnSpecMixin(TaskSpec):
|
|||
super(BpmnSpecMixin, self).__init__(wf_spec, name, **kwargs)
|
||||
self.lane = lane
|
||||
self.position = position or {'x': 0, 'y': 0}
|
||||
self.loopTask = False
|
||||
self.documentation = None
|
||||
self.data_input_associations = []
|
||||
self.data_output_associations = []
|
||||
self.io_specification = None
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'BPMN Task'
|
||||
|
||||
def is_loop_task(self):
|
||||
"""
|
||||
Returns true if this task is a BPMN looping task
|
||||
"""
|
||||
return self.loopTask
|
||||
|
||||
def connect_outgoing_if(self, condition, taskspec):
|
||||
"""
|
||||
Connect this task spec to the indicated child, if the condition
|
||||
|
@ -74,13 +69,34 @@ class BpmnSpecMixin(TaskSpec):
|
|||
else:
|
||||
self.connect_if(_BpmnCondition(condition), taskspec)
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
super()._on_ready_hook(my_task)
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
super()._update_hook(my_task)
|
||||
# This copies data from data objects
|
||||
for obj in self.data_input_associations:
|
||||
obj.get(my_task)
|
||||
|
||||
# If an IO spec was given, require all inputs are present, and remove all other inputs.
|
||||
if self.io_specification is not None:
|
||||
data = {}
|
||||
for var in self.io_specification.data_inputs:
|
||||
if var.name not in my_task.data:
|
||||
raise WorkflowDataException(f"Missing data input", task=my_task, data_input=var)
|
||||
data[var.name] = my_task.data[var.name]
|
||||
my_task.data = data
|
||||
|
||||
return True
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
|
||||
if self.io_specification is not None:
|
||||
data = {}
|
||||
for var in self.io_specification.data_outputs:
|
||||
if var.name not in my_task.data:
|
||||
raise WorkflowDataException(f"Missing data ouput", task=my_task, data_output=var)
|
||||
data[var.name] = my_task.data[var.name]
|
||||
my_task.data = data
|
||||
|
||||
for obj in self.data_output_associations:
|
||||
obj.set(my_task)
|
||||
|
||||
|
|
|
@ -17,469 +17,303 @@
|
|||
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
|
||||
# 02110-1301 USA
|
||||
|
||||
import copy
|
||||
from builtins import range
|
||||
from uuid import uuid4
|
||||
import re
|
||||
from copy import deepcopy
|
||||
from collections.abc import Iterable, Sequence, Mapping, MutableSequence, MutableMapping
|
||||
|
||||
from .SubWorkflowTask import SubWorkflowTask, CallActivity
|
||||
from .ParallelGateway import ParallelGateway
|
||||
from .ScriptTask import ScriptTask
|
||||
from .ExclusiveGateway import ExclusiveGateway
|
||||
from ...dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from ...exceptions import WorkflowTaskException
|
||||
from ...operators import valueof, is_number
|
||||
from ...specs.SubWorkflow import SubWorkflow
|
||||
from ...specs.base import TaskSpec
|
||||
from ...util.impl import get_class
|
||||
from ...task import Task, TaskState
|
||||
from ...task import TaskState
|
||||
from ...util.deep_merge import DeepMerge
|
||||
from ..exceptions import WorkflowDataException
|
||||
from .BpmnSpecMixin import BpmnSpecMixin
|
||||
|
||||
|
||||
def gendict(path, d):
|
||||
if len(path) == 0:
|
||||
return d
|
||||
else:
|
||||
return gendict(path[:-1], {path[-1]: d})
|
||||
class LoopTask(BpmnSpecMixin):
|
||||
|
||||
class MultiInstanceTask(TaskSpec):
|
||||
"""
|
||||
When executed, this task performs a split on the current task.
|
||||
The number of outgoing tasks depends on the runtime value of a
|
||||
specified data field.
|
||||
If more than one input is connected, the task performs an implicit
|
||||
multi merge.
|
||||
|
||||
This task has one or more inputs and may have any number of outputs.
|
||||
"""
|
||||
|
||||
def __init__(self, wf_spec, name, times, **kwargs):
|
||||
def process_children(self, my_task):
|
||||
"""
|
||||
Constructor.
|
||||
|
||||
:type wf_spec: WorkflowSpec
|
||||
:param wf_spec: A reference to the workflow specification.
|
||||
:type name: str
|
||||
:param name: The name of the task spec.
|
||||
:type times: int or :class:`SpiffWorkflow.operators.Term`
|
||||
:param times: The number of tasks to create.
|
||||
:type kwargs: dict
|
||||
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
|
||||
Handle any newly completed children and update merged tasks.
|
||||
Returns a boolean indicating whether there is a child currently running
|
||||
"""
|
||||
if times is None:
|
||||
raise ValueError('times argument is required')
|
||||
self.times = times
|
||||
merged = my_task.internal_data.get('merged') or []
|
||||
child_running = False
|
||||
for child in filter(lambda c: c.task_spec.name == self.task_spec, my_task.children):
|
||||
if child._has_state(TaskState.FINISHED_MASK) and str(child.id) not in merged:
|
||||
self.child_completed_action(my_task, child)
|
||||
merged.append(str(child.id))
|
||||
elif not child._has_state(TaskState.FINISHED_MASK):
|
||||
child_running = True
|
||||
my_task.internal_data['merged'] = merged
|
||||
return child_running
|
||||
|
||||
# We don't really pass these things in (we should), but putting them here to document that they exist
|
||||
self.loopTask = kwargs.get('loopTask', False)
|
||||
self.isSequential = kwargs.get('isSequential', False)
|
||||
self.expanded = kwargs.get('expanded', 1)
|
||||
self.elementVar = kwargs.get('element_var')
|
||||
self.collection = kwargs.get('collection')
|
||||
def child_completed_action(self, my_task, child):
|
||||
raise NotImplementedError
|
||||
|
||||
self.multiInstance = True
|
||||
|
||||
TaskSpec.__init__(self, wf_spec, name, **kwargs)
|
||||
class StandardLoopTask(LoopTask):
|
||||
|
||||
def _find_my_task(self, task):
|
||||
for thetask in task.workflow.task_tree:
|
||||
if thetask.thread_id != task.thread_id:
|
||||
continue
|
||||
if thetask.task_spec == self:
|
||||
return thetask
|
||||
return None
|
||||
def __init__(self, wf_spec, name, task_spec, maximum, condition, test_before, **kwargs):
|
||||
super().__init__(wf_spec, name, **kwargs)
|
||||
self.task_spec = task_spec
|
||||
self.maximum = maximum
|
||||
self.condition = condition
|
||||
self.test_before = test_before
|
||||
|
||||
def _on_trigger(self, task_spec):
|
||||
"""
|
||||
May be called after execute() was already completed to create an
|
||||
additional outbound task.
|
||||
"""
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
# Find a Task for this TaksSpec.
|
||||
|
||||
my_task = self._find_my_task(task_spec)
|
||||
if my_task._has_state(TaskState.COMPLETED):
|
||||
state = TaskState.READY
|
||||
super()._update_hook(my_task)
|
||||
child_running = self.process_children(my_task)
|
||||
if child_running:
|
||||
# We're in the middle of an iteration; we're not done and we can't create a new task
|
||||
return False
|
||||
elif self.loop_complete(my_task):
|
||||
# No children running and one of the completion conditions has been met; done
|
||||
return True
|
||||
else:
|
||||
state = TaskState.FUTURE
|
||||
for output in self.outputs:
|
||||
new_task = my_task._add_child(output, state)
|
||||
new_task.triggered = True
|
||||
output._predict(new_task)
|
||||
# Execute again
|
||||
if my_task.state != TaskState.WAITING:
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
task_spec = my_task.workflow.spec.task_specs[self.task_spec]
|
||||
child = my_task._add_child(task_spec, TaskState.READY)
|
||||
child.data = deepcopy(my_task.data)
|
||||
|
||||
def _get_loop_completion(self,my_task):
|
||||
if not self.completioncondition == None:
|
||||
terminate = my_task.workflow.script_engine.evaluate(my_task,self.completioncondition)
|
||||
if terminate:
|
||||
my_task.terminate_current_loop = True
|
||||
return terminate
|
||||
return False
|
||||
def child_completed_action(self, my_task, child):
|
||||
DeepMerge.merge(my_task.data, child.data)
|
||||
|
||||
def _get_count(self, my_task):
|
||||
"""
|
||||
self.times has the text entered in the BPMN model.
|
||||
It could be just a number - in this case return the number
|
||||
it could be a variable name - so we get the variable value from my_task
|
||||
the variable could be a number (text representation??) - in this case return the integer value of the number
|
||||
it could be a list of records - in this case return the cardinality of the list
|
||||
it could be a dict with a bunch of keys - it this case return the cardinality of the keys
|
||||
"""
|
||||
|
||||
if is_number(self.times.name):
|
||||
return int(self.times.name)
|
||||
variable = valueof(my_task, self.times, 1) # look for variable in context, if we don't find it, default to 1
|
||||
|
||||
if is_number(variable):
|
||||
return int(variable)
|
||||
if isinstance(variable,list):
|
||||
return len(variable)
|
||||
if isinstance(variable,dict):
|
||||
return len(variable.keys())
|
||||
return 1 # we shouldn't ever get here, but just in case return a sane value.
|
||||
|
||||
def _get_predicted_outputs(self, my_task):
|
||||
split_n = self._get_count(my_task)
|
||||
|
||||
# Predict the outputs.
|
||||
outputs = []
|
||||
for i in range(split_n):
|
||||
outputs += self.outputs
|
||||
|
||||
return outputs
|
||||
|
||||
def _build_gateway_name(self,position):
|
||||
"""
|
||||
Build a unique name for each task - need to be the
|
||||
same over save/restore of the workflow spec.
|
||||
"""
|
||||
return 'Gateway_for_' + str(self.name) + "_" + position
|
||||
|
||||
def _make_new_gateway(self,my_task,suffix,descr):
|
||||
gw_spec = ParallelGateway(self._wf_spec,
|
||||
self._build_gateway_name(suffix),
|
||||
triggered=False,
|
||||
description=descr)
|
||||
gw = Task(my_task.workflow, task_spec=gw_spec)
|
||||
return gw_spec,gw
|
||||
|
||||
def _add_gateway(self, my_task):
|
||||
""" Generate parallel gateway tasks on either side of the current task.
|
||||
This emulates a standard BPMN pattern of having parallel tasks between
|
||||
two parallel gateways.
|
||||
Once we have set up the gateways, we write a note into our internal data so that
|
||||
we don't do it again.
|
||||
"""
|
||||
# Expand this
|
||||
# A-> ME -> C
|
||||
# into this
|
||||
# A -> GW_start -> ME -> GW_end -> C
|
||||
# where GW is a parallel gateway
|
||||
|
||||
|
||||
# check to see if we have already done this, this code gets called multiple times
|
||||
# as we build the tree
|
||||
if my_task.parent.task_spec.name[:11] == 'Gateway_for':
|
||||
return
|
||||
|
||||
# build the gateway specs and the tasks.
|
||||
# Spiff wants a distinct spec for each task
|
||||
# that it has in the workflow or it will throw an error
|
||||
start_gw_spec, start_gw = self._make_new_gateway(my_task,'start','Begin Gateway')
|
||||
end_gw_spec, end_gw = self._make_new_gateway(my_task,'end','End Gateway')
|
||||
|
||||
# Set up the parent task and insert it into the workflow
|
||||
|
||||
# remove the current task spec from the parent, it will be replaced with the new construct.
|
||||
my_task.parent.task_spec.outputs = [x for x in my_task.parent.task_spec.outputs if x != my_task.task_spec]
|
||||
|
||||
# in the case that our parent is a gateway with a default route,
|
||||
# we need to ensure that the default route is empty
|
||||
# so that connect can set it up properly
|
||||
if hasattr(my_task.parent.task_spec,'default_task_spec') and \
|
||||
my_task.parent.task_spec.default_task_spec == my_task.task_spec.name:
|
||||
my_task.parent.task_spec.default_task_spec = None
|
||||
my_task.parent.task_spec.connect(start_gw_spec)
|
||||
elif isinstance(my_task.parent.task_spec, ExclusiveGateway):
|
||||
for cond, name in [ (cond, name) for cond, name in my_task.parent.task_spec.cond_task_specs\
|
||||
if name == my_task.task_spec.name]:
|
||||
my_task.parent.task_spec.cond_task_specs.remove((cond, name))
|
||||
my_task.parent.task_spec.cond_task_specs.append((cond, start_gw_spec.name))
|
||||
start_gw_spec.inputs.append(my_task.parent.task_spec)
|
||||
def loop_complete(self, my_task):
|
||||
merged = my_task.internal_data.get('merged') or []
|
||||
if not self.test_before and len(merged) == 0:
|
||||
# "test before" isn't really compatible our execution model in a transparent way
|
||||
# This guarantees that the task will run at least once if test_before is False
|
||||
return False
|
||||
else:
|
||||
my_task.parent.task_spec.outputs.append(start_gw_spec)
|
||||
start_gw_spec.inputs.append(my_task.parent.task_spec)
|
||||
max_complete = self.maximum is not None and len(merged) >= self.maximum
|
||||
cond_complete = self.condition is not None and my_task.workflow.script_engine.evaluate(my_task, self.condition)
|
||||
return max_complete or cond_complete
|
||||
|
||||
# get a list of all siblings and replace myself with the new gateway task
|
||||
# in the parent task
|
||||
newchildren = []
|
||||
for child in my_task.parent.children:
|
||||
if child == my_task:
|
||||
newchildren.append(start_gw)
|
||||
|
||||
class MultiInstanceTask(LoopTask):
|
||||
|
||||
def __init__(self, wf_spec, name, task_spec, cardinality=None, data_input=None,
|
||||
data_output=None, input_item=None, output_item=None, condition=None,
|
||||
**kwargs):
|
||||
|
||||
super().__init__(wf_spec, name, **kwargs)
|
||||
self.task_spec = task_spec
|
||||
self.cardinality = cardinality
|
||||
self.data_input = data_input
|
||||
self.data_output = data_output
|
||||
self.input_item = input_item
|
||||
self.output_item = output_item
|
||||
self.condition = condition
|
||||
|
||||
def child_completed_action(self, my_task, child):
|
||||
"""This merges child data into this task's data."""
|
||||
|
||||
if self.data_output is not None and self.output_item is not None:
|
||||
if self.output_item.name not in child.data:
|
||||
self.raise_data_exception("Expected an output item", child)
|
||||
item = child.data[self.output_item.name]
|
||||
key_or_index = child.internal_data.get('key_or_index')
|
||||
data_output = my_task.data[self.data_output.name]
|
||||
data_input = my_task.data[self.data_input.name] if self.data_input is not None else None
|
||||
if isinstance(data_output, Mapping) or data_input is data_output:
|
||||
data_output[key_or_index] = item
|
||||
else:
|
||||
newchildren.append(child)
|
||||
my_task.parent.children = newchildren
|
||||
|
||||
# update the gatways parent to be my parent
|
||||
start_gw.parent = my_task.parent
|
||||
# update my parent to be the gateway
|
||||
my_task.parent = start_gw
|
||||
start_gw_spec.connect(self)
|
||||
start_gw.children = [my_task]
|
||||
|
||||
# transfer my outputs to the ending gateway and set up the
|
||||
# child parent links
|
||||
end_gw_spec.outputs = self.outputs.copy()
|
||||
self.connect(end_gw_spec)
|
||||
self.outputs = [end_gw_spec]
|
||||
end_gw.parent = my_task
|
||||
my_task.children = [end_gw]
|
||||
|
||||
def multiinstance_info(self, my_task):
|
||||
split_n = self._get_count(my_task)
|
||||
|
||||
runtimes = int(my_task._get_internal_data('runtimes', 1)) # set a default if not already run
|
||||
loop = False
|
||||
parallel = False
|
||||
sequential = False
|
||||
|
||||
if my_task.task_spec.loopTask:
|
||||
loop = True
|
||||
elif my_task.task_spec.isSequential:
|
||||
sequential = True
|
||||
data_output.append(item)
|
||||
else:
|
||||
parallel = True
|
||||
DeepMerge.merge(my_task.data, child.data)
|
||||
|
||||
return {'is_looping': loop,
|
||||
'is_sequential_mi': sequential,
|
||||
'is_parallel_mi': parallel,
|
||||
'mi_count': split_n,
|
||||
'mi_index': runtimes}
|
||||
def create_child(self, my_task, item, key_or_index=None):
|
||||
|
||||
task_spec = my_task.workflow.spec.task_specs[self.task_spec]
|
||||
child = my_task._add_child(task_spec, TaskState.WAITING)
|
||||
child.data = deepcopy(my_task.data)
|
||||
if self.input_item is not None:
|
||||
child.data[self.input_item.name] = deepcopy(item)
|
||||
if key_or_index is not None:
|
||||
child.internal_data['key_or_index'] = key_or_index
|
||||
child.task_spec._update(child)
|
||||
|
||||
def _make_new_child_task(self,my_task,x):
|
||||
# here we generate a distinct copy of our original task each
|
||||
# parallel instance, and hook them up into the task tree
|
||||
new_child = copy.copy(my_task)
|
||||
new_child.id = uuid4()
|
||||
# I think we will need to update both every variables
|
||||
# internal data and the copy of the public data to get the
|
||||
# variables correct
|
||||
new_child.internal_data = copy.deepcopy(my_task.internal_data)
|
||||
def check_completion_condition(self, my_task):
|
||||
|
||||
new_child.internal_data[
|
||||
'runtimes'] = x + 2 # working with base 1 and we already have one done
|
||||
merged = my_task.internal_data.get('merged', [])
|
||||
if len(merged) > 0:
|
||||
last_child = [c for c in my_task.children if str(c.id) == merged[-1]][0]
|
||||
return my_task.workflow.script_engine.evaluate(last_child, self.condition)
|
||||
|
||||
new_child.data = copy.deepcopy(my_task.data)
|
||||
new_child.data[self.elementVar] = self._get_current_var(my_task,
|
||||
x + 2)
|
||||
def init_data_output_with_input_data(self, my_task, input_data):
|
||||
|
||||
new_child.children = [] # these will be updated later
|
||||
# in the case of parallel, the children list will get updated during the predict loop
|
||||
return new_child
|
||||
|
||||
def _expand_sequential(self,my_task,split_n):
|
||||
# this should be only for SMI and not looping tasks -
|
||||
# we need to patch up the children and make sure they chain correctly
|
||||
# this is different from PMI because the children all link together, not to
|
||||
# the gateways on both ends.
|
||||
# first let's check for a task in the task spec tree
|
||||
|
||||
# we have to jump through some hoops to determine if we have already
|
||||
# expanded this properly as we may have a cardinality that may change
|
||||
# and this code gets run a bunch of times.
|
||||
expanded = getattr(self, 'expanded', 1)
|
||||
if split_n >= expanded:
|
||||
setattr(self, 'expanded', split_n)
|
||||
|
||||
if not (expanded == split_n):
|
||||
|
||||
# Initialize based on current task
|
||||
my_task_copy = copy.copy(my_task)
|
||||
current_task = my_task
|
||||
current_task_spec = self
|
||||
proto_task_spec = copy.copy(self)
|
||||
|
||||
# Essentially we are expanding like this:
|
||||
# A -> B0 -> C
|
||||
# A -> B0 -> B1 -> B2 -> C
|
||||
# each new child has the last child we created as its parent
|
||||
# and the outputs of what B0 had previously.
|
||||
# this has to be done for both the task and the task spec.
|
||||
|
||||
for x in range(split_n - expanded):
|
||||
# create Bx from Bx-1
|
||||
new_child = self._make_new_child_task(my_task,x)
|
||||
# set children of Bx = children of B0
|
||||
new_child.children = copy.copy(my_task_copy.children)
|
||||
# all of C's parents should be Bx
|
||||
for child in new_child.children:
|
||||
child.parent = new_child
|
||||
# create a new task spec for this new task and update it
|
||||
new_task_spec = self._make_new_task_spec(proto_task_spec, my_task, x)
|
||||
new_child.task_spec = new_task_spec
|
||||
new_child._set_state(TaskState.MAYBE)
|
||||
|
||||
# update task spec inputs and outputs like we did for the task
|
||||
current_task_spec.outputs = [new_task_spec]
|
||||
new_task_spec.inputs = [current_task_spec]
|
||||
current_task.children = [new_child]
|
||||
# update the parent of the new task
|
||||
new_child.parent = current_task
|
||||
# set up variables for next pass.
|
||||
current_task = new_child
|
||||
current_task_spec = new_task_spec
|
||||
|
||||
def _expand_parallel(self,my_task,split_n):
|
||||
# add a parallel gateway on either side of this task
|
||||
self._add_gateway(my_task)
|
||||
# we use the child count of the parallel gateway to determine
|
||||
# if we have expanded this or not. Children of the gateway we just created
|
||||
# should match the split level provided by the multiinstance
|
||||
|
||||
for x in range(split_n - len(my_task.parent.children)):
|
||||
new_child = self._make_new_child_task(my_task,x)
|
||||
new_task_spec = self._make_new_task_spec(my_task.task_spec, my_task, x)
|
||||
new_child.task_spec = new_task_spec
|
||||
# patch up the right hand side gateway
|
||||
self.outputs[0].inputs.append(new_task_spec)
|
||||
# patch up the left hand side gateway task and task_spec
|
||||
my_task.parent.children.append(new_child)
|
||||
my_task.parent.task_spec.outputs.append(new_task_spec)
|
||||
|
||||
def _make_new_task_spec(self,proto_task_spec,my_task,suffix):
|
||||
|
||||
new_task_spec = copy.copy(proto_task_spec)
|
||||
new_task_spec.name = new_task_spec.name + "_%d" % suffix
|
||||
new_task_spec.id = str(new_task_spec.id) + "_%d" % suffix
|
||||
my_task.workflow.spec.task_specs[new_task_spec.name] = new_task_spec # add to registry
|
||||
return new_task_spec
|
||||
|
||||
def _predict_hook(self, my_task):
|
||||
|
||||
split_n = self._get_count(my_task)
|
||||
runtimes = int(my_task._get_internal_data('runtimes', 1)) # set a default if not already run
|
||||
|
||||
my_task._set_internal_data(splits=split_n, runtimes=runtimes)
|
||||
if not self.elementVar:
|
||||
self.elementVar = my_task.task_spec.name + "_CurrentVar"
|
||||
|
||||
my_task.data[self.elementVar] = copy.copy(self._get_current_var(my_task, runtimes))
|
||||
|
||||
# Create the outgoing tasks.
|
||||
outputs = []
|
||||
# In the special case that this is a Parallel multiInstance, we need
|
||||
# to expand the children in the middle. This method gets called
|
||||
# during every pass through the tree, so we need to wait until our
|
||||
# real cardinality gets updated to expand the tree.
|
||||
if (not self.isSequential):
|
||||
self._expand_parallel(my_task,split_n)
|
||||
|
||||
elif not self.loopTask:
|
||||
self._expand_sequential(my_task,split_n)
|
||||
|
||||
outputs += self.outputs
|
||||
if my_task._is_definite():
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
name = self.data_output.name
|
||||
if name not in my_task.data:
|
||||
if isinstance(input_data, (MutableMapping, MutableSequence)):
|
||||
# We can use the same class if it implements __setitem__
|
||||
my_task.data[name] = input_data.__class__()
|
||||
elif isinstance(input_data, Mapping):
|
||||
# If we have a map without __setitem__, use a dict
|
||||
my_task.data[name] = dict()
|
||||
else:
|
||||
# For all other types, we'll append to a list
|
||||
my_task.data[name] = list()
|
||||
else:
|
||||
my_task._sync_children(outputs, TaskState.LIKELY)
|
||||
output_data = my_task.data[self.data_output.name]
|
||||
if not isinstance(output_data, (MutableSequence, MutableMapping)):
|
||||
self.raise_data_exception("Only a mutable map (dict) or sequence (list) can be used for output", my_task)
|
||||
if input_data is not output_data and not isinstance(output_data, Mapping) and len(output_data) > 0:
|
||||
self.raise_data_exception(
|
||||
"If the input is not being updated in place, the output must be empty or it must be a map (dict)", my_task)
|
||||
|
||||
def _handle_special_cases(self, my_task):
|
||||
classes = [BusinessRuleTask, ScriptTask, SubWorkflowTask, SubWorkflow, CallActivity]
|
||||
classes = {x.__module__ + "." + x.__name__: x for x in classes}
|
||||
terminate = self._get_loop_completion(my_task)
|
||||
if my_task.task_spec.prevtaskclass in classes.keys() and not terminate:
|
||||
super()._on_complete_hook(my_task)
|
||||
def init_data_output_with_cardinality(self, my_task):
|
||||
|
||||
def _check_inputs(self, my_task):
|
||||
if self.collection is None:
|
||||
return
|
||||
# look for variable in context, if we don't find it, default to 1
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if self.times.name == self.collection.name and type(variable) == type([]):
|
||||
raise WorkflowTaskException(
|
||||
'If we are updating a collection, then the collection must be a dictionary.',
|
||||
task=my_task)
|
||||
|
||||
def _get_current_var(self, my_task, pos):
|
||||
variable = valueof(my_task, self.times, 1)
|
||||
if is_number(variable):
|
||||
return pos
|
||||
if isinstance(variable,list) and len(variable) >= pos:
|
||||
return variable[pos - 1]
|
||||
elif isinstance(variable,dict) and len(list(variable.keys())) >= pos:
|
||||
return variable[list(variable.keys())[pos - 1]]
|
||||
else:
|
||||
return pos
|
||||
|
||||
def _merge_element_variable(self, my_task, collect, runtimes):
|
||||
if self.collection is not None and self.times.name == self.collection.name:
|
||||
# Update an existing collection (we used the collection as the cardinality)
|
||||
keys = list(collect.keys())
|
||||
if len(keys) < runtimes:
|
||||
msg = f"There is a mismatch between runtimes and the number " \
|
||||
f"items in the collection, please check for empty " \
|
||||
f"collection {self.collection.name}."
|
||||
raise WorkflowTaskException(msg, task=my_task)
|
||||
runtimesvar = keys[runtimes - 1]
|
||||
else:
|
||||
# Use an integer (for arrays)
|
||||
runtimesvar = runtimes
|
||||
|
||||
if self.elementVar in my_task.data and isinstance(my_task.data[self.elementVar], dict):
|
||||
collect[str(runtimesvar)] = DeepMerge.merge(
|
||||
collect.get(runtimesvar, {}),
|
||||
copy.copy(my_task.data[self.elementVar])
|
||||
name = self.data_output.name
|
||||
if name not in my_task.data:
|
||||
my_task.data[name] = list()
|
||||
elif not isinstance(my_task.data[name], MutableMapping) and len(my_task.data[name]) > 0:
|
||||
self.raise_data_exception(
|
||||
"If loop cardinality is specificied, the output must be a map (dict) or empty sequence (list)",
|
||||
my_task
|
||||
)
|
||||
|
||||
def _update_sibling_data(self, my_task, runtimes, runcount, colvarname, collect):
|
||||
def raise_data_exception(self, message, my_task):
|
||||
raise WorkflowDataException(message, my_task, data_input=self.data_input, data_output=self.data_output)
|
||||
|
||||
if (runtimes < runcount) and not my_task.terminate_current_loop and self.loopTask:
|
||||
my_task._set_state(TaskState.READY)
|
||||
my_task._set_internal_data(runtimes=runtimes + 1)
|
||||
my_task.data[self.elementVar] = self._get_current_var(my_task, runtimes + 1)
|
||||
|
||||
class SequentialMultiInstanceTask(MultiInstanceTask):
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
if my_task.state != TaskState.WAITING:
|
||||
super()._update_hook(my_task)
|
||||
|
||||
child_running = self.process_children(my_task)
|
||||
if child_running:
|
||||
return False
|
||||
if self.condition is not None and self.check_completion_condition(my_task):
|
||||
return True
|
||||
else:
|
||||
my_task.data.pop(self.elementVar, None)
|
||||
return self.add_next_child(my_task)
|
||||
|
||||
for task in my_task.parent.children:
|
||||
task.data = DeepMerge.merge(
|
||||
task.data,
|
||||
gendict(colvarname.split('/'), collect)
|
||||
)
|
||||
def add_next_child(self, my_task):
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
# do special stuff for non-user tasks
|
||||
self._handle_special_cases(my_task)
|
||||
self.__iteration_complete(my_task)
|
||||
|
||||
def __iteration_complete(self, my_task):
|
||||
|
||||
# this is all about updating the collection for a MI
|
||||
self._check_inputs(my_task)
|
||||
|
||||
# initialize
|
||||
runcount = self._get_count(my_task)
|
||||
runtimes = int(my_task._get_internal_data('runtimes', 1))
|
||||
|
||||
if self.collection is not None:
|
||||
colvarname = self.collection.name
|
||||
if self.data_input is not None:
|
||||
key_or_index, item = self.get_next_input_item(my_task)
|
||||
else:
|
||||
colvarname = my_task.task_spec.name
|
||||
key_or_index, item = self.get_next_index(my_task)
|
||||
|
||||
collect = valueof(my_task, self.collection, {})
|
||||
if item is not None:
|
||||
if my_task.state != TaskState.WAITING:
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
self.create_child(my_task, item, key_or_index)
|
||||
else:
|
||||
return True
|
||||
|
||||
self._merge_element_variable(my_task, collect, runtimes)
|
||||
def get_next_input_item(self, my_task):
|
||||
|
||||
self._update_sibling_data(my_task, runtimes, runcount, colvarname, collect)
|
||||
input_data = my_task.data[self.data_input.name]
|
||||
remaining = my_task.internal_data.get('remaining')
|
||||
|
||||
# please see MultiInstance code for previous version
|
||||
outputs = []
|
||||
outputs += self.outputs
|
||||
if remaining is None:
|
||||
remaining = self.init_remaining_items(my_task)
|
||||
if self.data_output is not None:
|
||||
self.init_data_output_with_input_data(my_task, input_data)
|
||||
|
||||
if not isinstance(my_task.task_spec,SubWorkflowTask):
|
||||
my_task._sync_children(outputs, TaskState.FUTURE)
|
||||
if len(remaining) > 0:
|
||||
if isinstance(input_data, (Mapping, Sequence)):
|
||||
# In this case, we want to preserve a key or index
|
||||
# We definitely need it if the output is a map, or if we're udpating a sequence in place
|
||||
key_or_index, item = remaining[0], input_data[remaining[0]]
|
||||
else:
|
||||
key_or_index, item = None, remaining[0]
|
||||
my_task.internal_data['remaining'] = remaining[1:]
|
||||
return key_or_index, item
|
||||
else:
|
||||
return None, None
|
||||
|
||||
def init_remaining_items(self, my_task):
|
||||
|
||||
if self.data_input.name not in my_task.data:
|
||||
self.raise_data_exception("Missing data input for multiinstance task", my_task)
|
||||
input_data = my_task.data[self.data_input.name]
|
||||
|
||||
# This is internal bookkeeping, so we know where we are; we get the actual items when we create the task
|
||||
if isinstance(input_data, Sequence):
|
||||
# For lists, keep track of the index
|
||||
remaining = [idx for idx in range(len(input_data))]
|
||||
elif isinstance(input_data, Mapping):
|
||||
# For dicts, use the keys
|
||||
remaining = [key for key in input_data]
|
||||
elif isinstance(input_data, Iterable):
|
||||
# Otherwise, just copy the objects as a last resort
|
||||
remaining = [val for val in input_data]
|
||||
else:
|
||||
self.raise_data_exception("Multiinstance data input must be iterable", my_task)
|
||||
return remaining
|
||||
|
||||
def get_next_index(self, my_task):
|
||||
|
||||
current = my_task.internal_data.get('current')
|
||||
if current is None:
|
||||
current = 0
|
||||
if self.data_output is not None:
|
||||
self.init_data_output_with_cardinality(my_task)
|
||||
|
||||
cardinality = my_task.internal_data.get('cardinality')
|
||||
if cardinality is None:
|
||||
# In case the evaluated expression changes during execution
|
||||
cardinality = my_task.workflow.script_engine.evaluate(my_task, self.cardinality)
|
||||
my_task.internal_data['cardinality'] = cardinality
|
||||
|
||||
if current < cardinality:
|
||||
# If using loop cardinalty, if a data input was specified, use the index as the "item"
|
||||
my_task.internal_data['current'] = current + 1
|
||||
return None, current
|
||||
else:
|
||||
return None, None
|
||||
|
||||
|
||||
def getDynamicMIClass(id,prevclass):
|
||||
id = re.sub('(.+)_[0-9]$','\\1',id)
|
||||
return type(id + '_class', (
|
||||
MultiInstanceTask, prevclass), {})
|
||||
class ParallelMultiInstanceTask(MultiInstanceTask):
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
if my_task.state != TaskState.WAITING:
|
||||
super()._update_hook(my_task)
|
||||
self.create_children(my_task)
|
||||
|
||||
child_running = self.process_children(my_task)
|
||||
if self.condition is not None and self.check_completion_condition(my_task):
|
||||
for child in my_task.children:
|
||||
if child.task_spec.name == self.task_spec and child.state != TaskState.COMPLETED:
|
||||
child.cancel()
|
||||
return True
|
||||
return not child_running
|
||||
|
||||
def create_children(self, my_task):
|
||||
|
||||
data_input = my_task.data[self.data_input.name] if self.data_input is not None else None
|
||||
if data_input is not None:
|
||||
# We have to preserve the key or index for maps/sequences, in case we're updating in place, or the output is a mapping
|
||||
if isinstance(data_input, Mapping):
|
||||
children = data_input.items()
|
||||
elif isinstance(data_input, Sequence):
|
||||
children = enumerate(data_input)
|
||||
else:
|
||||
# We can use other iterables as inputs, but key or index isn't meaningful
|
||||
children = ((None, item) for item in data_input)
|
||||
else:
|
||||
# For tasks specifying the cardinality, use the index as the "item"
|
||||
cardinality = my_task.workflow.script_engine.evaluate(my_task, self.cardinality)
|
||||
children = ((None, idx) for idx in range(cardinality))
|
||||
|
||||
if not my_task.internal_data.get('started', False):
|
||||
|
||||
if self.data_output is not None:
|
||||
if self.data_input is not None:
|
||||
self.init_data_output_with_input_data(my_task, my_task.data[self.data_input.name])
|
||||
else:
|
||||
self.init_data_output_with_cardinality(my_task)
|
||||
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
for key_or_index, item in children:
|
||||
self.create_child(my_task, item, key_or_index)
|
||||
|
||||
my_task.internal_data['started'] = True
|
||||
else:
|
||||
return len(my_task.internal_data.get('merged', [])) == len(children)
|
||||
|
|
|
@ -25,62 +25,44 @@ class SubWorkflowTask(BpmnSpecMixin):
|
|||
def spec_type(self):
|
||||
return 'Subprocess'
|
||||
|
||||
def _on_ready_before_hook(self, my_task):
|
||||
subworkflow = my_task.workflow.create_subprocess(my_task, self.spec, self.name)
|
||||
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
|
||||
subworkflow.data = deepcopy(my_task.workflow.data)
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
|
||||
super()._on_ready_hook(my_task)
|
||||
self.start_workflow(my_task)
|
||||
|
||||
def _on_subworkflow_completed(self, subworkflow, my_task):
|
||||
|
||||
if len(subworkflow.spec.data_outputs) == 0:
|
||||
# Copy all workflow data if no outputs are specified
|
||||
my_task.data = deepcopy(subworkflow.last_task.data)
|
||||
else:
|
||||
end = subworkflow.get_tasks_from_spec_name('End', workflow=subworkflow)
|
||||
# Otherwise only copy data with the specified names
|
||||
for var in subworkflow.spec.data_outputs:
|
||||
try:
|
||||
var.copy(end[0], my_task, data_output=True)
|
||||
except WorkflowDataException as wde:
|
||||
wde.add_note("A Data Output was not provided as promised.")
|
||||
raise wde
|
||||
|
||||
self.update_data(my_task, subworkflow)
|
||||
my_task._set_state(TaskState.READY)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
wf = my_task.workflow._get_outermost_workflow(my_task)
|
||||
if my_task.id not in wf.subprocesses:
|
||||
return super()._update_hook(my_task)
|
||||
super()._update_hook(my_task)
|
||||
self.create_workflow(my_task)
|
||||
return True
|
||||
|
||||
def _on_cancel(self, my_task):
|
||||
subworkflow = my_task.workflow.get_subprocess(my_task)
|
||||
if subworkflow is not None:
|
||||
subworkflow.cancel()
|
||||
|
||||
def start_workflow(self, my_task):
|
||||
|
||||
subworkflow = my_task.workflow.get_subprocess(my_task)
|
||||
def copy_data(self, my_task, subworkflow):
|
||||
# There is only one copy of any given data object, so it should be updated immediately
|
||||
subworkflow.data = my_task.workflow.data
|
||||
start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow)
|
||||
start[0].set_data(**my_task.data)
|
||||
|
||||
if len(subworkflow.spec.data_inputs) == 0:
|
||||
# Copy all task data into start task if no inputs specified
|
||||
start[0].set_data(**my_task.data)
|
||||
else:
|
||||
# Otherwise copy only task data with the specified names
|
||||
for var in subworkflow.spec.data_inputs:
|
||||
try:
|
||||
var.copy(my_task, start[0], data_input=True)
|
||||
except WorkflowDataException as wde:
|
||||
wde.add_note("You are missing a required Data Input for a call activity.")
|
||||
raise wde
|
||||
def update_data(self, my_task, subworkflow):
|
||||
my_task.data = deepcopy(subworkflow.last_task.data)
|
||||
|
||||
def create_workflow(self, my_task):
|
||||
subworkflow = my_task.workflow.create_subprocess(my_task, self.spec, self.name)
|
||||
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
|
||||
|
||||
def start_workflow(self, my_task):
|
||||
subworkflow = my_task.workflow.get_subprocess(my_task)
|
||||
self.copy_data(my_task, subworkflow)
|
||||
for child in subworkflow.task_tree.children:
|
||||
child.task_spec._update(child)
|
||||
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
def task_will_set_children_future(self, my_task):
|
||||
|
@ -92,6 +74,40 @@ class CallActivity(SubWorkflowTask):
|
|||
def __init__(self, wf_spec, name, subworkflow_spec, **kwargs):
|
||||
super(CallActivity, self).__init__(wf_spec, name, subworkflow_spec, False, **kwargs)
|
||||
|
||||
def copy_data(self, my_task, subworkflow):
|
||||
|
||||
start = subworkflow.get_tasks_from_spec_name('Start', workflow=subworkflow)
|
||||
if subworkflow.spec.io_specification is None or len(subworkflow.spec.io_specification.data_inputs) == 0:
|
||||
# Copy all task data into start task if no inputs specified
|
||||
start[0].set_data(**my_task.data)
|
||||
else:
|
||||
# Otherwise copy only task data with the specified names
|
||||
for var in subworkflow.spec.io_specification.data_inputs:
|
||||
if var.name not in my_task.data:
|
||||
raise WorkflowDataException(
|
||||
"You are missing a required Data Input for a call activity.",
|
||||
task=my_task,
|
||||
data_input=var,
|
||||
)
|
||||
start[0].data[var.name] = my_task.data[var.name]
|
||||
|
||||
def update_data(self, my_task, subworkflow):
|
||||
|
||||
if subworkflow.spec.io_specification is None:
|
||||
# Copy all workflow data if no outputs are specified
|
||||
my_task.data = deepcopy(subworkflow.last_task.data)
|
||||
else:
|
||||
end = subworkflow.get_tasks_from_spec_name('End', workflow=subworkflow)
|
||||
# Otherwise only copy data with the specified names
|
||||
for var in subworkflow.spec.io_specification.data_outputs:
|
||||
if var.name not in end[0].data:
|
||||
raise WorkflowDataException(
|
||||
f"The Data Output was not available in the subprocess output.",
|
||||
task=my_task,
|
||||
data_output=var,
|
||||
)
|
||||
my_task.data[var.name] = end[0].data[var.name]
|
||||
|
||||
@property
|
||||
def spec_type(self):
|
||||
return 'Call Activity'
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
import logging
|
||||
from copy import deepcopy
|
||||
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
|
||||
|
||||
data_log = logging.getLogger('spiff.data')
|
||||
|
||||
|
||||
class BpmnDataSpecification:
|
||||
|
||||
def __init__(self, name, description=None):
|
||||
"""
|
||||
:param name: the variable (the BPMN ID)
|
||||
:param description: a human readable name (the BPMN name)
|
||||
"""
|
||||
self.name = name
|
||||
self.description = description or name
|
||||
# In the future, we can add schemas defining the objects here.
|
||||
|
||||
def get(self, my_task, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
def set(self, my_task, **kwargs):
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class BpmnIoSpecification:
|
||||
|
||||
def __init__(self, data_inputs, data_outputs):
|
||||
self.data_inputs = data_inputs
|
||||
self.data_outputs = data_outputs
|
||||
|
||||
|
||||
class DataObject(BpmnDataSpecification):
|
||||
"""Copy data between process variables and tasks"""
|
||||
|
||||
def get(self, my_task):
|
||||
"""Copy a value form the workflow data to the task data."""
|
||||
if self.name not in my_task.workflow.data:
|
||||
message = f"The data object could not be read; '{self.name}' does not exist in the process."
|
||||
raise WorkflowDataException(message, my_task, data_input=self)
|
||||
my_task.data[self.name] = deepcopy(my_task.workflow.data[self.name])
|
||||
data_log.info(f'Read workflow variable {self.name}', extra=my_task.log_info())
|
||||
|
||||
def set(self, my_task):
|
||||
"""Copy a value from the task data to the workflow data"""
|
||||
if self.name not in my_task.data:
|
||||
message = f"A data object could not be set; '{self.name}' not exist in the task."
|
||||
raise WorkflowDataException(message, my_task, data_output=self)
|
||||
my_task.workflow.data[self.name] = deepcopy(my_task.data[self.name])
|
||||
del my_task.data[self.name]
|
||||
data_log.info(f'Set workflow variable {self.name}', extra=my_task.log_info())
|
||||
|
||||
|
||||
class TaskDataReference(BpmnDataSpecification):
|
||||
"""A representation of task data that can be used in a BPMN diagram"""
|
||||
pass
|
|
@ -50,6 +50,7 @@ class CatchingEvent(Simple, BpmnSpecMixin):
|
|||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
super()._update_hook(my_task)
|
||||
# None events don't propogate, so as soon as we're ready, we fire our event
|
||||
if isinstance(self.event_definition, NoneEventDefinition):
|
||||
my_task._set_internal_data(event_fired=True)
|
||||
|
@ -72,6 +73,15 @@ class CatchingEvent(Simple, BpmnSpecMixin):
|
|||
self.event_definition.reset(my_task)
|
||||
super(CatchingEvent, self)._on_complete_hook(my_task)
|
||||
|
||||
# This fixes the problem of boundary events remaining cancelled if the task is reused.
|
||||
# It pains me to add these methods, but unless we can get rid of the loop reset task we're stuck
|
||||
|
||||
def task_should_set_children_future(self, my_task):
|
||||
return True
|
||||
|
||||
def task_will_set_children_future(self, my_task):
|
||||
my_task.internal_data = {}
|
||||
|
||||
|
||||
class ThrowingEvent(Simple, BpmnSpecMixin):
|
||||
"""Base Task Spec for Throwing Event nodes."""
|
||||
|
|
|
@ -74,7 +74,7 @@ class BpmnWorkflow(Workflow):
|
|||
self.__script_engine = engine
|
||||
|
||||
def create_subprocess(self, my_task, spec_name, name):
|
||||
|
||||
# This creates a subprocess for an existing task
|
||||
workflow = self._get_outermost_workflow(my_task)
|
||||
subprocess = BpmnWorkflow(
|
||||
workflow.subprocess_specs[spec_name], name=name,
|
||||
|
@ -91,15 +91,16 @@ class BpmnWorkflow(Workflow):
|
|||
workflow = self._get_outermost_workflow(my_task)
|
||||
return workflow.subprocesses.get(my_task.id)
|
||||
|
||||
def add_subprocess(self, spec_name, name):
|
||||
|
||||
def connect_subprocess(self, spec_name, name):
|
||||
# This creates a new task associated with a process when an event that kicks of a process is received
|
||||
new = CallActivity(self.spec, name, spec_name)
|
||||
self.spec.start.connect(new)
|
||||
task = Task(self, new)
|
||||
task._ready()
|
||||
start = self.get_tasks_from_spec_name('Start', workflow=self)[0]
|
||||
start.children.append(task)
|
||||
task.parent = start
|
||||
# This (indirectly) calls create_subprocess
|
||||
task.task_spec._update(task)
|
||||
return self.subprocesses[task.id]
|
||||
|
||||
def _get_outermost_workflow(self, task=None):
|
||||
|
@ -114,7 +115,7 @@ class BpmnWorkflow(Workflow):
|
|||
start = sp.get_tasks_from_spec_name(task_spec.name)
|
||||
if len(start) and start[0].state == TaskState.WAITING:
|
||||
return sp
|
||||
return self.add_subprocess(wf_spec.name, f'{wf_spec.name}_{len(self.subprocesses)}')
|
||||
return self.connect_subprocess(wf_spec.name, f'{wf_spec.name}_{len(self.subprocesses)}')
|
||||
|
||||
def catch(self, event_definition, correlations=None):
|
||||
"""
|
||||
|
|
|
@ -1,16 +1,37 @@
|
|||
from ..specs.UserTask import UserTask
|
||||
from ..parser.task_spec import UserTaskParser
|
||||
from ...bpmn.parser.BpmnParser import full_tag
|
||||
|
||||
from SpiffWorkflow.bpmn.parser.BpmnParser import full_tag, DEFAULT_NSMAP
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.ManualTask import ManualTask
|
||||
from SpiffWorkflow.bpmn.specs.NoneTask import NoneTask
|
||||
from SpiffWorkflow.bpmn.specs.ScriptTask import ScriptTask
|
||||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import CallActivity, TransactionSubprocess
|
||||
|
||||
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
|
||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
from SpiffWorkflow.camunda.parser.task_spec import BusinessRuleTaskParser
|
||||
from SpiffWorkflow.camunda.specs.UserTask import UserTask
|
||||
from SpiffWorkflow.camunda.parser.task_spec import (
|
||||
CamundaTaskParser,
|
||||
BusinessRuleTaskParser,
|
||||
UserTaskParser,
|
||||
CallActivityParser,
|
||||
SubWorkflowParser,
|
||||
ScriptTaskParser,
|
||||
CAMUNDA_MODEL_NS
|
||||
)
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
|
||||
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
|
||||
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
|
||||
from .event_parsers import CamundaStartEventParser, CamundaEndEventParser, \
|
||||
CamundaIntermediateCatchEventParser, CamundaIntermediateThrowEventParser, CamundaBoundaryEventParser
|
||||
from .event_parsers import (
|
||||
CamundaStartEventParser,
|
||||
CamundaEndEventParser,
|
||||
CamundaIntermediateCatchEventParser,
|
||||
CamundaIntermediateThrowEventParser,
|
||||
CamundaBoundaryEventParser,
|
||||
)
|
||||
|
||||
NSMAP = DEFAULT_NSMAP.copy()
|
||||
NSMAP['camunda'] = CAMUNDA_MODEL_NS
|
||||
|
||||
|
||||
class CamundaParser(BpmnDmnParser):
|
||||
|
@ -23,4 +44,13 @@ class CamundaParser(BpmnDmnParser):
|
|||
full_tag('intermediateThrowEvent'): (CamundaIntermediateThrowEventParser, IntermediateThrowEvent),
|
||||
full_tag('boundaryEvent'): (CamundaBoundaryEventParser, BoundaryEvent),
|
||||
full_tag('businessRuleTask'): (BusinessRuleTaskParser, BusinessRuleTask),
|
||||
full_tag('task'): (CamundaTaskParser, NoneTask),
|
||||
full_tag('manualTask'): (CamundaTaskParser, ManualTask),
|
||||
full_tag('scriptTask'): (ScriptTaskParser, ScriptTask),
|
||||
full_tag('subProcess'): (SubWorkflowParser, CallActivity),
|
||||
full_tag('callActivity'): (CallActivityParser, CallActivity),
|
||||
full_tag('transaction'): (SubWorkflowParser, TransactionSubprocess),
|
||||
}
|
||||
|
||||
def __init__(self, namespaces=None, validator=None):
|
||||
super().__init__(namespaces=namespaces or NSMAP, validator=validator)
|
|
@ -5,9 +5,6 @@ from SpiffWorkflow.camunda.specs.events.event_definitions import MessageEventDef
|
|||
from SpiffWorkflow.bpmn.parser.util import one
|
||||
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class CamundaEventDefinitionParser(EventDefinitionParser):
|
||||
|
||||
def parse_message_event(self, message_event):
|
||||
|
@ -22,8 +19,8 @@ class CamundaEventDefinitionParser(EventDefinitionParser):
|
|||
name = message_event.getparent().get('name')
|
||||
correlations = {}
|
||||
|
||||
payload = message_event.attrib.get('{' + CAMUNDA_MODEL_NS + '}expression')
|
||||
result_var = message_event.attrib.get('{' + CAMUNDA_MODEL_NS + '}resultVariable')
|
||||
payload = self.attribute('expression', 'camunda', message_event)
|
||||
result_var = self.attribute('resultVariable', 'camunda', message_event)
|
||||
return MessageEventDefinition(name, correlations, payload, result_var)
|
||||
|
||||
|
||||
|
|
|
@ -1,22 +1,69 @@
|
|||
from ...camunda.specs.UserTask import Form, FormField, EnumFormField
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
|
||||
from SpiffWorkflow.bpmn.parser.util import one
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser
|
||||
from SpiffWorkflow.bpmn.parser.node_parser import DEFAULT_NSMAP
|
||||
from SpiffWorkflow.bpmn.parser.task_parsers import SubprocessParser
|
||||
|
||||
from SpiffWorkflow.dmn.specs.BusinessRuleTask import BusinessRuleTask
|
||||
|
||||
from SpiffWorkflow.camunda.specs.multiinstance_task import SequentialMultiInstanceTask, ParallelMultiInstanceTask
|
||||
|
||||
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
|
||||
|
||||
|
||||
class CamundaTaskParser(TaskParser):
|
||||
|
||||
class BusinessRuleTaskParser(TaskParser):
|
||||
def parse_extensions(self, node=None):
|
||||
extensions = {}
|
||||
extension_nodes = self.xpath('.//bpmn:extensionElements/camunda:properties/camunda:property')
|
||||
for ex_node in extension_nodes:
|
||||
extensions[ex_node.get('name')] = ex_node.get('value')
|
||||
return extensions
|
||||
|
||||
def _add_multiinstance_task(self, loop_characteristics):
|
||||
|
||||
sequential = loop_characteristics.get('isSequential') == 'true'
|
||||
prefix = 'bpmn:multiInstanceLoopCharacteristics'
|
||||
|
||||
cardinality = self.xpath(f'./{prefix}/bpmn:loopCardinality')
|
||||
cardinality = cardinality[0].text if len(cardinality) > 0 else None
|
||||
collection = self.attribute('collection', 'camunda', loop_characteristics)
|
||||
if cardinality is None and collection is None:
|
||||
self.raise_validation_exception('A multiinstance task must specify a cardinality or a collection')
|
||||
|
||||
element_var = self.attribute('elementVariable', 'camunda', loop_characteristics)
|
||||
condition = self.xpath(f'./{prefix}/bpmn:completionCondition')
|
||||
condition = condition[0].text if len(condition) > 0 else None
|
||||
|
||||
original = self.spec.task_specs.pop(self.task.name)
|
||||
|
||||
# We won't include the data input, because sometimes it is the collection, and other times it
|
||||
# is the cardinality. The old MI task evaluated the cardinality at run time and treated it like
|
||||
# a cardinality if it evaluated to an int, and as the data input if if evaluated to a collection
|
||||
# I highly doubt that this is the way Camunda worked then, and I know that's not how it works
|
||||
# now, and I think we should ultimately replace this with something that corresponds to how
|
||||
# Camunda actually handles things; however, for the time being, I am just going to try to
|
||||
# replicate the old behavior as closely as possible.
|
||||
# In our subclassed MI task, we'll update the BPMN multiinstance attributes when the task starts.
|
||||
params = {
|
||||
'task_spec': '',
|
||||
'cardinality': cardinality,
|
||||
'data_output': TaskDataReference(collection) if collection is not None else None,
|
||||
'output_item': TaskDataReference(element_var) if element_var is not None else None,
|
||||
'condition': condition,
|
||||
}
|
||||
if sequential:
|
||||
self.task = SequentialMultiInstanceTask(self.spec, original.name, **params)
|
||||
else:
|
||||
self.task = ParallelMultiInstanceTask(self.spec, original.name, **params)
|
||||
self._copy_task_attrs(original)
|
||||
|
||||
|
||||
class BusinessRuleTaskParser(CamundaTaskParser):
|
||||
dmn_debug = None
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, lane=None):
|
||||
nsmap = DEFAULT_NSMAP.copy()
|
||||
nsmap.update({'camunda': CAMUNDA_MODEL_NS})
|
||||
super(BusinessRuleTaskParser, self).__init__(process_parser, spec_class, node, nsmap, lane)
|
||||
|
||||
def create_task(self):
|
||||
decision_ref = self.get_decision_ref(self.node)
|
||||
return BusinessRuleTask(self.spec, self.get_task_spec_name(),
|
||||
|
@ -29,26 +76,9 @@ class BusinessRuleTaskParser(TaskParser):
|
|||
def get_decision_ref(node):
|
||||
return node.attrib['{' + CAMUNDA_MODEL_NS + '}decisionRef']
|
||||
|
||||
def _on_trigger(self, my_task):
|
||||
pass
|
||||
|
||||
def serialize(self, serializer, **kwargs):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def deserialize(cls, serializer, wf_spec, s_state, **kwargs):
|
||||
pass
|
||||
|
||||
|
||||
class UserTaskParser(TaskParser):
|
||||
"""
|
||||
Base class for parsing User Tasks
|
||||
"""
|
||||
|
||||
def __init__(self, process_parser, spec_class, node, lane=None):
|
||||
nsmap = DEFAULT_NSMAP.copy()
|
||||
nsmap.update({'camunda': CAMUNDA_MODEL_NS})
|
||||
super(UserTaskParser, self).__init__(process_parser, spec_class, node, nsmap, lane)
|
||||
class UserTaskParser(CamundaTaskParser):
|
||||
"""Base class for parsing User Tasks"""
|
||||
|
||||
def create_task(self):
|
||||
form = self.get_form()
|
||||
|
@ -62,7 +92,7 @@ class UserTaskParser(TaskParser):
|
|||
details from that form and construct a form model from it. """
|
||||
form = Form()
|
||||
try:
|
||||
form.key = self.node.attrib['{' + CAMUNDA_MODEL_NS + '}formKey']
|
||||
form.key = self.attribute('formKey', 'camunda')
|
||||
except KeyError:
|
||||
return form
|
||||
for xml_field in self.xpath('.//camunda:formData/camunda:formField'):
|
||||
|
@ -76,12 +106,13 @@ class UserTaskParser(TaskParser):
|
|||
field.label = xml_field.get('label')
|
||||
field.default_value = xml_field.get('defaultValue')
|
||||
|
||||
prefix = '{' + self.nsmap.get('camunda') + '}'
|
||||
for child in xml_field:
|
||||
if child.tag == '{' + CAMUNDA_MODEL_NS + '}properties':
|
||||
if child.tag == f'{prefix}properties':
|
||||
for p in child:
|
||||
field.add_property(p.get('id'), p.get('value'))
|
||||
|
||||
if child.tag == '{' + CAMUNDA_MODEL_NS + '}validation':
|
||||
if child.tag == f'{prefix}validation':
|
||||
for v in child:
|
||||
field.add_validation(v.get('name'), v.get('config'))
|
||||
|
||||
|
@ -92,6 +123,59 @@ class UserTaskParser(TaskParser):
|
|||
field = EnumFormField()
|
||||
|
||||
for child in xml_field:
|
||||
if child.tag == '{' + CAMUNDA_MODEL_NS + '}value':
|
||||
if child.tag == '{' + self.nsmap.get('camunda') + '}value':
|
||||
field.add_option(child.get('id'), child.get('name'))
|
||||
return field
|
||||
|
||||
|
||||
# These classes need to be able to use the overriden _add_multiinstance_task method
|
||||
# so they have to inherit from CamundaTaskParser. Therefore, the parsers have to just
|
||||
# be copied, because both they and the CamundaTaskParser inherit from the base task
|
||||
# parser. I am looking forward to the day when I can replaced all of this with
|
||||
# something sane and sensible.
|
||||
|
||||
class SubWorkflowParser(CamundaTaskParser):
|
||||
|
||||
def create_task(self):
|
||||
subworkflow_spec = SubprocessParser.get_subprocess_spec(self)
|
||||
return self.spec_class(
|
||||
self.spec, self.get_task_spec_name(), subworkflow_spec,
|
||||
lane=self.lane, position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
|
||||
class CallActivityParser(CamundaTaskParser):
|
||||
"""Parses a CallActivity node."""
|
||||
|
||||
def create_task(self):
|
||||
subworkflow_spec = SubprocessParser.get_call_activity_spec(self)
|
||||
return self.spec_class(
|
||||
self.spec, self.get_task_spec_name(), subworkflow_spec,
|
||||
lane=self.lane, position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
|
||||
class ScriptTaskParser(TaskParser):
|
||||
"""
|
||||
Parses a script task
|
||||
"""
|
||||
|
||||
def create_task(self):
|
||||
script = self.get_script()
|
||||
return self.spec_class(self.spec, self.get_task_spec_name(), script,
|
||||
lane=self.lane,
|
||||
position=self.position,
|
||||
description=self.node.get('name', None))
|
||||
|
||||
def get_script(self):
|
||||
"""
|
||||
Gets the script content from the node. A subclass can override this
|
||||
method, if the script needs to be pre-parsed. The result of this call
|
||||
will be passed to the Script Engine for execution.
|
||||
"""
|
||||
try:
|
||||
return one(self.xpath('.//bpmn:script')).text
|
||||
except AssertionError as ae:
|
||||
raise ValidationException(
|
||||
f"Invalid Script Task. No Script Provided. " + str(ae),
|
||||
node=self.node, file_name=self.filename)
|
||||
|
|
|
@ -1,15 +1,25 @@
|
|||
from copy import deepcopy
|
||||
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import DEFAULT_SPEC_CONFIG
|
||||
from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter as DefaultUserTaskConverter
|
||||
from SpiffWorkflow.bpmn.serializer.task_spec import (
|
||||
UserTaskConverter as DefaultUserTaskConverter,
|
||||
ParallelMultiInstanceTaskConverter as DefaultParallelMIConverter,
|
||||
SequentialMultiInstanceTaskConverter as DefaultSequentialMIConverter,
|
||||
)
|
||||
from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventConverter
|
||||
|
||||
from .task_spec import UserTaskConverter
|
||||
|
||||
from .task_spec import UserTaskConverter, ParallelMultiInstanceTaskConverter, SequentialMultiInstanceTaskConverter
|
||||
from .event_definition import MessageEventDefinitionConverter
|
||||
|
||||
|
||||
CAMUNDA_SPEC_CONFIG = deepcopy(DEFAULT_SPEC_CONFIG)
|
||||
CAMUNDA_SPEC_CONFIG['task_specs'].remove(DefaultUserTaskConverter)
|
||||
CAMUNDA_SPEC_CONFIG['task_specs'].append(UserTaskConverter)
|
||||
CAMUNDA_SPEC_CONFIG['task_specs'].remove(DefaultParallelMIConverter)
|
||||
CAMUNDA_SPEC_CONFIG['task_specs'].append(ParallelMultiInstanceTaskConverter)
|
||||
CAMUNDA_SPEC_CONFIG['task_specs'].remove(DefaultSequentialMIConverter)
|
||||
CAMUNDA_SPEC_CONFIG['task_specs'].append(SequentialMultiInstanceTaskConverter)
|
||||
|
||||
CAMUNDA_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventConverter)
|
||||
CAMUNDA_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter)
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
from ...bpmn.serializer.helpers.spec import TaskSpecConverter
|
||||
from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter
|
||||
from SpiffWorkflow.bpmn.serializer.task_spec import MultiInstanceTaskConverter
|
||||
|
||||
from ..specs.UserTask import UserTask, Form
|
||||
from SpiffWorkflow.camunda.specs.UserTask import UserTask, Form
|
||||
from SpiffWorkflow.camunda.specs.multiinstance_task import ParallelMultiInstanceTask, SequentialMultiInstanceTask
|
||||
|
||||
class UserTaskConverter(TaskSpecConverter):
|
||||
|
||||
|
@ -32,3 +34,12 @@ class UserTaskConverter(TaskSpecConverter):
|
|||
new['options'] = [ opt.__dict__ for opt in field.options ]
|
||||
dct['fields'].append(new)
|
||||
return dct
|
||||
|
||||
|
||||
class ParallelMultiInstanceTaskConverter(MultiInstanceTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(ParallelMultiInstanceTask, registry)
|
||||
|
||||
class SequentialMultiInstanceTaskConverter(MultiInstanceTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(SequentialMultiInstanceTask, registry)
|
|
@ -0,0 +1,51 @@
|
|||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.specs.BpmnSpecMixin import BpmnSpecMixin
|
||||
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
|
||||
|
||||
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import (
|
||||
SequentialMultiInstanceTask as BpmnSequentialMITask,
|
||||
ParallelMultiInstanceTask as BpmnParallelMITask,
|
||||
)
|
||||
|
||||
# This is an abomination, but I don't see any other way replicating the older MI functionality
|
||||
|
||||
def update_task_spec(my_task):
|
||||
|
||||
task_spec = my_task.task_spec
|
||||
if my_task.state != TaskState.WAITING:
|
||||
# We have to fix up our state before we can run the parent update, but we still need
|
||||
# to inherit our parent data.
|
||||
BpmnSpecMixin._update_hook(task_spec, my_task)
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
if task_spec.cardinality is None:
|
||||
# Use the same collection for input and output
|
||||
task_spec.data_input = TaskDataReference(task_spec.data_output.name)
|
||||
task_spec.input_item = TaskDataReference(task_spec.output_item.name)
|
||||
else:
|
||||
cardinality = my_task.workflow.script_engine.evaluate(my_task, task_spec.cardinality)
|
||||
if not isinstance(cardinality, int):
|
||||
# The input data was supplied via "cardinality"
|
||||
# We'll use the same reference for input and output item
|
||||
task_spec.data_input = TaskDataReference(task_spec.cardinality)
|
||||
task_spec.input_item = TaskDataReference(task_spec.output_item.name) if task_spec.output_item is not None else None
|
||||
task_spec.cardinality = None
|
||||
else:
|
||||
# This will be the index
|
||||
task_spec.input_item = TaskDataReference(task_spec.output_item.name) if task_spec.output_item is not None else None
|
||||
|
||||
|
||||
class SequentialMultiInstanceTask(BpmnSequentialMITask):
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
update_task_spec(my_task)
|
||||
return super()._update_hook(my_task)
|
||||
|
||||
|
||||
class ParallelMultiInstanceTask(BpmnParallelMITask):
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
if not my_task.internal_data.get('started', False):
|
||||
update_task_spec(my_task)
|
||||
self.create_children(my_task)
|
||||
return super()._update_hook(my_task)
|
|
@ -86,6 +86,5 @@ class BpmnDmnParser(BpmnParser):
|
|||
def _find_dependencies(self, process):
|
||||
super()._find_dependencies(process)
|
||||
parser_cls, cls = self._get_parser_class(full_tag('businessRuleTask'))
|
||||
for business_rule in process.xpath('.//bpmn:businessRuleTask',namespaces=self.namespaces):
|
||||
for business_rule in process.xpath('.//bpmn:businessRuleTask', namespaces=self.namespaces):
|
||||
self.dmn_dependencies.add(parser_cls.get_decision_ref(business_rule))
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ class AcquireMutex(TaskSpec):
|
|||
self.mutex = mutex
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
super()._update_hook(my_task)
|
||||
mutex = my_task.workflow._get_mutex(self.mutex)
|
||||
if mutex.testandset():
|
||||
self.entered_event.emit(my_task.workflow, my_task)
|
||||
|
|
|
@ -245,6 +245,7 @@ class Celery(TaskSpec):
|
|||
return False
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
super()._update_hook(my_task)
|
||||
if not self._start(my_task):
|
||||
if not my_task._has_state(TaskState.WAITING):
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
|
|
|
@ -71,10 +71,11 @@ class Execute(TaskSpec):
|
|||
return False
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
super()._update_hook(my_task)
|
||||
if not self._start(my_task):
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
else:
|
||||
return super(Execute, self)._update_hook(my_task)
|
||||
return True
|
||||
|
||||
def serialize(self, serializer):
|
||||
return serializer.serialize_execute(self)
|
||||
|
|
|
@ -52,6 +52,7 @@ class Gate(TaskSpec):
|
|||
self.context = context
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
super()._update_hook(my_task)
|
||||
context_task = my_task.workflow.get_task_spec_from_name(self.context)
|
||||
root_task = my_task.workflow.task_tree
|
||||
for task in root_task._find_any(context_task):
|
||||
|
|
|
@ -217,6 +217,7 @@ class Join(TaskSpec):
|
|||
|
||||
def _update_hook(self, my_task):
|
||||
# Check whether enough incoming branches have completed.
|
||||
my_task._inherit_data()
|
||||
may_fire, waiting_tasks = self._start(my_task)
|
||||
if may_fire:
|
||||
# If this is a cancelling join, cancel all incoming branches except for the one that just completed.
|
||||
|
|
|
@ -124,6 +124,8 @@ class SubWorkflow(TaskSpec):
|
|||
my_task._set_state(TaskState.WAITING)
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
super()._update_hook(my_task)
|
||||
subworkflow = my_task._get_internal_data('subworkflow')
|
||||
if subworkflow is None:
|
||||
# On the first update, we have to create the subworkflow
|
||||
|
|
|
@ -99,6 +99,8 @@ class ThreadMerge(Join):
|
|||
return False
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
my_task._inherit_data()
|
||||
if not self._start(my_task):
|
||||
my_task._set_state(TaskState.WAITING)
|
||||
return
|
||||
|
|
|
@ -51,6 +51,9 @@ class Transform(TaskSpec):
|
|||
self.transforms = transforms
|
||||
|
||||
def _update_hook(self, my_task):
|
||||
|
||||
super()._update_hook(my_task)
|
||||
|
||||
if self.transforms:
|
||||
for transform in self.transforms:
|
||||
logger.debug(f'Execute transform', extra=my_task.log_info({'transform': transform}))
|
||||
|
|
|
@ -205,27 +205,11 @@ class TaskSpec(object):
|
|||
self.outputs.append(taskspec)
|
||||
taskspec._connect_notify(self)
|
||||
|
||||
def follow(self, taskspec):
|
||||
"""
|
||||
Make this task follow the provided one. In other words, this task is
|
||||
added to the given task outputs.
|
||||
|
||||
This is an alias to connect, just easier to understand when reading
|
||||
code - ex: my_task.follow(the_other_task)
|
||||
Adding it after being confused by .connect one times too many!
|
||||
|
||||
:type taskspec: TaskSpec
|
||||
:param taskspec: The task to follow.
|
||||
"""
|
||||
taskspec.connect(self)
|
||||
|
||||
def test(self):
|
||||
"""
|
||||
Checks whether all required attributes are set. Throws an exception
|
||||
if an error was detected.
|
||||
"""
|
||||
# if self.id is None:
|
||||
# raise WorkflowException(self, 'TaskSpec is not yet instanciated.')
|
||||
if len(self.inputs) < 1:
|
||||
raise WorkflowException(self, 'No input task connected.')
|
||||
|
||||
|
@ -272,7 +256,6 @@ class TaskSpec(object):
|
|||
state of this task in the workflow. For example, if a predecessor
|
||||
completes it makes sure to call this method so we can react.
|
||||
"""
|
||||
my_task._inherit_data()
|
||||
if my_task._is_predicted():
|
||||
self._predict(my_task)
|
||||
self.entered_event.emit(my_task.workflow, my_task)
|
||||
|
@ -282,8 +265,10 @@ class TaskSpec(object):
|
|||
def _update_hook(self, my_task):
|
||||
"""
|
||||
This method should decide whether the task should run now or need to wait.
|
||||
Tasks can also optionally choose not to inherit data.
|
||||
Returning True will cause the task to go into READY.
|
||||
"""
|
||||
my_task._inherit_data()
|
||||
return True
|
||||
|
||||
def _on_ready(self, my_task):
|
||||
|
|
|
@ -5,12 +5,18 @@ from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser
|
|||
from SpiffWorkflow.bpmn.parser.task_parsers import SubprocessParser
|
||||
from SpiffWorkflow.bpmn.parser.util import xpath_eval
|
||||
|
||||
from SpiffWorkflow.spiff.specs.multiinstance_task import StandardLoopTask, ParallelMultiInstanceTask, SequentialMultiInstanceTask
|
||||
|
||||
SPIFFWORKFLOW_MODEL_NS = 'http://spiffworkflow.org/bpmn/schema/1.0/core'
|
||||
SPIFFWORKFLOW_MODEL_PREFIX = 'spiffworkflow'
|
||||
|
||||
|
||||
class SpiffTaskParser(TaskParser):
|
||||
|
||||
STANDARD_LOOP_CLASS = StandardLoopTask
|
||||
PARALLEL_MI_CLASS = ParallelMultiInstanceTask
|
||||
SEQUENTIAL_MI_CLASS = SequentialMultiInstanceTask
|
||||
|
||||
def parse_extensions(self, node=None):
|
||||
if node is None:
|
||||
node = self.node
|
||||
|
@ -81,6 +87,12 @@ class SpiffTaskParser(TaskParser):
|
|||
operator['parameters'] = parameters
|
||||
return operator
|
||||
|
||||
def _copy_task_attrs(self, original):
|
||||
# I am so disappointed I have to do this.
|
||||
super()._copy_task_attrs(original)
|
||||
self.task.prescript = original.prescript
|
||||
self.task.postscript = original.postscript
|
||||
|
||||
def create_task(self):
|
||||
# The main task parser already calls this, and even sets an attribute, but
|
||||
# 1. It calls it after creating the task so I don't have access to it here yet and
|
||||
|
|
|
@ -16,6 +16,7 @@ from SpiffWorkflow.bpmn.serializer.task_spec import (
|
|||
ParallelGatewayConverter,
|
||||
ExclusiveGatewayConverter,
|
||||
InclusiveGatewayConverter,
|
||||
StandardLoopTaskConverter,
|
||||
)
|
||||
|
||||
from .task_spec import (
|
||||
|
@ -26,9 +27,11 @@ from .task_spec import (
|
|||
ReceiveTaskConverter,
|
||||
ScriptTaskConverter,
|
||||
ServiceTaskConverter,
|
||||
SubWorkflowTaskConverter,
|
||||
SubprocessTaskConverter,
|
||||
TransactionSubprocessConverter,
|
||||
CallActivityTaskConverter,
|
||||
ParallelMultiInstanceTaskConverter,
|
||||
SequentialMultiInstanceTaskConverter,
|
||||
)
|
||||
|
||||
from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventDefinitionConverter
|
||||
|
@ -57,9 +60,12 @@ SPIFF_SPEC_CONFIG['task_specs'] = [
|
|||
ReceiveTaskConverter,
|
||||
ScriptTaskConverter,
|
||||
ServiceTaskConverter,
|
||||
SubWorkflowTaskConverter,
|
||||
SubprocessTaskConverter,
|
||||
TransactionSubprocessConverter,
|
||||
CallActivityTaskConverter,
|
||||
StandardLoopTaskConverter,
|
||||
ParallelMultiInstanceTaskConverter,
|
||||
SequentialMultiInstanceTaskConverter,
|
||||
]
|
||||
SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventDefinitionConverter)
|
||||
SPIFF_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter)
|
|
@ -1,4 +1,5 @@
|
|||
from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter
|
||||
from SpiffWorkflow.bpmn.serializer.task_spec import MultiInstanceTaskConverter
|
||||
|
||||
from SpiffWorkflow.spiff.specs.none_task import NoneTask
|
||||
from SpiffWorkflow.spiff.specs.manual_task import ManualTask
|
||||
|
@ -7,6 +8,7 @@ from SpiffWorkflow.spiff.specs.script_task import ScriptTask
|
|||
from SpiffWorkflow.spiff.specs.service_task import ServiceTask
|
||||
from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity
|
||||
from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask
|
||||
from SpiffWorkflow.spiff.specs.multiinstance_task import StandardLoopTask, ParallelMultiInstanceTask, SequentialMultiInstanceTask
|
||||
|
||||
|
||||
class SpiffBpmnTaskConverter(TaskSpecConverter):
|
||||
|
@ -91,7 +93,7 @@ class ServiceTaskConverter(SpiffBpmnTaskConverter):
|
|||
return self.task_spec_from_dict(dct)
|
||||
|
||||
|
||||
class SubprocessTaskConverter(SpiffBpmnTaskConverter):
|
||||
class SubWorkflowTaskConverter(SpiffBpmnTaskConverter):
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = super().to_dict(spec)
|
||||
|
@ -102,14 +104,42 @@ class SubprocessTaskConverter(SpiffBpmnTaskConverter):
|
|||
dct['subworkflow_spec'] = dct.pop('spec')
|
||||
return super().task_spec_from_dict(dct)
|
||||
|
||||
class SubWorkflowTaskConverter(SubprocessTaskConverter):
|
||||
class SubprocessTaskConverter(SubWorkflowTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(SubWorkflowTask, registry)
|
||||
|
||||
class TransactionSubprocessConverter(SubprocessTaskConverter):
|
||||
class TransactionSubprocessConverter(SubWorkflowTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(TransactionSubprocess, registry)
|
||||
|
||||
class CallActivityTaskConverter(SubprocessTaskConverter):
|
||||
class CallActivityTaskConverter(SubWorkflowTaskConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(CallActivity, registry)
|
||||
|
||||
|
||||
class StandardLoopTaskConverter(SpiffBpmnTaskConverter):
|
||||
|
||||
def __init__(self, registry):
|
||||
super().__init__(StandardLoopTask, registry)
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = self.get_default_attributes(spec)
|
||||
dct.update(self.get_bpmn_attributes(spec))
|
||||
dct.update(self.get_standard_loop_attributes(spec))
|
||||
return dct
|
||||
|
||||
|
||||
class SpiffMultiInstanceConverter(MultiInstanceTaskConverter, SpiffBpmnTaskConverter):
|
||||
|
||||
def to_dict(self, spec):
|
||||
dct = MultiInstanceTaskConverter.to_dict(self, spec)
|
||||
dct.update(SpiffBpmnTaskConverter.to_dict(self, spec))
|
||||
return dct
|
||||
|
||||
class ParallelMultiInstanceTaskConverter(SpiffMultiInstanceConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(ParallelMultiInstanceTask, registry)
|
||||
|
||||
class SequentialMultiInstanceTaskConverter(SpiffMultiInstanceConverter):
|
||||
def __init__(self, registry):
|
||||
super().__init__(SequentialMultiInstanceTask, registry)
|
|
@ -0,0 +1,15 @@
|
|||
from SpiffWorkflow.bpmn.specs.MultiInstanceTask import (
|
||||
StandardLoopTask as BpmnStandardLoopTask,
|
||||
ParallelMultiInstanceTask as BpmnParallelMITask,
|
||||
SequentialMultiInstanceTask as BpmnSequentialMITask,
|
||||
)
|
||||
from .spiff_task import SpiffBpmnTask
|
||||
|
||||
class StandardLoopTask(BpmnStandardLoopTask, SpiffBpmnTask):
|
||||
pass
|
||||
|
||||
class ParallelMultiInstanceTask(BpmnParallelMITask, SpiffBpmnTask):
|
||||
pass
|
||||
|
||||
class SequentialMultiInstanceTask(BpmnSequentialMITask, SpiffBpmnTask):
|
||||
pass
|
|
@ -34,14 +34,15 @@ class SpiffBpmnTask(BpmnSpecMixin):
|
|||
my_task._set_state(TaskState.WAITING)
|
||||
raise exc
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
super()._on_ready_hook(my_task)
|
||||
def _update_hook(self, my_task):
|
||||
super()._update_hook(my_task)
|
||||
if self.prescript is not None:
|
||||
try:
|
||||
self.execute_script(my_task, self.prescript)
|
||||
except SpiffWorkflowException as se:
|
||||
se.add_note("Error occurred in the Pre-Script")
|
||||
raise se
|
||||
return True
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
if self.postscript is not None:
|
||||
|
|
|
@ -1,7 +1,12 @@
|
|||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import SubWorkflowTask, TransactionSubprocess, CallActivity
|
||||
from SpiffWorkflow.bpmn.specs.SubWorkflowTask import (
|
||||
SubWorkflowTask as DefaultSubWorkflow,
|
||||
TransactionSubprocess as DefaultTransaction,
|
||||
CallActivity as DefaultCallActivity,
|
||||
)
|
||||
from SpiffWorkflow.spiff.specs.spiff_task import SpiffBpmnTask
|
||||
|
||||
class SubWorkflowTask(SubWorkflowTask, SpiffBpmnTask):
|
||||
|
||||
class SubWorkflowTask(DefaultSubWorkflow, SpiffBpmnTask):
|
||||
|
||||
def __init__(self, wf_spec, name, subworkflow_spec, transaction=False, **kwargs):
|
||||
|
||||
|
@ -13,9 +18,13 @@ class SubWorkflowTask(SubWorkflowTask, SpiffBpmnTask):
|
|||
self.in_assign = []
|
||||
self.out_assign = []
|
||||
|
||||
def _on_ready_hook(self, my_task):
|
||||
SpiffBpmnTask._on_ready_hook(self, my_task)
|
||||
self.start_workflow(my_task)
|
||||
def _update_hook(self, my_task):
|
||||
# Don't really like duplicating this, but we need to run SpiffBpmn update rather than the default
|
||||
wf = my_task.workflow._get_outermost_workflow(my_task)
|
||||
if my_task.id not in wf.subprocesses:
|
||||
SpiffBpmnTask._update_hook(self, my_task)
|
||||
self.create_workflow(my_task)
|
||||
return True
|
||||
|
||||
def _on_complete_hook(self, my_task):
|
||||
SpiffBpmnTask._on_complete_hook(self, my_task)
|
||||
|
@ -25,7 +34,7 @@ class SubWorkflowTask(SubWorkflowTask, SpiffBpmnTask):
|
|||
return 'Subprocess'
|
||||
|
||||
|
||||
class TransactionSubprocess(SubWorkflowTask, TransactionSubprocess):
|
||||
class TransactionSubprocess(SubWorkflowTask, DefaultTransaction):
|
||||
|
||||
def __init__(self, wf_spec, name, subworkflow_spec, transaction=True, **kwargs):
|
||||
|
||||
|
@ -40,7 +49,7 @@ class TransactionSubprocess(SubWorkflowTask, TransactionSubprocess):
|
|||
return 'Transactional Subprocess'
|
||||
|
||||
|
||||
class CallActivity(SubWorkflowTask, CallActivity):
|
||||
class CallActivity(SubWorkflowTask, DefaultCallActivity):
|
||||
|
||||
def __init__(self, wf_spec, name, subworkflow_spec, **kwargs):
|
||||
|
||||
|
|
|
@ -31,20 +31,6 @@ metrics = logging.getLogger('spiff.metrics')
|
|||
data_log = logging.getLogger('spiff.data')
|
||||
|
||||
|
||||
def updateDotDict(dct,dotted_path,value):
|
||||
parts = dotted_path.split(".")
|
||||
path_len = len(parts)
|
||||
root = dct
|
||||
for i, key in enumerate(parts):
|
||||
if (i + 1) < path_len:
|
||||
if key not in dct:
|
||||
dct[key] = {}
|
||||
dct = dct[key]
|
||||
else:
|
||||
dct[key] = value
|
||||
return root
|
||||
|
||||
|
||||
class TaskState:
|
||||
"""
|
||||
|
||||
|
@ -247,16 +233,11 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
self.id = uuid4()
|
||||
self.thread_id = self.__class__.thread_id_pool
|
||||
self.data = {}
|
||||
self.terminate_current_loop = False
|
||||
self.internal_data = {}
|
||||
self.mi_collect_data = {}
|
||||
self.last_state_change = time.time()
|
||||
if parent is not None:
|
||||
self.parent._child_added_notify(self)
|
||||
|
||||
# TODO: get rid of this stuff
|
||||
self.last_state_change = time.time()
|
||||
self.state_history = [state]
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
return self._state
|
||||
|
@ -277,7 +258,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
if value != self.state:
|
||||
logger.info(f'State change to {TaskStateNames[value]}', extra=self.log_info())
|
||||
self.last_state_change = time.time()
|
||||
self.state_history.append(value)
|
||||
self._state = value
|
||||
else:
|
||||
logger.debug(f'State set to {TaskStateNames[value]}', extra=self.log_info())
|
||||
|
@ -301,11 +281,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
})
|
||||
return extra
|
||||
|
||||
def update_data_var(self, fieldid, value):
|
||||
model = {}
|
||||
updateDotDict(model,fieldid, value)
|
||||
self.update_data(model)
|
||||
|
||||
def update_data(self, data):
|
||||
"""
|
||||
If the task.data needs to be updated from a UserTask form or
|
||||
|
@ -316,45 +291,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
self.data = DeepMerge.merge(self.data, data)
|
||||
data_log.info('Data update', extra=self.log_info())
|
||||
|
||||
def task_info(self):
|
||||
"""
|
||||
Returns a dictionary of information about the current task, so that
|
||||
we can give hints to the user about what kind of task we are working
|
||||
with such as a looping task or a Parallel MultiInstance task
|
||||
:returns: dictionary
|
||||
"""
|
||||
default = {'is_looping': False,
|
||||
'is_sequential_mi': False,
|
||||
'is_parallel_mi': False,
|
||||
'mi_count': 0,
|
||||
'mi_index': 0}
|
||||
|
||||
miInfo = getattr(self.task_spec, "multiinstance_info", None)
|
||||
if callable(miInfo):
|
||||
return miInfo(self)
|
||||
else:
|
||||
return default
|
||||
|
||||
def terminate_loop(self):
|
||||
"""
|
||||
Used in the case that we are working with a BPMN 'loop' task.
|
||||
The task will loop, repeatedly asking for input until terminate_loop
|
||||
is called on the task
|
||||
"""
|
||||
if self.is_looping():
|
||||
self.terminate_current_loop = True
|
||||
else:
|
||||
raise WorkflowException('The method terminate_loop should only be called in the case of a BPMN Loop Task',
|
||||
task_spec=self)
|
||||
|
||||
def is_looping(self):
|
||||
"""Returns true if this is a looping task."""
|
||||
islooping = getattr(self.task_spec, "is_loop_task", None)
|
||||
if callable(islooping):
|
||||
return self.task_spec.is_loop_task()
|
||||
else:
|
||||
return False
|
||||
|
||||
def set_children_future(self):
|
||||
"""
|
||||
for a parallel gateway, we need to set up our
|
||||
|
@ -362,25 +298,17 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
the inputs - otherwise our child process never gets marked as
|
||||
'READY'
|
||||
"""
|
||||
|
||||
if not self.task_spec.task_should_set_children_future(self):
|
||||
return
|
||||
|
||||
self.task_spec.task_will_set_children_future(self)
|
||||
|
||||
# now we set this one to execute
|
||||
|
||||
self._set_state(TaskState.MAYBE)
|
||||
self._sync_children(self.task_spec.outputs)
|
||||
for child in self.children:
|
||||
child.set_children_future()
|
||||
|
||||
def find_children_by_name(self,name):
|
||||
"""
|
||||
for debugging
|
||||
"""
|
||||
return [x for x in self.workflow.task_tree if x.task_spec.name == name]
|
||||
|
||||
def reset_token(self, data, reset_data=False):
|
||||
"""
|
||||
Resets the token to this task. This should allow a trip 'back in time'
|
||||
|
@ -660,12 +588,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
|
|||
def get_description(self):
|
||||
return str(self.task_spec.description)
|
||||
|
||||
def get_state(self):
|
||||
"""
|
||||
Returns this Task's state.
|
||||
"""
|
||||
return self.state
|
||||
|
||||
def get_state_name(self):
|
||||
"""
|
||||
Returns a textual representation of this Task's state.
|
||||
|
|
|
@ -231,13 +231,6 @@ class Workflow(object):
|
|||
return True
|
||||
return False
|
||||
|
||||
def cancel_notify(self):
|
||||
self.task_tree.internal_data['cancels'] = self.task_tree.internal_data.get('cancels', {})
|
||||
self.task_tree.internal_data['cancels']['TokenReset'] = True
|
||||
self.refresh_waiting_tasks()
|
||||
self.do_engine_steps()
|
||||
self.task_tree.internal_data['cancels'] = {}
|
||||
|
||||
def get_tasks(self, state=TaskState.ANY_MASK):
|
||||
"""
|
||||
Returns a list of Task objects with the given state.
|
||||
|
|
|
@ -1,12 +1,9 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import unittest
|
||||
import re
|
||||
import os.path
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
|
||||
from SpiffWorkflow.task import Task, TaskState, updateDotDict
|
||||
from SpiffWorkflow.task import Task, TaskState
|
||||
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
|
||||
from SpiffWorkflow.specs.Simple import Simple
|
||||
|
||||
|
@ -15,10 +12,6 @@ class MockWorkflow(object):
|
|||
def __init__(self, spec):
|
||||
self.spec = spec
|
||||
|
||||
class UpdateDotDictTest(unittest.TestCase):
|
||||
def test_update(self):
|
||||
res = updateDotDict({}, 'some.thing.here', 'avalue')
|
||||
self.assertEqual(res, {'some':{'thing': {'here': 'avalue'}}})
|
||||
|
||||
class TaskTest(unittest.TestCase):
|
||||
|
||||
|
@ -85,8 +78,7 @@ class TaskTest(unittest.TestCase):
|
|||
|
||||
def suite():
|
||||
taskSuite = unittest.TestLoader().loadTestsFromTestCase(TaskTest)
|
||||
updateDotSuite = unittest.TestLoader().loadTestsFromTestCase(UpdateDotDictTest)
|
||||
return unittest.TestSuite([taskSuite, updateDotSuite])
|
||||
return unittest.TestSuite([taskSuite])
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
||||
|
|
|
@ -1,42 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
||||
import unittest
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.exceptions import WorkflowException
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
|
||||
|
||||
class AntiLoopTaskTest(BpmnWorkflowTestCase):
|
||||
"""The example bpmn is actually a MultiInstance. It should not report that it is a looping task and
|
||||
it should fail when we try to terminate the loop"""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('bpmnAntiLoopTask.bpmn','LoopTaskTest')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertTrue(len(ready_tasks) ==1)
|
||||
self.assertFalse(ready_tasks[0].task_spec.is_loop_task())
|
||||
try:
|
||||
ready_tasks[0].terminate_loop()
|
||||
self.fail("Terminate Loop should throw and error when called on a non-loop MultiInstance")
|
||||
except WorkflowException as ex:
|
||||
self.assertTrue(
|
||||
'The method terminate_loop should only be called in the case of a BPMN Loop Task' in (
|
||||
'%r' % ex),
|
||||
'\'The method terminate_loop should only be called in the case of a BPMN Loop Task\' should be a substring of error message: \'%r\'' % ex)
|
||||
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(AntiLoopTaskTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -8,7 +8,6 @@ from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator
|
|||
from SpiffWorkflow.task import TaskState
|
||||
|
||||
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer, DEFAULT_SPEC_CONFIG
|
||||
from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter
|
||||
from .BpmnLoaderForTests import TestUserTaskConverter, TestBpmnParser
|
||||
|
||||
__author__ = 'matth'
|
||||
|
@ -121,6 +120,7 @@ class BpmnWorkflowTestCase(unittest.TestCase):
|
|||
|
||||
def save_restore(self):
|
||||
|
||||
script_engine = self.workflow.script_engine
|
||||
before_state = self._get_workflow_state(do_steps=False)
|
||||
before_dump = self.workflow.get_dump()
|
||||
# Check that we can actully convert this to JSON
|
||||
|
@ -133,6 +133,7 @@ class BpmnWorkflowTestCase(unittest.TestCase):
|
|||
self.assertEqual(before_dump, after_dump)
|
||||
self.assertEqual(before_state, after_state)
|
||||
self.workflow = after
|
||||
self.workflow.script_engine = script_engine
|
||||
|
||||
def restore(self, state):
|
||||
self.workflow = self.serializer.workflow_from_dict(state)
|
||||
|
|
|
@ -72,11 +72,15 @@ class DataObjectReferenceTest(BpmnWorkflowTestCase):
|
|||
self.assertNotIn('obj_1', ready_tasks[0].data)
|
||||
self.assertEqual(self.workflow.data['obj_1'], 'hello')
|
||||
|
||||
# Make sure data objects can be copied in and out of a subprocess
|
||||
# Make sure data objects are accessible inside a subprocess
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(ready_tasks[0].data['obj_1'], 'hello')
|
||||
ready_tasks[0].data['obj_1'] = 'hello again'
|
||||
ready_tasks[0].complete()
|
||||
self.workflow.do_engine_steps()
|
||||
sp = self.workflow.get_tasks_from_spec_name('subprocess')[0]
|
||||
# It was copied out
|
||||
self.assertNotIn('obj_1', sp.data)
|
||||
# The update should persist in the main process
|
||||
self.assertEqual(self.workflow.data['obj_1'], 'hello again')
|
||||
|
|
|
@ -1,38 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class ExclusiveGatewayIntoMultiInstanceTest(BpmnWorkflowTestCase):
|
||||
"""In the example BPMN Diagram we set x = 0, then we have an
|
||||
exclusive gateway that should skip over a parallel multi-instance
|
||||
class, so it should run straight through and complete without issue."""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('exclusive_into_multi.bpmn','ExclusiveToMulti')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def testSaveRestore(self):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ExclusiveGatewayIntoMultiInstanceTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -1,59 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class ExclusiveGatewayNonDefaultPathIntoMultiTest(BpmnWorkflowTestCase):
|
||||
"""In the example BPMN Diagram we require that "Yes" or "No" be specified
|
||||
in a user task and check that a multiinstance can follow a non-default
|
||||
path.
|
||||
"""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('exclusive_non_default_path_into_multi.bpmn','ExclusiveNonDefaultMulti')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def load_workflow1_spec(self):
|
||||
return
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
# Set initial array size to 3 in the first user form.
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
self.assertEqual("DoStuff", task.task_spec.name)
|
||||
task.update_data({"morestuff": 'Yes'})
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
for i in range(3):
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
if i == 0:
|
||||
self.assertEqual("GetMoreStuff", task.task_spec.name)
|
||||
else:
|
||||
self.assertEqual("GetMoreStuff_%d"%(i-1), task.task_spec.name)
|
||||
|
||||
|
||||
task.update_data({"stuff.addstuff": "Stuff %d"%i})
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ExclusiveGatewayNonDefaultPathIntoMultiTest)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -25,9 +25,6 @@ class CallActivityDataTest(BpmnWorkflowTestCase):
|
|||
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
self.advance_to_subprocess()
|
||||
self.assertEqual("'in_2' was not found in the task data. "
|
||||
"You are missing a required Data Input for a call activity.",
|
||||
str(exc.exception))
|
||||
self.assertEqual(exc.exception.data_input.name,'in_2')
|
||||
|
||||
def testCallActivityMissingOutput(self):
|
||||
|
@ -43,10 +40,7 @@ class CallActivityDataTest(BpmnWorkflowTestCase):
|
|||
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
self.complete_subprocess()
|
||||
|
||||
self.assertEqual("'out_2' was not found in the task data. A Data Output was not provided as promised.",
|
||||
str(exc.exception))
|
||||
self.assertEqual(exc.exception.data_output.name,'out_2')
|
||||
self.assertEqual(exc.exception.data_output.name, 'out_2')
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
|
||||
|
@ -92,3 +86,44 @@ class CallActivityDataTest(BpmnWorkflowTestCase):
|
|||
next_task = self.workflow.get_tasks(TaskState.READY)[0]
|
||||
next_task.complete()
|
||||
waiting = self.workflow.get_tasks(TaskState.WAITING)
|
||||
|
||||
|
||||
class IOSpecOnTaskTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('io_spec_on_task.bpmn', 'main')
|
||||
|
||||
def testIOSpecOnTask(self):
|
||||
self.actual_test()
|
||||
|
||||
def testIOSpecOnTaskSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def testIOSpecOnTaskMissingInput(self):
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
set_data = self.workflow.spec.task_specs['set_data']
|
||||
set_data.script = """in_1, unused = 1, True"""
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(exc.exception.data_input.name, 'in_2')
|
||||
|
||||
def testIOSpecOnTaskMissingOutput(self):
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
task = self.workflow.get_tasks_from_spec_name('any_task')[0]
|
||||
task.data.update({'out_1': 1})
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
task.complete()
|
||||
self.assertEqual(exc.exception.data_output.name, 'out_2')
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
self.workflow = BpmnWorkflow(self.spec, self.subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
task = self.workflow.get_tasks_from_spec_name('any_task')[0]
|
||||
self.assertDictEqual(task.data, {'in_1': 1, 'in_2': 'hello world'})
|
||||
task.data.update({'out_1': 1, 'out_2': 'bye', 'extra': True})
|
||||
task.complete()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertDictEqual(self.workflow.last_task.data, {'out_1': 1, 'out_2': 'bye'})
|
||||
|
|
|
@ -1,60 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'kellym'
|
||||
|
||||
|
||||
class LoopTaskTest(BpmnWorkflowTestCase):
|
||||
"""The example bpmn diagram has a single task with a loop cardinality of 5.
|
||||
It should repeat 5 times before termination."""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('bpmnLoopTask.bpmn','LoopTaskTest')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
|
||||
for i in range(5):
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertTrue(len(ready_tasks) ==1)
|
||||
self.assertTrue(ready_tasks[0].task_spec.is_loop_task())
|
||||
self.assertFalse(self.workflow.is_completed())
|
||||
last_task = self.workflow.last_task
|
||||
|
||||
self.do_next_exclusive_step('Activity_TestLoop')
|
||||
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertTrue(len(ready_tasks) ==1)
|
||||
ready_tasks[0].terminate_loop()
|
||||
self.do_next_exclusive_step('Activity_TestLoop')
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
def testSaveRestore(self):
|
||||
|
||||
for i in range(5):
|
||||
self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertTrue(len(ready_tasks) ==1)
|
||||
self.assertTrue(ready_tasks[0].task_spec.is_loop_task())
|
||||
self.assertFalse(self.workflow.is_completed())
|
||||
self.do_next_exclusive_step('Activity_TestLoop')
|
||||
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertTrue(len(ready_tasks) ==1)
|
||||
ready_tasks[0].terminate_loop()
|
||||
self.do_next_exclusive_step('Activity_TestLoop')
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(LoopTaskTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -1,54 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class MultiInstanceCondTest(BpmnWorkflowTestCase):
|
||||
"""The example bpmn diagram has a single task set to be a parallel
|
||||
multi-instance with a loop cardinality of 5.
|
||||
It should repeat 5 times before termination, and it should
|
||||
have a navigation list with 7 items in it - one for start, one for end,
|
||||
and five items for the repeating section. """
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('MultiInstanceParallelTaskCond.bpmn', 'MultiInstance')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def load_workflow1_spec(self):
|
||||
return
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actualTest()
|
||||
|
||||
def testSaveRestore(self):
|
||||
self.actualTest(True)
|
||||
|
||||
def actualTest(self, save_restore=False):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(1, len(self.workflow.get_ready_user_tasks()))
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
task.data['collection'] = {'a':{'a':'test'},
|
||||
'b':{'b':'test'}}
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
for task in self.workflow.get_ready_user_tasks():
|
||||
self.assertFalse(self.workflow.is_completed())
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(MultiInstanceCondTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -1,51 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class MultiInstanceTest(BpmnWorkflowTestCase):
|
||||
"""The example bpmn diagram has a single task set to be a parallel
|
||||
multi-instance with a loop cardinality of 5.
|
||||
It should repeat 5 times before termination, and it should
|
||||
have a navigation list with 7 items in it - one for start, one for end,
|
||||
and five items for the repeating section. """
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('MultiInstanceParallelTask.bpmn', 'MultiInstance')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def load_workflow1_spec(self):
|
||||
return
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actualTest()
|
||||
|
||||
def testSaveRestore(self):
|
||||
self.actualTest(True)
|
||||
|
||||
def actualTest(self, save_restore=False):
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(1, len(self.workflow.get_ready_user_tasks()))
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
task.data['collection'] = [1,2,3,4,5]
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
self.workflow.do_engine_steps()
|
||||
for task in self.workflow.get_ready_user_tasks():
|
||||
self.assertFalse(self.workflow.is_completed())
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(MultiInstanceTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -1,46 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class MultiInstanceTest(BpmnWorkflowTestCase):
|
||||
"""The example bpmn diagram has a single task with a loop cardinality of 5.
|
||||
It should repeat 5 times before termination."""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('bpmnMultiUserTask.bpmn','MultiInstance')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
|
||||
for i in range(5):
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertFalse(self.workflow.is_completed())
|
||||
self.do_next_exclusive_step('Activity_Loop')
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def testSaveRestore(self):
|
||||
|
||||
for i in range(5):
|
||||
self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertFalse(self.workflow.is_completed())
|
||||
self.do_next_exclusive_step('Activity_Loop')
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(MultiInstanceTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -0,0 +1,233 @@
|
|||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
|
||||
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
|
||||
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
|
||||
class BaseTestCase(BpmnWorkflowTestCase):
|
||||
|
||||
def set_io_and_run_workflow(self, data, data_input=None, data_output=None, save_restore=False):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data = data
|
||||
|
||||
any_task = self.workflow.get_tasks_from_spec_name('any_task')[0]
|
||||
any_task.task_spec.data_input = TaskDataReference(data_input) if data_input is not None else None
|
||||
any_task.task_spec.data_output = TaskDataReference(data_output) if data_output is not None else None
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 3)
|
||||
while len(ready_tasks) > 0:
|
||||
task = ready_tasks[0]
|
||||
self.assertEqual(task.task_spec.name, 'any_task [child]')
|
||||
self.assertIn('input_item', task.data)
|
||||
task.data['output_item'] = task.data['input_item'] * 2
|
||||
task.complete()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def run_workflow_with_condition(self, data):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data = data
|
||||
|
||||
task = self.workflow.get_tasks_from_spec_name('any_task')[0]
|
||||
task.task_spec.condition = "input_item == 2"
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 3)
|
||||
task = [t for t in ready_tasks if t.data['input_item'] == 2][0]
|
||||
task.data['output_item'] = task.data['input_item'] * 2
|
||||
task.complete()
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
self.assertEqual(len([ t for t in ready_tasks if t.state == TaskState.CANCELLED]), 2)
|
||||
|
||||
|
||||
class ParallellMultiInstanceExistingOutputTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocess = self.load_workflow_spec('parallel_multiinstance_loop_input.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testListWithDictOutput(self):
|
||||
data = {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': {},
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': {0: 2, 1: 4, 2: 6},
|
||||
})
|
||||
|
||||
def testDictWithListOutput(self):
|
||||
data = {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': [],
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': [2, 4, 6],
|
||||
})
|
||||
|
||||
def testNonEmptyOutput(self):
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
data = {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [1, 2, 3],
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertEqual(exc.exception.message,
|
||||
"If the input is not being updated in place, the output must be empty or it must be a map (dict)")
|
||||
|
||||
def testInvalidOutputType(self):
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
data = {
|
||||
'input_data': set([1, 2, 3]),
|
||||
'output_data': set(),
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertEqual(exc.exception.message, "Only a mutable map (dict) or sequence (list) can be used for output")
|
||||
|
||||
|
||||
class ParallelMultiInstanceNewOutputTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocess = self.load_workflow_spec('parallel_multiinstance_loop_input.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testList(self):
|
||||
data = {'input_data': [1, 2, 3]}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [2, 4, 6]
|
||||
})
|
||||
|
||||
def testListSaveRestore(self):
|
||||
data = {'input_data': [1, 2, 3]}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data', save_restore=True)
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [2, 4, 6]
|
||||
})
|
||||
|
||||
def testDict(self):
|
||||
data = {'input_data': {'a': 1, 'b': 2, 'c': 3} }
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': {'a': 2, 'b': 4, 'c': 6}
|
||||
})
|
||||
|
||||
def testDictSaveRestore(self):
|
||||
data = {'input_data': {'a': 1, 'b': 2, 'c': 3} }
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data', save_restore=True)
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': {'a': 2, 'b': 4, 'c': 6}
|
||||
})
|
||||
|
||||
def testSet(self):
|
||||
data = {'input_data': set([1, 2, 3])}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': set([1, 2, 3]),
|
||||
'output_data': [2, 4, 6]
|
||||
})
|
||||
|
||||
def testEmptyCollection(self):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data = {'input_data': []}
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
self.assertDictEqual(self.workflow.data, {'input_data': [], 'output_data': []})
|
||||
|
||||
def testCondition(self):
|
||||
self.run_workflow_with_condition({'input_data': [1, 2, 3]})
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [4]
|
||||
})
|
||||
|
||||
|
||||
class ParallelMultiInstanceUpdateInputTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocess = self.load_workflow_spec('parallel_multiinstance_loop_input.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testList(self):
|
||||
data = { 'input_data': [1, 2, 3]}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='input_data')
|
||||
self.assertDictEqual(self.workflow.data, {'input_data': [2, 4, 6]})
|
||||
|
||||
def testDict(self):
|
||||
data = { 'input_data': {'a': 1, 'b': 2, 'c': 3}}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='input_data')
|
||||
self.assertDictEqual(self.workflow.data, {'input_data': {'a': 2, 'b': 4, 'c': 6}})
|
||||
|
||||
|
||||
class ParallelMultiInstanceWithCardinality(BaseTestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.spec, subprocess = self.load_workflow_spec('parallel_multiinstance_cardinality.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testCardinality(self):
|
||||
self.set_io_and_run_workflow({}, data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {'output_data': [0, 2, 4]})
|
||||
|
||||
def testCardinalitySaveRestore(self):
|
||||
self.set_io_and_run_workflow({}, data_output='output_data', save_restore=True)
|
||||
self.assertDictEqual(self.workflow.data, {'output_data': [0, 2, 4]})
|
||||
|
||||
def testCondition(self):
|
||||
self.run_workflow_with_condition({})
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'output_data': [4]
|
||||
})
|
||||
|
||||
|
||||
class ParallelMultiInstanceTaskTest(BpmnWorkflowTestCase):
|
||||
|
||||
def check_reference(self, reference, name):
|
||||
self.assertIsInstance(reference, TaskDataReference)
|
||||
self.assertEqual(reference.name, name)
|
||||
|
||||
def testParseInputOutput(self):
|
||||
spec, subprocess = self.load_workflow_spec('parallel_multiinstance_loop_input.bpmn', 'main')
|
||||
workflow = BpmnWorkflow(spec)
|
||||
task_spec = workflow.get_tasks_from_spec_name('any_task')[0].task_spec
|
||||
self.check_reference(task_spec.data_input, 'input_data')
|
||||
self.check_reference(task_spec.data_output, 'output_data')
|
||||
self.check_reference(task_spec.input_item, 'input_item')
|
||||
self.check_reference(task_spec.output_item, 'output_item')
|
||||
self.assertIsNone(task_spec.cardinality)
|
||||
|
||||
def testParseCardinality(self):
|
||||
spec, subprocess = self.load_workflow_spec('parallel_multiinstance_cardinality.bpmn', 'main')
|
||||
workflow = BpmnWorkflow(spec)
|
||||
task_spec = workflow.get_tasks_from_spec_name('any_task')[0].task_spec
|
||||
self.assertIsNone(task_spec.data_input)
|
||||
self.assertEqual(task_spec.cardinality, '3')
|
||||
|
||||
def testInvalidBpmn(self):
|
||||
with self.assertRaises(ValidationException) as exc:
|
||||
spec, subprocess = self.load_workflow_spec('parallel_multiinstance_invalid.bpmn', 'main')
|
||||
self.assertEqual(exc.exception.message,
|
||||
'A multiinstance task must specify exactly one of cardinality or loop input data reference.')
|
|
@ -1,16 +1,12 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import sys
|
||||
import os
|
||||
import unittest
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class MultiInstanceTest(BpmnWorkflowTestCase):
|
||||
class ParallelOrderTest(BpmnWorkflowTestCase):
|
||||
"""The example bpmn diagram has a 4 parallel workflows, this
|
||||
verifies that the parallel tasks have a natural order that follows
|
||||
the visual layout of the diagram, rather than just the order in which
|
||||
|
@ -33,6 +29,6 @@ class MultiInstanceTest(BpmnWorkflowTestCase):
|
|||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(MultiInstanceTest)
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ParallelOrderTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
||||
|
|
|
@ -1,25 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
|
||||
import unittest
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'leashys'
|
||||
|
||||
|
||||
class ParallelWithScriptTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('ParallelWithScript.bpmn', 'ParallelWithScript')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testRunThroughParallel(self):
|
||||
self.workflow.do_engine_steps()
|
||||
# TODO: what to assert here?
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ParallelWithScriptTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -13,8 +13,8 @@ class ParserTest(unittest.TestCase):
|
|||
bpmn_file = os.path.join(os.path.dirname(__file__), 'data', 'io_spec.bpmn')
|
||||
parser.add_bpmn_file(bpmn_file)
|
||||
spec = parser.get_spec('subprocess')
|
||||
self.assertEqual(len(spec.data_inputs), 2)
|
||||
self.assertEqual(len(spec.data_outputs), 2)
|
||||
self.assertEqual(len(spec.io_specification.data_inputs), 2)
|
||||
self.assertEqual(len(spec.io_specification.data_outputs), 2)
|
||||
|
||||
def testDataReferences(self):
|
||||
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.task import TaskState
|
||||
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
class ResetTimerTest(BpmnWorkflowTestCase):
|
||||
|
||||
def test_timer(self):
|
||||
spec, subprocess = self.load_workflow_spec('reset_timer.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(spec, subprocess)
|
||||
self.workflow.do_engine_steps()
|
||||
task_1 = self.workflow.get_tasks_from_spec_name('task_1')[0]
|
||||
timer = self.workflow.get_tasks_from_spec_name('timer')[0]
|
||||
original_timer = timer.internal_data.get('event_value')
|
||||
# This returns us to the task
|
||||
task_1.data['modify'] = True
|
||||
task_1.complete()
|
||||
self.workflow.do_engine_steps()
|
||||
# The timer should be waiting and the time should have been updated
|
||||
self.assertEqual(task_1.state, TaskState.READY)
|
||||
self.assertEqual(timer.state, TaskState.WAITING)
|
||||
self.assertGreater(timer.internal_data.get('event_value'), original_timer)
|
||||
task_1.data['modify'] = False
|
||||
task_1.complete()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertEqual(timer.state, TaskState.CANCELLED)
|
||||
self.assertTrue(self.workflow.is_completed())
|
|
@ -0,0 +1,213 @@
|
|||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
|
||||
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
|
||||
class BaseTestCase(BpmnWorkflowTestCase):
|
||||
|
||||
def set_io_and_run_workflow(self, data, data_input=None, data_output=None, save_restore=False):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data = data
|
||||
|
||||
any_task = self.workflow.get_tasks_from_spec_name('any_task')[0]
|
||||
any_task.task_spec.data_input = TaskDataReference(data_input) if data_input is not None else None
|
||||
any_task.task_spec.data_output = TaskDataReference(data_output) if data_output is not None else None
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
|
||||
while len(ready_tasks) > 0:
|
||||
self.assertEqual(len(ready_tasks), 1)
|
||||
task = ready_tasks[0]
|
||||
self.assertEqual(task.task_spec.name, 'any_task [child]')
|
||||
self.assertIn('input_item', task.data)
|
||||
task.data['output_item'] = task.data['input_item'] * 2
|
||||
task.complete()
|
||||
if save_restore:
|
||||
self.save_restore()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
children = self.workflow.get_tasks_from_spec_name('any_task [child]')
|
||||
self.assertEqual(len(children), 3)
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def run_workflow_with_condition(self, data, condition):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data = data
|
||||
|
||||
task = self.workflow.get_tasks_from_spec_name('any_task')[0]
|
||||
task.task_spec.condition = condition
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
|
||||
while len(ready_tasks) > 0:
|
||||
ready = ready_tasks[0]
|
||||
self.assertEqual(ready.task_spec.name, 'any_task [child]')
|
||||
self.assertIn('input_item', ready.data)
|
||||
ready.data['output_item'] = ready.data['input_item'] * 2
|
||||
ready.complete()
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
children = self.workflow.get_tasks_from_spec_name('any_task [child]')
|
||||
self.assertEqual(len(children), 2)
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
class SequentialMultiInstanceExistingOutputTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocess = self.load_workflow_spec('sequential_multiinstance_loop_input.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testListWithDictOutput(self):
|
||||
data = {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': {},
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': {0: 2, 1: 4, 2: 6},
|
||||
})
|
||||
|
||||
def testDictWithListOutput(self):
|
||||
data = {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': [],
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': [2, 4, 6],
|
||||
})
|
||||
|
||||
def testNonEmptyOutput(self):
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
data = {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [1, 2, 3],
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertEqual(exc.exception.message,
|
||||
"If the input is not being updated in place, the output must be empty or it must be a map (dict)")
|
||||
|
||||
def testInvalidOutputType(self):
|
||||
with self.assertRaises(WorkflowDataException) as exc:
|
||||
data = {
|
||||
'input_data': set([1, 2, 3]),
|
||||
'output_data': set(),
|
||||
}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertEqual(exc.exception.message, "Only a mutable map (dict) or sequence (list) can be used for output")
|
||||
|
||||
|
||||
class SequentialMultiInstanceNewOutputTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocess = self.load_workflow_spec('sequential_multiinstance_loop_input.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testList(self):
|
||||
data = {'input_data': [1, 2, 3]}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [2, 4, 6]
|
||||
})
|
||||
|
||||
def testListSaveRestore(self):
|
||||
data = {'input_data': [1, 2, 3]}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data', save_restore=True)
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [2, 4, 6]
|
||||
})
|
||||
|
||||
def testDict(self):
|
||||
data = {'input_data': {'a': 1, 'b': 2, 'c': 3} }
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': {'a': 2, 'b': 4, 'c': 6}
|
||||
})
|
||||
|
||||
def testDictSaveRestore(self):
|
||||
data = {'input_data': {'a': 1, 'b': 2, 'c': 3} }
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data', save_restore=True)
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': {'a': 1, 'b': 2, 'c': 3},
|
||||
'output_data': {'a': 2, 'b': 4, 'c': 6}
|
||||
})
|
||||
|
||||
def testSet(self):
|
||||
data = {'input_data': set([1, 2, 3])}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': set([1, 2, 3]),
|
||||
'output_data': [2, 4, 6]
|
||||
})
|
||||
|
||||
def testEmptyCollection(self):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('Start')[0]
|
||||
start.data = {'input_data': []}
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
self.assertDictEqual(self.workflow.data, {'input_data': [], 'output_data': []})
|
||||
|
||||
def testCondition(self):
|
||||
self.run_workflow_with_condition({'input_data': [1, 2, 3]}, "input_item == 2")
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'input_data': [1, 2, 3],
|
||||
'output_data': [2, 4]
|
||||
})
|
||||
|
||||
|
||||
class SequentialMultiInstanceUpdateInputTest(BaseTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, subprocess = self.load_workflow_spec('sequential_multiinstance_loop_input.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testList(self):
|
||||
data = { 'input_data': [1, 2, 3]}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='input_data')
|
||||
self.assertDictEqual(self.workflow.data, {'input_data': [2, 4, 6]})
|
||||
|
||||
def testDict(self):
|
||||
data = { 'input_data': {'a': 1, 'b': 2, 'c': 3}}
|
||||
self.set_io_and_run_workflow(data, data_input='input_data', data_output='input_data')
|
||||
self.assertDictEqual(self.workflow.data, {'input_data': {'a': 2, 'b': 4, 'c': 6}})
|
||||
|
||||
|
||||
class SequentialMultiInstanceWithCardinality(BaseTestCase):
|
||||
|
||||
def setUp(self) -> None:
|
||||
self.spec, subprocess = self.load_workflow_spec('sequential_multiinstance_cardinality.bpmn', 'main')
|
||||
self.workflow = BpmnWorkflow(self.spec)
|
||||
|
||||
def testCardinality(self):
|
||||
self.set_io_and_run_workflow({}, data_output='output_data')
|
||||
self.assertDictEqual(self.workflow.data, {'output_data': [0, 2, 4]})
|
||||
|
||||
def testCardinalitySaveRestore(self):
|
||||
self.set_io_and_run_workflow({}, data_output='output_data', save_restore=True)
|
||||
self.assertDictEqual(self.workflow.data, {'output_data': [0, 2, 4]})
|
||||
|
||||
def testCondition(self):
|
||||
self.run_workflow_with_condition({}, "input_item == 1")
|
||||
self.assertDictEqual(self.workflow.data, {
|
||||
'output_data': [0, 2]
|
||||
})
|
|
@ -0,0 +1,62 @@
|
|||
import os
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser, ValidationException
|
||||
from .BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
class StandardLoopTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('standard_loop.bpmn','main', validate=False)
|
||||
# This spec has a loop task with loopMaximum = 3 and loopCondition = 'done'
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testLoopMaximum(self):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('StartEvent_1')
|
||||
start[0].data['done'] = False
|
||||
for idx in range(3):
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 1)
|
||||
ready_tasks[0].data[str(idx)] = True
|
||||
ready_tasks[0].complete()
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def testLoopCondition(self):
|
||||
|
||||
start = self.workflow.get_tasks_from_spec_name('StartEvent_1')
|
||||
start[0].data['done'] = False
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
ready_tasks = self.workflow.get_ready_user_tasks()
|
||||
self.assertEqual(len(ready_tasks), 1)
|
||||
ready_tasks[0].data['done'] = True
|
||||
ready_tasks[0].complete()
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
def testSkipLoop(self):
|
||||
|
||||
# This is called "skip loop" because I thought "testTestBefore" was a terrible name
|
||||
start = self.workflow.get_tasks_from_spec_name('StartEvent_1')
|
||||
start[0].data['done'] = True
|
||||
self.workflow.do_engine_steps()
|
||||
self.workflow.refresh_waiting_tasks()
|
||||
self.workflow.do_engine_steps()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
class ParseStandardLoop(BpmnWorkflowTestCase):
|
||||
|
||||
def testParseStandardLoop(self):
|
||||
parser = BpmnParser()
|
||||
# This process has neither a loop condition nor a loop maximum
|
||||
bpmn_file = os.path.join(os.path.dirname(__file__), 'data', 'standard_loop_invalid.bpmn')
|
||||
parser.add_bpmn_file(bpmn_file)
|
||||
self.assertRaises(ValidationException, parser.get_spec, 'main')
|
|
@ -1,51 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class SubWorkflowMultiTest(BpmnWorkflowTestCase):
|
||||
|
||||
expected_data = {
|
||||
'a': {'name': 'Apple_edit',
|
||||
'new_info': 'Adding this!'},
|
||||
'b': {'name': 'Bubble_edit',
|
||||
'new_info': 'Adding this!'},
|
||||
'c': {'name': 'Crap, I should write better code_edit',
|
||||
'new_info': 'Adding this!'}
|
||||
}
|
||||
|
||||
def testSequential(self):
|
||||
spec, subprocesses = self.load_workflow_spec('sub_workflow_multi.bpmn', 'ScriptTest')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
data = self.workflow.last_task.data
|
||||
self.assertEqual(data['my_collection'], self.expected_data)
|
||||
|
||||
def testParallel(self):
|
||||
spec, subprocesses= self.load_workflow_spec('sub_workflow_multi_parallel.bpmn', 'ScriptTest')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
data = self.workflow.last_task.data
|
||||
self.assertEqual(data['my_collection'], self.expected_data)
|
||||
|
||||
def testWrapped(self):
|
||||
spec, subprocesses = self.load_workflow_spec('sub_within_sub_multi.bpmn', 'ScriptTest')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
data = self.workflow.last_task.data
|
||||
self.assertEqual(self.expected_data, data['my_collection'])
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(SubWorkflowMultiTest)
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -1,59 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="MultiInstance" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0t6p1sb" sourceRef="StartEvent_1" targetRef="Activity_088tnzu" />
|
||||
<bpmn:endEvent id="Event_End" name="Event_End">
|
||||
<bpmn:incoming>Flow_0ugjw69</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0ugjw69" sourceRef="Activity_Loop" targetRef="Event_End" />
|
||||
<bpmn:userTask id="Activity_Loop" name="Activity_Loop">
|
||||
<bpmn:incoming>Flow_0ds4mp0</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ugjw69</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics>
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">collection</bpmn:loopCardinality>
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:userTask>
|
||||
<bpmn:task id="Activity_088tnzu" name="Setup">
|
||||
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ds4mp0</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0ds4mp0" sourceRef="Activity_088tnzu" targetRef="Activity_Loop" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="MultiInstance">
|
||||
<bpmndi:BPMNEdge id="Flow_0ugjw69_di" bpmnElement="Flow_0ugjw69">
|
||||
<di:waypoint x="480" y="117" />
|
||||
<di:waypoint x="582" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0t6p1sb_di" bpmnElement="Flow_0t6p1sb">
|
||||
<di:waypoint x="208" y="117" />
|
||||
<di:waypoint x="230" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_1g0pmib_di" bpmnElement="Event_End">
|
||||
<dc:Bounds x="582" y="99" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="575" y="142" width="54" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="172" y="99" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="159" y="142" width="64" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1iyilui_di" bpmnElement="Activity_Loop">
|
||||
<dc:Bounds x="380" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_088tnzu_di" bpmnElement="Activity_088tnzu">
|
||||
<dc:Bounds x="230" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0ds4mp0_di" bpmnElement="Flow_0ds4mp0">
|
||||
<di:waypoint x="330" y="117" />
|
||||
<di:waypoint x="380" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,145 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1">
|
||||
<bpmn:process id="MultiInstance" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0t6p1sb" sourceRef="StartEvent_1" targetRef="Activity_088tnzu" />
|
||||
<bpmn:endEvent id="Event_End" name="Event_End">
|
||||
<bpmn:incoming>Flow_0ugjw69</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_1oo4mpj</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0ugjw69" sourceRef="Activity_Loop" targetRef="Event_End" />
|
||||
<bpmn:userTask id="Activity_Loop" name="Activity_Loop">
|
||||
<bpmn:incoming>Flow_0u92n7b</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ugjw69</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics camunda:collection="collection" camunda:elementVariable="x" />
|
||||
</bpmn:userTask>
|
||||
<bpmn:task id="Activity_088tnzu" name="Setup">
|
||||
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ds4mp0</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0ds4mp0" sourceRef="Activity_088tnzu" targetRef="Gateway_08wnx3s" />
|
||||
<bpmn:exclusiveGateway id="Gateway_07go3pk" name="Filled Collection" default="Flow_0u92n7b">
|
||||
<bpmn:incoming>Flow_1sx7n9u</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1oo4mpj</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0u92n7b</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_1oo4mpj" name="EmptyCollection" sourceRef="Gateway_07go3pk" targetRef="Event_End">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">len(collection.keys())==0</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_0u92n7b" name="Default" sourceRef="Gateway_07go3pk" targetRef="Activity_Loop" />
|
||||
<bpmn:sequenceFlow id="Flow_0io0g18" sourceRef="Activity_0flre28" targetRef="Gateway_1cn7vsp" />
|
||||
<bpmn:exclusiveGateway id="Gateway_08wnx3s" name="Always skip" default="Flow_1dah8xt">
|
||||
<bpmn:incoming>Flow_0ds4mp0</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1dah8xt</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0i1bv5g</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_1dah8xt" name="Flow A" sourceRef="Gateway_08wnx3s" targetRef="Activity_0flre28" />
|
||||
<bpmn:manualTask id="Activity_0flre28" name="Do something">
|
||||
<bpmn:incoming>Flow_1dah8xt</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0io0g18</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:exclusiveGateway id="Gateway_1cn7vsp" default="Flow_1sx7n9u">
|
||||
<bpmn:incoming>Flow_0io0g18</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0i1bv5g</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1sx7n9u</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_1sx7n9u" sourceRef="Gateway_1cn7vsp" targetRef="Gateway_07go3pk" />
|
||||
<bpmn:sequenceFlow id="Flow_0i1bv5g" name="Flow B" sourceRef="Gateway_08wnx3s" targetRef="Gateway_1cn7vsp">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">1==1</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="MultiInstance">
|
||||
<bpmndi:BPMNEdge id="Flow_0i1bv5g_di" bpmnElement="Flow_0i1bv5g">
|
||||
<di:waypoint x="370" y="142" />
|
||||
<di:waypoint x="370" y="280" />
|
||||
<di:waypoint x="560" y="280" />
|
||||
<di:waypoint x="560" y="142" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="448" y="262" width="34" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1sx7n9u_di" bpmnElement="Flow_1sx7n9u">
|
||||
<di:waypoint x="585" y="117" />
|
||||
<di:waypoint x="615" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1dah8xt_di" bpmnElement="Flow_1dah8xt">
|
||||
<di:waypoint x="395" y="117" />
|
||||
<di:waypoint x="430" y="117" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="395" y="99" width="35" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0io0g18_di" bpmnElement="Flow_0io0g18">
|
||||
<di:waypoint x="530" y="117" />
|
||||
<di:waypoint x="535" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0u92n7b_di" bpmnElement="Flow_0u92n7b">
|
||||
<di:waypoint x="665" y="117" />
|
||||
<di:waypoint x="710" y="117" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="670" y="99" width="35" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1oo4mpj_di" bpmnElement="Flow_1oo4mpj">
|
||||
<di:waypoint x="640" y="142" />
|
||||
<di:waypoint x="640" y="330" />
|
||||
<di:waypoint x="930" y="330" />
|
||||
<di:waypoint x="930" y="135" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="745" y="312" width="80" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0ds4mp0_di" bpmnElement="Flow_0ds4mp0">
|
||||
<di:waypoint x="330" y="117" />
|
||||
<di:waypoint x="345" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0ugjw69_di" bpmnElement="Flow_0ugjw69">
|
||||
<di:waypoint x="810" y="117" />
|
||||
<di:waypoint x="912" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0t6p1sb_di" bpmnElement="Flow_0t6p1sb">
|
||||
<di:waypoint x="208" y="117" />
|
||||
<di:waypoint x="230" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="172" y="99" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="159" y="142" width="64" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1g0pmib_di" bpmnElement="Event_End">
|
||||
<dc:Bounds x="912" y="99" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="903" y="75" width="54" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1iyilui_di" bpmnElement="Activity_Loop">
|
||||
<dc:Bounds x="710" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_088tnzu_di" bpmnElement="Activity_088tnzu">
|
||||
<dc:Bounds x="230" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_07go3pk_di" bpmnElement="Gateway_07go3pk" isMarkerVisible="true">
|
||||
<dc:Bounds x="615" y="92" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="602" y="62" width="78" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_08wnx3s_di" bpmnElement="Gateway_08wnx3s" isMarkerVisible="true">
|
||||
<dc:Bounds x="345" y="92" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="341" y="62" width="58" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0ow7pu7_di" bpmnElement="Activity_0flre28">
|
||||
<dc:Bounds x="430" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_1cn7vsp_di" bpmnElement="Gateway_1cn7vsp" isMarkerVisible="true">
|
||||
<dc:Bounds x="535" y="92" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,117 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_196qfv1" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.0.0">
|
||||
<bpmn:process id="ParallelWithScript" name="A" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1swtnkk</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1swtnkk" sourceRef="StartEvent_1" targetRef="Gateway_1" />
|
||||
<bpmn:sequenceFlow id="Flow_1ukvcj0" sourceRef="Gateway_1" targetRef="user_task_A" />
|
||||
<bpmn:sequenceFlow id="Flow_188f01l" sourceRef="Gateway_1" targetRef="user_task_B" />
|
||||
<bpmn:sequenceFlow id="Flow_1empxbr" sourceRef="Gateway_1" targetRef="script_task_C" />
|
||||
<bpmn:sequenceFlow id="Flow_1m1yz1x" sourceRef="script_task_C" targetRef="user_task_C" />
|
||||
<bpmn:sequenceFlow id="Flow_0ykkbts" sourceRef="user_task_B" targetRef="Gateway_2" />
|
||||
<bpmn:sequenceFlow id="Flow_0lmf2gd" sourceRef="user_task_A" targetRef="Gateway_2" />
|
||||
<bpmn:sequenceFlow id="Flow_0954wrk" sourceRef="user_task_C" targetRef="Gateway_2" />
|
||||
<bpmn:scriptTask id="script_task_C" name="Script">
|
||||
<bpmn:incoming>Flow_1empxbr</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1m1yz1x</bpmn:outgoing>
|
||||
<bpmn:script># do nothing</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="Event_0exe5n0">
|
||||
<bpmn:incoming>Flow_04k0ue9</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_04k0ue9" sourceRef="Gateway_2" targetRef="Event_0exe5n0" />
|
||||
<bpmn:parallelGateway id="Gateway_1">
|
||||
<bpmn:incoming>Flow_1swtnkk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ukvcj0</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_188f01l</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_1empxbr</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:parallelGateway id="Gateway_2">
|
||||
<bpmn:incoming>Flow_0ykkbts</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0lmf2gd</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0954wrk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_04k0ue9</bpmn:outgoing>
|
||||
</bpmn:parallelGateway>
|
||||
<bpmn:manualTask id="user_task_A" name="Task A">
|
||||
<bpmn:incoming>Flow_1ukvcj0</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0lmf2gd</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:manualTask id="user_task_B" name="Task B">
|
||||
<bpmn:incoming>Flow_188f01l</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ykkbts</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:manualTask id="user_task_C" name="Task C">
|
||||
<bpmn:incoming>Flow_1m1yz1x</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0954wrk</bpmn:outgoing>
|
||||
</bpmn:manualTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ParallelWithScript">
|
||||
<bpmndi:BPMNEdge id="Flow_1swtnkk_di" bpmnElement="Flow_1swtnkk">
|
||||
<di:waypoint x="188" y="117" />
|
||||
<di:waypoint x="435" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1ukvcj0_di" bpmnElement="Flow_1ukvcj0">
|
||||
<di:waypoint x="485" y="117" />
|
||||
<di:waypoint x="640" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_188f01l_di" bpmnElement="Flow_188f01l">
|
||||
<di:waypoint x="460" y="142" />
|
||||
<di:waypoint x="460" y="230" />
|
||||
<di:waypoint x="640" y="230" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1empxbr_di" bpmnElement="Flow_1empxbr">
|
||||
<di:waypoint x="460" y="142" />
|
||||
<di:waypoint x="460" y="340" />
|
||||
<di:waypoint x="510" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1m1yz1x_di" bpmnElement="Flow_1m1yz1x">
|
||||
<di:waypoint x="610" y="340" />
|
||||
<di:waypoint x="640" y="340" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0ykkbts_di" bpmnElement="Flow_0ykkbts">
|
||||
<di:waypoint x="740" y="230" />
|
||||
<di:waypoint x="865" y="230" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0lmf2gd_di" bpmnElement="Flow_0lmf2gd">
|
||||
<di:waypoint x="740" y="117" />
|
||||
<di:waypoint x="890" y="117" />
|
||||
<di:waypoint x="890" y="205" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0954wrk_di" bpmnElement="Flow_0954wrk">
|
||||
<di:waypoint x="740" y="340" />
|
||||
<di:waypoint x="890" y="340" />
|
||||
<di:waypoint x="890" y="255" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_04k0ue9_di" bpmnElement="Flow_04k0ue9">
|
||||
<di:waypoint x="915" y="230" />
|
||||
<di:waypoint x="1052" y="230" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_0exe5n0_di" bpmnElement="Event_0exe5n0">
|
||||
<dc:Bounds x="1052" y="212" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_1f2ua0v_di" bpmnElement="Gateway_1">
|
||||
<dc:Bounds x="435" y="92" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_06epsj9_di" bpmnElement="Gateway_2">
|
||||
<dc:Bounds x="865" y="205" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1uxd70j_di" bpmnElement="user_task_A">
|
||||
<dc:Bounds x="640" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1cr04li_di" bpmnElement="user_task_B">
|
||||
<dc:Bounds x="640" y="190" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0ze74eq_di" bpmnElement="script_task_C">
|
||||
<dc:Bounds x="510" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1mznorb_di" bpmnElement="user_task_C">
|
||||
<dc:Bounds x="640" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="152" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,47 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_0lhdj7m" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="LoopTaskTest" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0q33jmj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="Activity_TestLoop" name="Loop till user says we are done" camunda:formKey="LoopForm">
|
||||
<bpmn:documentation>Enter Name for member {{ Activity_TestLoop_CurrentVar }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_FirstName" label="Enter First Name" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0q33jmj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_13213ce</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true">
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">5</bpmn:loopCardinality>
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="Flow_0q33jmj" sourceRef="StartEvent_1" targetRef="Activity_TestLoop" />
|
||||
<bpmn:endEvent id="Event_0l4x230">
|
||||
<bpmn:incoming>Flow_13213ce</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_13213ce" sourceRef="Activity_TestLoop" targetRef="Event_0l4x230" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="LoopTaskTest">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0kugfe7_di" bpmnElement="Activity_TestLoop">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0q33jmj_di" bpmnElement="Flow_0q33jmj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_1tmau7e_di" bpmnElement="Event_0l4x230">
|
||||
<dc:Bounds x="472" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_13213ce_di" bpmnElement="Flow_13213ce">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="472" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,45 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_0lhdj7m" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="LoopTaskTest" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0q33jmj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="Activity_TestLoop" name="Loop till user says we are done" camunda:formKey="LoopForm">
|
||||
<bpmn:documentation>Enter Name for member {{ Activity_TestLoop_CurrentVar }}</bpmn:documentation>
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="FormField_FirstName" label="Enter First Name" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0q33jmj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_13213ce</bpmn:outgoing>
|
||||
<bpmn:standardLoopCharacteristics />
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="Flow_0q33jmj" sourceRef="StartEvent_1" targetRef="Activity_TestLoop" />
|
||||
<bpmn:endEvent id="Event_0l4x230">
|
||||
<bpmn:incoming>Flow_13213ce</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_13213ce" sourceRef="Activity_TestLoop" targetRef="Event_0l4x230" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="LoopTaskTest">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0kugfe7_di" bpmnElement="Activity_TestLoop">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0q33jmj_di" bpmnElement="Flow_0q33jmj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_1tmau7e_di" bpmnElement="Event_0l4x230">
|
||||
<dc:Bounds x="472" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_13213ce_di" bpmnElement="Flow_13213ce">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="472" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,49 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_17fwemw" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="MultiInstance" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1" name="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0t6p1sb</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0t6p1sb" sourceRef="StartEvent_1" targetRef="Activity_Loop" />
|
||||
<bpmn:endEvent id="Event_End" name="Event_End">
|
||||
<bpmn:incoming>Flow_0ugjw69</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0ugjw69" sourceRef="Activity_Loop" targetRef="Event_End" />
|
||||
<bpmn:userTask id="Activity_Loop" name="Activity_Loop">
|
||||
<bpmn:incoming>Flow_0t6p1sb</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0ugjw69</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true">
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">5</bpmn:loopCardinality>
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="MultiInstance">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="112" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="166" y="155" width="64" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0t6p1sb_di" bpmnElement="Flow_0t6p1sb">
|
||||
<di:waypoint x="215" y="130" />
|
||||
<di:waypoint x="248" y="130" />
|
||||
<di:waypoint x="248" y="117" />
|
||||
<di:waypoint x="280" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_1g0pmib_di" bpmnElement="Event_End">
|
||||
<dc:Bounds x="492" y="99" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="485" y="142" width="54" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0ugjw69_di" bpmnElement="Flow_0ugjw69">
|
||||
<di:waypoint x="380" y="117" />
|
||||
<di:waypoint x="492" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1iyilui_di" bpmnElement="Activity_Loop">
|
||||
<dc:Bounds x="280" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -11,6 +11,7 @@
|
|||
<bpmn:dataObjectReference id="DataObjectReference_17fhr1j" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:dataObjectReference id="DataObjectReference_0pztwm3" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:dataObjectReference id="DataObjectReference_0cm8dnh" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:dataObjectReference id="DataObjectReference_1dn9eoi" name="Data" dataObjectRef="obj_1" />
|
||||
<bpmn:endEvent id="Event_0qw1yr0">
|
||||
<bpmn:incoming>Flow_19pyf8s</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
|
@ -38,17 +39,20 @@
|
|||
<bpmn:subProcess id="subprocess" name="Subprocess">
|
||||
<bpmn:incoming>Flow_1tnu3ej</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_19pyf8s</bpmn:outgoing>
|
||||
<bpmn:property id="Property_1q5wp77" name="__targetRef_placeholder" />
|
||||
<bpmn:dataInputAssociation id="DataInputAssociation_0w2qahx">
|
||||
<bpmn:sourceRef>DataObjectReference_0cm8dnh</bpmn:sourceRef>
|
||||
<bpmn:targetRef>Property_1q5wp77</bpmn:targetRef>
|
||||
</bpmn:dataInputAssociation>
|
||||
<bpmn:startEvent id="Event_1wuwx2f">
|
||||
<bpmn:outgoing>Flow_0yx8lkz</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="placeholder">
|
||||
<bpmn:incoming>Flow_0yx8lkz</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0rk4i35</bpmn:outgoing>
|
||||
<bpmn:property id="Property_1q5wp77" name="__targetRef_placeholder" />
|
||||
<bpmn:dataInputAssociation id="DataInputAssociation_0w2qahx">
|
||||
<bpmn:sourceRef>DataObjectReference_0cm8dnh</bpmn:sourceRef>
|
||||
<bpmn:targetRef>Property_1q5wp77</bpmn:targetRef>
|
||||
</bpmn:dataInputAssociation>
|
||||
<bpmn:dataOutputAssociation id="DataOutputAssociation_164qpaq">
|
||||
<bpmn:targetRef>DataObjectReference_1dn9eoi</bpmn:targetRef>
|
||||
</bpmn:dataOutputAssociation>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0yx8lkz" sourceRef="Event_1wuwx2f" targetRef="placeholder" />
|
||||
<bpmn:endEvent id="Event_1qcnmnt">
|
||||
|
|
|
@ -1,83 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_19d41bq" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="ExclusiveToMulti" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_163toj3</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:scriptTask id="Script_Set_x_to_0" name="x = 0">
|
||||
<bpmn:incoming>Flow_163toj3</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1rakb4c</bpmn:outgoing>
|
||||
<bpmn:script>x = 0</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_163toj3" sourceRef="StartEvent_1" targetRef="Script_Set_x_to_0" />
|
||||
<bpmn:exclusiveGateway id="Gateway_0zdq5of" default="Flow_0340se7">
|
||||
<bpmn:incoming>Flow_1rakb4c</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_04bjhw6</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_0340se7</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_1rakb4c" sourceRef="Script_Set_x_to_0" targetRef="Gateway_0zdq5of" />
|
||||
<bpmn:sequenceFlow id="Flow_04bjhw6" name="x is not 0" sourceRef="Gateway_0zdq5of" targetRef="Activity_1j43xon">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">x != 0</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_073oado" sourceRef="Activity_1j43xon" targetRef="Event_1n4p05n" />
|
||||
<bpmn:sequenceFlow id="Flow_0340se7" name="x is 0" sourceRef="Gateway_0zdq5of" targetRef="Event_1n4p05n" />
|
||||
<bpmn:userTask id="Activity_1j43xon" name="Some Multiinstance">
|
||||
<bpmn:incoming>Flow_04bjhw6</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_073oado</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics>
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">1</bpmn:loopCardinality>
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:userTask>
|
||||
<bpmn:endEvent id="Event_1n4p05n">
|
||||
<bpmn:incoming>Flow_073oado</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0340se7</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ExclusiveToMulti">
|
||||
<bpmndi:BPMNEdge id="Flow_163toj3_di" bpmnElement="Flow_163toj3">
|
||||
<di:waypoint x="215" y="207" />
|
||||
<di:waypoint x="260" y="207" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1rakb4c_di" bpmnElement="Flow_1rakb4c">
|
||||
<di:waypoint x="360" y="207" />
|
||||
<di:waypoint x="405" y="207" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_04bjhw6_di" bpmnElement="Flow_04bjhw6">
|
||||
<di:waypoint x="455" y="207" />
|
||||
<di:waypoint x="520" y="207" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="457" y="189" width="45" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_073oado_di" bpmnElement="Flow_073oado">
|
||||
<di:waypoint x="620" y="207" />
|
||||
<di:waypoint x="702" y="207" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0340se7_di" bpmnElement="Flow_0340se7">
|
||||
<di:waypoint x="430" y="182" />
|
||||
<di:waypoint x="430" y="100" />
|
||||
<di:waypoint x="720" y="100" />
|
||||
<di:waypoint x="720" y="189" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="563" y="82" width="26" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="189" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1cmafjf_di" bpmnElement="Script_Set_x_to_0">
|
||||
<dc:Bounds x="260" y="167" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0zdq5of_di" bpmnElement="Gateway_0zdq5of" isMarkerVisible="true">
|
||||
<dc:Bounds x="405" y="182" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_05dssc6_di" bpmnElement="Activity_1j43xon">
|
||||
<dc:Bounds x="520" y="167" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0if1cvv_di" bpmnElement="Event_1n4p05n">
|
||||
<dc:Bounds x="702" y="189" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,97 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0m3hv47" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="ExclusiveNonDefaultMulti" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent">
|
||||
<bpmn:outgoing>Flow_0rqubl2</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:userTask id="DoStuff" name="Do Stuff?" camunda:formKey="morestuffform">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="morestuff" label="Do we need to do more stuff?" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_0rqubl2</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_02orejl</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
<bpmn:exclusiveGateway id="CheckResponse" name="Check Response">
|
||||
<bpmn:incoming>Flow_02orejl</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_Yes</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_No</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:endEvent id="EndEvent">
|
||||
<bpmn:incoming>Flow_No</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0pud9db</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:userTask id="GetMoreStuff" name="Add More Stuff">
|
||||
<bpmn:extensionElements>
|
||||
<camunda:formData>
|
||||
<camunda:formField id="stuff.addstuff" label="Add More Stuff" type="string" />
|
||||
</camunda:formData>
|
||||
</bpmn:extensionElements>
|
||||
<bpmn:incoming>Flow_Yes</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0pud9db</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics camunda:collection="collectstuff" camunda:elementVariable="stuff">
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">3</bpmn:loopCardinality>
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:userTask>
|
||||
<bpmn:sequenceFlow id="Flow_0rqubl2" sourceRef="StartEvent" targetRef="DoStuff" />
|
||||
<bpmn:sequenceFlow id="Flow_02orejl" sourceRef="DoStuff" targetRef="CheckResponse" />
|
||||
<bpmn:sequenceFlow id="Flow_Yes" name="Yes" sourceRef="CheckResponse" targetRef="GetMoreStuff">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">morestuff == 'Yes'</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_No" name="No" sourceRef="CheckResponse" targetRef="EndEvent">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">morestuff == 'No'</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_0pud9db" sourceRef="GetMoreStuff" targetRef="EndEvent" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ExclusiveNonDefaultMulti">
|
||||
<bpmndi:BPMNEdge id="Flow_0rqubl2_di" bpmnElement="Flow_0rqubl2">
|
||||
<di:waypoint x="158" y="130" />
|
||||
<di:waypoint x="203" y="130" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_02orejl_di" bpmnElement="Flow_02orejl">
|
||||
<di:waypoint x="303" y="130" />
|
||||
<di:waypoint x="348" y="130" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1ecte1a_di" bpmnElement="Flow_Yes">
|
||||
<di:waypoint x="398" y="130" />
|
||||
<di:waypoint x="463" y="130" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="421" y="112" width="19" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0tsq42b_di" bpmnElement="Flow_No">
|
||||
<di:waypoint x="373" y="155" />
|
||||
<di:waypoint x="373" y="253" />
|
||||
<di:waypoint x="653" y="253" />
|
||||
<di:waypoint x="653" y="148" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="506" y="235" width="15" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0pud9db_di" bpmnElement="Flow_0pud9db">
|
||||
<di:waypoint x="563" y="130" />
|
||||
<di:waypoint x="635" y="130" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_0sv6yoe_di" bpmnElement="StartEvent">
|
||||
<dc:Bounds x="122" y="112" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0qp7zvb_di" bpmnElement="DoStuff">
|
||||
<dc:Bounds x="203" y="90" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_0ur3pbx_di" bpmnElement="CheckResponse" isMarkerVisible="true">
|
||||
<dc:Bounds x="348" y="105" width="50" height="50" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="331" y="75" width="84" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1komr8a_di" bpmnElement="EndEvent">
|
||||
<dc:Bounds x="635" y="112" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1ch5bce_di" bpmnElement="GetMoreStuff">
|
||||
<dc:Bounds x="463" y="90" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -13,8 +13,8 @@
|
|||
as a required set of inputs or outputs that must all
|
||||
appear together -->
|
||||
<inputSet name="Inputs" id="INS_1">
|
||||
<dataInputRefs>id_1</dataInputRefs>
|
||||
<dataInputRefs>id_2</dataInputRefs>
|
||||
<dataInputRefs>in_1</dataInputRefs>
|
||||
<dataInputRefs>in_2</dataInputRefs>
|
||||
</inputSet>
|
||||
<outputSet name="Outputs" id="OUTS_1">
|
||||
<dataOutputRefs>out_1</dataOutputRefs>
|
||||
|
@ -85,4 +85,4 @@
|
|||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</definitions>
|
||||
</definitions>
|
|
@ -0,0 +1,66 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1bprarj" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0zbeoq1</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0zbeoq1" sourceRef="StartEvent_1" targetRef="set_data" />
|
||||
<bpmn:scriptTask id="set_data" name="Set Data">
|
||||
<bpmn:incoming>Flow_0zbeoq1</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_16rr3p3</bpmn:outgoing>
|
||||
<bpmn:script>in_1, in_2, unused = 1, "hello world", True
|
||||
</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_16rr3p3" sourceRef="set_data" targetRef="any_task" />
|
||||
<bpmn:manualTask id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_16rr3p3</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1woo38x</bpmn:outgoing>
|
||||
<bpmn:ioSpecification>
|
||||
<bpmn:dataInput id="in_1" name="input 1" />
|
||||
<bpmn:dataInput id="in_2" name="input 2" />
|
||||
<bpmn:dataOutput id="out_1" name="output 1" />
|
||||
<bpmn:dataOutput id="out_2" name="output 2" />
|
||||
<bpmn:inputSet id="input_set" name="Inputs">
|
||||
<bpmn:dataInputRefs>in_1</bpmn:dataInputRefs>
|
||||
<bpmn:dataInputRefs>in_2</bpmn:dataInputRefs>
|
||||
</bpmn:inputSet>
|
||||
<bpmn:outputSet id="output_set" name="Outputs">
|
||||
<bpmn:dataOutputRefs>out_1</bpmn:dataOutputRefs>
|
||||
<bpmn:dataOutputRefs>out_2</bpmn:dataOutputRefs>
|
||||
</bpmn:outputSet>
|
||||
</bpmn:ioSpecification>
|
||||
</bpmn:manualTask>
|
||||
<bpmn:endEvent id="Event_1nbxxx5">
|
||||
<bpmn:incoming>Flow_1woo38x</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1woo38x" sourceRef="any_task" targetRef="Event_1nbxxx5" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1woo38x_di" bpmnElement="Flow_1woo38x">
|
||||
<di:waypoint x="530" y="117" />
|
||||
<di:waypoint x="592" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_16rr3p3_di" bpmnElement="Flow_16rr3p3">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="430" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0zbeoq1_di" bpmnElement="Flow_0zbeoq1">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0w8jd8z_di" bpmnElement="set_data">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0fltcc2_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="430" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1nbxxx5_di" bpmnElement="Event_1nbxxx5">
|
||||
<dc:Bounds x="592" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics>
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">3</bpmn:loopCardinality>
|
||||
<bpmn:loopDataOutputRef>output_data</bpmn:loopDataOutputRef>
|
||||
<bpmn:inputDataItem id="input_item" name="input item" />
|
||||
<bpmn:outputDataItem id="output_item" name="output item" />
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="452" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="452" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,45 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics>
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">3</bpmn:loopCardinality>
|
||||
<bpmn:loopDataInputRef>input_data</bpmn:loopDataInputRef>
|
||||
<bpmn:loopDataOutputRef>output_data</bpmn:loopDataOutputRef>
|
||||
<bpmn:inputDataItem id="input_item" name="input item" />
|
||||
<bpmn:outputDataItem id="output_item" name="output item" />
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="452" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="452" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics>
|
||||
<bpmn:loopDataInputRef>input_data</bpmn:loopDataInputRef>
|
||||
<bpmn:loopDataOutputRef>output_data</bpmn:loopDataOutputRef>
|
||||
<bpmn:inputDataItem id="input_item" name="input item" />
|
||||
<bpmn:outputDataItem id="output_item" name="output item" />
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="390" y="117" />
|
||||
<di:waypoint x="462" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="290" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="290" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="462" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,104 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1svhxil" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0j648np</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:exclusiveGateway id="Gateway_1hq5zma" default="Flow_13cp5nc">
|
||||
<bpmn:incoming>Flow_0j648np</bpmn:incoming>
|
||||
<bpmn:incoming>modify</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_13cp5nc</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_0j648np" sourceRef="StartEvent_1" targetRef="Gateway_1hq5zma" />
|
||||
<bpmn:task id="task_1" name="Task 1">
|
||||
<bpmn:incoming>Flow_13cp5nc</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1r81vou</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_13cp5nc" sourceRef="Gateway_1hq5zma" targetRef="task_1" />
|
||||
<bpmn:task id="task_2" name="Task 2">
|
||||
<bpmn:incoming>Flow_0m5s7t9</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0p7c88x</bpmn:outgoing>
|
||||
</bpmn:task>
|
||||
<bpmn:endEvent id="Event_07pdq0w">
|
||||
<bpmn:incoming>Flow_1gm7381</bpmn:incoming>
|
||||
<bpmn:incoming>Flow_0p7c88x</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:boundaryEvent id="timer" attachedToRef="task_1">
|
||||
<bpmn:outgoing>Flow_0m5s7t9</bpmn:outgoing>
|
||||
<bpmn:timerEventDefinition id="TimerEventDefinition_0hu2ovu">
|
||||
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">"PT60S"</bpmn:timeDuration>
|
||||
</bpmn:timerEventDefinition>
|
||||
</bpmn:boundaryEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0m5s7t9" sourceRef="timer" targetRef="task_2" />
|
||||
<bpmn:exclusiveGateway id="Gateway_123uzx5" default="Flow_1gm7381">
|
||||
<bpmn:incoming>Flow_1r81vou</bpmn:incoming>
|
||||
<bpmn:outgoing>modify</bpmn:outgoing>
|
||||
<bpmn:outgoing>Flow_1gm7381</bpmn:outgoing>
|
||||
</bpmn:exclusiveGateway>
|
||||
<bpmn:sequenceFlow id="Flow_1r81vou" sourceRef="task_1" targetRef="Gateway_123uzx5" />
|
||||
<bpmn:sequenceFlow id="modify" name="Modify " sourceRef="Gateway_123uzx5" targetRef="Gateway_1hq5zma">
|
||||
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">modify</bpmn:conditionExpression>
|
||||
</bpmn:sequenceFlow>
|
||||
<bpmn:sequenceFlow id="Flow_1gm7381" sourceRef="Gateway_123uzx5" targetRef="Event_07pdq0w" />
|
||||
<bpmn:sequenceFlow id="Flow_0p7c88x" sourceRef="task_2" targetRef="Event_07pdq0w" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_0j648np_di" bpmnElement="Flow_0j648np">
|
||||
<di:waypoint x="215" y="197" />
|
||||
<di:waypoint x="265" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_13cp5nc_di" bpmnElement="Flow_13cp5nc">
|
||||
<di:waypoint x="315" y="197" />
|
||||
<di:waypoint x="370" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m5s7t9_di" bpmnElement="Flow_0m5s7t9">
|
||||
<di:waypoint x="420" y="255" />
|
||||
<di:waypoint x="420" y="300" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1r81vou_di" bpmnElement="Flow_1r81vou">
|
||||
<di:waypoint x="470" y="197" />
|
||||
<di:waypoint x="525" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1l30w6o_di" bpmnElement="modify">
|
||||
<di:waypoint x="550" y="172" />
|
||||
<di:waypoint x="550" y="100" />
|
||||
<di:waypoint x="290" y="100" />
|
||||
<di:waypoint x="290" y="172" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="404" y="82" width="33" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1gm7381_di" bpmnElement="Flow_1gm7381">
|
||||
<di:waypoint x="575" y="197" />
|
||||
<di:waypoint x="632" y="197" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0p7c88x_di" bpmnElement="Flow_0p7c88x">
|
||||
<di:waypoint x="470" y="340" />
|
||||
<di:waypoint x="650" y="340" />
|
||||
<di:waypoint x="650" y="215" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="179" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_1hq5zma_di" bpmnElement="Gateway_1hq5zma" isMarkerVisible="true">
|
||||
<dc:Bounds x="265" y="172" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1f3jg2c_di" bpmnElement="task_1">
|
||||
<dc:Bounds x="370" y="157" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1r0ra56_di" bpmnElement="task_2">
|
||||
<dc:Bounds x="370" y="300" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Gateway_123uzx5_di" bpmnElement="Gateway_123uzx5" isMarkerVisible="true">
|
||||
<dc:Bounds x="525" y="172" width="50" height="50" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_07pdq0w_di" bpmnElement="Event_07pdq0w">
|
||||
<dc:Bounds x="632" y="179" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1g1bbcs_di" bpmnElement="timer">
|
||||
<dc:Bounds x="402" y="219" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true">
|
||||
<bpmn:loopCardinality>3</bpmn:loopCardinality>
|
||||
<bpmn:loopDataOutputRef>output_data</bpmn:loopDataOutputRef>
|
||||
<bpmn:inputDataItem id="input_item" name="input item" />
|
||||
<bpmn:outputDataItem id="output_item" name="output item" />
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="390" y="117" />
|
||||
<di:waypoint x="462" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="290" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="290" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="462" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,44 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true">
|
||||
<bpmn:loopDataInputRef>input_data</bpmn:loopDataInputRef>
|
||||
<bpmn:loopDataOutputRef>output_data</bpmn:loopDataOutputRef>
|
||||
<bpmn:inputDataItem id="input_item" name="input item" />
|
||||
<bpmn:outputDataItem id="output_item" name="output item" />
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="390" y="117" />
|
||||
<di:waypoint x="462" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="290" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="290" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="462" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,731 @@
|
|||
{
|
||||
"serializer_version": "1.1",
|
||||
"data": {
|
||||
"obj_1": "object 1"
|
||||
},
|
||||
"last_task": "9a4925a1-a152-428e-a764-b24d81b3cfdd",
|
||||
"success": true,
|
||||
"tasks": {
|
||||
"4a2e2ad3-ad4b-4168-800d-71e31f33e225": {
|
||||
"id": "4a2e2ad3-ad4b-4168-800d-71e31f33e225",
|
||||
"parent": null,
|
||||
"children": [
|
||||
"b666abf3-1e97-49c0-94b3-6e0f1a5573ec"
|
||||
],
|
||||
"last_state_change": 1675380199.2266004,
|
||||
"state": 32,
|
||||
"task_spec": "Root",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"b666abf3-1e97-49c0-94b3-6e0f1a5573ec": {
|
||||
"id": "b666abf3-1e97-49c0-94b3-6e0f1a5573ec",
|
||||
"parent": "4a2e2ad3-ad4b-4168-800d-71e31f33e225",
|
||||
"children": [
|
||||
"c066bd8f-894d-4b24-b724-8c63fb15bdbf"
|
||||
],
|
||||
"last_state_change": 1675380199.2330534,
|
||||
"state": 32,
|
||||
"task_spec": "Start",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"c066bd8f-894d-4b24-b724-8c63fb15bdbf": {
|
||||
"id": "c066bd8f-894d-4b24-b724-8c63fb15bdbf",
|
||||
"parent": "b666abf3-1e97-49c0-94b3-6e0f1a5573ec",
|
||||
"children": [
|
||||
"9a4925a1-a152-428e-a764-b24d81b3cfdd"
|
||||
],
|
||||
"last_state_change": 1675380199.2362425,
|
||||
"state": 32,
|
||||
"task_spec": "Event_0xiw3t6",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {
|
||||
"event_fired": true
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
"9a4925a1-a152-428e-a764-b24d81b3cfdd": {
|
||||
"id": "9a4925a1-a152-428e-a764-b24d81b3cfdd",
|
||||
"parent": "c066bd8f-894d-4b24-b724-8c63fb15bdbf",
|
||||
"children": [
|
||||
"dcd54745-3143-4b4d-b557-f9c8ce9c7e71"
|
||||
],
|
||||
"last_state_change": 1675380199.238688,
|
||||
"state": 32,
|
||||
"task_spec": "Activity_0haob58",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {
|
||||
"in_1": 1,
|
||||
"in_2": "hello world",
|
||||
"unused": true
|
||||
}
|
||||
},
|
||||
"dcd54745-3143-4b4d-b557-f9c8ce9c7e71": {
|
||||
"id": "dcd54745-3143-4b4d-b557-f9c8ce9c7e71",
|
||||
"parent": "9a4925a1-a152-428e-a764-b24d81b3cfdd",
|
||||
"children": [
|
||||
"3a537753-2578-4f33-914c-5e840d2f9612"
|
||||
],
|
||||
"last_state_change": 1675380199.243926,
|
||||
"state": 8,
|
||||
"task_spec": "Activity_1wdjypm",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {
|
||||
"in_1": 1,
|
||||
"in_2": "hello world",
|
||||
"unused": true
|
||||
}
|
||||
},
|
||||
"3a537753-2578-4f33-914c-5e840d2f9612": {
|
||||
"id": "3a537753-2578-4f33-914c-5e840d2f9612",
|
||||
"parent": "dcd54745-3143-4b4d-b557-f9c8ce9c7e71",
|
||||
"children": [
|
||||
"f4e68050-fdc7-48e6-b0e0-7c71aa7ff3df"
|
||||
],
|
||||
"last_state_change": 1675380199.2281342,
|
||||
"state": 4,
|
||||
"task_spec": "Event_1q277cc",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"f4e68050-fdc7-48e6-b0e0-7c71aa7ff3df": {
|
||||
"id": "f4e68050-fdc7-48e6-b0e0-7c71aa7ff3df",
|
||||
"parent": "3a537753-2578-4f33-914c-5e840d2f9612",
|
||||
"children": [
|
||||
"04c50af7-cf65-4cd7-a25e-af506581de7c"
|
||||
],
|
||||
"last_state_change": 1675380199.2283747,
|
||||
"state": 4,
|
||||
"task_spec": "parent.EndJoin",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"04c50af7-cf65-4cd7-a25e-af506581de7c": {
|
||||
"id": "04c50af7-cf65-4cd7-a25e-af506581de7c",
|
||||
"parent": "f4e68050-fdc7-48e6-b0e0-7c71aa7ff3df",
|
||||
"children": [],
|
||||
"last_state_change": 1675380199.2286203,
|
||||
"state": 4,
|
||||
"task_spec": "End",
|
||||
"triggered": false,
|
||||
"workflow_name": "parent",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
}
|
||||
},
|
||||
"root": "4a2e2ad3-ad4b-4168-800d-71e31f33e225",
|
||||
"spec": {
|
||||
"name": "parent",
|
||||
"description": "Parent Process",
|
||||
"file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/io_spec_parent_data_obj.bpmn",
|
||||
"task_specs": {
|
||||
"Start": {
|
||||
"id": "parent_1",
|
||||
"name": "Start",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [
|
||||
"Event_0xiw3t6"
|
||||
],
|
||||
"typename": "StartTask"
|
||||
},
|
||||
"parent.EndJoin": {
|
||||
"id": "parent_2",
|
||||
"name": "parent.EndJoin",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_1q277cc"
|
||||
],
|
||||
"outputs": [
|
||||
"End"
|
||||
],
|
||||
"typename": "_EndJoin"
|
||||
},
|
||||
"End": {
|
||||
"id": "parent_3",
|
||||
"name": "End",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"parent.EndJoin"
|
||||
],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"Event_0xiw3t6": {
|
||||
"id": "parent_4",
|
||||
"name": "Event_0xiw3t6",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Start"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_0haob58"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 152.0,
|
||||
"y": 102.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_0haob58": {
|
||||
"id": "Flow_00qjfvu",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_0haob58",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_00qjfvu": {
|
||||
"id": "Flow_00qjfvu",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_0haob58",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "StartEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_0haob58": {
|
||||
"id": "parent_5",
|
||||
"name": "Activity_0haob58",
|
||||
"description": "Set Data",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_0xiw3t6"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_1wdjypm"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 240.0,
|
||||
"y": 80.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_1wdjypm": {
|
||||
"id": "Flow_0aj70uj",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_1wdjypm",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_0aj70uj": {
|
||||
"id": "Flow_0aj70uj",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_1wdjypm",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [
|
||||
{
|
||||
"name": "obj_1",
|
||||
"description": "obj_1",
|
||||
"typename": "BpmnDataSpecification"
|
||||
}
|
||||
],
|
||||
"script": "in_1, in_2, unused = 1, \"hello world\", True\nobj_1='object 1'",
|
||||
"typename": "ScriptTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_1wdjypm": {
|
||||
"id": "parent_6",
|
||||
"name": "Activity_1wdjypm",
|
||||
"description": "Update Data",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_0haob58"
|
||||
],
|
||||
"outputs": [
|
||||
"Event_1q277cc"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 400.0,
|
||||
"y": 80.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Event_1q277cc": {
|
||||
"id": "Flow_1uel76w",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_1q277cc",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_1uel76w": {
|
||||
"id": "Flow_1uel76w",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_1q277cc",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"spec": "subprocess",
|
||||
"typename": "CallActivity",
|
||||
"extensions": {}
|
||||
},
|
||||
"Event_1q277cc": {
|
||||
"id": "parent_7",
|
||||
"name": "Event_1q277cc",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_1wdjypm"
|
||||
],
|
||||
"outputs": [
|
||||
"parent.EndJoin"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 562.0,
|
||||
"y": 102.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"parent.EndJoin": {
|
||||
"id": "Event_1q277cc.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "parent.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Event_1q277cc.ToEndJoin": {
|
||||
"id": "Event_1q277cc.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "parent.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "EndEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Root": {
|
||||
"id": "parent_8",
|
||||
"name": "Root",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
}
|
||||
},
|
||||
"data_inputs": [],
|
||||
"data_outputs": [],
|
||||
"data_objects": {
|
||||
"obj_1": {
|
||||
"name": "obj_1",
|
||||
"description": "obj_1",
|
||||
"typename": "BpmnDataSpecification"
|
||||
}
|
||||
},
|
||||
"correlation_keys": {},
|
||||
"typename": "BpmnProcessSpec"
|
||||
},
|
||||
"subprocess_specs": {
|
||||
"subprocess": {
|
||||
"name": "subprocess",
|
||||
"description": "subprocess",
|
||||
"file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/io_spec.bpmn",
|
||||
"task_specs": {
|
||||
"Start": {
|
||||
"id": "subprocess_1",
|
||||
"name": "Start",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [
|
||||
"Event_1rtivo5"
|
||||
],
|
||||
"typename": "StartTask"
|
||||
},
|
||||
"subprocess.EndJoin": {
|
||||
"id": "subprocess_2",
|
||||
"name": "subprocess.EndJoin",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_0pgucu1"
|
||||
],
|
||||
"outputs": [
|
||||
"End"
|
||||
],
|
||||
"typename": "_EndJoin"
|
||||
},
|
||||
"End": {
|
||||
"id": "subprocess_3",
|
||||
"name": "End",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"subprocess.EndJoin"
|
||||
],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"Event_1rtivo5": {
|
||||
"id": "subprocess_4",
|
||||
"name": "Event_1rtivo5",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Start"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_04d94ee"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 232.0,
|
||||
"y": 252.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_04d94ee": {
|
||||
"id": "Flow_0n038fc",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_04d94ee",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_0n038fc": {
|
||||
"id": "Flow_0n038fc",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_04d94ee",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "StartEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_04d94ee": {
|
||||
"id": "subprocess_5",
|
||||
"name": "Activity_04d94ee",
|
||||
"description": "Task 1",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_1rtivo5"
|
||||
],
|
||||
"outputs": [
|
||||
"Event_0pgucu1"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 320.0,
|
||||
"y": 230.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Event_0pgucu1": {
|
||||
"id": "Flow_1d3l0mt",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_0pgucu1",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_1d3l0mt": {
|
||||
"id": "Flow_1d3l0mt",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_0pgucu1",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"script": "out_1, out_2, unused = in_1 * 2, in_2.upper(), False\n ",
|
||||
"typename": "ScriptTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Event_0pgucu1": {
|
||||
"id": "subprocess_6",
|
||||
"name": "Event_0pgucu1",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_04d94ee"
|
||||
],
|
||||
"outputs": [
|
||||
"subprocess.EndJoin"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 472.0,
|
||||
"y": 252.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"subprocess.EndJoin": {
|
||||
"id": "Event_0pgucu1.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "subprocess.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Event_0pgucu1.ToEndJoin": {
|
||||
"id": "Event_0pgucu1.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "subprocess.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "EndEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Root": {
|
||||
"id": "subprocess_7",
|
||||
"name": "Root",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
}
|
||||
},
|
||||
"data_inputs": [
|
||||
{
|
||||
"name": "in_1",
|
||||
"description": "input 1",
|
||||
"typename": "BpmnDataSpecification"
|
||||
},
|
||||
{
|
||||
"name": "in_2",
|
||||
"description": "input 2",
|
||||
"typename": "BpmnDataSpecification"
|
||||
}
|
||||
],
|
||||
"data_outputs": [
|
||||
{
|
||||
"name": "out_1",
|
||||
"description": "output 1",
|
||||
"typename": "BpmnDataSpecification"
|
||||
},
|
||||
{
|
||||
"name": "out_2",
|
||||
"description": "output 2",
|
||||
"typename": "BpmnDataSpecification"
|
||||
}
|
||||
],
|
||||
"data_objects": {},
|
||||
"correlation_keys": {},
|
||||
"typename": "BpmnProcessSpec"
|
||||
}
|
||||
},
|
||||
"subprocesses": {
|
||||
"dcd54745-3143-4b4d-b557-f9c8ce9c7e71": {
|
||||
"data": {
|
||||
"obj_1": "object 1"
|
||||
},
|
||||
"last_task": null,
|
||||
"success": true,
|
||||
"tasks": {
|
||||
"658250f5-54df-4300-9e0a-6e122ed17e08": {
|
||||
"id": "658250f5-54df-4300-9e0a-6e122ed17e08",
|
||||
"parent": null,
|
||||
"children": [
|
||||
"cce657f0-4534-4923-b073-0213caf8d500"
|
||||
],
|
||||
"last_state_change": 1675380199.240486,
|
||||
"state": 32,
|
||||
"task_spec": "Root",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"cce657f0-4534-4923-b073-0213caf8d500": {
|
||||
"id": "cce657f0-4534-4923-b073-0213caf8d500",
|
||||
"parent": "658250f5-54df-4300-9e0a-6e122ed17e08",
|
||||
"children": [
|
||||
"d0fd83e5-e4fb-49fb-9055-27993fa09430"
|
||||
],
|
||||
"last_state_change": 1675380199.24384,
|
||||
"state": 16,
|
||||
"task_spec": "Start",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {
|
||||
"in_1": 1,
|
||||
"in_2": "hello world"
|
||||
}
|
||||
},
|
||||
"d0fd83e5-e4fb-49fb-9055-27993fa09430": {
|
||||
"id": "d0fd83e5-e4fb-49fb-9055-27993fa09430",
|
||||
"parent": "cce657f0-4534-4923-b073-0213caf8d500",
|
||||
"children": [
|
||||
"48649a1e-9fe5-48ef-8bd9-57b60ff4e98b"
|
||||
],
|
||||
"last_state_change": 1675380199.2407098,
|
||||
"state": 4,
|
||||
"task_spec": "Event_1rtivo5",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"48649a1e-9fe5-48ef-8bd9-57b60ff4e98b": {
|
||||
"id": "48649a1e-9fe5-48ef-8bd9-57b60ff4e98b",
|
||||
"parent": "d0fd83e5-e4fb-49fb-9055-27993fa09430",
|
||||
"children": [
|
||||
"984b7e3e-b037-4120-a544-792d5ee91fe5"
|
||||
],
|
||||
"last_state_change": 1675380199.240903,
|
||||
"state": 4,
|
||||
"task_spec": "Activity_04d94ee",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"984b7e3e-b037-4120-a544-792d5ee91fe5": {
|
||||
"id": "984b7e3e-b037-4120-a544-792d5ee91fe5",
|
||||
"parent": "48649a1e-9fe5-48ef-8bd9-57b60ff4e98b",
|
||||
"children": [
|
||||
"8b4d7999-b35b-480f-8e3d-08ad24883bce"
|
||||
],
|
||||
"last_state_change": 1675380199.2411075,
|
||||
"state": 4,
|
||||
"task_spec": "Event_0pgucu1",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"8b4d7999-b35b-480f-8e3d-08ad24883bce": {
|
||||
"id": "8b4d7999-b35b-480f-8e3d-08ad24883bce",
|
||||
"parent": "984b7e3e-b037-4120-a544-792d5ee91fe5",
|
||||
"children": [
|
||||
"0ad22900-dac5-4ce8-8fcf-644849cf8827"
|
||||
],
|
||||
"last_state_change": 1675380199.2413251,
|
||||
"state": 4,
|
||||
"task_spec": "subprocess.EndJoin",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"0ad22900-dac5-4ce8-8fcf-644849cf8827": {
|
||||
"id": "0ad22900-dac5-4ce8-8fcf-644849cf8827",
|
||||
"parent": "8b4d7999-b35b-480f-8e3d-08ad24883bce",
|
||||
"children": [],
|
||||
"last_state_change": 1675380199.2415493,
|
||||
"state": 4,
|
||||
"task_spec": "End",
|
||||
"triggered": false,
|
||||
"workflow_name": "Activity_1wdjypm",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
}
|
||||
},
|
||||
"root": "658250f5-54df-4300-9e0a-6e122ed17e08"
|
||||
}
|
||||
},
|
||||
"bpmn_messages": []
|
||||
}
|
|
@ -0,0 +1,830 @@
|
|||
{
|
||||
"serializer_version": "1.1",
|
||||
"data": {},
|
||||
"last_task": "215867bf-41a3-42b3-8403-b836aabcfe6c",
|
||||
"success": true,
|
||||
"tasks": {
|
||||
"01bdb086-35cc-4897-805f-d059d1cfe682": {
|
||||
"id": "01bdb086-35cc-4897-805f-d059d1cfe682",
|
||||
"parent": null,
|
||||
"children": [
|
||||
"3846b631-5ed5-4913-9f53-f358886547cd"
|
||||
],
|
||||
"last_state_change": 1675380654.3327675,
|
||||
"state": 32,
|
||||
"task_spec": "Root",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"3846b631-5ed5-4913-9f53-f358886547cd": {
|
||||
"id": "3846b631-5ed5-4913-9f53-f358886547cd",
|
||||
"parent": "01bdb086-35cc-4897-805f-d059d1cfe682",
|
||||
"children": [
|
||||
"7f261ef4-047b-4941-a508-c24790f0a8c0"
|
||||
],
|
||||
"last_state_change": 1675380654.338419,
|
||||
"state": 32,
|
||||
"task_spec": "Start",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"7f261ef4-047b-4941-a508-c24790f0a8c0": {
|
||||
"id": "7f261ef4-047b-4941-a508-c24790f0a8c0",
|
||||
"parent": "3846b631-5ed5-4913-9f53-f358886547cd",
|
||||
"children": [
|
||||
"215867bf-41a3-42b3-8403-b836aabcfe6c"
|
||||
],
|
||||
"last_state_change": 1675380654.3411734,
|
||||
"state": 32,
|
||||
"task_spec": "StartEvent_1",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {
|
||||
"event_fired": true
|
||||
},
|
||||
"data": {}
|
||||
},
|
||||
"215867bf-41a3-42b3-8403-b836aabcfe6c": {
|
||||
"id": "215867bf-41a3-42b3-8403-b836aabcfe6c",
|
||||
"parent": "7f261ef4-047b-4941-a508-c24790f0a8c0",
|
||||
"children": [
|
||||
"5b0dc44b-0901-4989-8733-2dfcb82344c7"
|
||||
],
|
||||
"last_state_change": 1675380654.3452208,
|
||||
"state": 32,
|
||||
"task_spec": "Activity_A1",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"5b0dc44b-0901-4989-8733-2dfcb82344c7": {
|
||||
"id": "5b0dc44b-0901-4989-8733-2dfcb82344c7",
|
||||
"parent": "215867bf-41a3-42b3-8403-b836aabcfe6c",
|
||||
"children": [
|
||||
"bf645868-c2fc-4dfb-90c9-210f23b7f503"
|
||||
],
|
||||
"last_state_change": 1675380654.3462226,
|
||||
"state": 16,
|
||||
"task_spec": "Activity_B1",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"bf645868-c2fc-4dfb-90c9-210f23b7f503": {
|
||||
"id": "bf645868-c2fc-4dfb-90c9-210f23b7f503",
|
||||
"parent": "5b0dc44b-0901-4989-8733-2dfcb82344c7",
|
||||
"children": [
|
||||
"3825a6af-4ea6-4242-9570-411a7d080f96",
|
||||
"90b2d2bc-d222-4c96-95e5-9becd7260ef6"
|
||||
],
|
||||
"last_state_change": 1675380654.3340564,
|
||||
"state": 4,
|
||||
"task_spec": "Gateway_askQuestion",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"3825a6af-4ea6-4242-9570-411a7d080f96": {
|
||||
"id": "3825a6af-4ea6-4242-9570-411a7d080f96",
|
||||
"parent": "bf645868-c2fc-4dfb-90c9-210f23b7f503",
|
||||
"children": [
|
||||
"a00fc3d7-c0ab-46ec-a3b1-892ff78f4809"
|
||||
],
|
||||
"last_state_change": 1675380654.3344204,
|
||||
"state": 1,
|
||||
"task_spec": "Activity_A2",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"a00fc3d7-c0ab-46ec-a3b1-892ff78f4809": {
|
||||
"id": "a00fc3d7-c0ab-46ec-a3b1-892ff78f4809",
|
||||
"parent": "3825a6af-4ea6-4242-9570-411a7d080f96",
|
||||
"children": [],
|
||||
"last_state_change": 1675380654.3349297,
|
||||
"state": 1,
|
||||
"task_spec": "Implement_Feature",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"90b2d2bc-d222-4c96-95e5-9becd7260ef6": {
|
||||
"id": "90b2d2bc-d222-4c96-95e5-9becd7260ef6",
|
||||
"parent": "bf645868-c2fc-4dfb-90c9-210f23b7f503",
|
||||
"children": [
|
||||
"3eeb1f48-6bde-4921-b2b0-ae4557c09d1f"
|
||||
],
|
||||
"last_state_change": 1675380654.3346882,
|
||||
"state": 2,
|
||||
"task_spec": "Implement_Feature",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"3eeb1f48-6bde-4921-b2b0-ae4557c09d1f": {
|
||||
"id": "3eeb1f48-6bde-4921-b2b0-ae4557c09d1f",
|
||||
"parent": "90b2d2bc-d222-4c96-95e5-9becd7260ef6",
|
||||
"children": [],
|
||||
"last_state_change": 1675380654.3352633,
|
||||
"state": 2,
|
||||
"task_spec": "Activity_1uksrqx",
|
||||
"triggered": false,
|
||||
"workflow_name": "lanes",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
}
|
||||
},
|
||||
"root": "01bdb086-35cc-4897-805f-d059d1cfe682",
|
||||
"spec": {
|
||||
"name": "lanes",
|
||||
"description": "lanes",
|
||||
"file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/lanes.bpmn",
|
||||
"task_specs": {
|
||||
"Start": {
|
||||
"id": "lanes_1",
|
||||
"name": "Start",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [
|
||||
"StartEvent_1"
|
||||
],
|
||||
"typename": "StartTask"
|
||||
},
|
||||
"lanes.EndJoin": {
|
||||
"id": "lanes_2",
|
||||
"name": "lanes.EndJoin",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_07pakcl"
|
||||
],
|
||||
"outputs": [
|
||||
"End"
|
||||
],
|
||||
"typename": "_EndJoin"
|
||||
},
|
||||
"End": {
|
||||
"id": "lanes_3",
|
||||
"name": "End",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"lanes.EndJoin"
|
||||
],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"StartEvent_1": {
|
||||
"id": "lanes_4",
|
||||
"name": "StartEvent_1",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Start"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_A1"
|
||||
],
|
||||
"lane": "A",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 219.0,
|
||||
"y": 92.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_A1": {
|
||||
"id": "Flow_0jwejm5",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_A1",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_0jwejm5": {
|
||||
"id": "Flow_0jwejm5",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_A1",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "StartEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_A1": {
|
||||
"id": "lanes_5",
|
||||
"name": "Activity_A1",
|
||||
"description": "Request Feature",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"StartEvent_1"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_B1"
|
||||
],
|
||||
"lane": "A",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 300.0,
|
||||
"y": 70.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_B1": {
|
||||
"id": "Flow_140vffb",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_B1",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_140vffb": {
|
||||
"id": "Flow_140vffb",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_B1",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "UserTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_B1": {
|
||||
"id": "lanes_6",
|
||||
"name": "Activity_B1",
|
||||
"description": "Clarifying Questions?",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_A1"
|
||||
],
|
||||
"outputs": [
|
||||
"Gateway_askQuestion"
|
||||
],
|
||||
"lane": "B",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 300.0,
|
||||
"y": 210.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Gateway_askQuestion": {
|
||||
"id": "Flow_1k9gsm1",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Gateway_askQuestion",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_1k9gsm1": {
|
||||
"id": "Flow_1k9gsm1",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Gateway_askQuestion",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "UserTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Gateway_askQuestion": {
|
||||
"id": "lanes_7",
|
||||
"name": "Gateway_askQuestion",
|
||||
"description": "Do we need Clarifcation?",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_B1"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_A2",
|
||||
"Implement_Feature"
|
||||
],
|
||||
"lane": "B",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 465.0,
|
||||
"y": 225.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_A2": {
|
||||
"id": "Flow_0okhwy0",
|
||||
"name": "Yes",
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_A2",
|
||||
"typename": "SequenceFlow"
|
||||
},
|
||||
"Implement_Feature": {
|
||||
"id": "Flow_182bqvo",
|
||||
"name": "No",
|
||||
"documentation": null,
|
||||
"target_task_spec": "Implement_Feature",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_0okhwy0": {
|
||||
"id": "Flow_0okhwy0",
|
||||
"name": "Yes",
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_A2",
|
||||
"typename": "SequenceFlow"
|
||||
},
|
||||
"Flow_182bqvo": {
|
||||
"id": "Flow_182bqvo",
|
||||
"name": "No",
|
||||
"documentation": null,
|
||||
"target_task_spec": "Implement_Feature",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"default_task_spec": "Implement_Feature",
|
||||
"cond_task_specs": [
|
||||
{
|
||||
"condition": "NeedClarification == 'Yes'",
|
||||
"task_spec": "Activity_A2"
|
||||
}
|
||||
],
|
||||
"choice": null,
|
||||
"typename": "ExclusiveGateway",
|
||||
"extensions": {}
|
||||
},
|
||||
"Implement_Feature": {
|
||||
"id": "lanes_8",
|
||||
"name": "Implement_Feature",
|
||||
"description": "Implement Feature",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_A2",
|
||||
"Gateway_askQuestion"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_1uksrqx"
|
||||
],
|
||||
"lane": "B",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 620.0,
|
||||
"y": 200.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_1uksrqx": {
|
||||
"id": "Flow_0xz2oco",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_1uksrqx",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_0xz2oco": {
|
||||
"id": "Flow_0xz2oco",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_1uksrqx",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "ManualTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_1uksrqx": {
|
||||
"id": "lanes_9",
|
||||
"name": "Activity_1uksrqx",
|
||||
"description": "Send to testing",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Implement_Feature"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_0i0rxuw"
|
||||
],
|
||||
"lane": "C",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 620.0,
|
||||
"y": 340.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_0i0rxuw": {
|
||||
"id": "Flow_1cybznq",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_0i0rxuw",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_1cybznq": {
|
||||
"id": "Flow_1cybznq",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_0i0rxuw",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "ManualTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_0i0rxuw": {
|
||||
"id": "lanes_10",
|
||||
"name": "Activity_0i0rxuw",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_1uksrqx"
|
||||
],
|
||||
"outputs": [
|
||||
"Event_07pakcl"
|
||||
],
|
||||
"lane": "C",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 760.0,
|
||||
"y": 320.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Event_07pakcl": {
|
||||
"id": "Flow_0e1uyol",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_07pakcl",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_0e1uyol": {
|
||||
"id": "Flow_0e1uyol",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_07pakcl",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"spec": "Activity_0i0rxuw",
|
||||
"typename": "CallActivity",
|
||||
"extensions": {}
|
||||
},
|
||||
"Event_07pakcl": {
|
||||
"id": "lanes_11",
|
||||
"name": "Event_07pakcl",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_0i0rxuw"
|
||||
],
|
||||
"outputs": [
|
||||
"lanes.EndJoin"
|
||||
],
|
||||
"lane": "C",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 1092.0,
|
||||
"y": 362.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"lanes.EndJoin": {
|
||||
"id": "Event_07pakcl.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "lanes.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Event_07pakcl.ToEndJoin": {
|
||||
"id": "Event_07pakcl.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "lanes.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "EndEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Activity_A2": {
|
||||
"id": "lanes_12",
|
||||
"name": "Activity_A2",
|
||||
"description": "Clarify Request",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Gateway_askQuestion"
|
||||
],
|
||||
"outputs": [
|
||||
"Implement_Feature"
|
||||
],
|
||||
"lane": "A",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 530.0,
|
||||
"y": 70.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Implement_Feature": {
|
||||
"id": "Flow_17rng3c",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Implement_Feature",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_17rng3c": {
|
||||
"id": "Flow_17rng3c",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Implement_Feature",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "UserTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Root": {
|
||||
"id": "lanes_13",
|
||||
"name": "Root",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
}
|
||||
},
|
||||
"data_inputs": [],
|
||||
"data_outputs": [],
|
||||
"data_objects": {},
|
||||
"correlation_keys": {},
|
||||
"typename": "BpmnProcessSpec"
|
||||
},
|
||||
"subprocess_specs": {
|
||||
"Activity_0i0rxuw": {
|
||||
"name": "Activity_0i0rxuw",
|
||||
"description": "Activity_0i0rxuw",
|
||||
"file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/lanes.bpmn",
|
||||
"task_specs": {
|
||||
"Start": {
|
||||
"id": "Activity_0i0rxuw_1",
|
||||
"name": "Start",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [
|
||||
"Event_0lbloj7"
|
||||
],
|
||||
"typename": "StartTask"
|
||||
},
|
||||
"Activity_0i0rxuw.EndJoin": {
|
||||
"id": "Activity_0i0rxuw_2",
|
||||
"name": "Activity_0i0rxuw.EndJoin",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_1vz21ww"
|
||||
],
|
||||
"outputs": [
|
||||
"End"
|
||||
],
|
||||
"typename": "_EndJoin"
|
||||
},
|
||||
"End": {
|
||||
"id": "Activity_0i0rxuw_3",
|
||||
"name": "End",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Activity_0i0rxuw.EndJoin"
|
||||
],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"Event_0lbloj7": {
|
||||
"id": "Activity_0i0rxuw_4",
|
||||
"name": "Event_0lbloj7",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Start"
|
||||
],
|
||||
"outputs": [
|
||||
"SubProcessTask"
|
||||
],
|
||||
"lane": "C",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 782.0,
|
||||
"y": 362.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"SubProcessTask": {
|
||||
"id": "Flow_086ghyu",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "SubProcessTask",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_086ghyu": {
|
||||
"id": "Flow_086ghyu",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "SubProcessTask",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "StartEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"SubProcessTask": {
|
||||
"id": "Activity_0i0rxuw_5",
|
||||
"name": "SubProcessTask",
|
||||
"description": "SubProcessTask",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_0lbloj7"
|
||||
],
|
||||
"outputs": [
|
||||
"Event_1vz21ww"
|
||||
],
|
||||
"lane": "C",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 850.0,
|
||||
"y": 340.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Event_1vz21ww": {
|
||||
"id": "Flow_1jw6qrj",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_1vz21ww",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Flow_1jw6qrj": {
|
||||
"id": "Flow_1jw6qrj",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Event_1vz21ww",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "ManualTask",
|
||||
"extensions": {}
|
||||
},
|
||||
"Event_1vz21ww": {
|
||||
"id": "Activity_0i0rxuw_6",
|
||||
"name": "Event_1vz21ww",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"SubProcessTask"
|
||||
],
|
||||
"outputs": [
|
||||
"Activity_0i0rxuw.EndJoin"
|
||||
],
|
||||
"lane": "C",
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 982.0,
|
||||
"y": 362.0
|
||||
},
|
||||
"outgoing_sequence_flows": {
|
||||
"Activity_0i0rxuw.EndJoin": {
|
||||
"id": "Event_1vz21ww.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_0i0rxuw.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"outgoing_sequence_flows_by_id": {
|
||||
"Event_1vz21ww.ToEndJoin": {
|
||||
"id": "Event_1vz21ww.ToEndJoin",
|
||||
"name": null,
|
||||
"documentation": null,
|
||||
"target_task_spec": "Activity_0i0rxuw.EndJoin",
|
||||
"typename": "SequenceFlow"
|
||||
}
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "EndEvent",
|
||||
"extensions": {}
|
||||
}
|
||||
},
|
||||
"data_inputs": [],
|
||||
"data_outputs": [],
|
||||
"data_objects": {},
|
||||
"correlation_keys": {},
|
||||
"typename": "BpmnProcessSpec"
|
||||
}
|
||||
},
|
||||
"subprocesses": {},
|
||||
"bpmn_messages": []
|
||||
}
|
|
@ -0,0 +1,350 @@
|
|||
{
|
||||
"serializer_version": "1.1",
|
||||
"data": {},
|
||||
"last_task": "ca089728-9745-4d50-8fbc-f2f7234dec8f",
|
||||
"success": true,
|
||||
"tasks": {
|
||||
"fa4b8656-22a2-467e-8fb0-9b1d8f1f6da6": {
|
||||
"id": "fa4b8656-22a2-467e-8fb0-9b1d8f1f6da6",
|
||||
"parent": null,
|
||||
"children": [
|
||||
"ccf50f31-880b-406a-9e61-2f3d42f39d70"
|
||||
],
|
||||
"last_state_change": 1676389310.7311432,
|
||||
"state": 32,
|
||||
"task_spec": "Root",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"ccf50f31-880b-406a-9e61-2f3d42f39d70": {
|
||||
"id": "ccf50f31-880b-406a-9e61-2f3d42f39d70",
|
||||
"parent": "fa4b8656-22a2-467e-8fb0-9b1d8f1f6da6",
|
||||
"children": [
|
||||
"ca089728-9745-4d50-8fbc-f2f7234dec8f"
|
||||
],
|
||||
"last_state_change": 1676389310.735502,
|
||||
"state": 32,
|
||||
"task_spec": "Start",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {},
|
||||
"data": {
|
||||
"input_data": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
||||
},
|
||||
"ca089728-9745-4d50-8fbc-f2f7234dec8f": {
|
||||
"id": "ca089728-9745-4d50-8fbc-f2f7234dec8f",
|
||||
"parent": "ccf50f31-880b-406a-9e61-2f3d42f39d70",
|
||||
"children": [
|
||||
"513dba6b-7017-48df-a1e0-7a2c57a1042c"
|
||||
],
|
||||
"last_state_change": 1676389310.739117,
|
||||
"state": 32,
|
||||
"task_spec": "StartEvent_1",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {
|
||||
"event_fired": true
|
||||
},
|
||||
"data": {
|
||||
"input_data": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
||||
},
|
||||
"513dba6b-7017-48df-a1e0-7a2c57a1042c": {
|
||||
"id": "513dba6b-7017-48df-a1e0-7a2c57a1042c",
|
||||
"parent": "ca089728-9745-4d50-8fbc-f2f7234dec8f",
|
||||
"children": [
|
||||
"638ea876-beb2-4fd6-9dc3-5fd528d7cfb9"
|
||||
],
|
||||
"last_state_change": 1676389310.7412922,
|
||||
"state": 16,
|
||||
"task_spec": "Gateway_for_any_task_start",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {},
|
||||
"data": {
|
||||
"input_data": [
|
||||
1,
|
||||
2,
|
||||
3
|
||||
]
|
||||
}
|
||||
},
|
||||
"638ea876-beb2-4fd6-9dc3-5fd528d7cfb9": {
|
||||
"id": "638ea876-beb2-4fd6-9dc3-5fd528d7cfb9",
|
||||
"parent": "513dba6b-7017-48df-a1e0-7a2c57a1042c",
|
||||
"children": [
|
||||
"ec145fea-d068-4401-9f6c-6903cf153b23"
|
||||
],
|
||||
"last_state_change": 1676389310.7315657,
|
||||
"state": 4,
|
||||
"task_spec": "any_task",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {
|
||||
"splits": 1,
|
||||
"runtimes": 1
|
||||
},
|
||||
"data": {
|
||||
"item": 1
|
||||
}
|
||||
},
|
||||
"ec145fea-d068-4401-9f6c-6903cf153b23": {
|
||||
"id": "ec145fea-d068-4401-9f6c-6903cf153b23",
|
||||
"parent": "638ea876-beb2-4fd6-9dc3-5fd528d7cfb9",
|
||||
"children": [
|
||||
"eccb7e2f-4b23-4b75-b9fb-e3b3a335574f"
|
||||
],
|
||||
"last_state_change": 1676389310.7325432,
|
||||
"state": 1,
|
||||
"task_spec": "Gateway_for_any_task_end",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
},
|
||||
"eccb7e2f-4b23-4b75-b9fb-e3b3a335574f": {
|
||||
"id": "eccb7e2f-4b23-4b75-b9fb-e3b3a335574f",
|
||||
"parent": "ec145fea-d068-4401-9f6c-6903cf153b23",
|
||||
"children": [],
|
||||
"last_state_change": 1676389310.732967,
|
||||
"state": 1,
|
||||
"task_spec": "Event_0a6d9t5",
|
||||
"triggered": false,
|
||||
"workflow_name": "main",
|
||||
"internal_data": {},
|
||||
"data": {}
|
||||
}
|
||||
},
|
||||
"root": "fa4b8656-22a2-467e-8fb0-9b1d8f1f6da6",
|
||||
"spec": {
|
||||
"name": "main",
|
||||
"description": "main",
|
||||
"file": "/home/essweine/work/sartography/code/SpiffWorkflow/tests/SpiffWorkflow/bpmn/data/diagram_1.bpmn",
|
||||
"task_specs": {
|
||||
"Start": {
|
||||
"id": "main_1",
|
||||
"name": "Start",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [
|
||||
"StartEvent_1"
|
||||
],
|
||||
"typename": "StartTask"
|
||||
},
|
||||
"main.EndJoin": {
|
||||
"id": "main_2",
|
||||
"name": "main.EndJoin",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Event_0a6d9t5"
|
||||
],
|
||||
"outputs": [
|
||||
"End"
|
||||
],
|
||||
"typename": "_EndJoin"
|
||||
},
|
||||
"End": {
|
||||
"id": "main_3",
|
||||
"name": "End",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"main.EndJoin"
|
||||
],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"StartEvent_1": {
|
||||
"id": "main_4",
|
||||
"name": "StartEvent_1",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"Start"
|
||||
],
|
||||
"outputs": [
|
||||
"Gateway_for_any_task_start"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 179.0,
|
||||
"y": 99.0
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "StartEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"any_task": {
|
||||
"id": "main_5",
|
||||
"name": "any_task",
|
||||
"description": "Any Task",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"StartEvent_1",
|
||||
"Gateway_for_any_task_start"
|
||||
],
|
||||
"outputs": [
|
||||
"Gateway_for_any_task_end"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 270.0,
|
||||
"y": 77.0
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"typename": "NoneTask",
|
||||
"times": {
|
||||
"name": "input_data",
|
||||
"typename": "Attrib"
|
||||
},
|
||||
"elementVar": "item",
|
||||
"collection": {
|
||||
"name": "output_data",
|
||||
"typename": "Attrib"
|
||||
},
|
||||
"completioncondition": null,
|
||||
"prevtaskclass": "SpiffWorkflow.bpmn.specs.NoneTask.NoneTask",
|
||||
"isSequential": false,
|
||||
"expanded": 1,
|
||||
"extensions": {}
|
||||
},
|
||||
"Event_0a6d9t5": {
|
||||
"id": "main_6",
|
||||
"name": "Event_0a6d9t5",
|
||||
"description": null,
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"any_task"
|
||||
],
|
||||
"outputs": [
|
||||
"main.EndJoin"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 432.0,
|
||||
"y": 99.0
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"event_definition": {
|
||||
"internal": false,
|
||||
"external": false,
|
||||
"typename": "NoneEventDefinition"
|
||||
},
|
||||
"typename": "EndEvent",
|
||||
"extensions": {}
|
||||
},
|
||||
"Root": {
|
||||
"id": "main_7",
|
||||
"name": "Root",
|
||||
"description": "",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [],
|
||||
"outputs": [],
|
||||
"typename": "Simple"
|
||||
},
|
||||
"Gateway_for_any_task_start": {
|
||||
"id": "main_8",
|
||||
"name": "Gateway_for_any_task_start",
|
||||
"description": "Begin Gateway",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"StartEvent_1"
|
||||
],
|
||||
"outputs": [
|
||||
"any_task"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"split_task": null,
|
||||
"threshold": null,
|
||||
"cancel": false,
|
||||
"typename": "ParallelGateway"
|
||||
},
|
||||
"Gateway_for_any_task_end": {
|
||||
"id": "main_9",
|
||||
"name": "Gateway_for_any_task_end",
|
||||
"description": "End Gateway",
|
||||
"manual": false,
|
||||
"internal": false,
|
||||
"lookahead": 2,
|
||||
"inputs": [
|
||||
"any_task"
|
||||
],
|
||||
"outputs": [
|
||||
"Event_0a6d9t5"
|
||||
],
|
||||
"lane": null,
|
||||
"documentation": null,
|
||||
"loopTask": false,
|
||||
"position": {
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"data_input_associations": [],
|
||||
"data_output_associations": [],
|
||||
"split_task": null,
|
||||
"threshold": null,
|
||||
"cancel": false,
|
||||
"typename": "ParallelGateway"
|
||||
}
|
||||
},
|
||||
"data_inputs": [],
|
||||
"data_outputs": [],
|
||||
"data_objects": {},
|
||||
"correlation_keys": {},
|
||||
"typename": "BpmnProcessSpec"
|
||||
},
|
||||
"subprocess_specs": {},
|
||||
"subprocesses": {},
|
||||
"bpmn_messages": []
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:standardLoopCharacteristics testBefore="true" loopMaximum="3">
|
||||
<bpmn:loopCondition>done</bpmn:loopCondition>
|
||||
</bpmn:standardLoopCharacteristics>
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="432" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="432" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -0,0 +1,39 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0zetnjn" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:process id="main" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0m77cxj</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:task id="any_task" name="Any Task">
|
||||
<bpmn:incoming>Flow_0m77cxj</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1jbp2el</bpmn:outgoing>
|
||||
<bpmn:standardLoopCharacteristics />
|
||||
</bpmn:task>
|
||||
<bpmn:sequenceFlow id="Flow_0m77cxj" sourceRef="StartEvent_1" targetRef="any_task" />
|
||||
<bpmn:endEvent id="Event_1xk7z3g">
|
||||
<bpmn:incoming>Flow_1jbp2el</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1jbp2el" sourceRef="any_task" targetRef="Event_1xk7z3g" />
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="main">
|
||||
<bpmndi:BPMNEdge id="Flow_1jbp2el_di" bpmnElement="Flow_1jbp2el">
|
||||
<di:waypoint x="370" y="117" />
|
||||
<di:waypoint x="432" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0m77cxj_di" bpmnElement="Flow_0m77cxj">
|
||||
<di:waypoint x="215" y="117" />
|
||||
<di:waypoint x="270" y="117" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1jay5wu_di" bpmnElement="any_task">
|
||||
<dc:Bounds x="270" y="77" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1xk7z3g_di" bpmnElement="Event_1xk7z3g">
|
||||
<dc:Bounds x="432" y="99" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,129 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_015ooho" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.10.0">
|
||||
<bpmn:process id="Process_1l85e0n" name="ScriptTest" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0dsbqk4</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0dsbqk4" sourceRef="StartEvent_1" targetRef="Activity_0umlasr" />
|
||||
<bpmn:endEvent id="Event_12boxg0">
|
||||
<bpmn:incoming>Flow_18e9qgr</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:subProcess id="MyOuterSubProcess" name="MyOuterSubProcess">
|
||||
<bpmn:incoming>Flow_1ona7kk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_18e9qgr</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true" camunda:collection="my_collection" camunda:elementVariable="my_var" />
|
||||
<bpmn:endEvent id="outer_end" name="outer_end">
|
||||
<bpmn:incoming>Flow_05tjul5</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:startEvent id="outer_start" name="outer_start">
|
||||
<bpmn:outgoing>Flow_1pc1vib</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:subProcess id="MyInnerSubProcess" name="MyInnerSubProcess">
|
||||
<bpmn:incoming>Flow_1pc1vib</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_05tjul5</bpmn:outgoing>
|
||||
<bpmn:startEvent id="inner_start" name="inner_start">
|
||||
<bpmn:outgoing>Flow_0hikak1</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:scriptTask id="SubProcessScript" name="SubProcessScript">
|
||||
<bpmn:incoming>Flow_0hikak1</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0oby5rd</bpmn:outgoing>
|
||||
<bpmn:script>my_var['new_info'] = "Adding this!"
|
||||
my_var['name'] = my_var['name'] + "_edit"
|
||||
</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_0hikak1" sourceRef="inner_start" targetRef="SubProcessScript" />
|
||||
<bpmn:endEvent id="inner_end" name="inner_end">
|
||||
<bpmn:incoming>Flow_0oby5rd</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0oby5rd" sourceRef="SubProcessScript" targetRef="inner_end" />
|
||||
</bpmn:subProcess>
|
||||
<bpmn:sequenceFlow id="Flow_1pc1vib" sourceRef="outer_start" targetRef="MyInnerSubProcess" />
|
||||
<bpmn:sequenceFlow id="Flow_05tjul5" sourceRef="MyInnerSubProcess" targetRef="outer_end" />
|
||||
</bpmn:subProcess>
|
||||
<bpmn:sequenceFlow id="Flow_18e9qgr" sourceRef="MyOuterSubProcess" targetRef="Event_12boxg0" />
|
||||
<bpmn:sequenceFlow id="Flow_1ona7kk" sourceRef="Activity_0umlasr" targetRef="MyOuterSubProcess" />
|
||||
<bpmn:scriptTask id="Activity_0umlasr" name="init">
|
||||
<bpmn:incoming>Flow_0dsbqk4</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ona7kk</bpmn:outgoing>
|
||||
<bpmn:script>my_collection = {
|
||||
'a':{'name':'Apple'},
|
||||
'b':{'name':'Bubble'},
|
||||
'c':{'name':'Crap, I should write better code'}
|
||||
}</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1l85e0n">
|
||||
<bpmndi:BPMNEdge id="Flow_0dsbqk4_di" bpmnElement="Flow_0dsbqk4">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="250" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_18e9qgr_di" bpmnElement="Flow_18e9qgr">
|
||||
<di:waypoint x="1110" y="177" />
|
||||
<di:waypoint x="1182" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1ona7kk_di" bpmnElement="Flow_1ona7kk">
|
||||
<di:waypoint x="350" y="177" />
|
||||
<di:waypoint x="430" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_18x5yaj_di" bpmnElement="Activity_0umlasr">
|
||||
<dc:Bounds x="250" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_12boxg0_di" bpmnElement="Event_12boxg0">
|
||||
<dc:Bounds x="1182" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_16u5jzz_di" bpmnElement="MyOuterSubProcess" isExpanded="true">
|
||||
<dc:Bounds x="430" y="77" width="680" height="283" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1pc1vib_di" bpmnElement="Flow_1pc1vib">
|
||||
<di:waypoint x="518" y="177" />
|
||||
<di:waypoint x="600" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_05tjul5_di" bpmnElement="Flow_05tjul5">
|
||||
<di:waypoint x="950" y="177" />
|
||||
<di:waypoint x="1002" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_1u4mcv3_di" bpmnElement="outer_start">
|
||||
<dc:Bounds x="482" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="474" y="202" width="53" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0y42ecd_di" bpmnElement="outer_end">
|
||||
<dc:Bounds x="1002" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="996" y="202" width="50" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0f3yfji_di" bpmnElement="MyInnerSubProcess" isExpanded="true">
|
||||
<dc:Bounds x="600" y="120" width="350" height="200" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0hikak1_di" bpmnElement="Flow_0hikak1">
|
||||
<di:waypoint x="658" y="220" />
|
||||
<di:waypoint x="730" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0oby5rd_di" bpmnElement="Flow_0oby5rd">
|
||||
<di:waypoint x="830" y="220" />
|
||||
<di:waypoint x="892" y="220" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1v1rg9x_di" bpmnElement="SubProcessScript">
|
||||
<dc:Bounds x="730" y="180" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0tdns2c_di" bpmnElement="inner_end">
|
||||
<dc:Bounds x="892" y="202" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="886" y="245" width="49" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0557238_di" bpmnElement="inner_start">
|
||||
<dc:Bounds x="622" y="202" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="614" y="245" width="52" height="14" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,93 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_015ooho" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.10.0">
|
||||
<bpmn:process id="Process_1l85e0n" name="ScriptTest" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0dsbqk4</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0dsbqk4" sourceRef="StartEvent_1" targetRef="Activity_0umlasr" />
|
||||
<bpmn:endEvent id="Event_12boxg0">
|
||||
<bpmn:incoming>Flow_18e9qgr</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:subProcess id="MySubProcess" name="MySubProcess">
|
||||
<bpmn:incoming>Flow_1ona7kk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_18e9qgr</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true" camunda:collection="my_collection" camunda:elementVariable="my_var" />
|
||||
<bpmn:scriptTask id="SubProcessScript" name="SubProcessScript">
|
||||
<bpmn:incoming>Flow_14l2ton</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_06gypww</bpmn:outgoing>
|
||||
<bpmn:script>my_var['new_info'] = "Adding this!"
|
||||
my_var['name'] = my_var['name'] + "_edit"</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="MySubProcessEnd" name="MySubProcessEnd">
|
||||
<bpmn:incoming>Flow_06gypww</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:startEvent id="MySubProcessStart" name="MySubProcessStart">
|
||||
<bpmn:outgoing>Flow_14l2ton</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_06gypww" sourceRef="SubProcessScript" targetRef="MySubProcessEnd" />
|
||||
<bpmn:sequenceFlow id="Flow_14l2ton" sourceRef="MySubProcessStart" targetRef="SubProcessScript" />
|
||||
</bpmn:subProcess>
|
||||
<bpmn:sequenceFlow id="Flow_18e9qgr" sourceRef="MySubProcess" targetRef="Event_12boxg0" />
|
||||
<bpmn:sequenceFlow id="Flow_1ona7kk" sourceRef="Activity_0umlasr" targetRef="MySubProcess" />
|
||||
<bpmn:scriptTask id="Activity_0umlasr" name="init">
|
||||
<bpmn:incoming>Flow_0dsbqk4</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ona7kk</bpmn:outgoing>
|
||||
<bpmn:script>my_collection = {
|
||||
'a':{'name':'Apple'},
|
||||
'b':{'name':'Bubble'},
|
||||
'c':{'name':'Crap, I should write better code'}
|
||||
}</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1l85e0n">
|
||||
<bpmndi:BPMNEdge id="Flow_1ona7kk_di" bpmnElement="Flow_1ona7kk">
|
||||
<di:waypoint x="350" y="177" />
|
||||
<di:waypoint x="430" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_18e9qgr_di" bpmnElement="Flow_18e9qgr">
|
||||
<di:waypoint x="940" y="177" />
|
||||
<di:waypoint x="1032" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0dsbqk4_di" bpmnElement="Flow_0dsbqk4">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="250" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_12boxg0_di" bpmnElement="Event_12boxg0">
|
||||
<dc:Bounds x="1032" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_16u5jzz_di" bpmnElement="MySubProcess" isExpanded="true">
|
||||
<dc:Bounds x="430" y="77" width="510" height="200" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_14l2ton_di" bpmnElement="Flow_14l2ton">
|
||||
<di:waypoint x="518" y="177" />
|
||||
<di:waypoint x="640" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_06gypww_di" bpmnElement="Flow_06gypww">
|
||||
<di:waypoint x="740" y="177" />
|
||||
<di:waypoint x="862" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1v1rg9x_di" bpmnElement="SubProcessScript">
|
||||
<dc:Bounds x="640" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0y42ecd_di" bpmnElement="MySubProcessEnd">
|
||||
<dc:Bounds x="862" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="839" y="202" width="82" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1u4mcv3_di" bpmnElement="MySubProcessStart">
|
||||
<dc:Bounds x="482" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="458" y="202" width="85" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_18x5yaj_di" bpmnElement="Activity_0umlasr">
|
||||
<dc:Bounds x="250" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,59 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_015ooho" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
|
||||
<bpmn:process id="Process_1l85e0n" name="ScriptTest" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0dsbqk4</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0dsbqk4" sourceRef="StartEvent_1" targetRef="Activity_16giml8" />
|
||||
<bpmn:endEvent id="Event_12boxg0">
|
||||
<bpmn:incoming>Flow_1lbqsop</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:scriptTask id="Activity_1kkxlz7" name="Second Script">
|
||||
<bpmn:incoming>Flow_0n1o8w6</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1lbqsop</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics isSequential="true" camunda:collection="coll" camunda:elementVariable="a">
|
||||
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">5</bpmn:loopCardinality>
|
||||
<bpmn:completionCondition xsi:type="bpmn:tFormalExpression">done==True</bpmn:completionCondition>
|
||||
</bpmn:multiInstanceLoopCharacteristics>
|
||||
<bpmn:script>x = {'a':a}
|
||||
if a==3:
|
||||
done=True
|
||||
a=x</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:sequenceFlow id="Flow_1lbqsop" sourceRef="Activity_1kkxlz7" targetRef="Event_12boxg0" />
|
||||
<bpmn:sequenceFlow id="Flow_0n1o8w6" sourceRef="Activity_16giml8" targetRef="Activity_1kkxlz7" />
|
||||
<bpmn:scriptTask id="Activity_16giml8" name="init">
|
||||
<bpmn:incoming>Flow_0dsbqk4</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_0n1o8w6</bpmn:outgoing>
|
||||
<bpmn:script>done=False</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1l85e0n">
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="152" y="109" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0dsbqk4_di" bpmnElement="Flow_0dsbqk4">
|
||||
<di:waypoint x="188" y="127" />
|
||||
<di:waypoint x="250" y="127" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_12boxg0_di" bpmnElement="Event_12boxg0">
|
||||
<dc:Bounds x="632" y="109" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1v1rg9x_di" bpmnElement="Activity_1kkxlz7">
|
||||
<dc:Bounds x="440" y="87" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1lbqsop_di" bpmnElement="Flow_1lbqsop">
|
||||
<di:waypoint x="540" y="127" />
|
||||
<di:waypoint x="632" y="127" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0n1o8w6_di" bpmnElement="Flow_0n1o8w6">
|
||||
<di:waypoint x="350" y="127" />
|
||||
<di:waypoint x="440" y="127" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_0fx0yfe_di" bpmnElement="Activity_16giml8">
|
||||
<dc:Bounds x="250" y="87" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,93 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_015ooho" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.10.0">
|
||||
<bpmn:process id="Process_1l85e0n" name="ScriptTest" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_0dsbqk4</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0dsbqk4" sourceRef="StartEvent_1" targetRef="Activity_0umlasr" />
|
||||
<bpmn:endEvent id="Event_12boxg0">
|
||||
<bpmn:incoming>Flow_18e9qgr</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:subProcess id="MySubProcess" name="MySubProcess">
|
||||
<bpmn:incoming>Flow_1ona7kk</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_18e9qgr</bpmn:outgoing>
|
||||
<bpmn:multiInstanceLoopCharacteristics camunda:collection="my_collection" camunda:elementVariable="my_var" />
|
||||
<bpmn:scriptTask id="SubProcessScript" name="SubProcessScript">
|
||||
<bpmn:incoming>Flow_14l2ton</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_06gypww</bpmn:outgoing>
|
||||
<bpmn:script>my_var['new_info'] = "Adding this!"
|
||||
my_var['name'] = my_var['name'] + "_edit"</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
<bpmn:endEvent id="MySubProcessEnd" name="MySubProcessEnd">
|
||||
<bpmn:incoming>Flow_06gypww</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:startEvent id="MySubProcessStart" name="MySubProcessStart">
|
||||
<bpmn:outgoing>Flow_14l2ton</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_06gypww" sourceRef="SubProcessScript" targetRef="MySubProcessEnd" />
|
||||
<bpmn:sequenceFlow id="Flow_14l2ton" sourceRef="MySubProcessStart" targetRef="SubProcessScript" />
|
||||
</bpmn:subProcess>
|
||||
<bpmn:sequenceFlow id="Flow_18e9qgr" sourceRef="MySubProcess" targetRef="Event_12boxg0" />
|
||||
<bpmn:sequenceFlow id="Flow_1ona7kk" sourceRef="Activity_0umlasr" targetRef="MySubProcess" />
|
||||
<bpmn:scriptTask id="Activity_0umlasr" name="init">
|
||||
<bpmn:incoming>Flow_0dsbqk4</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1ona7kk</bpmn:outgoing>
|
||||
<bpmn:script>my_collection = {
|
||||
'a':{'name':'Apple'},
|
||||
'b':{'name':'Bubble'},
|
||||
'c':{'name':'Crap, I should write better code'}
|
||||
}</bpmn:script>
|
||||
</bpmn:scriptTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_1l85e0n">
|
||||
<bpmndi:BPMNEdge id="Flow_1ona7kk_di" bpmnElement="Flow_1ona7kk">
|
||||
<di:waypoint x="350" y="177" />
|
||||
<di:waypoint x="430" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_18e9qgr_di" bpmnElement="Flow_18e9qgr">
|
||||
<di:waypoint x="940" y="177" />
|
||||
<di:waypoint x="1032" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_0dsbqk4_di" bpmnElement="Flow_0dsbqk4">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="250" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_12boxg0_di" bpmnElement="Event_12boxg0">
|
||||
<dc:Bounds x="1032" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_16u5jzz_di" bpmnElement="MySubProcess" isExpanded="true">
|
||||
<dc:Bounds x="430" y="77" width="510" height="200" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_14l2ton_di" bpmnElement="Flow_14l2ton">
|
||||
<di:waypoint x="518" y="177" />
|
||||
<di:waypoint x="640" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_06gypww_di" bpmnElement="Flow_06gypww">
|
||||
<di:waypoint x="740" y="177" />
|
||||
<di:waypoint x="862" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Activity_1v1rg9x_di" bpmnElement="SubProcessScript">
|
||||
<dc:Bounds x="640" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_0y42ecd_di" bpmnElement="MySubProcessEnd">
|
||||
<dc:Bounds x="862" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="839" y="202" width="82" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_1u4mcv3_di" bpmnElement="MySubProcessStart">
|
||||
<dc:Bounds x="482" y="159" width="36" height="36" />
|
||||
<bpmndi:BPMNLabel>
|
||||
<dc:Bounds x="458" y="202" width="85" height="27" />
|
||||
</bpmndi:BPMNLabel>
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_18x5yaj_di" bpmnElement="Activity_0umlasr">
|
||||
<dc:Bounds x="250" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -1,79 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_0r1c9o8" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.11.1" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
|
||||
<bpmn:collaboration id="Collaboration_1i7mjwg">
|
||||
<bpmn:participant id="Participant_0up2p6u" name="Participant 1" processRef="Proc_1" />
|
||||
<bpmn:participant id="Participant_0jlaump" name="Participant 2" processRef="Proc_2" />
|
||||
</bpmn:collaboration>
|
||||
<bpmn:process id="Proc_1" name="Process 1" isExecutable="true">
|
||||
<bpmn:startEvent id="StartEvent_1">
|
||||
<bpmn:outgoing>Flow_1fumg40</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1fumg40" sourceRef="StartEvent_1" targetRef="Activity_15qpnpw" />
|
||||
<bpmn:endEvent id="Event_192zvak">
|
||||
<bpmn:incoming>Flow_1sfcxwo</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_1sfcxwo" sourceRef="Activity_15qpnpw" targetRef="Event_192zvak" />
|
||||
<bpmn:userTask id="Activity_15qpnpw" name="Process 1 Task">
|
||||
<bpmn:incoming>Flow_1fumg40</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_1sfcxwo</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmn:process id="Proc_2" isExecutable="true">
|
||||
<bpmn:startEvent id="Event_03ne9sv">
|
||||
<bpmn:outgoing>Flow_0ptjvq1</bpmn:outgoing>
|
||||
</bpmn:startEvent>
|
||||
<bpmn:sequenceFlow id="Flow_0ptjvq1" sourceRef="Event_03ne9sv" targetRef="Activity_15qii7z" />
|
||||
<bpmn:endEvent id="Event_10ar29a">
|
||||
<bpmn:incoming>Flow_12xe6lg</bpmn:incoming>
|
||||
</bpmn:endEvent>
|
||||
<bpmn:sequenceFlow id="Flow_12xe6lg" sourceRef="Activity_15qii7z" targetRef="Event_10ar29a" />
|
||||
<bpmn:userTask id="Activity_15qii7z" name="Process 2 Task">
|
||||
<bpmn:incoming>Flow_0ptjvq1</bpmn:incoming>
|
||||
<bpmn:outgoing>Flow_12xe6lg</bpmn:outgoing>
|
||||
</bpmn:userTask>
|
||||
</bpmn:process>
|
||||
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
|
||||
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Collaboration_1i7mjwg">
|
||||
<bpmndi:BPMNShape id="Participant_0up2p6u_di" bpmnElement="Participant_0up2p6u" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="52" width="400" height="250" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_1fumg40_di" bpmnElement="Flow_1fumg40">
|
||||
<di:waypoint x="215" y="177" />
|
||||
<di:waypoint x="270" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_1sfcxwo_di" bpmnElement="Flow_1sfcxwo">
|
||||
<di:waypoint x="370" y="177" />
|
||||
<di:waypoint x="432" y="177" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
|
||||
<dc:Bounds x="179" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_192zvak_di" bpmnElement="Event_192zvak">
|
||||
<dc:Bounds x="432" y="159" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_1loi9tx_di" bpmnElement="Activity_15qpnpw">
|
||||
<dc:Bounds x="270" y="137" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Participant_0jlaump_di" bpmnElement="Participant_0jlaump" isHorizontal="true">
|
||||
<dc:Bounds x="120" y="340" width="400" height="250" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNEdge id="Flow_0ptjvq1_di" bpmnElement="Flow_0ptjvq1">
|
||||
<di:waypoint x="218" y="470" />
|
||||
<di:waypoint x="270" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNEdge id="Flow_12xe6lg_di" bpmnElement="Flow_12xe6lg">
|
||||
<di:waypoint x="370" y="470" />
|
||||
<di:waypoint x="422" y="470" />
|
||||
</bpmndi:BPMNEdge>
|
||||
<bpmndi:BPMNShape id="Event_03ne9sv_di" bpmnElement="Event_03ne9sv">
|
||||
<dc:Bounds x="182" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Event_10ar29a_di" bpmnElement="Event_10ar29a">
|
||||
<dc:Bounds x="422" y="452" width="36" height="36" />
|
||||
</bpmndi:BPMNShape>
|
||||
<bpmndi:BPMNShape id="Activity_0xvun11_di" bpmnElement="Activity_15qii7z">
|
||||
<dc:Bounds x="270" y="430" width="100" height="80" />
|
||||
</bpmndi:BPMNShape>
|
||||
</bpmndi:BPMNPlane>
|
||||
</bpmndi:BPMNDiagram>
|
||||
</bpmn:definitions>
|
|
@ -8,7 +8,7 @@ from SpiffWorkflow.task import TaskState
|
|||
|
||||
from ..BpmnWorkflowTestCase import BpmnWorkflowTestCase
|
||||
|
||||
class EventBsedGatewayTest(BpmnWorkflowTestCase):
|
||||
class EventBasedGatewayTest(BpmnWorkflowTestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v')
|
||||
|
|
|
@ -29,17 +29,17 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
|
|||
|
||||
# Check that workflow and next task completed
|
||||
subprocess = self.workflow.get_tasks_from_spec_name('Subprocess')[0]
|
||||
self.assertEqual(subprocess.get_state(), TaskState.COMPLETED)
|
||||
self.assertEqual(subprocess.state, TaskState.COMPLETED)
|
||||
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")[0]
|
||||
self.assertEqual(print_task.get_state(), TaskState.COMPLETED)
|
||||
self.assertEqual(print_task.state, TaskState.COMPLETED)
|
||||
|
||||
# Check that the boundary events were cancelled
|
||||
cancel_task = self.workflow.get_tasks_from_spec_name("Catch_Cancel_Event")[0]
|
||||
self.assertEqual(cancel_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(cancel_task.state, TaskState.CANCELLED)
|
||||
error_1_task = self.workflow.get_tasks_from_spec_name("Catch_Error_1")[0]
|
||||
self.assertEqual(error_1_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(error_1_task.state, TaskState.CANCELLED)
|
||||
error_none_task = self.workflow.get_tasks_from_spec_name("Catch_Error_None")[0]
|
||||
self.assertEqual(error_none_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(error_none_task.state, TaskState.CANCELLED)
|
||||
|
||||
|
||||
def testSubworkflowCancelEvent(self):
|
||||
|
@ -56,13 +56,13 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
|
|||
|
||||
# Check that we completed the Cancel Task
|
||||
cancel_task = self.workflow.get_tasks_from_spec_name("Cancel_Action")[0]
|
||||
self.assertEqual(cancel_task.get_state(), TaskState.COMPLETED)
|
||||
self.assertEqual(cancel_task.state, TaskState.COMPLETED)
|
||||
|
||||
# And cancelled the remaining tasks
|
||||
error_1_task = self.workflow.get_tasks_from_spec_name("Catch_Error_1")[0]
|
||||
self.assertEqual(error_1_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(error_1_task.state, TaskState.CANCELLED)
|
||||
error_none_task = self.workflow.get_tasks_from_spec_name("Catch_Error_None")[0]
|
||||
self.assertEqual(error_none_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(error_none_task.state, TaskState.CANCELLED)
|
||||
|
||||
# We should not have this task, as we followed the 'cancel branch'
|
||||
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")
|
||||
|
@ -87,13 +87,13 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
|
|||
|
||||
# The cancel boundary event should be cancelled
|
||||
cancel_task = self.workflow.get_tasks_from_spec_name("Catch_Cancel_Event")[0]
|
||||
self.assertEqual(cancel_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(cancel_task.state, TaskState.CANCELLED)
|
||||
|
||||
# We should catch the None Error, but not Error 1
|
||||
error_none_task = self.workflow.get_tasks_from_spec_name("Catch_Error_None")[0]
|
||||
self.assertEqual(error_none_task.get_state(), TaskState.COMPLETED)
|
||||
self.assertEqual(error_none_task.state, TaskState.COMPLETED)
|
||||
error_1_task = self.workflow.get_tasks_from_spec_name("Catch_Error_1")[0]
|
||||
self.assertEqual(error_1_task.get_state(), TaskState.CANCELLED)
|
||||
self.assertEqual(error_1_task.state, TaskState.CANCELLED)
|
||||
|
||||
# Make sure this branch didn't getfollowed
|
||||
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")
|
||||
|
@ -117,9 +117,9 @@ class TransactionSubprocessTest(BpmnWorkflowTestCase):
|
|||
|
||||
# Both boundary events should complete
|
||||
error_none_task = self.workflow.get_tasks_from_spec_name("Catch_Error_None")[0]
|
||||
self.assertEqual(error_none_task.get_state(), TaskState.COMPLETED)
|
||||
self.assertEqual(error_none_task.state, TaskState.COMPLETED)
|
||||
error_1_task = self.workflow.get_tasks_from_spec_name("Catch_Error_1")[0]
|
||||
self.assertEqual(error_1_task.get_state(), TaskState.COMPLETED)
|
||||
self.assertEqual(error_1_task.state, TaskState.COMPLETED)
|
||||
|
||||
print_task = self.workflow.get_tasks_from_spec_name("Activity_Print_Data")
|
||||
self.assertEqual(len(print_task), 0)
|
||||
|
|
|
@ -4,15 +4,16 @@ import time
|
|||
from SpiffWorkflow.task import TaskState
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
|
||||
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
|
||||
from SpiffWorkflow.bpmn.serializer.migration.exceptions import VersionMigrationError
|
||||
|
||||
from .BaseTestCase import BaseTestCase
|
||||
|
||||
|
||||
class VersionMigrationTest(BaseTestCase):
|
||||
class Version_1_0_Test(BaseTestCase):
|
||||
|
||||
SERIALIZER_VERSION = "1.2"
|
||||
|
||||
def test_convert_1_0_to_1_1(self):
|
||||
def test_convert_subprocess(self):
|
||||
# The serialization used here comes from NestedSubprocessTest saved at line 25 with version 1.0
|
||||
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.0.json')
|
||||
wf = self.serializer.deserialize_json(open(fn).read())
|
||||
|
@ -23,10 +24,38 @@ class VersionMigrationTest(BaseTestCase):
|
|||
wf.do_engine_steps()
|
||||
self.assertEqual(True, wf.is_completed())
|
||||
|
||||
def test_convert_1_1_to_1_2(self):
|
||||
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1-1.json')
|
||||
|
||||
class Version_1_1_Test(BaseTestCase):
|
||||
|
||||
def test_timers(self):
|
||||
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-timers.json')
|
||||
wf = self.serializer.deserialize_json(open(fn).read())
|
||||
wf.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"time": time}))
|
||||
wf.refresh_waiting_tasks()
|
||||
wf.do_engine_steps()
|
||||
self.assertTrue(wf.is_completed())
|
||||
|
||||
def test_convert_data_specs(self):
|
||||
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-data.json')
|
||||
wf = self.serializer.deserialize_json(open(fn).read())
|
||||
wf.do_engine_steps()
|
||||
self.assertTrue(wf.is_completed())
|
||||
|
||||
def test_convert_exclusive_gateway(self):
|
||||
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-gateways.json')
|
||||
wf = self.serializer.deserialize_json(open(fn).read())
|
||||
wf.do_engine_steps()
|
||||
task = wf.get_tasks_from_spec_name('Gateway_askQuestion')[0]
|
||||
self.assertEqual(len(task.task_spec.cond_task_specs), 2)
|
||||
ready_task = wf.get_ready_user_tasks()[0]
|
||||
ready_task.data['NeedClarification'] = 'Yes'
|
||||
ready_task.complete()
|
||||
wf.do_engine_steps()
|
||||
ready_task = wf.get_ready_user_tasks()[0]
|
||||
self.assertEqual(ready_task.task_spec.name, 'Activity_A2')
|
||||
|
||||
def test_check_multiinstance(self):
|
||||
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1.1-multi.json')
|
||||
with self.assertRaises(VersionMigrationError) as ctx:
|
||||
wf = self.serializer.deserialize_json(open(fn).read())
|
||||
self.assertEqual(ctx.exception.message, "This workflow cannot be migrated because it contains MultiInstance Tasks")
|
|
@ -1,60 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
|
||||
from tests.SpiffWorkflow.camunda.BaseTestCase import BaseTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
class DefaultGatewayPMITest(BaseTestCase):
|
||||
"""The example bpmn diagram tests both a set cardinality from user input
|
||||
as well as looping over an existing array."""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('default_gateway_pmi.bpmn', 'DefaultGateway')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(False)
|
||||
|
||||
def testRunThroughSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def actual_test(self, save_restore=False):
|
||||
|
||||
# Set initial array size to 3 in the first user form.
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
self.assertEqual("DoStuff", task.task_spec.name)
|
||||
task.update_data({"morestuff": 'Yep'})
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
self.workflow.do_engine_steps()
|
||||
if save_restore: self.save_restore()
|
||||
|
||||
# Set the names of the 3 family members.
|
||||
for i in range(3):
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
if i == 0:
|
||||
self.assertEqual("GetMoreStuff", task.task_spec.name)
|
||||
else:
|
||||
self.assertEqual("GetMoreStuff_%d"%(i-1), task.task_spec.name)
|
||||
|
||||
|
||||
task.update_data({"stuff.addstuff": "Stuff %d"%i})
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
if save_restore: self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
if save_restore: self.save_restore()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(DefaultGatewayPMITest)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
|
@ -1,68 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
|
||||
import unittest
|
||||
|
||||
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
|
||||
|
||||
from tests.SpiffWorkflow.camunda.BaseTestCase import BaseTestCase
|
||||
|
||||
__author__ = 'matth'
|
||||
|
||||
|
||||
class ExclusiveGatewayPMITest(BaseTestCase):
|
||||
"""The example bpmn diagram tests both a set cardinality from user input
|
||||
as well as looping over an existing array."""
|
||||
|
||||
def setUp(self):
|
||||
spec, subprocesses = self.load_workflow_spec('default_gateway_pmi.bpmn', 'DefaultGateway')
|
||||
self.workflow = BpmnWorkflow(spec, subprocesses)
|
||||
|
||||
def testRunThroughHappy(self):
|
||||
self.actual_test(False)
|
||||
|
||||
def testRunThroughSaveRestore(self):
|
||||
self.actual_test(True)
|
||||
|
||||
def testRunThroughHappyNo(self):
|
||||
self.actual_test(False,'No')
|
||||
|
||||
def testRunThroughSaveRestoreNo(self):
|
||||
self.actual_test(True,'No')
|
||||
|
||||
def actual_test(self, save_restore=False,response='Yes'):
|
||||
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
# Set initial array size to 3 in the first user form.
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
self.assertEqual("DoStuff", task.task_spec.name)
|
||||
task.update_data({"morestuff": response})
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
self.workflow.do_engine_steps()
|
||||
if save_restore: self.save_restore()
|
||||
|
||||
# Set the names of the 3 family members.
|
||||
if response == 'Yes':
|
||||
for i in range(3):
|
||||
task = self.workflow.get_ready_user_tasks()[0]
|
||||
if i == 0:
|
||||
self.assertEqual("GetMoreStuff", task.task_spec.name)
|
||||
else:
|
||||
self.assertEqual("GetMoreStuff_%d"%(i-1), task.task_spec.name)
|
||||
|
||||
|
||||
task.update_data({"stuff.addstuff": "Stuff %d"%i})
|
||||
self.workflow.complete_task_from_id(task.id)
|
||||
if save_restore: self.save_restore()
|
||||
self.workflow.do_engine_steps()
|
||||
|
||||
if save_restore: self.save_restore()
|
||||
self.assertTrue(self.workflow.is_completed())
|
||||
|
||||
|
||||
|
||||
def suite():
|
||||
return unittest.TestLoader().loadTestsFromTestCase(ExclusiveGatewayPMITest)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.TextTestRunner(verbosity=2).run(suite())
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue