Merge branch 'main' into feature/more_better_errors

This commit is contained in:
Dan Funk 2023-02-03 13:21:48 -05:00 committed by GitHub
commit dd692cf671
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
125 changed files with 2772 additions and 4530 deletions

View File

@ -266,8 +266,8 @@ class FeelLikeScriptEngine(PythonScriptEngine):
provide a specialised subclass that parses and executes the scripts /
expressions in a mini-language of your own.
"""
def __init__(self):
super().__init__()
def __init__(self, environment=None):
super().__init__(environment=environment)
def validate(self, expression):
super().validate(self.patch_expression(expression))

View File

@ -3,7 +3,9 @@ import ast
import copy
import sys
import traceback
import warnings
from .PythonScriptEngineEnvironment import TaskDataEnvironment
from ..exceptions import SpiffWorkflowException, WorkflowTaskException
from ..operators import Operator
@ -26,66 +28,6 @@ from ..operators import Operator
# 02110-1301 USA
class Box(dict):
"""
Example:
m = Box({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer'])
"""
def __init__(self, *args, **kwargs):
super(Box, self).__init__(*args, **kwargs)
for arg in args:
if isinstance(arg, dict):
for k, v in arg.items():
if isinstance(v, dict):
self[k] = Box(v)
else:
self[k] = v
if kwargs:
for k, v in kwargs.items():
if isinstance(v, dict):
self[k] = Box(v)
else:
self[k] = v
def __deepcopy__(self, memodict=None):
if memodict is None:
memodict = {}
my_copy = Box()
for k, v in self.items():
my_copy[k] = copy.deepcopy(v)
return my_copy
def __getattr__(self, attr):
try:
output = self[attr]
except:
raise AttributeError(
"Dictionary has no attribute '%s' " % str(attr))
return output
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __setitem__(self, key, value):
super(Box, self).__setitem__(key, value)
self.__dict__.update({key: value})
def __getstate__(self):
return self.__dict__
def __setstate__(self, state):
self.__init__(state)
def __delattr__(self, item):
self.__delitem__(item)
def __delitem__(self, key):
super(Box, self).__delitem__(key)
del self.__dict__[key]
class PythonScriptEngine(object):
"""
This should serve as a base for all scripting & expression evaluation
@ -97,10 +39,18 @@ class PythonScriptEngine(object):
expressions in a different way.
"""
def __init__(self, default_globals=None, scripting_additions=None):
self.globals = default_globals or {}
self.globals.update(scripting_additions or {})
def __init__(self, default_globals=None, scripting_additions=None, environment=None):
if default_globals is not None or scripting_additions is not None:
warnings.warn(f'default_globals and scripting_additions are deprecated. '
f'Please provide an environment such as TaskDataEnvrionment',
DeprecationWarning, stacklevel=2)
if environment is None:
environment_globals = {}
environment_globals.update(default_globals or {})
environment_globals.update(scripting_additions or {})
self.environment = TaskDataEnvironment(environment_globals)
else:
self.environment = environment
self.error_tasks = {}
def validate(self, expression):
@ -175,7 +125,7 @@ class PythonScriptEngine(object):
same name as a pre-defined script, rending the script un-callable.
This results in a nearly indecipherable error. Better to fail
fast with a sensible error message."""
func_overwrites = set(self.globals).intersection(task.data)
func_overwrites = set(self.environment.globals).intersection(task.data)
func_overwrites.update(set(external_methods).intersection(task.data))
if len(func_overwrites) > 0:
msg = f"You have task data that overwrites a predefined " \
@ -183,45 +133,8 @@ class PythonScriptEngine(object):
f"field name(s) to something else: {func_overwrites}"
raise WorkflowTaskException(msg, task=task)
def convert_to_box(self, data):
if isinstance(data, dict):
for key, value in data.items():
if not isinstance(value, Box):
data[key] = self.convert_to_box(value)
return Box(data)
if isinstance(data, list):
for idx, value in enumerate(data):
data[idx] = self.convert_to_box(value)
return data
return data
def _evaluate(self, expression, context, external_methods=None):
globals = copy.copy(self.globals) # else we pollute all later evals.
self.convert_to_box(context)
globals.update(external_methods or {})
globals.update(context)
return eval(expression, globals)
return self.environment.evaluate(expression, context, external_methods)
def _execute(self, script, context, external_methods=None):
my_globals = copy.copy(self.globals)
self.convert_to_box(context)
my_globals.update(external_methods or {})
context.update(my_globals)
try:
exec(script, context)
finally:
self.remove_globals_and_functions_from_context(context,
external_methods)
def remove_globals_and_functions_from_context(self, context,
external_methods=None):
"""When executing a script, don't leave the globals, functions
and external methods in the context that we have modified."""
for k in list(context):
if k == "__builtins__" or \
hasattr(context[k], '__call__') or \
k in self.globals or \
external_methods and k in external_methods:
context.pop(k)
self.environment.execute(script, context, external_methods)

View File

@ -0,0 +1,122 @@
import copy
import warnings
class BasePythonScriptEngineEnvironment:
def __init__(self, environment_globals=None):
self.globals = environment_globals or {}
def evaluate(self, expression, context, external_methods=None):
raise NotImplementedError("Subclass must implement this method")
def execute(self, script, context, external_methods=None):
raise NotImplementedError("Subclass must implement this method")
class TaskDataEnvironment(BasePythonScriptEngineEnvironment):
def evaluate(self, expression, context, external_methods=None):
my_globals = copy.copy(self.globals) # else we pollute all later evals.
self._prepare_context(context)
my_globals.update(external_methods or {})
my_globals.update(context)
return eval(expression, my_globals)
def execute(self, script, context, external_methods=None):
my_globals = copy.copy(self.globals)
self._prepare_context(context)
my_globals.update(external_methods or {})
context.update(my_globals)
try:
exec(script, context)
finally:
self._remove_globals_and_functions_from_context(context, external_methods)
def _prepare_context(self, context):
pass
def _remove_globals_and_functions_from_context(self, context,
external_methods=None):
"""When executing a script, don't leave the globals, functions
and external methods in the context that we have modified."""
for k in list(context):
if k == "__builtins__" or \
hasattr(context[k], '__call__') or \
k in self.globals or \
external_methods and k in external_methods:
context.pop(k)
class Box(dict):
"""
Example:
m = Box({'first_name': 'Eduardo'}, last_name='Pool', age=24, sports=['Soccer'])
"""
def __init__(self, *args, **kwargs):
warnings.warn('The usage of Box has been deprecated.', DeprecationWarning, stacklevel=2)
super(Box, self).__init__(*args, **kwargs)
for arg in args:
if isinstance(arg, dict):
for k, v in arg.items():
if isinstance(v, dict):
self[k] = Box(v)
else:
self[k] = v
if kwargs:
for k, v in kwargs.items():
if isinstance(v, dict):
self[k] = Box(v)
else:
self[k] = v
def __deepcopy__(self, memodict=None):
if memodict is None:
memodict = {}
my_copy = Box()
for k, v in self.items():
my_copy[k] = copy.deepcopy(v)
return my_copy
def __getattr__(self, attr):
try:
output = self[attr]
except:
raise AttributeError(
"Dictionary has no attribute '%s' " % str(attr))
return output
def __setattr__(self, key, value):
self.__setitem__(key, value)
def __setitem__(self, key, value):
super(Box, self).__setitem__(key, value)
self.__dict__.update({key: value})
def __getstate__(self):
return self.__dict__
def __setstate__(self, state):
self.__init__(state)
def __delattr__(self, item):
self.__delitem__(item)
def __delitem__(self, key):
super(Box, self).__delitem__(key)
del self.__dict__[key]
@classmethod
def convert_to_box(cls, data):
if isinstance(data, dict):
for key, value in data.items():
if not isinstance(value, Box):
data[key] = cls.convert_to_box(value)
return Box(data)
if isinstance(data, list):
for idx, value in enumerate(data):
data[idx] = cls.convert_to_box(value)
return data
return data
class BoxedTaskDataEnvironment(TaskDataEnvironment):
def _prepare_context(self, context):
Box.convert_to_box(context)

View File

@ -179,11 +179,8 @@ class BpmnParser(object):
Add all filenames in the given list to the parser's set.
"""
for filename in filenames:
f = open(filename, 'r')
try:
with open(filename, 'r') as f:
self.add_bpmn_xml(etree.parse(f), filename=filename)
finally:
f.close()
def add_bpmn_xml(self, bpmn, filename=None):
"""

View File

@ -1,348 +0,0 @@
from functools import partial
from uuid import UUID
from datetime import datetime, timedelta
from SpiffWorkflow.bpmn.specs.BpmnProcessSpec import BpmnDataSpecification
from .dictionary import DictionaryConverter
from ..specs.events.event_definitions import (
NoneEventDefinition,
MultipleEventDefinition,
SignalEventDefinition,
MessageEventDefinition,
CorrelationProperty,
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
CancelEventDefinition,
TerminateEventDefinition,
NamedEventDefinition
)
from ..specs.BpmnSpecMixin import BpmnSpecMixin
from ...operators import Attrib, PathAttrib
class BpmnDataConverter(DictionaryConverter):
"""
The default converter for task and workflow data. It allows some commonly used python objects
to be converted to a form that can be serialized with JSOM
It also serves as a simple example for anyone who needs custom data serialization. If you have
custom objects or python objects not included here in your workflow/task data, then you should
replace or extend this with one that can handle the contents of your workflow.
"""
def __init__(self):
super().__init__()
self.register(UUID, lambda v: { 'value': str(v) }, lambda v: UUID(v['value']))
self.register(datetime, lambda v: { 'value': v.isoformat() }, lambda v: datetime.fromisoformat(v['value']))
self.register(timedelta, lambda v: { 'days': v.days, 'seconds': v.seconds }, lambda v: timedelta(**v))
def convert(self, obj):
self.clean(obj)
return super().convert(obj)
def clean(self, obj):
# This removes functions and other callables from task data.
# By default we don't want to serialize these
if isinstance(obj, dict):
items = [ (k, v) for k, v in obj.items() ]
for key, value in items:
if callable(value):
del obj[key]
class BpmnDataSpecificationConverter:
@staticmethod
def to_dict(data_spec):
return { 'name': data_spec.name, 'description': data_spec.description }
@staticmethod
def from_dict(dct):
return BpmnDataSpecification(**dct)
class BpmnTaskSpecConverter(DictionaryConverter):
"""
This the base Task Spec Converter.
It contains methods for parsing generic and BPMN task spec attributes.
If you have extended any of the the BPMN tasks with custom functionality, you'll need to
implement a converter for those task spec types. You'll need to implement the `to_dict` and
`from_dict` methods on any inheriting classes.
The default task spec converters are in `task_converters`; the `camunda` and `dmn`
serialization packages contain other examples.
"""
def __init__(self, spec_class, data_converter, typename=None):
"""The default task spec converter. This will generally be registered with a workflow
spec converter.
Task specs can contain arbitrary data, though none of the default BPMN tasks do. We
may remove this functionality in the future. Therefore, the data_converter can be
`None`; if this is the case, task spec attributes that can contain arbitrary data will be
ignored.
:param spec_class: the class defining the task type
:param data_converter: a converter for custom data (can be None)
:param typename: an optional typename for the object registration
"""
super().__init__()
self.spec_class = spec_class
self.data_converter = data_converter
self.typename = typename if typename is not None else spec_class.__name__
event_definitions = [
NoneEventDefinition,
CancelEventDefinition,
TerminateEventDefinition,
SignalEventDefinition,
MessageEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition,
MultipleEventDefinition
]
for event_definition in event_definitions:
self.register(
event_definition,
self.event_definition_to_dict,
partial(self.event_defintion_from_dict, event_definition)
)
self.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib))
self.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib))
self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict)
def to_dict(self, spec):
"""
The convert method that will be called when a Task Spec Converter is registered with a
Workflow Spec Converter.
"""
raise NotImplementedError
def from_dict(self, dct):
"""
The restore method that will be called when a Task Spec Converter is registered with a
Workflow Spec Converter.
"""
raise NotImplementedError
def get_default_attributes(self, spec):
"""Extracts the default Spiff attributes from a task spec.
:param spec: the task spec to be converted
Returns:
a dictionary of standard task spec attributes
"""
dct = {
'id': spec.id,
'name': spec.name,
'description': spec.description,
'manual': spec.manual,
'internal': spec.internal,
'lookahead': spec.lookahead,
'inputs': [task.name for task in spec.inputs],
'outputs': [task.name for task in spec.outputs],
}
# This stuff is also all defined in the base task spec, but can contain data, so we need
# our data serializer. I think we should try to get this stuff out of the base task spec.
if self.data_converter is not None:
dct['data'] = self.data_converter.convert(spec.data)
dct['defines'] = self.data_converter.convert(spec.defines)
dct['pre_assign'] = self.data_converter.convert(spec.pre_assign)
dct['post_assign'] = self.data_converter.convert(spec.post_assign)
return dct
def get_bpmn_attributes(self, spec):
"""Extracts the attributes added by the `BpmnSpecMixin` class.
:param spec: the task spec to be converted
Returns:
a dictionary of BPMN task spec attributes
"""
return {
'lane': spec.lane,
'documentation': spec.documentation,
'loopTask': spec.loopTask,
'position': spec.position,
'data_input_associations': [ self.convert(obj) for obj in spec.data_input_associations ],
'data_output_associations': [ self.convert(obj) for obj in spec.data_output_associations ],
}
def get_join_attributes(self, spec):
"""Extracts attributes for task specs that inherit from `Join`.
:param spec: the task spec to be converted
Returns:
a dictionary of `Join` task spec attributes
"""
return {
'split_task': spec.split_task,
'threshold': spec.threshold,
'cancel': spec.cancel_remaining,
}
def get_subworkflow_attributes(self, spec):
"""Extracts attributes for task specs that inherit from `SubWorkflowTask`.
:param spec: the task spec to be converted
Returns:
a dictionary of subworkflow task spec attributes
"""
return {'spec': spec.spec}
def task_spec_from_dict(self, dct):
"""
Creates a task spec based on the supplied dictionary. It handles setting the default
task spec attributes as well as attributes added by `BpmnSpecMixin`.
:param dct: the dictionary to create the task spec from
Returns:
a restored task spec
"""
internal = dct.pop('internal')
inputs = dct.pop('inputs')
outputs = dct.pop('outputs')
spec = self.spec_class(**dct)
spec.internal = internal
spec.inputs = inputs
spec.outputs = outputs
spec.id = dct['id']
if self.data_converter is not None:
spec.data = self.data_converter.restore(dct.get('data', {}))
spec.defines = self.data_converter.restore(dct.get('defines', {}))
spec.pre_assign = self.data_converter.restore(dct.get('pre_assign', {}))
spec.post_assign = self.data_converter.restore(dct.get('post_assign', {}))
if isinstance(spec, BpmnSpecMixin):
spec.documentation = dct.pop('documentation', None)
spec.lane = dct.pop('lane', None)
spec.loopTask = dct.pop('loopTask', False)
spec.data_input_associations = self.restore(dct.pop('data_input_associations', []))
spec.data_output_associations = self.restore(dct.pop('data_output_associations', []))
return spec
def event_definition_to_dict(self, event_definition):
"""
Converts an BPMN event definition to a dict. It will not typically be called directly,
but via `convert` and will convert any event type supported by Spiff.
:param event_definition: the event_definition to be converted.
Returns:
a dictionary representation of an event definition
"""
dct = {'internal': event_definition.internal, 'external': event_definition.external}
if isinstance(event_definition, NamedEventDefinition):
dct['name'] = event_definition.name
if isinstance(event_definition, MessageEventDefinition):
dct['correlation_properties'] = [prop.__dict__ for prop in event_definition.correlation_properties]
if isinstance(event_definition, (TimeDateEventDefinition, DurationTimerEventDefinition, CycleTimerEventDefinition)):
dct['name'] = event_definition.name
dct['expression'] = event_definition.expression
if isinstance(event_definition, ErrorEventDefinition):
dct['error_code'] = event_definition.error_code
if isinstance(event_definition, EscalationEventDefinition):
dct['escalation_code'] = event_definition.escalation_code
if isinstance(event_definition, MultipleEventDefinition):
dct['event_definitions'] = [self.convert(e) for e in event_definition.event_definitions]
dct['parallel'] = event_definition.parallel
return dct
def event_defintion_from_dict(self, definition_class, dct):
"""Restores an event definition. It will not typically be called directly, but via
`restore` and will restore any BPMN event type supporred by Spiff.
:param definition_class: the class that will be used to create the object
:param dct: the event definition attributes
Returns:
an `EventDefinition` object
"""
internal, external = dct.pop('internal'), dct.pop('external')
if 'correlation_properties' in dct:
dct['correlation_properties'] = [CorrelationProperty(**prop) for prop in dct['correlation_properties']]
if 'event_definitions' in dct:
dct['event_definitions'] = [self.restore(d) for d in dct['event_definitions']]
event_definition = definition_class(**dct)
event_definition.internal = internal
event_definition.external = external
return event_definition
def attrib_to_dict(self, attrib):
return { 'name': attrib.name }
def attrib_from_dict(self, attrib_class, dct):
return attrib_class(dct['name'])
class BpmnWorkflowSpecConverter(DictionaryConverter):
"""
This is the base converter for a BPMN workflow spec.
It will register converters for the task spec types contained in the workflow, as well as
the workflow spec class itself.
This class can be extended if you implement a custom workflow spec type. See the converter
in `workflow_spec_converter` for an example.
"""
def __init__(self, spec_class, task_spec_converters, data_converter=None):
"""
Converter for a BPMN workflow spec class.
The `to_dict` and `from_dict` methods of the given task spec converter classes will
be registered, so that they can be restored automatically.
The data_converter applied to task *spec* data, not task data, and may be `None`. See
`BpmnTaskSpecConverter` for more discussion.
:param spec_class: the workflow spec class
:param task_spec_converters: a list of `BpmnTaskSpecConverter` classes
:param data_converter: an optional data converter
"""
super().__init__()
self.spec_class = spec_class
self.data_converter = data_converter
self.register(spec_class, self.to_dict, self.from_dict)
for converter in task_spec_converters:
self.register(converter.spec_class, converter.to_dict, converter.from_dict, converter.typename)
self.register(BpmnDataSpecification, BpmnDataSpecificationConverter.to_dict, BpmnDataSpecificationConverter.from_dict)
def to_dict(self, spec):
"""
The convert method that will be called when a Workflow Spec Converter is registered with a
Workflow Converter.
"""
raise NotImplementedError
def from_dict(self, dct):
"""
The restore method that will be called when a Workflow Spec Converter is registered with a
Workflow Converter.
"""
raise NotImplementedError

View File

@ -0,0 +1,127 @@
from .helpers.spec import EventDefinitionConverter
from ..specs.events.event_definitions import (
CancelEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
MessageEventDefinition,
NoneEventDefinition,
SignalEventDefinition,
TerminateEventDefinition,
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition,
MultipleEventDefinition,
)
class CancelEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(CancelEventDefinition, registry)
class ErrorEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(ErrorEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['error_code'] = event_definition.error_code
return dct
class EscalationEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(EscalationEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['escalation_code'] = event_definition.escalation_code
return dct
class MessageEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(MessageEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties)
return dct
def from_dict(self, dct):
dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties'])
event_definition = super().from_dict(dct)
return event_definition
class NoneEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(NoneEventDefinition, registry)
class SignalEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(SignalEventDefinition, registry)
class TerminateEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(TerminateEventDefinition, registry)
class TimerEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['expression'] = event_definition.expression
return dct
class TimeDateEventDefinitionConverter(TimerEventDefinitionConverter):
def __init__(self, registry):
super().__init__(TimeDateEventDefinition, registry)
class DurationTimerEventDefinitionConverter(TimerEventDefinitionConverter):
def __init__(self, registry):
super().__init__(DurationTimerEventDefinition, registry)
class CycleTimerEventDefinitionConverter(TimerEventDefinitionConverter):
def __init__(self, registry):
super().__init__(CycleTimerEventDefinition, registry)
class MultipleEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(MultipleEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['parallel'] = event_definition.parallel
dct['event_definitions'] = [self.registry.convert(e) for e in event_definition.event_definitions]
return dct
def from_dict(self, dct):
events = dct.pop('event_definitions')
event_definition = super().from_dict(dct)
event_definition.event_definitions = [self.registry.restore(d) for d in events]
return event_definition
DEFAULT_EVENT_CONVERTERS = [
CancelEventDefinitionConverter,
ErrorEventDefinitionConverter,
EscalationEventDefinitionConverter,
MessageEventDefinitionConverter,
NoneEventDefinitionConverter,
SignalEventDefinitionConverter,
TerminateEventDefinitionConverter,
TimeDateEventDefinitionConverter,
DurationTimerEventDefinitionConverter,
CycleTimerEventDefinitionConverter,
MultipleEventDefinitionConverter,
]

View File

@ -2,28 +2,28 @@ from functools import partial
class DictionaryConverter:
"""
This is a base class used to convert BPMN specs, workflows, tasks, and data to
dictionaries of JSON-serializable objects. Actual serialization is done as the
This is a base class used to convert BPMN specs, workflows, tasks, and (optonally)
data to dictionaries of JSON-serializable objects. Actual serialization is done as the
very last step by other classes.
This class allows you to register to_dict and from_dict functions for non-JSON-
This class allows you to register `to_dict` and `from_dict` functions for non-JSON-
serializable objects.
When an object is passed into `convert`, it will call the supplied to_dict
When an object is passed into `convert`, it will call the supplied `to_dict`
function on any classes that have been registered. The supplied to_dict function
must return a dictionary. The object's `typename` will be added to this dictionary
by the converter.
The (unqualified) class name will be used as the `typename` if one is not supplied.
You can optionally supply our own names (you'll need to do this if you need to
identically named classes in multiple packages).
You can optionally supply our own names (you'll need to do this if you use identically
named classes in multiple packages).
When a dictionary is passed into `restore`, it will be checked for a `typename` key.
If a registered `typename` is found, the supplied from_dict function will be
If a registered `typename` is found, the supplied `from_dict` function will be
called. Unrecognized objects will be returned as-is.
For a simple example of how to use this class, see the `BpmnDataConverter` in
`bpmn_converters`.
`registry`.
"""
def __init__(self):

View File

@ -0,0 +1,33 @@
from uuid import UUID
from datetime import datetime, timedelta
from .dictionary import DictionaryConverter
class DefaultRegistry(DictionaryConverter):
"""
The default converter for task and workflow data. It allows some commonly used python objects
to be converted to a form that can be serialized with JSOM
It also serves as a simple example for anyone who needs custom data serialization. If you have
custom objects or python objects not included here in your workflow/task data, then you should
replace or extend this with one that can handle the contents of your workflow.
"""
def __init__(self):
super().__init__()
self.register(UUID, lambda v: { 'value': str(v) }, lambda v: UUID(v['value']))
self.register(datetime, lambda v: { 'value': v.isoformat() }, lambda v: datetime.fromisoformat(v['value']))
self.register(timedelta, lambda v: { 'days': v.days, 'seconds': v.seconds }, lambda v: timedelta(**v))
def convert(self, obj):
self.clean(obj)
return super().convert(obj)
def clean(self, obj):
# This removes functions and other callables from task data.
# By default we don't want to serialize these
if isinstance(obj, dict):
items = [ (k, v) for k, v in obj.items() ]
for key, value in items:
if callable(value):
del obj[key]

View File

@ -0,0 +1,248 @@
from functools import partial
from ...specs.BpmnSpecMixin import BpmnSpecMixin
from ...specs.events.event_definitions import NamedEventDefinition, TimerEventDefinition
from ...specs.events.event_definitions import CorrelationProperty
from ....operators import Attrib, PathAttrib
class BpmnSpecConverter:
"""The base class for conversion of BPMN spec classes.
In general, most classes that extend this would simply take an existing registry as an
argument and automatically supply the class along with the implementations of the
conversion functions `to_dict` and `from_dict`.
The operation of the spec converter is a little opaque, but hopefully makes sense with a
little explanation.
The registry is a `DictionaryConverter` that registers conversion methods by class. It can be
pre-populated with methods for custom data (though this is not required) and is passed into
each of these sublclasses. When a subclass of this one gets instantiated, it adds itself
to this registry.
This seems a little bit backwards -- the registry is using the subclass, so it seems like we
ought to pass the subclass to the registry. However, there is a lot of interdependence across
the spec classes, so this doesn't work that well in practice -- most classes need to know about
all the other classes, and this was the most concise way I could think of to make that happen.
The goal is to be able to replace almost any spec class at the top level without classes that
use it to reimplement conversion mechanisms. So for example, it is not necessary to
re-implemnent all event-based task spec conversions because, eg, the
`MessageEventDefintion` was modified.
"""
def __init__(self, spec_class, registry, typename=None):
"""Constructor for a BPMN spec.
:param spec_class: the class of the spec the subclass provides conversions for
:param registry: a registry of conversions to which this one should be added
:param typename: the name of the class as it will appear in the serialization
"""
self.spec_class = spec_class
self.registry = registry
self.typename = typename if typename is not None else spec_class.__name__
self.registry.register(spec_class, self.to_dict, self.from_dict, self.typename)
def to_dict(self, spec):
raise NotImplementedError
def from_dict(self, dct):
raise NotImplementedError
class BpmnDataSpecificationConverter(BpmnSpecConverter):
"""This is the base Data Spec converter.
Currently the only use is Data Objects.
"""
def to_dict(self, data_spec):
return { 'name': data_spec.name, 'description': data_spec.description }
def from_dict(self, dct):
return self.spec_class(**dct)
class EventDefinitionConverter(BpmnSpecConverter):
"""This is the base Event Defintiion Converter.
It provides conversions for the great majority of BPMN events as-is, and contains
one custom method for serializing Correlation Properties (as Message Event Defintiions
are likely to the most commonly extended event definition spec).
"""
def to_dict(self, event_definition):
dct = {'internal': event_definition.internal, 'external': event_definition.external}
if isinstance(event_definition, (NamedEventDefinition, TimerEventDefinition)):
dct['name'] = event_definition.name
return dct
def from_dict(self, dct):
internal, external = dct.pop('internal'), dct.pop('external')
event_definition = self.spec_class(**dct)
event_definition.internal = internal
event_definition.external = external
return event_definition
def correlation_properties_to_dict(self, props):
return [prop.__dict__ for prop in props]
def correlation_properties_from_dict(self, props):
return [CorrelationProperty(**prop) for prop in props]
class TaskSpecConverter(BpmnSpecConverter):
"""
This the base Task Spec Converter.
It contains methods for parsing generic and BPMN task spec attributes.
If you have extended any of the the BPMN tasks with custom functionality, you'll need to
implement a converter for those task spec types. You'll need to implement the `to_dict` and
`from_dict` methods on any inheriting classes.
The default task spec converters are in the `task`, 'process_spec`, and 'event_definitions`
modules of this package; the `camunda`,`dmn`, and `spiff` serialization packages contain other
examples.
"""
def get_default_attributes(self, spec, include_data=False):
"""Extracts the default Spiff attributes from a task spec.
:param spec: the task spec to be converted
Returns:
a dictionary of standard task spec attributes
"""
dct = {
'id': spec.id,
'name': spec.name,
'description': spec.description,
'manual': spec.manual,
'internal': spec.internal,
'lookahead': spec.lookahead,
'inputs': [task.name for task in spec.inputs],
'outputs': [task.name for task in spec.outputs],
}
# This stuff is also all defined in the base task spec, but can contain data, so we need
# our data serializer. I think we should try to get this stuff out of the base task spec.
if include_data:
dct['data'] = self.registry.convert(spec.data)
dct['defines'] = self.registry.convert(spec.defines)
dct['pre_assign'] = self.registry.convert(spec.pre_assign)
dct['post_assign'] = self.registry.convert(spec.post_assign)
return dct
def get_bpmn_attributes(self, spec):
"""Extracts the attributes added by the `BpmnSpecMixin` class.
:param spec: the task spec to be converted
Returns:
a dictionary of BPMN task spec attributes
"""
return {
'lane': spec.lane,
'documentation': spec.documentation,
'loopTask': spec.loopTask,
'position': spec.position,
'data_input_associations': [ self.registry.convert(obj) for obj in spec.data_input_associations ],
'data_output_associations': [ self.registry.convert(obj) for obj in spec.data_output_associations ],
}
def get_join_attributes(self, spec):
"""Extracts attributes for task specs that inherit from `Join`.
:param spec: the task spec to be converted
Returns:
a dictionary of `Join` task spec attributes
"""
return {
'split_task': spec.split_task,
'threshold': spec.threshold,
'cancel': spec.cancel_remaining,
}
def get_subworkflow_attributes(self, spec):
"""Extracts attributes for task specs that inherit from `SubWorkflowTask`.
:param spec: the task spec to be converted
Returns:
a dictionary of subworkflow task spec attributes
"""
return {'spec': spec.spec}
def task_spec_from_dict(self, dct, include_data=False):
"""
Creates a task spec based on the supplied dictionary. It handles setting the default
task spec attributes as well as attributes added by `BpmnSpecMixin`.
:param dct: the dictionary to create the task spec from
:param include_data: whether or not to include task spec data attributes
Returns:
a restored task spec
"""
internal = dct.pop('internal')
inputs = dct.pop('inputs')
outputs = dct.pop('outputs')
spec = self.spec_class(**dct)
spec.internal = internal
spec.inputs = inputs
spec.outputs = outputs
spec.id = dct['id']
if include_data:
spec.data = self.registry.restore(dct.get('data', {}))
spec.defines = self.registry.restore(dct.get('defines', {}))
spec.pre_assign = self.registry.restore(dct.get('pre_assign', {}))
spec.post_assign = self.registry.restore(dct.get('post_assign', {}))
if isinstance(spec, BpmnSpecMixin):
spec.documentation = dct.pop('documentation', None)
spec.lane = dct.pop('lane', None)
spec.loopTask = dct.pop('loopTask', False)
spec.data_input_associations = self.registry.restore(dct.pop('data_input_associations', []))
spec.data_output_associations = self.registry.restore(dct.pop('data_output_associations', []))
return spec
class WorkflowSpecConverter(BpmnSpecConverter):
"""
This is the base converter for a BPMN workflow spec.
It will register converters for the task spec types contained in the workflow, as well as
the workflow spec class itself.
This class can be extended if you implement a custom workflow spec type. See the converter
in `workflow_spec_converter` for an example.
"""
def __init__(self, spec_class, registry):
"""
Converter for a BPMN workflow spec class.
The `to_dict` and `from_dict` methods of the given task spec converter classes will
be registered, so that they can be restored automatically.
The data_converter applied to task *spec* data, not task data, and may be `None`. See
`BpmnTaskSpecConverter` for more discussion.
:param spec_class: the workflow spec class
:param task_spec_converters: a list of `BpmnTaskSpecConverter` classes
"""
super().__init__(spec_class, registry)
# Leaving these as-as, as I can't imagine anyone would need or want to extend
self.registry.register(Attrib, self.attrib_to_dict, partial(self.attrib_from_dict, Attrib))
self.registry.register(PathAttrib, self.attrib_to_dict, partial(self.attrib_from_dict, PathAttrib))
def attrib_to_dict(self, attrib):
return { 'name': attrib.name }
def attrib_from_dict(self, attrib_class, dct):
return attrib_class(dct['name'])

View File

@ -1,18 +1,20 @@
from .bpmn_converters import BpmnWorkflowSpecConverter
from .helpers.spec import WorkflowSpecConverter, BpmnDataSpecificationConverter
from ..specs.BpmnProcessSpec import BpmnProcessSpec
from ..specs.MultiInstanceTask import MultiInstanceTask, getDynamicMIClass
from ..specs.events.IntermediateEvent import _BoundaryEventParent
from ...operators import Attrib, PathAttrib
from ...specs.WorkflowSpec import WorkflowSpec
from ..specs.BpmnProcessSpec import BpmnDataSpecification
class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
class BpmnDataObjectConverter(BpmnDataSpecificationConverter):
def __init__(self, registry, typename=None):
super().__init__(BpmnDataSpecification, registry, typename)
def __init__(self, task_spec_converters, data_converter=None):
super().__init__(BpmnProcessSpec, task_spec_converters, data_converter)
self.register(WorkflowSpec, self.base_workflow_spec_to_dict, self.from_dict)
class BpmnProcessSpecConverter(WorkflowSpecConverter):
def __init__(self, registry):
super().__init__(BpmnProcessSpec, registry)
def multi_instance_to_dict(self, spec):
@ -22,18 +24,15 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
# Bypass the automatic selection of a conversion function
# This returns the partial function that was created on register for the original task type.
# The second argument is the function that would be called by `convert`.
conversion = self.convert_to_dict[classname]
conversion = self.registry.convert_to_dict[classname]
func = conversion.args[1]
# We can just call it directly and add the typename manually
dct = func(spec)
dct['typename'] = classname
# And we have to do this here, rather than in a converter
# We also have to manually apply the Attrib conversions
convert_attrib = lambda v: { 'name': v.name, 'typename': v.__class__.__name__ }
dct.update({
'times': convert_attrib(spec.times) if spec.times is not None else None,
'times': self.registry.convert(spec.times) if spec.times is not None else None,
'elementVar': spec.elementVar,
'collection': convert_attrib(spec.collection) if spec.collection is not None else None,
'collection': self.registry.convert(spec.collection) if spec.collection is not None else None,
# These are not defined in the constructor, but added by the parser, or somewhere else inappropriate
'completioncondition': spec.completioncondition,
'prevtaskclass': spec.prevtaskclass,
@ -62,28 +61,26 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
attrs.append('dmnEngine')
# Terrible ugly hack
registered = dict((name, c) for c, name in self.typenames.items())
registered = dict((name, c) for c, name in self.registry.typenames.items())
# First get the dynamic class
cls = getDynamicMIClass(dct['name'], registered[dct['typename']])
# Restore the task according to the original task spec, so that its attributes can be converted
# recursively
original = self.restore(dct.copy())
original = self.registry.restore(dct.copy())
# But this task has the wrong class, so delete it from the spec
del dct['wf_spec'].task_specs[original.name]
# Create a new class using the dynamic class
task_spec = cls(**dct)
# Restore the attributes that weren't recognized by the original converter
restore_attrib = lambda v: Attrib(v['name']) if v['typename'] == 'Attrib' else PathAttrib(v['name'])
task_spec.times = restore_attrib(dct['times']) if dct['times'] is not None else None
task_spec.collection = restore_attrib(dct['collection']) if dct['collection'] is not None else None
task_spec.times = self.registry.restore(dct['times']) if dct['times'] is not None else None
task_spec.collection = self.registry.restore(dct['collection']) if dct['collection'] is not None else None
# Now copy everything else, from the temporary task spec if possible, otherwise the dict
for attr in attrs:
# If the original task has the attr, use the converted value
if hasattr(original, attr):
task_spec.__dict__[attr] = original.__dict__[attr]
else:
task_spec.__dict__[attr] = self.restore(dct[attr])
task_spec.__dict__[attr] = self.registry.restore(dct[attr])
# Handle adding any remaining attributes from the original task type that might not be
# present in the restored version (for example attributes added since last serialized)
@ -110,16 +107,16 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
'description': spec.description,
'file': spec.file,
'task_specs': {},
'data_inputs': [ self.convert(obj) for obj in spec.data_inputs ],
'data_outputs': [ self.convert(obj) for obj in spec.data_outputs ],
'data_objects': dict([ (name, self.convert(obj)) for name, obj in spec.data_objects .items() ]),
'data_inputs': [ self.registry.convert(obj) for obj in spec.data_inputs ],
'data_outputs': [ self.registry.convert(obj) for obj in spec.data_outputs ],
'data_objects': dict([ (name, self.registry.convert(obj)) for name, obj in spec.data_objects.items() ]),
'correlation_keys': spec.correlation_keys,
}
for name, task_spec in spec.task_specs.items():
if isinstance(task_spec, MultiInstanceTask):
task_dict = self.multi_instance_to_dict(task_spec)
else:
task_dict = self.convert(task_spec)
task_dict = self.registry.convert(task_spec)
self.convert_task_spec_extensions(task_spec, task_dict)
dct['task_specs'][name] = task_dict
@ -138,12 +135,12 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
del spec.task_specs[f'{spec.name}.EndJoin']
# Add the data specs
spec.data_inputs = [ self.restore(obj_dct) for obj_dct in dct.pop('data_inputs', []) ]
spec.data_outputs = [ self.restore(obj_dct) for obj_dct in dct.pop('data_outputs', []) ]
spec.data_inputs = [ self.registry.restore(obj_dct) for obj_dct in dct.pop('data_inputs', []) ]
spec.data_outputs = [ self.registry.restore(obj_dct) for obj_dct in dct.pop('data_outputs', []) ]
# fixme: This conditional can be removed in the next release, just avoiding invalid a potential
# serialization issue for some users caught between official releases.
if isinstance(dct.get('data_objects', {}), dict):
spec.data_objects = dict([ (name, self.restore(obj_dct)) for name, obj_dct in dct.pop('data_objects', {}).items() ])
spec.data_objects = dict([ (name, self.registry.restore(obj_dct)) for name, obj_dct in dct.pop('data_objects', {}).items() ])
else:
spec.data_objects = {}
@ -159,7 +156,7 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
if 'prevtaskclass' in task_dict:
task_spec = self.multiinstance_from_dict(task_dict)
else:
task_spec = self.restore(task_dict)
task_spec = self.registry.restore(task_dict)
if name == 'Start':
spec.start = task_spec
self.restore_task_spec_extensions(task_dict, task_spec)
@ -172,26 +169,3 @@ class BpmnProcessSpecConverter(BpmnWorkflowSpecConverter):
task_spec.outputs = [ spec.get_task_spec_from_name(name) for name in task_spec.outputs ]
return spec
def base_workflow_spec_to_dict(self, spec):
# We should delete this method when we stop supporting the old serializer.
# It uses WorkflowSpec rather than BpmnWorkflowSpec, which does not support data objects.
# I hate copying this code here, but I am NOT putting an "if isinstance" check in the
# main method to handle a bug in the thing I'm replacing,
dct = {
'name': spec.name,
'description': spec.description,
'file': spec.file,
'task_specs': {},
}
for name, task_spec in spec.task_specs.items():
if isinstance(task_spec, MultiInstanceTask):
task_dict = self.multi_instance_to_dict(task_spec)
else:
task_dict = self.convert(task_spec)
self.convert_task_spec_extensions(task_spec, task_dict)
dct['task_specs'][name] = task_dict
return dct

View File

@ -0,0 +1,292 @@
from .helpers.spec import TaskSpecConverter
from ...specs.StartTask import StartTask
from ...specs.Simple import Simple
from ...specs.LoopResetTask import LoopResetTask
from ..specs.BpmnProcessSpec import _EndJoin
from ..specs.BpmnSpecMixin import _BpmnCondition
from ..specs.NoneTask import NoneTask
from ..specs.UserTask import UserTask
from ..specs.ManualTask import ManualTask
from ..specs.ScriptTask import ScriptTask
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess
from ..specs.ExclusiveGateway import ExclusiveGateway
from ..specs.InclusiveGateway import InclusiveGateway
from ..specs.ParallelGateway import ParallelGateway
from ..specs.events.StartEvent import StartEvent
from ..specs.events.EndEvent import EndEvent
from ..specs.events.IntermediateEvent import (
BoundaryEvent,
_BoundaryEventParent,
EventBasedGateway,
IntermediateCatchEvent,
IntermediateThrowEvent,
SendTask,
ReceiveTask,
)
from ..workflow import BpmnWorkflow
class DefaultTaskSpecConverter(TaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class SimpleTaskConverter(DefaultTaskSpecConverter):
def __init__(self, registry):
super().__init__(Simple, registry)
class StartTaskConverter(DefaultTaskSpecConverter):
def __init__(self, registry):
super().__init__(StartTask, registry)
class LoopResetTaskConverter(DefaultTaskSpecConverter):
def __init__(self, registry):
super().__init__(LoopResetTask, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['destination_id'] = str(spec.destination_id)
dct['destination_spec_name'] = spec.destination_spec_name
return dct
def from_dict(self, dct):
spec = self.task_spec_from_dict(dct)
spec.destination_id = self.registry.convert(spec.destination_id)
return spec
class EndJoinConverter(DefaultTaskSpecConverter):
def __init__(self, registry):
super().__init__(_EndJoin, registry)
class BpmnTaskSpecConverter(TaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class NoneTaskConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(NoneTask, registry)
class UserTaskConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(UserTask, registry)
class ManualTaskConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(ManualTask, registry)
class ScriptTaskConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(ScriptTask, registry)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['script'] = spec.script
return dct
class BoundaryEventParentConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(_BoundaryEventParent, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['main_child_task_spec'] = spec.main_child_task_spec.name
return dct
class SubprocessConverter(BpmnTaskSpecConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return self.task_spec_from_dict(dct)
class CallActivityTaskConverter(SubprocessConverter):
def __init__(self, registry):
super().__init__(CallActivity, registry)
self.wf_class = BpmnWorkflow
class TransactionSubprocessTaskConverter(SubprocessConverter):
def __init__(self, registry):
super().__init__(TransactionSubprocess, registry)
self.wf_class = BpmnWorkflow
class ConditionalGatewayConverter(BpmnTaskSpecConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['cond_task_specs'] = [ self.bpmn_condition_to_dict(cond) for cond in spec.cond_task_specs ]
dct['choice'] = spec.choice
return dct
def from_dict(self, dct):
conditions = dct.pop('cond_task_specs')
spec = self.task_spec_from_dict(dct)
spec.cond_task_specs = [ self.bpmn_condition_from_dict(cond) for cond in conditions ]
return spec
def bpmn_condition_from_dict(self, dct):
return (_BpmnCondition(dct['condition']) if dct['condition'] is not None else None, dct['task_spec'])
def bpmn_condition_to_dict(self, condition):
expr, task_spec = condition
return {
'condition': expr.args[0] if expr is not None else None,
'task_spec': task_spec
}
class ExclusiveGatewayConverter(ConditionalGatewayConverter):
def __init__(self, registry):
super().__init__(ExclusiveGateway, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['default_task_spec'] = spec.default_task_spec
return dct
def from_dict(self, dct):
default_task_spec = dct.pop('default_task_spec')
spec = super().from_dict(dct)
spec.default_task_spec = default_task_spec
return spec
class InclusiveGatewayConverter(ConditionalGatewayConverter):
def __init__(self, registry):
super().__init__(InclusiveGateway, registry)
class ParallelGatewayConverter(BpmnTaskSpecConverter):
def __init__(self, registry):
super().__init__(ParallelGateway, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_join_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class EventConverter(BpmnTaskSpecConverter):
def __init__(self, spec_class, registry):
super().__init__(spec_class, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['event_definition'] = self.registry.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.registry.restore(dct['event_definition'])
return self.task_spec_from_dict(dct)
class StartEventConverter(EventConverter):
def __init__(self, registry):
super().__init__(StartEvent, registry)
class EndEventConverter(EventConverter):
def __init__(self, registry):
super().__init__(EndEvent, registry)
class IntermediateCatchEventConverter(EventConverter):
def __init__(self, registry):
super().__init__(IntermediateCatchEvent, registry)
class ReceiveTaskConverter(EventConverter):
def __init__(self, registry):
super().__init__(ReceiveTask, registry)
class IntermediateThrowEventConverter(EventConverter):
def __init__(self, registry):
super().__init__(IntermediateThrowEvent, registry)
class SendTaskConverter(EventConverter):
def __init__(self, registry):
super().__init__(SendTask, registry)
class BoundaryEventConverter(EventConverter):
def __init__(self, registry):
super().__init__(BoundaryEvent, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['cancel_activity'] = spec.cancel_activity
return dct
class EventBasedGatewayConverter(EventConverter):
def __init__(self, registry):
super().__init__(EventBasedGateway, registry)
DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
SimpleTaskConverter,
StartTaskConverter,
EndJoinConverter,
LoopResetTaskConverter,
NoneTaskConverter,
UserTaskConverter,
ManualTaskConverter,
ScriptTaskConverter,
CallActivityTaskConverter,
TransactionSubprocessTaskConverter,
StartEventConverter,
EndEventConverter,
SendTaskConverter,
ReceiveTaskConverter,
IntermediateCatchEventConverter,
IntermediateThrowEventConverter,
EventBasedGatewayConverter,
BoundaryEventConverter,
BoundaryEventParentConverter,
ParallelGatewayConverter,
ExclusiveGatewayConverter,
InclusiveGatewayConverter,
]

View File

@ -1,323 +0,0 @@
from uuid import UUID
from .bpmn_converters import BpmnTaskSpecConverter
from ...specs.StartTask import StartTask
from ...specs.Simple import Simple
from ...specs.LoopResetTask import LoopResetTask
from ..specs.BpmnProcessSpec import _EndJoin
from ..specs.BpmnSpecMixin import _BpmnCondition
from ..specs.NoneTask import NoneTask
from ..specs.UserTask import UserTask
from ..specs.ManualTask import ManualTask
from ..specs.ScriptTask import ScriptTask
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess
from ..specs.ExclusiveGateway import ExclusiveGateway
from ..specs.InclusiveGateway import InclusiveGateway
from ..specs.ParallelGateway import ParallelGateway
from ..specs.events.StartEvent import StartEvent
from ..specs.events.EndEvent import EndEvent
from ..specs.events.IntermediateEvent import BoundaryEvent, EventBasedGateway, IntermediateCatchEvent, IntermediateThrowEvent
from ..specs.events.IntermediateEvent import _BoundaryEventParent, SendTask, ReceiveTask
from ..workflow import BpmnWorkflow
class SimpleTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(Simple, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class StartTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(StartTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class LoopResetTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(LoopResetTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
# Maybe I should add this to the base task converter, but I'm trying to keep it free of
# anything but task related conversions
dct['destination_id'] = str(spec.destination_id)
dct['destination_spec_name'] = spec.destination_spec_name
return dct
def from_dict(self, dct):
spec = self.task_spec_from_dict(dct)
spec.destination_id = UUID(spec.destination_id)
return spec
class EndJoinConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(_EndJoin, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class NoneTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(NoneTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class UserTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(UserTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class ManualTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(ManualTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class ScriptTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(ScriptTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['script'] = spec.script
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class CallActivityTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(CallActivity, data_converter, typename)
self.wf_class = BpmnWorkflow
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return self.task_spec_from_dict(dct)
class TransactionSubprocessTaskConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(TransactionSubprocess, data_converter, typename)
self.wf_class = BpmnWorkflow
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return self.task_spec_from_dict(dct)
class ConditionalGatewayConverter(BpmnTaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['cond_task_specs'] = [ self.bpmn_condition_to_dict(cond) for cond in spec.cond_task_specs ]
dct['choice'] = spec.choice
return dct
def from_dict(self, dct):
conditions = dct.pop('cond_task_specs')
spec = self.task_spec_from_dict(dct)
spec.cond_task_specs = [ self.bpmn_condition_from_dict(cond) for cond in conditions ]
return spec
def bpmn_condition_from_dict(self, dct):
return (_BpmnCondition(dct['condition']) if dct['condition'] is not None else None, dct['task_spec'])
def bpmn_condition_to_dict(self, condition):
expr, task_spec = condition
return {
'condition': expr.args[0] if expr is not None else None,
'task_spec': task_spec
}
class ExclusiveGatewayConverter(ConditionalGatewayConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(ExclusiveGateway, data_converter, typename)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['default_task_spec'] = spec.default_task_spec
return dct
def from_dict(self, dct):
default_task_spec = dct.pop('default_task_spec')
spec = super().from_dict(dct)
spec.default_task_spec = default_task_spec
return spec
class InclusiveGatewayConverter(ConditionalGatewayConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(InclusiveGateway, data_converter, typename)
class ParallelGatewayConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(ParallelGateway, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct.update(self.get_join_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class EventConverter(BpmnTaskSpecConverter):
def __init__(self, spec_class, data_converter, typename):
super().__init__(spec_class, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['event_definition'] = self.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.restore(dct['event_definition'])
return self.task_spec_from_dict(dct)
class StartEventConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(StartEvent, data_converter, typename)
class EndEventConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(EndEvent, data_converter, typename)
class IntermediateCatchEventConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(IntermediateCatchEvent, data_converter, typename)
class ReceiveTaskConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(ReceiveTask, data_converter, typename)
class IntermediateThrowEventConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(IntermediateThrowEvent, data_converter, typename)
class SendTaskConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(SendTask, data_converter, typename)
class BoundaryEventConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(BoundaryEvent, data_converter, typename)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['cancel_activity'] = spec.cancel_activity
return dct
class BoundaryEventParentConverter(BpmnTaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(_BoundaryEventParent, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['main_child_task_spec'] = spec.main_child_task_spec.name
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class EventBasedGatewayConverter(EventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(EventBasedGateway, data_converter, typename)

View File

@ -3,34 +3,25 @@ import gzip
from copy import deepcopy
from uuid import UUID
from .version_migration import MIGRATIONS
from .bpmn_converters import BpmnDataConverter
from ..workflow import BpmnMessage, BpmnWorkflow
from ..specs.SubWorkflowTask import SubWorkflowTask
from ...task import Task
from .workflow_spec_converter import BpmnProcessSpecConverter
from .version_migration import MIGRATIONS
from .helpers.registry import DefaultRegistry
from .helpers.dictionary import DictionaryConverter
from .task_spec_converters import SimpleTaskConverter, StartTaskConverter, EndJoinConverter, LoopResetTaskConverter
from .task_spec_converters import NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter
from .task_spec_converters import CallActivityTaskConverter, TransactionSubprocessTaskConverter
from .task_spec_converters import StartEventConverter, EndEventConverter
from .task_spec_converters import IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter
from .task_spec_converters import SendTaskConverter, ReceiveTaskConverter
from .task_spec_converters import BoundaryEventConverter, BoundaryEventParentConverter
from .task_spec_converters import ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
from .process_spec import BpmnProcessSpecConverter, BpmnDataObjectConverter
from .task_spec import DEFAULT_TASK_SPEC_CONVERTER_CLASSES
from .event_definition import DEFAULT_EVENT_CONVERTERS
DEFAULT_SPEC_CONFIG = {
'process': BpmnProcessSpecConverter,
'data_specs': [BpmnDataObjectConverter],
'task_specs': DEFAULT_TASK_SPEC_CONVERTER_CLASSES,
'event_definitions': DEFAULT_EVENT_CONVERTERS,
}
DEFAULT_TASK_SPEC_CONVERTER_CLASSES = [
SimpleTaskConverter, StartTaskConverter, EndJoinConverter, LoopResetTaskConverter,
NoneTaskConverter, UserTaskConverter, ManualTaskConverter, ScriptTaskConverter,
CallActivityTaskConverter, TransactionSubprocessTaskConverter,
StartEventConverter, EndEventConverter, SendTaskConverter, ReceiveTaskConverter,
IntermediateCatchEventConverter, IntermediateThrowEventConverter, EventBasedGatewayConverter,
BoundaryEventConverter, BoundaryEventParentConverter,
ParallelGatewayConverter, ExclusiveGatewayConverter, InclusiveGatewayConverter
]
class BpmnWorkflowSerializer:
"""
@ -39,21 +30,18 @@ class BpmnWorkflowSerializer:
The goal is to provide modular serialization capabilities.
You'll need to configure a Workflow Spec Converter with Task Spec Converters for any task types
present in your workflows. Because the Task Spec Converters also require initialization, the process
of building a Workflow Spec Converter is a little tedious; therefore, this class provides a static
method `configure_workflow_spec_converter` that can extend and/or override the default Task Spec
Converter list and return a Workflow Spec Converter that will recognize the overridden specs.
You'll need to configure a Workflow Spec Converter with converters for any task, data, or event types
present in your workflows.
If you have implemented any custom task specs, you'll need to write a converter to handle them and
provide it to this method; if you using only the defaults, you can call this with no arguments.
If you have implemented any custom specs, you'll need to write a converter to handle them and
replace the converter from the default confiuration with your own.
If your workflow contains non-JSON-serializable objects, you'll need to extend or replace the
default data converter with one that will handle them. This converter needs to implement
`convert` and `restore` methods.
Serialization occurs in two phases: the first is to convert everything in the workflow to a
dictionary containins only JSON-serializable objects and the second is dumping to JSON.
dictionary containing only JSON-serializable objects and the second is dumping to JSON.
This means that you can call the `workflow_to_dict` or `workflow_from_dict` methods separately from
conversion to JSON for further manipulation of the state, or selective serialization of only certain
@ -70,36 +58,34 @@ class BpmnWorkflowSerializer:
DEFAULT_JSON_DECODER_CLS = None
@staticmethod
def configure_workflow_spec_converter(task_spec_overrides=None, data_converter=None, version=VERSION):
def configure_workflow_spec_converter(spec_config=None, registry=None):
"""
This method can be used to add additional task spec converters to the default BPMN Process
converter.
This method can be used to create a spec converter that uses custom specs.
The task specs may contain arbitrary data, though none of the default task specs use it. We
may disallow that in the future, so we don't recommend using this capability.
The task specs may contain arbitrary data, though none of the default task specs use it. We don't
recommend that you do this, as we may disallow it in the future. However, if you have task spec data,
then you'll also need to make sure it can be serialized.
The task spec converters also take an optional typename argument; this will be included in the
serialized dictionaries so that the original class can restored. The unqualified classname is
used if none is provided. If a class in `task_spec_overrides` conflicts with one of the
defaults, the default will be removed and the provided one will be used instead. If you need
both for some reason, you'll have to instantiate the task spec converters and workflow spec
converter yourself.
The workflow spec serializer is based on the `DictionaryConverter` in the `helpers` package. You can
create one of your own, add custom data serializtion to that and pass that in as the `registry`. The
conversion classes in the spec_config will be added this "registry" and any classes with entries there
will be serialized/deserialized.
:param task_spec_overrides: a list of task spec converter classes
:param data_converter: an optional data converter for task spec data
See the documentation for `helpers.spec.BpmnSpecConverter` for more information about what's going
on here.
:param spec_config: a dictionary specifying how to save and restore any classes used by the spec
:param registry: a `DictionaryConverter` with conversions for custom data (if applicable)
"""
if task_spec_overrides is None:
task_spec_overrides = []
config = spec_config or DEFAULT_SPEC_CONFIG
spec_converter = registry or DictionaryConverter()
config['process'](spec_converter)
for cls in config['data_specs'] + config['task_specs'] + config['event_definitions']:
cls(spec_converter)
return spec_converter
classnames = [c.__name__ for c in task_spec_overrides]
converters = [c(data_converter=data_converter) for c in task_spec_overrides]
for c in DEFAULT_TASK_SPEC_CONVERTER_CLASSES:
if c.__name__ not in classnames:
converters.append(c(data_converter=data_converter))
return BpmnProcessSpecConverter(converters, version)
def __init__(self, spec_converter=None, data_converter=None, wf_class=None, version=VERSION, json_encoder_cls=DEFAULT_JSON_ENCODER_CLS, json_decoder_cls=DEFAULT_JSON_DECODER_CLS):
def __init__(self, spec_converter=None, data_converter=None, wf_class=None, version=VERSION,
json_encoder_cls=DEFAULT_JSON_ENCODER_CLS, json_decoder_cls=DEFAULT_JSON_DECODER_CLS):
"""Intializes a Workflow Serializer with the given Workflow, Task and Data Converters.
:param spec_converter: the workflow spec converter
@ -110,7 +96,7 @@ class BpmnWorkflowSerializer:
"""
super().__init__()
self.spec_converter = spec_converter if spec_converter is not None else self.configure_workflow_spec_converter()
self.data_converter = data_converter if data_converter is not None else BpmnDataConverter()
self.data_converter = data_converter if data_converter is not None else DefaultRegistry()
self.wf_class = wf_class if wf_class is not None else BpmnWorkflow
self.json_encoder_cls = json_encoder_cls
self.json_decoder_cls = json_decoder_cls

View File

@ -124,57 +124,3 @@ class BpmnProcessSpec(WorkflowSpec):
self.data_outputs = []
self.data_objects = {}
self.correlation_keys = {}
def get_all_lanes(self):
"""
Returns a set of the distinct lane names used in the process (including
called activities)
"""
done = set()
lanes = set()
def recursive_find(task_spec):
if task_spec in done:
return
done.add(task_spec)
if hasattr(task_spec, 'lane') and task_spec.lane:
lanes.add(task_spec.lane)
if hasattr(task_spec, 'spec'):
recursive_find(task_spec.spec.start)
for t in task_spec.outputs:
recursive_find(t)
recursive_find(self.start)
return lanes
def get_specs_depth_first(self):
"""
Get the specs for all processes (including called ones), in depth first
order.
"""
done = set()
specs = [self]
def recursive_find(task_spec):
if task_spec in done:
return
done.add(task_spec)
if hasattr(task_spec, 'spec'):
specs.append(task_spec.spec)
recursive_find(task_spec.spec.start)
for t in task_spec.outputs:
recursive_find(t)
recursive_find(self.start)
return specs

View File

@ -4,7 +4,6 @@ from copy import deepcopy
from SpiffWorkflow.task import TaskState
from .BpmnSpecMixin import BpmnSpecMixin
from ..exceptions import WorkflowDataException
from ...specs.base import TaskSpec
class SubWorkflowTask(BpmnSpecMixin):
@ -26,9 +25,6 @@ class SubWorkflowTask(BpmnSpecMixin):
def spec_type(self):
return 'Subprocess'
def test(self):
TaskSpec.test(self)
def _on_ready_before_hook(self, my_task):
subworkflow = my_task.workflow.create_subprocess(my_task, self.spec, self.name)
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
@ -41,10 +37,6 @@ class SubWorkflowTask(BpmnSpecMixin):
def _on_subworkflow_completed(self, subworkflow, my_task):
# Shouldn't this always be true?
if isinstance(my_task.parent.task_spec, BpmnSpecMixin):
my_task.parent.task_spec._child_complete_hook(my_task)
if len(subworkflow.spec.data_outputs) == 0:
# Copy all workflow data if no outputs are specified
my_task.data = deepcopy(subworkflow.last_task.data)
@ -63,14 +55,7 @@ class SubWorkflowTask(BpmnSpecMixin):
def _update_hook(self, my_task):
wf = my_task.workflow._get_outermost_workflow(my_task)
if my_task.id not in wf.subprocesses:
super()._update_hook(my_task)
def _predict_hook(self, my_task):
# The base Subworkflow task predict doesn't work with the loop reset task
BpmnSpecMixin._predict_hook(self, my_task)
def _on_complete_hook(self, my_task):
BpmnSpecMixin._on_complete_hook(self, my_task)
return super()._update_hook(my_task)
def _on_cancel(self, my_task):
subworkflow = my_task.workflow.get_subprocess(my_task)

View File

@ -121,13 +121,10 @@ class BoundaryEvent(CatchingEvent):
def catch(self, my_task, event_definition):
super(BoundaryEvent, self).catch(my_task, event_definition)
# Would love to get rid of this statement and manage in the workflow
# However, it is not really compatible with how boundary events work.
my_task.complete()
def _on_complete_hook(self, my_task):
super(BoundaryEvent, self)._on_complete_hook(my_task)
# Notify the boundary event parent as well.
my_task.parent.task_spec._child_complete_hook(my_task)
class EventBasedGateway(CatchingEvent):

View File

@ -46,24 +46,18 @@ class CatchingEvent(Simple, BpmnSpecMixin):
definition, at which point we can update our task's state.
"""
self.event_definition.catch(my_task, event_definition)
self._update_hook(my_task)
my_task._set_state(TaskState.WAITING)
def _update_hook(self, my_task):
if my_task.state == TaskState.WAITING and self.event_definition.has_fired(my_task):
my_task._ready()
super(CatchingEvent, self)._update_hook(my_task)
def _on_ready_hook(self, my_task):
# None events don't propogate, so as soon as we're ready, we fire our event
if isinstance(self.event_definition, NoneEventDefinition):
my_task._set_internal_data(event_fired=True)
# If we have not seen the event we're waiting for, enter the waiting state
if not self.event_definition.has_fired(my_task):
if self.event_definition.has_fired(my_task):
return True
else:
my_task._set_state(TaskState.WAITING)
super(CatchingEvent, self)._on_ready_hook(my_task)
def _on_complete_hook(self, my_task):

View File

@ -145,6 +145,9 @@ class BpmnWorkflow(Workflow):
for task in tasks:
task.task_spec.catch(task, event_definition)
# Move any tasks that received message to READY
self.refresh_waiting_tasks()
# Figure out if we need to create an extenal message
if len(tasks) == 0 and isinstance(event_definition, MessageEventDefinition):
self.bpmn_messages.append(

View File

@ -0,0 +1,15 @@
from copy import deepcopy
from SpiffWorkflow.bpmn.serializer.workflow import DEFAULT_SPEC_CONFIG
from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter as DefaultUserTaskConverter
from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventConverter
from .task_spec import UserTaskConverter
from .event_definition import MessageEventDefinitionConverter
CAMUNDA_SPEC_CONFIG = deepcopy(DEFAULT_SPEC_CONFIG)
CAMUNDA_SPEC_CONFIG['task_specs'].remove(DefaultUserTaskConverter)
CAMUNDA_SPEC_CONFIG['task_specs'].append(UserTaskConverter)
CAMUNDA_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventConverter)
CAMUNDA_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter)

View File

@ -0,0 +1,20 @@
from SpiffWorkflow.bpmn.serializer.helpers.spec import EventDefinitionConverter
from ..specs.events.event_definitions import MessageEventDefinition
class MessageEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(MessageEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties)
dct['payload'] = event_definition.payload
dct['result_var'] = event_definition.result_var
return dct
def from_dict(self, dct):
dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties'])
event_definition = super().from_dict(dct)
return event_definition

View File

@ -0,0 +1,34 @@
from ...bpmn.serializer.helpers.spec import TaskSpecConverter
from ..specs.UserTask import UserTask, Form
class UserTaskConverter(TaskSpecConverter):
def __init__(self, registry):
super().__init__(UserTask, registry)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['form'] = self.form_to_dict(spec.form)
return dct
def from_dict(self, dct):
dct['form'] = Form(init=dct['form'])
return self.task_spec_from_dict(dct)
def form_to_dict(self, form):
dct = {'key': form.key, 'fields': []}
for field in form.fields:
new = {
'id': field.id,
'default_value': field.default_value,
'label': field.label,
'type': field.type,
'properties': [ prop.__dict__ for prop in field.properties ],
'validation': [ val.__dict__ for val in field.validation ],
}
if field.type == "enum":
new['options'] = [ opt.__dict__ for opt in field.options ]
dct['fields'].append(new)
return dct

View File

@ -1,90 +0,0 @@
from functools import partial
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent
from ..specs.events.event_definitions import MessageEventDefinition
from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
from ..specs.UserTask import UserTask, Form
class CamundaEventConverter(BpmnTaskSpecConverter):
def __init__(self, spec_class, data_converter, typename):
super().__init__(spec_class, data_converter, typename)
self.register(
MessageEventDefinition,
self.event_definition_to_dict,
partial(self.event_defintion_from_dict, MessageEventDefinition)
)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
if isinstance(spec, BoundaryEvent):
dct['cancel_activity'] = spec.cancel_activity
dct['event_definition'] = self.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.restore(dct['event_definition'])
return self.task_spec_from_dict(dct)
def event_definition_to_dict(self, event_definition):
dct = super().event_definition_to_dict(event_definition)
if isinstance(event_definition, MessageEventDefinition):
dct['payload'] = event_definition.payload
dct['result_var'] = event_definition.result_var
return dct
class StartEventConverter(CamundaEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(StartEvent, data_converter, typename)
class EndEventConverter(CamundaEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(EndEvent, data_converter, typename)
class BoundaryEventConverter(CamundaEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(BoundaryEvent, data_converter, typename)
class IntermediateCatchEventConverter(CamundaEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(IntermediateCatchEvent, data_converter, typename)
class IntermediateThrowEventConverter(CamundaEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(IntermediateThrowEvent, data_converter, typename)
class UserTaskConverter(CamundaEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(UserTask, data_converter, typename)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['form'] = self.form_to_dict(spec.form)
return dct
def from_dict(self, dct):
dct['form'] = Form(init=dct['form'])
return self.task_spec_from_dict(dct)
def form_to_dict(self, form):
dct = {'key': form.key, 'fields': []}
for field in form.fields:
new = {
'id': field.id,
'default_value': field.default_value,
'label': field.label,
'type': field.type,
'properties': [ prop.__dict__ for prop in field.properties ],
'validation': [ val.__dict__ for val in field.validation ],
}
if field.type == "enum":
new['options'] = [ opt.__dict__ for opt in field.options ]
dct['fields'].append(new)
return dct

View File

@ -74,11 +74,8 @@ class BpmnDmnParser(BpmnParser):
Add all filenames in the given list to the parser's set.
"""
for filename in filenames:
f = open(filename, 'r')
try:
with open(filename, 'r') as f:
self.add_dmn_xml(etree.parse(f).getroot(), filename=filename)
finally:
f.close()
def get_dependencies(self):
return self.process_dependencies.union(self.dmn_dependencies)

View File

@ -1,14 +1,14 @@
from ...bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
from ...bpmn.serializer.helpers.spec import TaskSpecConverter
from ..specs.BusinessRuleTask import BusinessRuleTask
from ..specs.model import DecisionTable, Rule, HitPolicy
from ..specs.model import Input, InputEntry, Output, OutputEntry
from ..engine.DMNEngine import DMNEngine
class BusinessRuleTaskConverter(BpmnTaskSpecConverter):
class BusinessRuleTaskConverter(TaskSpecConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(BusinessRuleTask, data_converter, typename)
def __init__(self, registry):
super().__init__(BusinessRuleTask, registry)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)

View File

@ -51,9 +51,9 @@ class AcquireMutex(TaskSpec):
mutex = my_task.workflow._get_mutex(self.mutex)
if mutex.testandset():
self.entered_event.emit(my_task.workflow, my_task)
my_task._ready()
return
my_task._set_state(TaskState.WAITING)
return True
else:
my_task._set_state(TaskState.WAITING)
def serialize(self, serializer):
return serializer.serialize_acquire_mutex(self)

View File

@ -248,8 +248,8 @@ class Celery(TaskSpec):
if not self._start(my_task):
if not my_task._has_state(TaskState.WAITING):
my_task._set_state(TaskState.WAITING)
return
super(Celery, self)._update_hook(my_task)
else:
return True
def serialize(self, serializer):
return serializer.serialize_celery(self)

View File

@ -73,8 +73,8 @@ class Execute(TaskSpec):
def _update_hook(self, my_task):
if not self._start(my_task):
my_task._set_state(TaskState.WAITING)
return
super(Execute, self)._update_hook(my_task)
else:
return super(Execute, self)._update_hook(my_task)
def serialize(self, serializer):
return serializer.serialize_execute(self)

View File

@ -60,7 +60,7 @@ class Gate(TaskSpec):
if not task._has_state(TaskState.COMPLETED):
my_task._set_state(TaskState.WAITING)
return
super(Gate, self)._update_hook(my_task)
return True
def serialize(self, serializer):
return serializer.serialize_gate(self)

View File

@ -218,24 +218,16 @@ class Join(TaskSpec):
def _update_hook(self, my_task):
# Check whether enough incoming branches have completed.
may_fire, waiting_tasks = self._start(my_task)
if not may_fire:
if may_fire:
# If this is a cancelling join, cancel all incoming branches except for the one that just completed.
if self.cancel_remaining:
for task in waiting_tasks:
task.cancel()
# Update the state of our child objects.
self._do_join(my_task)
else:
my_task._set_state(TaskState.WAITING)
return
# If this is a cancelling join, cancel all incoming branches,
# except for the one that just completed.
if self.cancel_remaining:
for task in waiting_tasks:
task.cancel()
# We do NOT set the task state to COMPLETED, because in
# case all other incoming tasks get cancelled (or never reach
# the Join for other reasons, such as reaching a stub branch),
# we need to revisit it.
my_task._ready()
# Update the state of our child objects.
self._do_join(my_task)
def _do_join(self, my_task):

View File

@ -127,7 +127,7 @@ class SubWorkflow(TaskSpec):
subworkflow = my_task._get_internal_data('subworkflow')
if subworkflow is None:
# On the first update, we have to create the subworkflow
super()._update_hook(my_task)
return True
elif subworkflow.is_completed():
# Then wait until it finishes to complete
my_task.complete()

View File

@ -55,7 +55,7 @@ class Transform(TaskSpec):
for transform in self.transforms:
logger.debug(f'Execute transform', extra=my_task.log_info({'transform': transform}))
exec(transform)
super(Transform, self)._update_hook(my_task)
return True
def serialize(self, serializer):
s_state = serializer.serialize_simple(self)

View File

@ -273,27 +273,18 @@ class TaskSpec(object):
completes it makes sure to call this method so we can react.
"""
my_task._inherit_data()
# We were doing this in _update_hook, but to me that seems inconsistent with the spirit
# of the hook functions. Moving it here allows removal of some repeated calls (overridden
# hook methods still need to do these things)
if my_task._is_predicted():
self._predict(my_task)
self.entered_event.emit(my_task.workflow, my_task)
self._update_hook(my_task)
if self._update_hook(my_task):
my_task._ready()
def _update_hook(self, my_task):
"""
Typically this method should perform the following actions::
- Update the state of the corresponding task.
- Update the predictions for its successors.
Returning non-False will cause the task to go into READY.
Returning any other value will cause no action.
This method should decide whether the task should run now or need to wait.
Returning True will cause the task to go into READY.
"""
# If this actually did what the documentation said (returned a value indicating
# that the task was ready), then a lot of things might be easier.
my_task._ready()
return True
def _on_ready(self, my_task):
"""
@ -390,7 +381,9 @@ class TaskSpec(object):
my_task.workflow.last_task = my_task
self._on_complete_hook(my_task)
for child in my_task.children:
child.task_spec._update(child)
# Don't like this, but this is the most expedient way of preventing cancelled tasks from reactivation
if child.state != TaskState.CANCELLED:
child.task_spec._update(child)
my_task.workflow._task_completed_notify(my_task)
self.completed_event.emit(my_task.workflow, my_task)

View File

@ -0,0 +1,65 @@
from copy import deepcopy
from SpiffWorkflow.bpmn.serializer.workflow import DEFAULT_SPEC_CONFIG
from SpiffWorkflow.bpmn.serializer.task_spec import (
SimpleTaskConverter,
StartTaskConverter,
EndJoinConverter,
LoopResetTaskConverter,
StartEventConverter,
EndEventConverter,
IntermediateCatchEventConverter,
IntermediateThrowEventConverter,
EventBasedGatewayConverter,
BoundaryEventConverter,
BoundaryEventParentConverter,
ParallelGatewayConverter,
ExclusiveGatewayConverter,
InclusiveGatewayConverter,
)
from .task_spec import (
NoneTaskConverter,
ManualTaskConverter,
UserTaskConverter,
SendTaskConverter,
ReceiveTaskConverter,
ScriptTaskConverter,
ServiceTaskConverter,
SubWorkflowTaskConverter,
TransactionSubprocessConverter,
CallActivityTaskConverter,
)
from SpiffWorkflow.bpmn.serializer.event_definition import MessageEventDefinitionConverter as DefaultMessageEventDefinitionConverter
from .event_definition import MessageEventDefinitionConverter
SPIFF_SPEC_CONFIG = deepcopy(DEFAULT_SPEC_CONFIG)
SPIFF_SPEC_CONFIG['task_specs'] = [
SimpleTaskConverter,
StartTaskConverter,
EndJoinConverter,
LoopResetTaskConverter,
StartEventConverter,
EndEventConverter,
IntermediateCatchEventConverter,
IntermediateThrowEventConverter,
EventBasedGatewayConverter,
BoundaryEventConverter,
BoundaryEventParentConverter,
ParallelGatewayConverter,
ExclusiveGatewayConverter,
InclusiveGatewayConverter,
NoneTaskConverter,
ManualTaskConverter,
UserTaskConverter,
SendTaskConverter,
ReceiveTaskConverter,
ScriptTaskConverter,
ServiceTaskConverter,
SubWorkflowTaskConverter,
TransactionSubprocessConverter,
CallActivityTaskConverter,
]
SPIFF_SPEC_CONFIG['event_definitions'].remove(DefaultMessageEventDefinitionConverter)
SPIFF_SPEC_CONFIG['event_definitions'].append(MessageEventDefinitionConverter)

View File

@ -0,0 +1,20 @@
from SpiffWorkflow.bpmn.serializer.helpers.spec import EventDefinitionConverter
from SpiffWorkflow.spiff.specs.events.event_definitions import MessageEventDefinition
class MessageEventDefinitionConverter(EventDefinitionConverter):
def __init__(self, registry):
super().__init__(MessageEventDefinition, registry)
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties)
dct['expression'] = event_definition.expression
dct['message_var'] = event_definition.message_var
return dct
def from_dict(self, dct):
dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties'])
event_definition = super().from_dict(dct)
return event_definition

View File

@ -0,0 +1,115 @@
from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter
from SpiffWorkflow.spiff.specs.none_task import NoneTask
from SpiffWorkflow.spiff.specs.manual_task import ManualTask
from SpiffWorkflow.spiff.specs.user_task import UserTask
from SpiffWorkflow.spiff.specs.script_task import ScriptTask
from SpiffWorkflow.spiff.specs.service_task import ServiceTask
from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity
from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask
class SpiffBpmnTaskConverter(TaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['prescript'] = spec.prescript
dct['postscript'] = spec.postscript
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class NoneTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry):
super().__init__(NoneTask, registry)
class ManualTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry):
super().__init__(ManualTask, registry)
class UserTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry):
super().__init__(UserTask, registry)
class SendTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry, typename=None):
super().__init__(SendTask, registry, typename)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['event_definition'] = self.registry.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.registry.restore(dct['event_definition'])
return super().from_dict(dct)
class ReceiveTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry, typename=None):
super().__init__(ReceiveTask, registry, typename)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['event_definition'] = self.registry.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.registry.restore(dct['event_definition'])
return super().from_dict(dct)
class ScriptTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry):
super().__init__(ScriptTask, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['script'] = spec.script
return dct
class ServiceTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, registry):
super().__init__(ServiceTask, registry)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['operation_name'] = spec.operation_name
dct['operation_params'] = spec.operation_params
dct['result_variable'] = spec.result_variable
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class SubprocessTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return super().task_spec_from_dict(dct)
class SubWorkflowTaskConverter(SubprocessTaskConverter):
def __init__(self, registry):
super().__init__(SubWorkflowTask, registry)
class TransactionSubprocessConverter(SubprocessTaskConverter):
def __init__(self, registry):
super().__init__(TransactionSubprocess, registry)
class CallActivityTaskConverter(SubprocessTaskConverter):
def __init__(self, registry):
super().__init__(CallActivity, registry)

View File

@ -1,170 +0,0 @@
from functools import partial
from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
from SpiffWorkflow.bpmn.specs.events.StartEvent import StartEvent
from SpiffWorkflow.bpmn.specs.events.EndEvent import EndEvent
from SpiffWorkflow.bpmn.specs.events.IntermediateEvent import IntermediateThrowEvent, IntermediateCatchEvent, BoundaryEvent, EventBasedGateway
from SpiffWorkflow.spiff.specs.none_task import NoneTask
from SpiffWorkflow.spiff.specs.manual_task import ManualTask
from SpiffWorkflow.spiff.specs.user_task import UserTask
from SpiffWorkflow.spiff.specs.script_task import ScriptTask
from SpiffWorkflow.spiff.specs.service_task import ServiceTask
from SpiffWorkflow.spiff.specs.subworkflow_task import SubWorkflowTask, TransactionSubprocess, CallActivity
from SpiffWorkflow.spiff.specs.events.event_types import SendTask, ReceiveTask
from SpiffWorkflow.spiff.specs.events.event_definitions import MessageEventDefinition
class SpiffBpmnTaskConverter(BpmnTaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
dct['prescript'] = spec.prescript
dct['postscript'] = spec.postscript
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class NoneTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, data_converter=None):
super().__init__(NoneTask, data_converter)
class ManualTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, data_converter=None):
super().__init__(ManualTask, data_converter)
class UserTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, data_converter=None):
super().__init__(UserTask, data_converter)
class ScriptTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, data_converter=None):
super().__init__(ScriptTask, data_converter)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['script'] = spec.script
return dct
class ServiceTaskConverter(SpiffBpmnTaskConverter):
def __init__(self, data_converter=None):
super().__init__(ServiceTask, data_converter)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['operation_name'] = spec.operation_name
dct['operation_params'] = spec.operation_params
dct['result_variable'] = spec.result_variable
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class SubprocessTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return super().task_spec_from_dict(dct)
class SubWorkflowTaskConverter(SubprocessTaskConverter):
def __init__(self, data_converter=None):
super().__init__(SubWorkflowTask, data_converter)
class TransactionSubprocessConverter(SubprocessTaskConverter):
def __init__(self, data_converter=None):
super().__init__(TransactionSubprocess, data_converter)
class CallActivityTaskConverter(SubprocessTaskConverter):
def __init__(self, data_converter=None):
super().__init__(CallActivity, data_converter)
class SpiffEventConverter(BpmnTaskSpecConverter):
def __init__(self, spec_class, data_converter, typename):
super().__init__(spec_class, data_converter, typename)
self.register(
MessageEventDefinition,
self.event_definition_to_dict,
partial(self.event_defintion_from_dict, MessageEventDefinition)
)
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_bpmn_attributes(spec))
if isinstance(spec, BoundaryEvent):
dct['cancel_activity'] = spec.cancel_activity
dct['event_definition'] = self.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.restore(dct['event_definition'])
return self.task_spec_from_dict(dct)
def event_definition_to_dict(self, event_definition):
dct = super().event_definition_to_dict(event_definition)
if isinstance(event_definition, MessageEventDefinition):
dct['expression'] = event_definition.expression
dct['message_var'] = event_definition.message_var
return dct
class StartEventConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(StartEvent, data_converter, typename)
class EndEventConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(EndEvent, data_converter, typename)
class BoundaryEventConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(BoundaryEvent, data_converter, typename)
class IntermediateCatchEventConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(IntermediateCatchEvent, data_converter, typename)
class IntermediateThrowEventConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(IntermediateThrowEvent, data_converter, typename)
class SendTaskConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(SendTask, data_converter, typename)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['prescript'] = spec.prescript
dct['postscript'] = spec.postscript
return dct
class ReceiveTaskConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(ReceiveTask, data_converter, typename)
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['prescript'] = spec.prescript
dct['postscript'] = spec.postscript
return dct
class EventBasedGatewayConverter(SpiffEventConverter):
def __init__(self, data_converter=None, typename=None):
super().__init__(EventBasedGateway, data_converter, typename)

View File

@ -403,14 +403,6 @@ class Task(object, metaclass=DeprecatedMetaTask):
def __iter__(self):
return Task.Iterator(self)
def __setstate__(self, dict):
self.__dict__.update(dict)
# If unpickled in the same Python process in which a workflow
# (Task) is built through the API, we need to make sure
# that there will not be any ID collisions.
if dict['thread_id'] >= self.__class__.thread_id_pool:
self.__class__.thread_id_pool = dict['thread_id']
def _get_root(self):
"""
Returns the top level parent.
@ -752,10 +744,9 @@ class Task(object, metaclass=DeprecatedMetaTask):
has changed (e.g. from FUTURE to COMPLETED.)
"""
self._set_state(TaskState.COMPLETED)
# WHY on earth do we mark the task completed and THEN attempt to execute it.
# A sane model would have success and failure states and instead we return
# a boolean, with no systematic way of dealing with failures. This is just
# crazy!
# I am taking back my previous comment about running the task after it's completed being "CRAZY"
# Turns out that tasks are in fact supposed to be complete at this point and I've been wrong all along
# about when tasks should actually be executed
start = time.time()
retval = self.task_spec._on_complete(self)
extra = self.log_info({

View File

@ -1,6 +1,6 @@
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
class BoxDeepCopyTest(unittest.TestCase):

View File

@ -7,7 +7,7 @@ from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser
from SpiffWorkflow.bpmn.parser.task_parsers import ConditionalGatewayParser
from SpiffWorkflow.bpmn.parser.util import full_tag
from SpiffWorkflow.bpmn.serializer.bpmn_converters import BpmnTaskSpecConverter
from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter
# Many of our tests relied on the Packager to set the calledElement attribute on
# Call Activities. I've moved that code to a customized parser.
@ -35,9 +35,6 @@ class TestUserTask(UserTask):
task.set_data(choice=choice)
task.complete()
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_generic(wf_spec, s_state, TestUserTask)
class TestExclusiveGatewayParser(ConditionalGatewayParser):
@ -47,7 +44,7 @@ class TestExclusiveGatewayParser(ConditionalGatewayParser):
return cond
return "choice == '%s'" % sequence_flow_node.get('name', None)
class TestUserTaskConverter(BpmnTaskSpecConverter):
class TestUserTaskConverter(TaskSpecConverter):
def __init__(self, data_converter=None):
super().__init__(TestUserTask, data_converter)

View File

@ -7,13 +7,16 @@ from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer, DEFAULT_SPEC_CONFIG
from SpiffWorkflow.bpmn.serializer.task_spec import UserTaskConverter
from .BpmnLoaderForTests import TestUserTaskConverter, TestBpmnParser
__author__ = 'matth'
DEFAULT_SPEC_CONFIG['task_specs'].append(TestUserTaskConverter)
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter])
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(spec_config=DEFAULT_SPEC_CONFIG)
class BpmnWorkflowTestCase(unittest.TestCase):

View File

@ -4,6 +4,7 @@ import unittest
from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -17,8 +18,8 @@ class CustomBpmnScriptEngine(PythonScriptEngine):
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {'custom_function': my_custom_function}
super().__init__(scripting_additions=augment_methods)
environment = TaskDataEnvironment({'custom_function': my_custom_function})
super().__init__(environment=environment)
class CustomInlineScriptTest(BpmnWorkflowTestCase):

View File

@ -3,6 +3,7 @@
import unittest
from SpiffWorkflow.bpmn.FeelLikeScriptEngine import FeelLikeScriptEngine, FeelInterval
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
import datetime
@ -12,7 +13,7 @@ __author__ = 'matth'
class FeelExpressionTest(BpmnWorkflowTestCase):
def setUp(self):
self.expressionEngine = FeelLikeScriptEngine()
self.expressionEngine = FeelLikeScriptEngine(environment=BoxedTaskDataEnvironment())
def testRunThroughExpressions(self):
tests = [("string length('abcd')", 4, {}),
@ -62,7 +63,7 @@ class FeelExpressionTest(BpmnWorkflowTestCase):
]
}
x = self.expressionEngine._evaluate(
"""sum([1 for x in exclusive if x.get('ExclusiveSpaceAMComputingID',None)==None])""",
"""sum([1 for x in exclusive if x.get('ExclusiveSpaceAMComputingID',None)==None])""",
data
)
self.assertEqual(x, 1)

View File

@ -1,47 +0,0 @@
# -*- coding: utf-8 -*-
import unittest
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
class NavListExclusiveGatewayTest(BpmnWorkflowTestCase):
"""The example bpmn diagram looks roughly like this, a gateway
that leads to two different end points
[Step 1] -> <x exclusive gateway x>
-> 'False' -> [Alternate End] -> END A
-> 'True' -> [Step 2] -> END B
"""
def setUp(self):
self.spec = self.load_workflow1_spec()
def load_workflow1_spec(self):
return self.load_workflow_spec('ExclusiveGatewayMultipleEndNavigation.bpmn','ExclusiveGatewayMultipleEndNavigation')
def testRunThroughHappy(self):
self.workflow = BpmnWorkflow(self.spec)
self.workflow.do_engine_steps()
nav_list = self.workflow.get_nav_list()
self.assertEqual(6, len(nav_list))
self.assertEqual("Step 1", nav_list[0]["description"])
self.assertEqual("GatewayToEnd", nav_list[1]["description"])
self.assertEqual("False", nav_list[2]["description"])
self.assertEqual("Step End", nav_list[3]["description"])
self.assertEqual("True", nav_list[4]["description"])
self.assertEqual("Step 2", nav_list[5]["description"])
self.assertEqual(0, nav_list[0]["indent"])
def suite():
return unittest.TestLoader().loadTestsFromTestCase(NavListExclusiveGatewayTest)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())

View File

@ -0,0 +1,80 @@
import json
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BasePythonScriptEngineEnvironment
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.task import TaskState
def example_global():
pass
class NonTaskDataExampleEnvironment(BasePythonScriptEngineEnvironment):
def __init__(self, environment_globals, environment):
self.environment = environment
self.environment.update(environment_globals)
super().__init__(environment_globals)
def evaluate(self, expression, context, external_methods=None):
pass
def execute(self, script, context, external_methods=None):
self.environment.update(context)
self.environment.update(external_methods or {})
exec(script, self.environment)
self.environment = {k: v for k, v in self.environment.items() if k not in external_methods}
def user_defined_values(self):
return {k: v for k, v in self.environment.items() if k not in self.globals}
class PythonScriptEngineEnvironmentTest(BpmnWorkflowTestCase):
def setUp(self):
spec, subprocesses = self.load_workflow_spec('task_data_size.bpmn', 'Process_ccz6oq2')
self.workflow = BpmnWorkflow(spec, subprocesses)
def testTaskDataSizeWithDefaultPythonScriptEngine(self):
self.workflow.do_engine_steps()
self.assertIn("a", self.workflow.data)
self.assertIn("b", self.workflow.data)
self.assertIn("c", self.workflow.data)
self.assertIn("d", self.workflow.data)
task_data_len = self._get_task_data_len()
d_uniques = set(self.workflow.data["d"])
d_len = len(self.workflow.data["d"])
self.assertGreater(task_data_len, 15000)
self.assertEqual(d_len, 512*3)
self.assertEqual(d_uniques, {"a", "b", "c"})
def testTaskDataSizeWithNonTaskDataEnvironmentBasedPythonScriptEngine(self):
script_engine_environment = NonTaskDataExampleEnvironment({"example_global": example_global}, {})
script_engine = PythonScriptEngine(environment=script_engine_environment)
self.workflow.script_engine = script_engine
self.workflow.do_engine_steps()
self.workflow.data.update(script_engine.environment.user_defined_values())
self.assertIn("a", self.workflow.data)
self.assertIn("b", self.workflow.data)
self.assertIn("c", self.workflow.data)
self.assertIn("d", self.workflow.data)
self.assertNotIn("example_global", self.workflow.data)
task_data_len = self._get_task_data_len()
d_uniques = set(self.workflow.data["d"])
d_len = len(self.workflow.data["d"])
self.assertEqual(task_data_len, 2)
self.assertEqual(d_len, 512*3)
self.assertEqual(d_uniques, {"a", "b", "c"})
def _get_task_data_len(self):
tasks_to_check = self.workflow.get_tasks(TaskState.FINISHED_MASK)
task_data = [task.data for task in tasks_to_check]
task_data_to_check = list(filter(len, task_data))
task_data_len = len(json.dumps(task_data_to_check))
return task_data_len

View File

@ -4,6 +4,7 @@ import datetime
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -14,10 +15,10 @@ class CustomScriptEngine(PythonScriptEngine):
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {
environment = TaskDataEnvironment({
'timedelta': datetime.timedelta,
}
super().__init__(scripting_additions=augment_methods)
})
super().__init__(environment=environment)
class TooManyLoopsTest(BpmnWorkflowTestCase):

View File

@ -1,746 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_06pyjz2" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="ComplexNavigation" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0kcrx5l</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_0kcrx5l" sourceRef="StartEvent_1" targetRef="Step1" />
<bpmn:userTask id="Step1" name="Step 1">
<bpmn:incoming>Flow_0kcrx5l</bpmn:incoming>
<bpmn:outgoing>Flow_1seuuie</bpmn:outgoing>
</bpmn:userTask>
<bpmn:userTask id="Activity_0obertf" name="Select RO Education Department" camunda:formKey="RO_EducationDept">
<bpmn:documentation>The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair.
**Primary Investigator's Primary Appointment**
***School:*** {{ pi.E0.schoolName }}
***Department:*** {{ pi.E0.deptName }}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="RO_StudyDeptEducation" label="PI&#39;s Study Responsible Organization Department" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="DepartmentList-Education.xlsx" />
<camunda:property id="spreadsheet.value.column" value="Value" />
<camunda:property id="spreadsheet.label.column" value="Label" />
<camunda:property id="description" value="Type key words to find Education Department" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_12obxbo</bpmn:incoming>
<bpmn:outgoing>Flow_1y4gjsg</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Activity_0vmy33u" name="Update RO Data">
<bpmn:incoming>Flow_02614fd</bpmn:incoming>
<bpmn:outgoing>Flow_0c4tt8e</bpmn:outgoing>
<bpmn:script>ro.chair = {}
ro.chair.uid = RO_Chair_CID
ro.chair.name_degree = RO_Chair_Name_Degree
ro.chair.title = RO_Chair_Title
ro.chair.sig_block = RO_Chair_Sig_Block</bpmn:script>
</bpmn:scriptTask>
<bpmn:exclusiveGateway id="Gateway_0ubqopr" name="PI&#39;s Primary School / Department same as study&#39;s Responsible Organization?" default="Flow_1ni06mz">
<bpmn:incoming>Flow_1seuuie</bpmn:incoming>
<bpmn:outgoing>Flow_1ni06mz</bpmn:outgoing>
<bpmn:outgoing>Flow_1y9edqt</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:exclusiveGateway id="Gateway_13vtxns" default="Flow_1oriwwz">
<bpmn:incoming>Flow_1y9edqt</bpmn:incoming>
<bpmn:outgoing>Flow_1oriwwz</bpmn:outgoing>
<bpmn:outgoing>Flow_185jvp3</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:scriptTask id="Activity_08ldcxm" name="Build School List">
<bpmn:incoming>Flow_185jvp3</bpmn:incoming>
<bpmn:outgoing>Flow_1dh8c45</bpmn:outgoing>
<bpmn:script>sch_enum = []
if pi.E0.schoolAbbrv != "MD":
sch_enum_md = [
{
"value": "MD",
"label": "Medicine"
},
]
else:
sch_enum_md = []
if pi.E0.schoolAbbrv != "AS":
sch_enum_as = [
{
"value": "AS",
"label": "Arts &amp; Science"
},
]
else:
sch_enum_as = []
if pi.E0.schoolAbbrv != "CU":
sch_enum_cu = [
{
"value": "CU",
"label": "Education"
},
]
else:
sch_enum_cu = []
if pi.E0.schoolAbbrv != "NR":
sch_enum_nr = [
{
"value": "NR",
"label": "Nursing"
},
]
else:
sch_enum_nr = []
sch_enum = sch_enum_md + sch_enum_as + sch_enum_cu + sch_enum_nr
del(sch_enum_md)
del(sch_enum_as)
del(sch_enum_cu)
del(sch_enum_nr)</bpmn:script>
</bpmn:scriptTask>
<bpmn:userTask id="Activity_08pywzy" name="Select RO School" camunda:formKey="RO_School">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="RO_StudySchool" label="Select the Responsible Organization&#39;s School" type="enum">
<camunda:properties>
<camunda:property id="data.name" value="sch_enum" />
<camunda:property id="data.value.column" value="value" />
<camunda:property id="data.label.column" value="label" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1dh8c45</bpmn:incoming>
<bpmn:outgoing>Flow_0mf9npl</bpmn:outgoing>
</bpmn:userTask>
<bpmn:exclusiveGateway id="Gateway_12qlux1" name="School Have Departments?" default="Flow_0nzochy">
<bpmn:incoming>Flow_1oriwwz</bpmn:incoming>
<bpmn:incoming>Flow_0nmpxmc</bpmn:incoming>
<bpmn:outgoing>Flow_12obxbo</bpmn:outgoing>
<bpmn:outgoing>Flow_03s8gvx</bpmn:outgoing>
<bpmn:outgoing>Flow_0nzochy</bpmn:outgoing>
<bpmn:outgoing>Flow_0h955ao</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:exclusiveGateway id="Gateway_02h27h5">
<bpmn:incoming>Flow_1y4gjsg</bpmn:incoming>
<bpmn:incoming>Flow_0lnb8jw</bpmn:incoming>
<bpmn:incoming>Flow_1fqtd41</bpmn:incoming>
<bpmn:outgoing>Flow_0a626ba</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:scriptTask id="Activity_0nkgcfg" name="Reset RO Department">
<bpmn:incoming>Flow_0a626ba</bpmn:incoming>
<bpmn:outgoing>Flow_0ssrpqx</bpmn:outgoing>
<bpmn:script>if PIsPrimaryDepartmentSameAsRO.value == "diffSchool":
ro.schoolName = RO_StudySchool.label
ro.schoolAbbrv = RO_StudySchool.value
if PIsPrimaryDepartmentSameAsRO.value != "yes":
if ro.schoolAbbrv == "MD":
ro.deptName = RO_StudyDeptMedicine.label
ro.deptAbbrv = RO_StudyDeptMedicine.value
elif ro.schoolAbbrv == "AS":
ro.deptName = RO_StudyDeptArtsSciences.label
ro.deptAbbrv = RO_StudyDeptArtsSciences.value
elif ro.schoolAbbrv == "CU":
ro.deptName = RO_StudyDeptEducation.label
ro.deptAbbrv = RO_StudyDeptEducation.value
else:
ro.deptName = ""
ro.deptAbbrv = ""</bpmn:script>
</bpmn:scriptTask>
<bpmn:userTask id="Activity_16q24p2" name="Select RO Medicine Department" camunda:formKey="RO_MedicineDept">
<bpmn:documentation>The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair.
**Primary Investigator's Primary Appointment**
***School:*** {{ pi.E0.schoolName }}
***Department:*** {{ pi.E0.deptName }}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="RO_StudyDeptMedicine" label="PI&#39;s Study Responsible Organization Department" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="DepartmentList-Medicine.xlsx" />
<camunda:property id="spreadsheet.value.column" value="Value" />
<camunda:property id="spreadsheet.label.column" value="Label" />
<camunda:property id="description" value="Type key words to find Medicine Department" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0nzochy</bpmn:incoming>
<bpmn:outgoing>Flow_0lnb8jw</bpmn:outgoing>
</bpmn:userTask>
<bpmn:userTask id="Activity_0nv1s23" name="Select RO A&#38;S Department" camunda:formKey="RO_AandS_Dept">
<bpmn:documentation>The Study's Responsible Organization is needed in order to confirm the Department Chair. If it is the same as the Primary Investigator's Primary Department show below, we have all the information needed to determine the Department Chair.
**Primary Investigator's Primary Appointment**
***School:*** {{ pi.E0.schoolName }}
***Department:*** {{ pi.E0.deptName }}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="RO_StudyDeptArtsSciences" label="PI&#39;s Study Responsible Organization Department" type="enum">
<camunda:properties>
<camunda:property id="spreadsheet.name" value="DepartmentList-ArtsSciences.xlsx" />
<camunda:property id="spreadsheet.value.column" value="Value" />
<camunda:property id="spreadsheet.label.column" value="Label" />
<camunda:property id="description" value="Type key words to find A&#38;S Department" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0h955ao</bpmn:incoming>
<bpmn:outgoing>Flow_1fqtd41</bpmn:outgoing>
</bpmn:userTask>
<bpmn:scriptTask id="Activity_0xa6vms" name="Reset RO School ">
<bpmn:incoming>Flow_0mf9npl</bpmn:incoming>
<bpmn:outgoing>Flow_0nmpxmc</bpmn:outgoing>
<bpmn:script>ro.schoolName = RO_StudySchool.label
ro.schoolAbbrv = RO_StudySchool.value</bpmn:script>
</bpmn:scriptTask>
<bpmn:exclusiveGateway id="Gateway_12sb0pk">
<bpmn:incoming>Flow_03s8gvx</bpmn:incoming>
<bpmn:incoming>Flow_0ssrpqx</bpmn:incoming>
<bpmn:outgoing>Flow_0tnnt3b</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_12obxbo" name="Education" sourceRef="Gateway_12qlux1" targetRef="Activity_0obertf">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">ro.schoolAbbrv == "CU"</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_1y4gjsg" sourceRef="Activity_0obertf" targetRef="Gateway_02h27h5" />
<bpmn:sequenceFlow id="Flow_1ni06mz" sourceRef="Gateway_0ubqopr" targetRef="Activity_0whcncc" />
<bpmn:sequenceFlow id="Flow_0tnnt3b" sourceRef="Gateway_12sb0pk" targetRef="Activity_0whcncc" />
<bpmn:sequenceFlow id="Flow_02614fd" sourceRef="Activity_0whcncc" targetRef="Activity_0vmy33u" />
<bpmn:sequenceFlow id="Flow_1y9edqt" name="No" sourceRef="Gateway_0ubqopr" targetRef="Gateway_13vtxns">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">PIsPrimaryDepartmentSameAsRO.value != "yes"</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_1oriwwz" name="Yes" sourceRef="Gateway_13vtxns" targetRef="Gateway_12qlux1" />
<bpmn:sequenceFlow id="Flow_185jvp3" name="No" sourceRef="Gateway_13vtxns" targetRef="Activity_08ldcxm">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">PIsPrimaryDepartmentSameAsRO.value == 'diffSchool'</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_1dh8c45" sourceRef="Activity_08ldcxm" targetRef="Activity_08pywzy" />
<bpmn:sequenceFlow id="Flow_0mf9npl" sourceRef="Activity_08pywzy" targetRef="Activity_0xa6vms" />
<bpmn:sequenceFlow id="Flow_0nmpxmc" sourceRef="Activity_0xa6vms" targetRef="Gateway_12qlux1" />
<bpmn:sequenceFlow id="Flow_03s8gvx" name="No" sourceRef="Gateway_12qlux1" targetRef="Gateway_12sb0pk">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">ro.schoolAbbrv not in ["MD", "AS", "CU"]</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_0nzochy" name="Medicine" sourceRef="Gateway_12qlux1" targetRef="Activity_16q24p2" />
<bpmn:sequenceFlow id="Flow_0h955ao" name="A&#38;S" sourceRef="Gateway_12qlux1" targetRef="Activity_0nv1s23">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">ro.schoolAbbrv == "AS"</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_0lnb8jw" sourceRef="Activity_16q24p2" targetRef="Gateway_02h27h5" />
<bpmn:sequenceFlow id="Flow_1fqtd41" sourceRef="Activity_0nv1s23" targetRef="Gateway_02h27h5" />
<bpmn:sequenceFlow id="Flow_0a626ba" sourceRef="Gateway_02h27h5" targetRef="Activity_0nkgcfg" />
<bpmn:sequenceFlow id="Flow_0ssrpqx" sourceRef="Activity_0nkgcfg" targetRef="Gateway_12sb0pk" />
<bpmn:sequenceFlow id="Flow_1seuuie" sourceRef="Step1" targetRef="Gateway_0ubqopr" />
<bpmn:userTask id="Activity_0whcncc" name="Determine RO Chair">
<bpmn:incoming>Flow_1ni06mz</bpmn:incoming>
<bpmn:incoming>Flow_0tnnt3b</bpmn:incoming>
<bpmn:outgoing>Flow_02614fd</bpmn:outgoing>
</bpmn:userTask>
<bpmn:endEvent id="Event_0l7thbn" name="End">
<bpmn:documentation>temp</bpmn:documentation>
<bpmn:incoming>Flow_15xpsq8</bpmn:incoming>
<bpmn:incoming>Flow_1g7q28p</bpmn:incoming>
</bpmn:endEvent>
<bpmn:exclusiveGateway id="Gateway_0ym1uow" name="How many Primary Coordinators?" default="Flow_0ygr7cu">
<bpmn:incoming>Flow_0cqbu1f</bpmn:incoming>
<bpmn:incoming>Flow_1d4sb3d</bpmn:incoming>
<bpmn:outgoing>Flow_12oux1f</bpmn:outgoing>
<bpmn:outgoing>Flow_0ygr7cu</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:userTask id="Activity_0l7vq1i" name="Update Primary Coordinator Info" camunda:formKey="SC_AccessEmails">
<bpmn:documentation>The following Primary Coordinators were entered in Protocol Builder:
{%+ for key, value in pcs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_pcs %}, {% endif %}{% endfor %}
To Save the current settings for all Primary Coordinators, select Save All.
Otherwise, edit each Coordinator as necessary and select the Save button for each.
### Please provide supplemental information for:
#### {{ pc.display_name }}
##### Title: {{ pc.title }}
##### Department: {{ pc.department }}
##### Affiliation: {{ pc.affiliation }}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="pc.access" label="Should this Coordinator have full editing access in the system?" type="boolean" defaultValue="true" />
<camunda:formField id="pc.emails" label="Should this Coordinator receive automated email notifications?" type="boolean" defaultValue="true" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_12oux1f</bpmn:incoming>
<bpmn:outgoing>Flow_1ik148z</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="pcs" camunda:elementVariable="pc" />
</bpmn:userTask>
<bpmn:exclusiveGateway id="Gateway_1fhu0gj" name="PI is Dept Chair?" default="Flow_05g7d16">
<bpmn:incoming>Flow_0c4tt8e</bpmn:incoming>
<bpmn:outgoing>Flow_05g7d16</bpmn:outgoing>
<bpmn:outgoing>Flow_13zasb1</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:manualTask id="Activity_17ikhsk" name="Show PI is Dept Chair">
<bpmn:documentation>The PI is also the RO Chair</bpmn:documentation>
<bpmn:incoming>Flow_13zasb1</bpmn:incoming>
<bpmn:outgoing>Flow_0cqbu1f</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:exclusiveGateway id="Gateway_1ykz8u2" name="How many Sub-Investigators?" default="Flow_0pdoc38">
<bpmn:incoming>Flow_0efu6u1</bpmn:incoming>
<bpmn:incoming>Flow_0a3fjzp</bpmn:incoming>
<bpmn:outgoing>Flow_0ljn2v6</bpmn:outgoing>
<bpmn:outgoing>Flow_0pdoc38</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:userTask id="Activity_0rcrs1i" name="Update Sub-Investigator Info" camunda:formKey="SI_AccessEmails">
<bpmn:documentation>The following Sub-Investigators were entered in Protocol Builder:
{%+ for key, value in subs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_subs %}, {% endif %}{% endfor %}
To Save the current settings for all Sub-Investigators, select Save All.
Otherwise, edit each Sub-Investigator as necessary and select the Save button for each.
### Please provide supplemental information for:
#### {{ sub.display_name }}
##### Title: {{ sub.title }}
##### Department: {{ sub.department }}
##### Affiliation: {{ sub.affiliation }}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="sub.access" label="Should this Sub-Investigator have full editing access in the system?" type="boolean" defaultValue="false" />
<camunda:formField id="sub.emails" label="Should this Sub-Investigator receive automated email notifications?" type="boolean" defaultValue="false" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0ljn2v6</bpmn:incoming>
<bpmn:outgoing>Flow_07vu2b0</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="subs" camunda:elementVariable="sub" />
</bpmn:userTask>
<bpmn:exclusiveGateway id="Gateway_1h4d4n5" name="How many Additional Coordinators?" default="Flow_0a3fjzp">
<bpmn:incoming>Flow_1ik148z</bpmn:incoming>
<bpmn:incoming>Flow_0ygr7cu</bpmn:incoming>
<bpmn:outgoing>Flow_0a3fjzp</bpmn:outgoing>
<bpmn:outgoing>Flow_0rstqv5</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:userTask id="Activity_0tbvw9o" name="Update Additional Coordinator Info">
<bpmn:documentation>The following Additional Coordinators were entered in Protocol Builder:
{%+ for key, value in acs.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_acs %}, {% endif %}{% endfor %}
To Save the current settings for all Additional Coordinators, select Save All.
Otherwise, edit each Coordinator as necessary and select the Save button for each.
### Please provide supplemental information for:
#### {{ acs.display_name }}
##### Title: {{ acs.title }}
##### Department: {{ acs.department }}
##### Affiliation: {{ acs.affiliation }}</bpmn:documentation>
<bpmn:incoming>Flow_0rstqv5</bpmn:incoming>
<bpmn:outgoing>Flow_0efu6u1</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="acs" camunda:elementVariable="ac" />
</bpmn:userTask>
<bpmn:exclusiveGateway id="Gateway_0gjk91e" name="How many Additional Personnel? " default="Flow_1g7q28p">
<bpmn:incoming>Flow_0pdoc38</bpmn:incoming>
<bpmn:incoming>Flow_07vu2b0</bpmn:incoming>
<bpmn:outgoing>Flow_1g7q28p</bpmn:outgoing>
<bpmn:outgoing>Flow_0qti1ms</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:userTask id="Activity_1uzsp1r" name="Update Additional Personnel Info" camunda:formKey="AP_AccessEmails">
<bpmn:documentation>The following Additional Personnel were entered in Protocol Builder:
{%+ for key, value in aps.items() %}{{value.display_name}} ({{key}}){% if loop.index is lt cnt_aps %}, {% endif %}{% endfor %}
To Save the current settings for all Additional Personnel, select Save All.
Otherwise, edit each Additional Personnel as necessary and select the Save button for each.
### Please provide supplemental information for:
#### {{ ap.display_name }}
##### Title: {{ ap.title }}
##### Department: {{ ap.department }}
##### Affiliation: {{ ap.affiliation }}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="ap.access" label="Should this Additional Personnel have full editing access in the system?" type="boolean" />
<camunda:formField id="FormField_27dit3u" label="Should this Additional Personnel receive automated email notifications?" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0qti1ms</bpmn:incoming>
<bpmn:outgoing>Flow_15xpsq8</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="aps" camunda:elementVariable="ap" />
</bpmn:userTask>
<bpmn:userTask id="Activity_0otiy71" name="Update Chair Info" camunda:formKey="RO_Chair_Info">
<bpmn:documentation>***Name &amp; Degree:*** {{ RO_Chair_Name_Degree }}
***School:*** {{ RO_School }}
***Department:*** {{ RO_Department }}
***Title:*** {{ RO_Chair_Title }}
***Email:*** {{ RO_Chair_CID }}
{% if RO_Chair_CID != dc.uid %}
*Does not match the Department Chair specified in Protocol Builder, {{ dc.display_name }}*
{% endif %}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="RO_ChairAccess" label="Should the Department Chair have full editing access in the system?" type="boolean" defaultValue="false" />
<camunda:formField id="RO_ChairEmails" label="Should the Department Chair receive automated email notifications?" type="boolean" defaultValue="false" />
</camunda:formData>
<camunda:properties>
<camunda:property name="display_name" value="&#34;Responsible Organization&#39;s Chair Info&#34;" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_05g7d16</bpmn:incoming>
<bpmn:outgoing>Flow_1d4sb3d</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_15xpsq8" sourceRef="Activity_1uzsp1r" targetRef="Event_0l7thbn" />
<bpmn:sequenceFlow id="Flow_1g7q28p" sourceRef="Gateway_0gjk91e" targetRef="Event_0l7thbn" />
<bpmn:sequenceFlow id="Flow_0cqbu1f" sourceRef="Activity_17ikhsk" targetRef="Gateway_0ym1uow" />
<bpmn:sequenceFlow id="Flow_1d4sb3d" sourceRef="Activity_0otiy71" targetRef="Gateway_0ym1uow" />
<bpmn:sequenceFlow id="Flow_12oux1f" name="1 or more" sourceRef="Gateway_0ym1uow" targetRef="Activity_0l7vq1i" />
<bpmn:sequenceFlow id="Flow_0ygr7cu" name="None" sourceRef="Gateway_0ym1uow" targetRef="Gateway_1h4d4n5" />
<bpmn:sequenceFlow id="Flow_1ik148z" sourceRef="Activity_0l7vq1i" targetRef="Gateway_1h4d4n5" />
<bpmn:sequenceFlow id="Flow_05g7d16" name="No" sourceRef="Gateway_1fhu0gj" targetRef="Activity_0otiy71" />
<bpmn:sequenceFlow id="Flow_13zasb1" name="Yes" sourceRef="Gateway_1fhu0gj" targetRef="Activity_17ikhsk">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">RO_Chair_CID == pi.uid</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_0efu6u1" sourceRef="Activity_0tbvw9o" targetRef="Gateway_1ykz8u2" />
<bpmn:sequenceFlow id="Flow_0a3fjzp" name="None" sourceRef="Gateway_1h4d4n5" targetRef="Gateway_1ykz8u2" />
<bpmn:sequenceFlow id="Flow_0ljn2v6" name="1 or more" sourceRef="Gateway_1ykz8u2" targetRef="Activity_0rcrs1i" />
<bpmn:sequenceFlow id="Flow_0pdoc38" name="None" sourceRef="Gateway_1ykz8u2" targetRef="Gateway_0gjk91e" />
<bpmn:sequenceFlow id="Flow_07vu2b0" sourceRef="Activity_0rcrs1i" targetRef="Gateway_0gjk91e" />
<bpmn:sequenceFlow id="Flow_0rstqv5" name="1 or more" sourceRef="Gateway_1h4d4n5" targetRef="Activity_0tbvw9o" />
<bpmn:sequenceFlow id="Flow_0qti1ms" sourceRef="Gateway_0gjk91e" targetRef="Activity_1uzsp1r" />
<bpmn:sequenceFlow id="Flow_0c4tt8e" sourceRef="Activity_0vmy33u" targetRef="Gateway_1fhu0gj" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ComplexNavigation">
<bpmndi:BPMNEdge id="Flow_0c4tt8e_di" bpmnElement="Flow_0c4tt8e">
<di:waypoint x="1070" y="300" />
<di:waypoint x="1118" y="300" />
<di:waypoint x="1118" y="290" />
<di:waypoint x="1165" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0qti1ms_di" bpmnElement="Flow_0qti1ms">
<di:waypoint x="2625" y="290" />
<di:waypoint x="2730" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0rstqv5_di" bpmnElement="Flow_0rstqv5">
<di:waypoint x="1925" y="290" />
<di:waypoint x="2040" y="290" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1959" y="272" width="48" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_07vu2b0_di" bpmnElement="Flow_07vu2b0">
<di:waypoint x="2510" y="290" />
<di:waypoint x="2575" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0pdoc38_di" bpmnElement="Flow_0pdoc38">
<di:waypoint x="2280" y="315" />
<di:waypoint x="2280" y="390" />
<di:waypoint x="2600" y="390" />
<di:waypoint x="2600" y="315" />
<bpmndi:BPMNLabel>
<dc:Bounds x="2427" y="372" width="27" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ljn2v6_di" bpmnElement="Flow_0ljn2v6">
<di:waypoint x="2305" y="290" />
<di:waypoint x="2410" y="290" />
<bpmndi:BPMNLabel>
<dc:Bounds x="2334" y="272" width="48" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0a3fjzp_di" bpmnElement="Flow_0a3fjzp">
<di:waypoint x="1900" y="265" />
<di:waypoint x="1900" y="180" />
<di:waypoint x="2280" y="180" />
<di:waypoint x="2280" y="265" />
<bpmndi:BPMNLabel>
<dc:Bounds x="2077" y="162" width="27" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0efu6u1_di" bpmnElement="Flow_0efu6u1">
<di:waypoint x="2140" y="290" />
<di:waypoint x="2255" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_13zasb1_di" bpmnElement="Flow_13zasb1">
<di:waypoint x="1190" y="265" />
<di:waypoint x="1190" y="160" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1201" y="178" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_05g7d16_di" bpmnElement="Flow_05g7d16">
<di:waypoint x="1215" y="290" />
<di:waypoint x="1350" y="290" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1260" y="272" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ik148z_di" bpmnElement="Flow_1ik148z">
<di:waypoint x="1780" y="290" />
<di:waypoint x="1875" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ygr7cu_di" bpmnElement="Flow_0ygr7cu">
<di:waypoint x="1540" y="315" />
<di:waypoint x="1540" y="400" />
<di:waypoint x="1900" y="400" />
<di:waypoint x="1900" y="315" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1716" y="383" width="27" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_12oux1f_di" bpmnElement="Flow_12oux1f">
<di:waypoint x="1565" y="290" />
<di:waypoint x="1680" y="290" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1593" y="273" width="48" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1d4sb3d_di" bpmnElement="Flow_1d4sb3d">
<di:waypoint x="1450" y="290" />
<di:waypoint x="1515" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0cqbu1f_di" bpmnElement="Flow_0cqbu1f">
<di:waypoint x="1240" y="120" />
<di:waypoint x="1540" y="120" />
<di:waypoint x="1540" y="260" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1g7q28p_di" bpmnElement="Flow_1g7q28p">
<di:waypoint x="2600" y="265" />
<di:waypoint x="2600" y="200" />
<di:waypoint x="2950" y="200" />
<di:waypoint x="2950" y="272" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_15xpsq8_di" bpmnElement="Flow_15xpsq8">
<di:waypoint x="2830" y="290" />
<di:waypoint x="2932" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1seuuie_di" bpmnElement="Flow_1seuuie">
<di:waypoint x="420" y="300" />
<di:waypoint x="525" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ssrpqx_di" bpmnElement="Flow_0ssrpqx">
<di:waypoint x="840" y="470" />
<di:waypoint x="840" y="435" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0a626ba_di" bpmnElement="Flow_0a626ba">
<di:waypoint x="840" y="605" />
<di:waypoint x="840" y="550" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1fqtd41_di" bpmnElement="Flow_1fqtd41">
<di:waypoint x="980" y="690" />
<di:waypoint x="980" y="630" />
<di:waypoint x="865" y="630" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0lnb8jw_di" bpmnElement="Flow_0lnb8jw">
<di:waypoint x="840" y="690" />
<di:waypoint x="840" y="655" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0h955ao_di" bpmnElement="Flow_0h955ao">
<di:waypoint x="865" y="840" />
<di:waypoint x="980" y="840" />
<di:waypoint x="980" y="770" />
<bpmndi:BPMNLabel>
<dc:Bounds x="989" y="793" width="23" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0nzochy_di" bpmnElement="Flow_0nzochy">
<di:waypoint x="840" y="815" />
<di:waypoint x="840" y="770" />
<bpmndi:BPMNLabel>
<dc:Bounds x="847" y="793" width="45" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_03s8gvx_di" bpmnElement="Flow_03s8gvx">
<di:waypoint x="865" y="840" />
<di:waypoint x="1070" y="840" />
<di:waypoint x="1070" y="410" />
<di:waypoint x="865" y="410" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1078" y="613" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0nmpxmc_di" bpmnElement="Flow_0nmpxmc">
<di:waypoint x="840" y="910" />
<di:waypoint x="840" y="865" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0mf9npl_di" bpmnElement="Flow_0mf9npl">
<di:waypoint x="840" y="1060" />
<di:waypoint x="840" y="990" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1dh8c45_di" bpmnElement="Flow_1dh8c45">
<di:waypoint x="600" y="1100" />
<di:waypoint x="790" y="1100" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_185jvp3_di" bpmnElement="Flow_185jvp3">
<di:waypoint x="550" y="865" />
<di:waypoint x="550" y="1060" />
<bpmndi:BPMNLabel>
<dc:Bounds x="522" y="943" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1oriwwz_di" bpmnElement="Flow_1oriwwz">
<di:waypoint x="575" y="840" />
<di:waypoint x="815" y="840" />
<bpmndi:BPMNLabel>
<dc:Bounds x="611" y="823" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1y9edqt_di" bpmnElement="Flow_1y9edqt">
<di:waypoint x="550" y="325" />
<di:waypoint x="550" y="815" />
<bpmndi:BPMNLabel>
<dc:Bounds x="522" y="701" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_02614fd_di" bpmnElement="Flow_02614fd">
<di:waypoint x="890" y="300" />
<di:waypoint x="970" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0tnnt3b_di" bpmnElement="Flow_0tnnt3b">
<di:waypoint x="840" y="385" />
<di:waypoint x="840" y="340" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1ni06mz_di" bpmnElement="Flow_1ni06mz">
<di:waypoint x="575" y="300" />
<di:waypoint x="790" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1y4gjsg_di" bpmnElement="Flow_1y4gjsg">
<di:waypoint x="700" y="690" />
<di:waypoint x="700" y="630" />
<di:waypoint x="815" y="630" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_12obxbo_di" bpmnElement="Flow_12obxbo">
<di:waypoint x="815" y="840" />
<di:waypoint x="700" y="840" />
<di:waypoint x="700" y="770" />
<bpmndi:BPMNLabel>
<dc:Bounds x="705" y="793" width="49" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0kcrx5l_di" bpmnElement="Flow_0kcrx5l">
<di:waypoint x="188" y="300" />
<di:waypoint x="320" y="300" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="282" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1j808ka_di" bpmnElement="Step1">
<dc:Bounds x="320" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0obertf_di" bpmnElement="Activity_0obertf">
<dc:Bounds x="650" y="690" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0vmy33u_di" bpmnElement="Activity_0vmy33u">
<dc:Bounds x="970" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ubqopr_di" bpmnElement="Gateway_0ubqopr" isMarkerVisible="true">
<dc:Bounds x="525" y="275" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="506" y="180" width="88" height="80" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_13vtxns_di" bpmnElement="Gateway_13vtxns" isMarkerVisible="true">
<dc:Bounds x="525" y="815" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1014" y="423" width="72" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_08ldcxm_di" bpmnElement="Activity_08ldcxm">
<dc:Bounds x="500" y="1060" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_08pywzy_di" bpmnElement="Activity_08pywzy">
<dc:Bounds x="790" y="1060" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_12qlux1_di" bpmnElement="Gateway_12qlux1" isMarkerVisible="true">
<dc:Bounds x="815" y="815" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="855" y="856" width="70" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_02h27h5_di" bpmnElement="Gateway_02h27h5" isMarkerVisible="true">
<dc:Bounds x="815" y="605" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0nkgcfg_di" bpmnElement="Activity_0nkgcfg">
<dc:Bounds x="790" y="470" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_16q24p2_di" bpmnElement="Activity_16q24p2">
<dc:Bounds x="790" y="690" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0nv1s23_di" bpmnElement="Activity_0nv1s23">
<dc:Bounds x="930" y="690" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0xa6vms_di" bpmnElement="Activity_0xa6vms">
<dc:Bounds x="790" y="910" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_12sb0pk_di" bpmnElement="Gateway_12sb0pk" isMarkerVisible="true">
<dc:Bounds x="815" y="385" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1rtabpz_di" bpmnElement="Activity_0whcncc">
<dc:Bounds x="790" y="260" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0l7thbn_di" bpmnElement="Event_0l7thbn">
<dc:Bounds x="2932" y="272" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="2940" y="318" width="20" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0ym1uow_di" bpmnElement="Gateway_0ym1uow" isMarkerVisible="true">
<dc:Bounds x="1515" y="265" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1545" y="309" width="70" height="40" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0l7vq1i_di" bpmnElement="Activity_0l7vq1i">
<dc:Bounds x="1680" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1fhu0gj_di" bpmnElement="Gateway_1fhu0gj" isMarkerVisible="true">
<dc:Bounds x="1165" y="265" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1148" y="322" width="84" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_17ikhsk_di" bpmnElement="Activity_17ikhsk">
<dc:Bounds x="1140" y="80" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1ykz8u2_di" bpmnElement="Gateway_1ykz8u2" isMarkerVisible="true">
<dc:Bounds x="2255" y="265" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="2300" y="315" width="79" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0rcrs1i_di" bpmnElement="Activity_0rcrs1i">
<dc:Bounds x="2410" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_1h4d4n5_di" bpmnElement="Gateway_1h4d4n5" isMarkerVisible="true">
<dc:Bounds x="1875" y="265" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1915" y="309" width="70" height="40" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0tbvw9o_di" bpmnElement="Activity_0tbvw9o">
<dc:Bounds x="2040" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0gjk91e_di" bpmnElement="Gateway_0gjk91e" isMarkerVisible="true">
<dc:Bounds x="2575" y="265" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="2622" y="309" width="56" height="40" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1uzsp1r_di" bpmnElement="Activity_1uzsp1r">
<dc:Bounds x="2730" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0otiy71_di" bpmnElement="Activity_0otiy71">
<dc:Bounds x="1350" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,143 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_06pyjz2" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="ExclusiveGatewayMultipleEndNavigation" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0kcrx5l</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:userTask id="Jabberwocky" name="Step 2" camunda:formKey="PI_Info">
<bpmn:documentation>##### Please confirm Primary Investigator entered in Protocol Builder is correct and if so, provide additional information:
### **{{ pi.display_name }}**
***Email:*** {{ pi.email_address }}
**Primary Appointment**
***School:*** {{ pi.E0.schoolName }}
***Department:*** {{ pi.E0.deptName }}
{% if is_me_pi %}
Since you are the person entering this information, you already have access and will receive all emails.
{% endif %}</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="pi.experience" label="Investigator&#39;s Experience" type="textarea">
<camunda:properties>
<camunda:property id="rows" value="5" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="pi.access" label="Should the Principal Investigator have full editing access in the system?" type="boolean" defaultValue="True">
<camunda:properties>
<camunda:property id="hide_expression" value="is_cu_pi" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="pi.emails" label="Should the Principal Investigator receive automated email notifications?" type="boolean" defaultValue="True">
<camunda:properties>
<camunda:property id="hide_expression" value="is_cu_pi" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="PIsPrimaryDepartmentSameAsRO" label="Is the PI&#39;s Primary Department the same as the study&#39;s Responsible Organization?" type="enum" defaultValue="yes">
<camunda:properties>
<camunda:property id="enum_type" value="radio" />
</camunda:properties>
<camunda:value id="yes" name="Yes" />
<camunda:value id="diffDept" name="No, it is a different Department within the same School" />
<camunda:value id="diffSchool" name="No, it is a different School" />
</camunda:formField>
</camunda:formData>
<camunda:properties>
<camunda:property name="display_name" value="pi.label" />
</camunda:properties>
</bpmn:extensionElements>
<bpmn:incoming>Flow_147b9li</bpmn:incoming>
<bpmn:outgoing>Flow_0xnj2rp</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0kcrx5l" sourceRef="StartEvent_1" targetRef="Step1" />
<bpmn:sequenceFlow id="Flow_1dcsioh" sourceRef="Step1" targetRef="Gateway" />
<bpmn:exclusiveGateway id="Gateway" name="GatewayToEnd">
<bpmn:incoming>Flow_1dcsioh</bpmn:incoming>
<bpmn:outgoing>Flow_147b9li</bpmn:outgoing>
<bpmn:outgoing>Flow_00prawo</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_147b9li" name="True" sourceRef="Gateway" targetRef="Jabberwocky">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">tru</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_00prawo" name="False" sourceRef="Gateway" targetRef="StepEnd">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">false</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:endEvent id="Event_0npjf2p">
<bpmn:incoming>Flow_16qr5jf</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_16qr5jf" sourceRef="StepEnd" targetRef="Event_0npjf2p" />
<bpmn:userTask id="Step1" name="Step 1">
<bpmn:incoming>Flow_0kcrx5l</bpmn:incoming>
<bpmn:outgoing>Flow_1dcsioh</bpmn:outgoing>
</bpmn:userTask>
<bpmn:userTask id="StepEnd" name="Step End">
<bpmn:documentation>No PI entered in PB</bpmn:documentation>
<bpmn:incoming>Flow_00prawo</bpmn:incoming>
<bpmn:outgoing>Flow_16qr5jf</bpmn:outgoing>
</bpmn:userTask>
<bpmn:endEvent id="Event_1d1c7ov">
<bpmn:incoming>Flow_0xnj2rp</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0xnj2rp" sourceRef="Jabberwocky" targetRef="Event_1d1c7ov" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="ExclusiveGatewayMultipleEndNavigation">
<bpmndi:BPMNEdge id="Flow_16qr5jf_di" bpmnElement="Flow_16qr5jf">
<di:waypoint x="740" y="150" />
<di:waypoint x="822" y="150" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_00prawo_di" bpmnElement="Flow_00prawo">
<di:waypoint x="510" y="265" />
<di:waypoint x="510" y="150" />
<di:waypoint x="640" y="150" />
<bpmndi:BPMNLabel>
<dc:Bounds x="477" y="204" width="27" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_147b9li_di" bpmnElement="Flow_147b9li">
<di:waypoint x="535" y="290" />
<di:waypoint x="640" y="290" />
<bpmndi:BPMNLabel>
<dc:Bounds x="537" y="273" width="23" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1dcsioh_di" bpmnElement="Flow_1dcsioh">
<di:waypoint x="410" y="290" />
<di:waypoint x="485" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0kcrx5l_di" bpmnElement="Flow_0kcrx5l">
<di:waypoint x="188" y="290" />
<di:waypoint x="310" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0xnj2rp_di" bpmnElement="Flow_0xnj2rp">
<di:waypoint x="740" y="290" />
<di:waypoint x="822" y="290" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="272" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_0qzf1r3_di" bpmnElement="Gateway" isMarkerVisible="true">
<dc:Bounds x="485" y="265" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="475" y="322" width="75" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0npjf2p_di" bpmnElement="Event_0npjf2p">
<dc:Bounds x="822" y="132" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0d622qi_di" bpmnElement="Jabberwocky">
<dc:Bounds x="640" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1j808ka_di" bpmnElement="Step1">
<dc:Bounds x="310" y="250" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0efoaut_di" bpmnElement="StepEnd">
<dc:Bounds x="640" y="110" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1d1c7ov_di" bpmnElement="Event_1d1c7ov">
<dc:Bounds x="822" y="272" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,39 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1hbo0hp" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="CommonActivity" name="CommonActivity_a" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_0xpz6la</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:scriptTask id="Activity_0bt6ln9" name="print(&#39;complicated common task&#39;)">
<bpmn:incoming>Flow_0xpz6la</bpmn:incoming>
<bpmn:outgoing>Flow_03yam6h</bpmn:outgoing>
<bpmn:script>print('complicated common task')</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0xpz6la" sourceRef="StartEvent_1" targetRef="Activity_0bt6ln9" />
<bpmn:endEvent id="Event_1m1s0k4">
<bpmn:incoming>Flow_03yam6h</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_03yam6h" sourceRef="Activity_0bt6ln9" targetRef="Event_1m1s0k4" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="CommonActivity">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1mbn0zk_di" bpmnElement="Activity_0bt6ln9">
<dc:Bounds x="240" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0xpz6la_di" bpmnElement="Flow_0xpz6la">
<di:waypoint x="215" y="117" />
<di:waypoint x="240" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_1m1s0k4_di" bpmnElement="Event_1m1s0k4">
<dc:Bounds x="372" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_03yam6h_di" bpmnElement="Flow_03yam6h">
<di:waypoint x="340" y="117" />
<di:waypoint x="372" y="117" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,336 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" id="Definitions_1oogn9j" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.3">
<bpmn:process id="rrt" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>SequenceFlow_05ja25w</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:manualTask id="ManualTask_1ofy9yz" name="Read RRP Instructions">
<bpmn:documentation>### UNIVERSITY OF VIRGINIA RESEARCH
#### Research Ramp-up Plan
As we plan for the resumption of on-grounds research, PIs are required to develop a Research Ramp-up Plan. Please use the ramp-up guidance provided to lay out your plan(s) to manage operations while prioritizing physical distancing, staggered work shifts to reduce group size, remote work, and other exposure-reducing measures.
Plans must be submitted to the Office of Research by Monday, May ?? for consideration in the first round of approvals. Plans will then be reviewed on a rolling basis going forward.
Instructions for Submitting:
1. Add a Request for each lab space you manage in a building. If your lab spans multiple rooms or floors in a single building, one request will be required for that lab. If your lab spans multipe buildings, one request for each building will be required for that lab. The primary reason for this differentiation is that in addition to obtaining approval to restart operations, this information will also be used after start up to assist with any contact tracing that may be needed.
2. Select each Request added and step through each form presented, responding to all required and applicable fields. You may be presented with different questions if activities in each lab differ.
3. After all forms have been completed, you will be presented with the option to create your Research Recovery Plan in Word format. Download the document and review it. If you see any corrections that need to be made, return to the coresponding form and make the correction.
4. Once the generated Research Recovery Plan is finalize, use the web site to submit it to the Office of the Vice President for Research for review.
Please submit questions on the Research Support website.</bpmn:documentation>
<bpmn:incoming>SequenceFlow_05ja25w</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0h50bp3</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:userTask id="AddInfo" name="Enter Submission Info" camunda:formKey="Submission Info">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="ComputingID" label="UVA Computing ID of Primary Investigator" type="string">
<camunda:properties>
<camunda:property id="placeholder" value="cdr9c" />
</camunda:properties>
<camunda:validation>
<camunda:constraint name="required" config="true" />
<camunda:constraint name="description" config="You may search by entering Compuingr ID or Last Name." />
</camunda:validation>
</camunda:formField>
<camunda:formField id="LabName" label="Lab Name" type="string">
<camunda:properties>
<camunda:property id="description" value="Enter the name of the lab." />
</camunda:properties>
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
<camunda:formField id="Building" type="autocomplete">
<camunda:properties>
<camunda:property id="description" value="Select the building in which the lab is located." />
<camunda:property id="enum.options.file" value="BuildingList.xls" />
<camunda:property id="enum.options.value.column" value="Value" />
<camunda:property id="enum.options.label.column" value="Building Name" />
<camunda:property id="enum.options.lookup" value="True" />
</camunda:properties>
<camunda:validation>
<camunda:constraint name="Required" config="true" />
</camunda:validation>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0h50bp3</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_0bqu7pp</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="SequenceFlow_0h50bp3" sourceRef="ManualTask_1ofy9yz" targetRef="AddInfo" />
<bpmn:sequenceFlow id="SequenceFlow_05ja25w" sourceRef="StartEvent_1" targetRef="ManualTask_1ofy9yz" />
<bpmn:sequenceFlow id="SequenceFlow_0bqu7pp" sourceRef="AddInfo" targetRef="LabInfo" />
<bpmn:userTask id="LabInfo" name="Enter Lab Details" camunda:formKey="Lab Details">
<bpmn:documentation>### {{ LabName }}
#### Lab details
Your response to these questions will determine if you do or do not provide additional information regarding each topic later.</bpmn:documentation>
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="isHumanResearch" label="Human Research" type="boolean">
<camunda:properties>
<camunda:property id="description" value="Does this lab&#39;s research involve human subjects?" />
<camunda:property id="required" value="true" />
</camunda:properties>
<camunda:validation>
<camunda:constraint name="required" config="true" />
</camunda:validation>
</camunda:formField>
<camunda:formField id="isAnimalUse" label="Animal Use" type="boolean">
<camunda:properties>
<camunda:property id="description" value="Do you use animals in your work?" />
<camunda:property id="required" value="true" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="isSharedLab" label="Shared Lab" type="boolean">
<camunda:properties>
<camunda:property id="description" value="Is your lab shared with another researcher?" />
<camunda:property id="required" value="true" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="isSharedSpace" label="Shared Space" type="boolean">
<camunda:properties>
<camunda:property id="description" value="Do you use any shared spaces with other labs?" />
<camunda:property id="required" value="true" />
</camunda:properties>
</camunda:formField>
<camunda:formField id="isGrantSupport" label="Grant Support" type="boolean">
<camunda:properties>
<camunda:property id="required" value="true" />
<camunda:property id="description" value="Are any of the studies in your lab that will be restarted supported by grants?" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>SequenceFlow_0bqu7pp</bpmn:incoming>
<bpmn:outgoing>Flow_0scfmzc</bpmn:outgoing>
</bpmn:userTask>
<bpmn:endEvent id="EndEvent_09wp7av">
<bpmn:incoming>SequenceFlow_1qtrgbv</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1e2qi9s" sourceRef="Activity_0rv3far" targetRef="Task_1cw2y6r" />
<bpmn:manualTask id="Activity_0rv3far" name="Review Plan">
<bpmn:documentation>Review plan, make changes if needed, continue of ready to submit.</bpmn:documentation>
<bpmn:incoming>Flow_1b6vbkk</bpmn:incoming>
<bpmn:outgoing>Flow_1e2qi9s</bpmn:outgoing>
</bpmn:manualTask>
<bpmn:sequenceFlow id="SequenceFlow_1qtrgbv" sourceRef="Task_1cw2y6r" targetRef="EndEvent_09wp7av" />
<bpmn:scriptTask id="Task_1cw2y6r" name="Generate RRP">
<bpmn:incoming>Flow_1e2qi9s</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1qtrgbv</bpmn:outgoing>
<bpmn:script>CompleteTemplate ResearchRecoveryPlan.docx RESEARCH_RECOVERY</bpmn:script>
</bpmn:scriptTask>
<bpmn:userTask id="UserTask_0ww2o4i" name="Enter Animal Research Info" camunda:formKey="Animal Research">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="AnimalTimeline" label="Animal Timeline" type="textarea">
<camunda:properties>
<camunda:property id="description" value="Please describe the timeline for animal colony regeneration and what needs you will have for services from the ASC, including routine and specialty services." />
<camunda:property id="rows" value="10" />
<camunda:property id="help" value="[EHS Lab Ramp up Checklist for Laboratories](https://research.virginia.edu/sites/vpr/files/2020-05/EHS.LabRampUpChecklistForLaboratories_0_0.pdf)\n#### Animal Care\n- Communicate with your vivarium manager prior to restarting animal research.\n- Confirm inventory of controlled substances and proper documentation." />
</camunda:properties>
</camunda:formField>
<camunda:formField id="AnimalOrder" label="Animal Order" type="enum">
<camunda:properties>
<camunda:property id="description" value="When will you need to order animals again? Give time frame relative to the date of completion of this form." />
</camunda:properties>
<camunda:value id="immediately" name="Immediately" />
<camunda:value id="weeks_1to2" name="1 to 2 weeks" />
<camunda:value id="weeks_3to4" name="3 to 4 weeks" />
<camunda:value id="weeks_5to8" name="5 to 8 weeks" />
<camunda:value id="weeks_more_than_8" name="More than 8 weeks" />
</camunda:formField>
<camunda:formField id="Animal Housing Access" label="Animal Housing Access" type="enum">
<camunda:properties>
<camunda:property id="description" value="When do you anticipate entering the animal housing facility? Give an estimated time frame based on the date of resumption of research activity." />
<camunda:property id="Property_1km3ge3" />
</camunda:properties>
<camunda:value id="ASAP" name="As soon as possible" />
<camunda:value id="weeks_1to2" name="1 to 2 weeks" />
<camunda:value id="weeks_3to4" name="3 to 4 weeks" />
<camunda:value id="weeks_5to8" name="5 to 8 weeks" />
<camunda:value id="weeks_more_than_8" name="More than 8 weeks" />
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_0so3402</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1yi9lig</bpmn:outgoing>
</bpmn:userTask>
<bpmn:exclusiveGateway id="Gateway_191l7i1" name="Are Animals Used?">
<bpmn:incoming>Flow_0scfmzc</bpmn:incoming>
<bpmn:outgoing>Flow_0so3402</bpmn:outgoing>
<bpmn:outgoing>Flow_0141rp3</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_0so3402" name="Yes" sourceRef="Gateway_191l7i1" targetRef="UserTask_0ww2o4i">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">isAnimalUse == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:userTask id="UserTask_1cx8349" name="Enter Grant Support" camunda:formKey="GrantSupport">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="Grants" label="Grants" type="textarea">
<camunda:properties>
<camunda:property id="rows" value="10" />
<camunda:property id="description" value="What is the status of your grant support and timelines associated with each grant?" />
<camunda:property id="help" value="Example: NIH Award R01xxxxx; project period through mm/dd/yy: Brief notes on grant status if applicable" />
</camunda:properties>
</camunda:formField>
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1121pfu</bpmn:incoming>
<bpmn:outgoing>SequenceFlow_1b4non2</bpmn:outgoing>
</bpmn:userTask>
<bpmn:exclusiveGateway id="Gateway_06s8ygl" name="Grant support?">
<bpmn:incoming>Flow_0141rp3</bpmn:incoming>
<bpmn:incoming>SequenceFlow_1yi9lig</bpmn:incoming>
<bpmn:outgoing>Flow_1121pfu</bpmn:outgoing>
<bpmn:outgoing>SequenceFlow_1wp5zmg</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_1121pfu" name="Yes" sourceRef="Gateway_06s8ygl" targetRef="UserTask_1cx8349">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">isGrantSupport == True</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:exclusiveGateway id="Gateway_01p9hbs">
<bpmn:incoming>SequenceFlow_1b4non2</bpmn:incoming>
<bpmn:incoming>SequenceFlow_1wp5zmg</bpmn:incoming>
<bpmn:outgoing>Flow_1b6vbkk</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_1b6vbkk" sourceRef="Gateway_01p9hbs" targetRef="Activity_0rv3far" />
<bpmn:sequenceFlow id="SequenceFlow_1b4non2" sourceRef="UserTask_1cx8349" targetRef="Gateway_01p9hbs" />
<bpmn:sequenceFlow id="SequenceFlow_1wp5zmg" name="No" sourceRef="Gateway_06s8ygl" targetRef="Gateway_01p9hbs">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">isGrantSupport == False</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="Flow_0scfmzc" sourceRef="LabInfo" targetRef="Gateway_191l7i1" />
<bpmn:sequenceFlow id="Flow_0141rp3" name="No" sourceRef="Gateway_191l7i1" targetRef="Gateway_06s8ygl">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">isAnimalUse == False</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:sequenceFlow id="SequenceFlow_1yi9lig" sourceRef="UserTask_0ww2o4i" targetRef="Gateway_06s8ygl" />
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="rrt">
<bpmndi:BPMNEdge id="SequenceFlow_0bqu7pp_di" bpmnElement="SequenceFlow_0bqu7pp">
<di:waypoint x="520" y="187" />
<di:waypoint x="580" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_05ja25w_di" bpmnElement="SequenceFlow_05ja25w">
<di:waypoint x="188" y="187" />
<di:waypoint x="260" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_0h50bp3_di" bpmnElement="SequenceFlow_0h50bp3">
<di:waypoint x="360" y="187" />
<di:waypoint x="420" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1qtrgbv_di" bpmnElement="SequenceFlow_1qtrgbv">
<di:waypoint x="1710" y="187" />
<di:waypoint x="1762" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1b6vbkk_di" bpmnElement="Flow_1b6vbkk">
<di:waypoint x="1385" y="187" />
<di:waypoint x="1460" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1e2qi9s_di" bpmnElement="Flow_1e2qi9s">
<di:waypoint x="1560" y="187" />
<di:waypoint x="1610" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1yi9lig_di" bpmnElement="SequenceFlow_1yi9lig">
<di:waypoint x="990" y="187" />
<di:waypoint x="1075" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0141rp3_di" bpmnElement="Flow_0141rp3">
<di:waypoint x="800" y="212" />
<di:waypoint x="800" y="280" />
<di:waypoint x="1100" y="280" />
<di:waypoint x="1100" y="212" />
<bpmndi:BPMNLabel>
<dc:Bounds x="936" y="263" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0so3402_di" bpmnElement="Flow_0so3402">
<di:waypoint x="825" y="187" />
<di:waypoint x="890" y="187" />
<bpmndi:BPMNLabel>
<dc:Bounds x="849" y="169" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1121pfu_di" bpmnElement="Flow_1121pfu">
<di:waypoint x="1125" y="187" />
<di:waypoint x="1190" y="187" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1149" y="169" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1b4non2_di" bpmnElement="SequenceFlow_1b4non2">
<di:waypoint x="1290" y="187" />
<di:waypoint x="1335" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="SequenceFlow_1wp5zmg_di" bpmnElement="SequenceFlow_1wp5zmg">
<di:waypoint x="1100" y="212" />
<di:waypoint x="1100" y="280" />
<di:waypoint x="1360" y="280" />
<di:waypoint x="1360" y="212" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1192" y="262" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0scfmzc_di" bpmnElement="Flow_0scfmzc">
<di:waypoint x="680" y="187" />
<di:waypoint x="775" y="187" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ManualTask_1ofy9yz_di" bpmnElement="ManualTask_1ofy9yz">
<dc:Bounds x="260" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_0xdpoxl_di" bpmnElement="AddInfo">
<dc:Bounds x="420" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_0nu5cww_di" bpmnElement="LabInfo">
<dc:Bounds x="580" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_191l7i1_di" bpmnElement="Gateway_191l7i1" isMarkerVisible="true">
<dc:Bounds x="775" y="162" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="770" y="125" width="60" height="27" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_0ww2o4i_di" bpmnElement="UserTask_0ww2o4i">
<dc:Bounds x="890" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_06s8ygl_di" bpmnElement="Gateway_06s8ygl" isMarkerVisible="true">
<dc:Bounds x="1075" y="162" width="50" height="50" />
<bpmndi:BPMNLabel>
<dc:Bounds x="1063" y="133" width="74" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="UserTask_1cx8349_di" bpmnElement="UserTask_1cx8349">
<dc:Bounds x="1190" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1mg5lp9_di" bpmnElement="Activity_0rv3far">
<dc:Bounds x="1460" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="EndEvent_09wp7av_di" bpmnElement="EndEvent_09wp7av">
<dc:Bounds x="1762" y="169" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Gateway_01p9hbs_di" bpmnElement="Gateway_01p9hbs" isMarkerVisible="true">
<dc:Bounds x="1335" y="162" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="ScriptTask_0gacq8q_di" bpmnElement="Task_1cw2y6r">
<dc:Bounds x="1610" y="147" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -142,7 +142,7 @@
"typename":"SequenceFlow"
}
},
"typename":"TestUserTask",
"typename":"UserTask",
"extensions":{}
},
"sid-C014B4B9-889F-4EE9-9949-C89502C35CF0":{
@ -697,7 +697,7 @@
"typename":"SequenceFlow"
}
},
"typename":"TestUserTask",
"typename":"UserTask",
"extensions":{}
},
"sid-2EDAD784-7F15-486C-B805-D26EE25F8087":{
@ -906,7 +906,7 @@
"typename":"SequenceFlow"
}
},
"typename":"TestUserTask",
"typename":"UserTask",
"extensions":{}
},
"sid-BC014079-199F-4720-95CD-244B0ACB6DE1":{

View File

@ -0,0 +1,81 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_96f6665" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.0.0-dev">
<bpmn:process id="Process_ccz6oq2" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_177wrsb</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_177wrsb" sourceRef="StartEvent_1" targetRef="Activity_0tbghnr" />
<bpmn:sequenceFlow id="Flow_0eductu" sourceRef="Activity_0tbghnr" targetRef="Activity_1b4i250" />
<bpmn:endEvent id="Event_1ncs6fv">
<bpmn:incoming>Flow_0hkxb5e</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_1xryi5d" sourceRef="Activity_1b4i250" targetRef="Activity_0k6ipvz" />
<bpmn:scriptTask id="Activity_0tbghnr" name="512 a">
<bpmn:incoming>Flow_177wrsb</bpmn:incoming>
<bpmn:outgoing>Flow_0eductu</bpmn:outgoing>
<bpmn:script>a="a"*512</bpmn:script>
</bpmn:scriptTask>
<bpmn:scriptTask id="Activity_1b4i250" name="512 b">
<bpmn:incoming>Flow_0eductu</bpmn:incoming>
<bpmn:outgoing>Flow_1xryi5d</bpmn:outgoing>
<bpmn:script>b="b"*512</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_1of7r00" sourceRef="Activity_0k6ipvz" targetRef="Activity_1i88z55" />
<bpmn:scriptTask id="Activity_0k6ipvz" name="512 c">
<bpmn:incoming>Flow_1xryi5d</bpmn:incoming>
<bpmn:outgoing>Flow_1of7r00</bpmn:outgoing>
<bpmn:script>c="c"*512</bpmn:script>
</bpmn:scriptTask>
<bpmn:sequenceFlow id="Flow_0hkxb5e" sourceRef="Activity_1i88z55" targetRef="Event_1ncs6fv" />
<bpmn:scriptTask id="Activity_1i88z55" name="a+b+c">
<bpmn:incoming>Flow_1of7r00</bpmn:incoming>
<bpmn:outgoing>Flow_0hkxb5e</bpmn:outgoing>
<bpmn:script>d=a+b+c</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="Process_ccz6oq2">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="202" y="42" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1kxr618_di" bpmnElement="Activity_0tbghnr">
<dc:Bounds x="170" y="110" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0wp0pql_di" bpmnElement="Activity_1b4i250">
<dc:Bounds x="170" y="220" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0975bxc_di" bpmnElement="Activity_0k6ipvz">
<dc:Bounds x="170" y="330" width="100" height="80" />
<bpmndi:BPMNLabel />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_1ncs6fv_di" bpmnElement="Event_1ncs6fv">
<dc:Bounds x="202" y="562" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1w8fetn_di" bpmnElement="Activity_1i88z55">
<dc:Bounds x="170" y="440" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_177wrsb_di" bpmnElement="Flow_177wrsb">
<di:waypoint x="220" y="78" />
<di:waypoint x="220" y="110" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0eductu_di" bpmnElement="Flow_0eductu">
<di:waypoint x="220" y="190" />
<di:waypoint x="220" y="220" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1xryi5d_di" bpmnElement="Flow_1xryi5d">
<di:waypoint x="220" y="300" />
<di:waypoint x="220" y="330" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1of7r00_di" bpmnElement="Flow_1of7r00">
<di:waypoint x="220" y="410" />
<di:waypoint x="220" y="440" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0hkxb5e_di" bpmnElement="Flow_0hkxb5e">
<di:waypoint x="220" y="520" />
<di:waypoint x="220" y="562" />
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,77 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" xmlns:modeler="http://camunda.org/schema/modeler/1.0" id="Definitions_1l7iuxt" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.10.0" modeler:executionPlatform="Camunda Platform" modeler:executionPlatformVersion="7.15.0">
<bpmn:process id="test_timer_end_event" name="test_timer_end_event" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_164sojd</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:task id="user_task" name="User Task">
<bpmn:incoming>Flow_1m2vq4v</bpmn:incoming>
<bpmn:outgoing>Flow_04tuv5z</bpmn:outgoing>
</bpmn:task>
<bpmn:sequenceFlow id="Flow_164sojd" sourceRef="StartEvent_1" targetRef="Activity_0dzjjk3" />
<bpmn:boundaryEvent id="Event_0y4hbl0" cancelActivity="false" attachedToRef="user_task">
<bpmn:outgoing>Flow_0ac4lx5</bpmn:outgoing>
<bpmn:timerEventDefinition id="TimerEventDefinition_1w16uhl">
<bpmn:timeDuration xsi:type="bpmn:tFormalExpression">timedelta(milliseconds=2)</bpmn:timeDuration>
</bpmn:timerEventDefinition>
</bpmn:boundaryEvent>
<bpmn:sequenceFlow id="Flow_0ac4lx5" sourceRef="Event_0y4hbl0" targetRef="set_variable" />
<bpmn:scriptTask id="set_variable" name="update timer_called">
<bpmn:incoming>Flow_0ac4lx5</bpmn:incoming>
<bpmn:script>timer_called = True</bpmn:script>
</bpmn:scriptTask>
<bpmn:endEvent id="final_end_event" name="End Event">
<bpmn:documentation>Some docs</bpmn:documentation>
<bpmn:incoming>Flow_04tuv5z</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_04tuv5z" sourceRef="user_task" targetRef="final_end_event" />
<bpmn:sequenceFlow id="Flow_1m2vq4v" sourceRef="Activity_0dzjjk3" targetRef="user_task" />
<bpmn:scriptTask id="Activity_0dzjjk3" name="Set timer_called">
<bpmn:incoming>Flow_164sojd</bpmn:incoming>
<bpmn:outgoing>Flow_1m2vq4v</bpmn:outgoing>
<bpmn:script>timer_called = False</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="test_timer_end_event">
<bpmndi:BPMNEdge id="Flow_164sojd_di" bpmnElement="Flow_164sojd">
<di:waypoint x="188" y="117" />
<di:waypoint x="220" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0ac4lx5_di" bpmnElement="Flow_0ac4lx5">
<di:waypoint x="420" y="175" />
<di:waypoint x="420" y="240" />
<di:waypoint x="490" y="240" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_04tuv5z_di" bpmnElement="Flow_04tuv5z">
<di:waypoint x="460" y="117" />
<di:waypoint x="542" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1m2vq4v_di" bpmnElement="Flow_1m2vq4v">
<di:waypoint x="320" y="117" />
<di:waypoint x="360" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0lhf45l_di" bpmnElement="user_task">
<dc:Bounds x="360" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1oduoqz_di" bpmnElement="set_variable">
<dc:Bounds x="490" y="200" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0olfqht_di" bpmnElement="final_end_event">
<dc:Bounds x="542" y="99" width="36" height="36" />
<bpmndi:BPMNLabel>
<dc:Bounds x="535" y="142" width="51" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="152" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_1mx687n_di" bpmnElement="Activity_0dzjjk3">
<dc:Bounds x="220" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0w4k4ro_di" bpmnElement="Event_0y4hbl0">
<dc:Bounds x="402" y="139" width="36" height="36" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -2,6 +2,7 @@ from datetime import timedelta
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.bpmn.specs.events.event_definitions import MessageEventDefinition
from SpiffWorkflow.task import TaskState
@ -11,7 +12,7 @@ class EventBsedGatewayTest(BpmnWorkflowTestCase):
def setUp(self):
self.spec, self.subprocesses = self.load_workflow_spec('event-gateway.bpmn', 'Process_0pvx19v')
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta}))
self.workflow = BpmnWorkflow(self.spec, script_engine=self.script_engine)
def testEventBasedGateway(self):
@ -29,8 +30,8 @@ class EventBsedGatewayTest(BpmnWorkflowTestCase):
self.workflow.script_engine = self.script_engine
self.assertEqual(len(waiting_tasks), 1)
self.workflow.catch(MessageEventDefinition('message_1'))
self.workflow.refresh_waiting_tasks()
self.workflow.do_engine_steps()
self.workflow.refresh_waiting_tasks()
self.assertEqual(self.workflow.is_completed(), True)
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_1_event')[0].state, TaskState.COMPLETED)
self.assertEqual(self.workflow.get_tasks_from_spec_name('message_2_event')[0].state, TaskState.CANCELLED)

View File

@ -5,6 +5,7 @@ import unittest
import time
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -24,11 +25,11 @@ class CustomScriptEngine(PythonScriptEngine):
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {
environment = TaskDataEnvironment({
'custom_function': my_custom_function,
'timedelta': datetime.timedelta,
}
super().__init__(scripting_additions=augment_methods)
})
super().__init__(environment=environment)
class TimerCycleStartTest(BpmnWorkflowTestCase):

View File

@ -5,6 +5,7 @@ import unittest
import time
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
@ -22,11 +23,11 @@ class CustomScriptEngine(PythonScriptEngine):
It will execute python code read in from the bpmn. It will also make any scripts in the
scripts directory available for execution. """
def __init__(self):
augment_methods = {
environment = TaskDataEnvironment({
'custom_function': my_custom_function,
'timedelta': datetime.timedelta,
}
super().__init__(scripting_additions=augment_methods)
})
super().__init__(environment=environment)

View File

@ -6,6 +6,7 @@ import time
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -14,10 +15,10 @@ __author__ = 'kellym'
class TimerDateTest(BpmnWorkflowTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={
self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({
"datetime": datetime.datetime,
"timedelta": datetime.timedelta,
})
}))
self.spec, self.subprocesses = self.load_workflow_spec('timer-date-start.bpmn', 'date_timer')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)

View File

@ -6,6 +6,7 @@ from datetime import timedelta
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -13,7 +14,7 @@ __author__ = 'kellym'
class TimerDurationTest(BpmnWorkflowTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta}))
self.spec, self.subprocesses = self.load_workflow_spec('boundary_timer_on_task.bpmn', 'test_timer')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)

View File

@ -5,6 +5,7 @@ import time
from datetime import datetime, timedelta
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
__author__ = 'kellym'
@ -13,7 +14,7 @@ __author__ = 'kellym'
class TimerDurationTest(BpmnWorkflowTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta}))
self.spec, self.subprocesses = self.load_workflow_spec('timer.bpmn', 'timer')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)

View File

@ -4,7 +4,6 @@ import os
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnParser
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter
class BaseTestCase(unittest.TestCase):
@ -21,7 +20,7 @@ class BaseTestCase(unittest.TestCase):
def setUp(self):
super(BaseTestCase, self).setUp()
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter])
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter()
self.serializer = BpmnWorkflowSerializer(wf_spec_converter, version=self.SERIALIZER_VERSION)
spec, subprocesses = self.load_workflow_spec('random_fact.bpmn', 'random_fact')
self.workflow = BpmnWorkflow(spec, subprocesses)

View File

@ -5,7 +5,6 @@ import json
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from tests.SpiffWorkflow.bpmn.BpmnLoaderForTests import TestUserTaskConverter
from .BaseTestCase import BaseTestCase
@ -71,7 +70,7 @@ class BpmnWorkflowSerializerTest(BaseTestCase):
try:
self.assertRaises(TypeError, self.serializer.serialize_json, self.workflow)
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([TestUserTaskConverter])
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter()
custom_serializer = BpmnWorkflowSerializer(wf_spec_converter, version=self.SERIALIZER_VERSION,json_encoder_cls=MyJsonEncoder, json_decoder_cls=MyJsonDecoder)
serialized_workflow = custom_serializer.serialize_json(self.workflow)
finally:

View File

@ -3,9 +3,11 @@ import time
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from .BaseTestCase import BaseTestCase
class VersionMigrationTest(BaseTestCase):
SERIALIZER_VERSION = "1.2"
@ -24,7 +26,7 @@ class VersionMigrationTest(BaseTestCase):
def test_convert_1_1_to_1_2(self):
fn = os.path.join(self.DATA_DIR, 'serialization', 'v1-1.json')
wf = self.serializer.deserialize_json(open(fn).read())
wf.script_engine = PythonScriptEngine(default_globals={"time": time})
wf.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"time": time}))
wf.refresh_waiting_tasks()
wf.do_engine_steps()
self.assertTrue(wf.is_completed())
self.assertTrue(wf.is_completed())

View File

@ -1,22 +1,20 @@
# -*- coding: utf-8 -*-
import os
from copy import deepcopy
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from SpiffWorkflow.camunda.parser.CamundaParser import CamundaParser
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter, StartEventConverter, EndEventConverter, \
IntermediateCatchEventConverter, IntermediateThrowEventConverter, BoundaryEventConverter
from SpiffWorkflow.camunda.serializer.config import CAMUNDA_SPEC_CONFIG
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
CAMUNDA_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter)
__author__ = 'danfunk'
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([
UserTaskConverter, BusinessRuleTaskConverter, StartEventConverter,
EndEventConverter, BoundaryEventConverter, IntermediateCatchEventConverter,
IntermediateThrowEventConverter])
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(CAMUNDA_SPEC_CONFIG)
class BaseTestCase(BpmnWorkflowTestCase):
""" Provides some basic tools for loading up and parsing camunda BPMN files """

View File

@ -16,7 +16,7 @@ class CallActivityMessageTest(BaseTestCase):
def testRunThroughHappy(self):
self.actual_test(save_restore=False)
def testThroughSaveRestore(self):
def testRunThroughSaveRestore(self):
self.actual_test(save_restore=True)
def actual_test(self, save_restore=False):

View File

@ -1,5 +1,6 @@
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from .BaseTestCase import BaseTestCase
@ -12,8 +13,8 @@ def my_custom_function(txt):
class CustomScriptEngine(PythonScriptEngine):
def __init__(self):
augment_methods = {'my_custom_function': my_custom_function}
super().__init__(scripting_additions=augment_methods)
environment = TaskDataEnvironment({'my_custom_function': my_custom_function})
super().__init__(environment=environment)
class DMNCustomScriptTest(BaseTestCase):

View File

@ -7,6 +7,7 @@ from datetime import timedelta
from SpiffWorkflow.task import TaskState
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from .BaseTestCase import BaseTestCase
__author__ = 'kellym'
@ -15,7 +16,7 @@ __author__ = 'kellym'
class MessageBoundaryTest(BaseTestCase):
def setUp(self):
self.script_engine = PythonScriptEngine(default_globals={"timedelta": timedelta})
self.script_engine = PythonScriptEngine(environment=TaskDataEnvironment({"timedelta": timedelta}))
self.spec, self.subprocesses = self.load_workflow_spec('MessageBoundary.bpmn', 'Process_1kjyavs')
self.workflow = BpmnWorkflow(self.spec, self.subprocesses, script_engine=self.script_engine)

View File

@ -1,6 +1,8 @@
import unittest
from SpiffWorkflow.bpmn.workflow import BpmnWorkflow
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import BoxedTaskDataEnvironment
from .BaseTestCase import BaseTestCase
@ -10,12 +12,13 @@ class MultiInstanceDMNTest(BaseTestCase):
self.spec, subprocesses = self.load_workflow_spec(
'DMNMultiInstance.bpmn', 'Process_1', 'test_integer_decision_multi.dmn')
self.workflow = BpmnWorkflow(self.spec)
self.script_engine = PythonScriptEngine(environment=BoxedTaskDataEnvironment())
self.workflow.script_engine = self.script_engine
def testConstructor(self):
pass # this is accomplished through setup.
def testDmnHappy(self):
self.workflow = BpmnWorkflow(self.spec)
self.workflow.do_engine_steps()
self.workflow.complete_next()
self.workflow.do_engine_steps()
@ -25,16 +28,19 @@ class MultiInstanceDMNTest(BaseTestCase):
def testDmnSaveRestore(self):
self.workflow = BpmnWorkflow(self.spec)
self.save_restore()
self.workflow.script_engine = self.script_engine
self.workflow.do_engine_steps()
self.workflow.complete_next()
self.save_restore()
self.workflow.script_engine = self.script_engine
self.workflow.do_engine_steps()
self.workflow.complete_next()
self.save_restore()
self.workflow.script_engine = self.script_engine
self.workflow.do_engine_steps()
self.save_restore()
self.workflow.script_engine = self.script_engine
self.assertEqual(self.workflow.data['stuff']['E']['y'], 'D')

View File

@ -1,94 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:camunda="http://camunda.org/schema/1.0/bpmn" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1n0u11m" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="3.7.0">
<bpmn:process id="DefaultGateway" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1wis1un</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:userTask id="DoStuff" name="Do Stuff?" camunda:formKey="morestuffform">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="morestuff" label="Do we need to do more stuff?" type="string" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1wis1un</bpmn:incoming>
<bpmn:outgoing>Flow_144jxvd</bpmn:outgoing>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_1wis1un" sourceRef="StartEvent_1" targetRef="DoStuff" />
<bpmn:exclusiveGateway id="Gateway_1yn93jn">
<bpmn:incoming>Flow_144jxvd</bpmn:incoming>
<bpmn:outgoing>Flow_1riszc2</bpmn:outgoing>
<bpmn:outgoing>Flow_0xdvee4</bpmn:outgoing>
</bpmn:exclusiveGateway>
<bpmn:sequenceFlow id="Flow_144jxvd" sourceRef="DoStuff" targetRef="Gateway_1yn93jn" />
<bpmn:sequenceFlow id="Flow_1riszc2" name="Yes" sourceRef="Gateway_1yn93jn" targetRef="GetMoreStuff">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">morestuff == 'Yes'</bpmn:conditionExpression>
</bpmn:sequenceFlow>
<bpmn:endEvent id="Event_1xfyeiq">
<bpmn:incoming>Flow_13ncefd</bpmn:incoming>
<bpmn:incoming>Flow_0xdvee4</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_13ncefd" sourceRef="GetMoreStuff" targetRef="Event_1xfyeiq" />
<bpmn:userTask id="GetMoreStuff" name="Add More Stuff">
<bpmn:extensionElements>
<camunda:formData>
<camunda:formField id="stuff.addstuff" label="Add More Stuff" type="string" />
</camunda:formData>
</bpmn:extensionElements>
<bpmn:incoming>Flow_1riszc2</bpmn:incoming>
<bpmn:outgoing>Flow_13ncefd</bpmn:outgoing>
<bpmn:multiInstanceLoopCharacteristics camunda:collection="collectstuff" camunda:elementVariable="stuff">
<bpmn:loopCardinality xsi:type="bpmn:tFormalExpression">3</bpmn:loopCardinality>
</bpmn:multiInstanceLoopCharacteristics>
</bpmn:userTask>
<bpmn:sequenceFlow id="Flow_0xdvee4" name="No" sourceRef="Gateway_1yn93jn" targetRef="Event_1xfyeiq">
<bpmn:conditionExpression xsi:type="bpmn:tFormalExpression">morestuff == 'No'</bpmn:conditionExpression>
</bpmn:sequenceFlow>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="DefaultGateway">
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_10nt4mt_di" bpmnElement="DoStuff">
<dc:Bounds x="260" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_1wis1un_di" bpmnElement="Flow_1wis1un">
<di:waypoint x="215" y="117" />
<di:waypoint x="260" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Gateway_1yn93jn_di" bpmnElement="Gateway_1yn93jn" isMarkerVisible="true">
<dc:Bounds x="405" y="92" width="50" height="50" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_144jxvd_di" bpmnElement="Flow_144jxvd">
<di:waypoint x="360" y="117" />
<di:waypoint x="405" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_1riszc2_di" bpmnElement="Flow_1riszc2">
<di:waypoint x="455" y="117" />
<di:waypoint x="520" y="117" />
<bpmndi:BPMNLabel>
<dc:Bounds x="478" y="99" width="19" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Event_1xfyeiq_di" bpmnElement="Event_1xfyeiq">
<dc:Bounds x="692" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_13ncefd_di" bpmnElement="Flow_13ncefd">
<di:waypoint x="620" y="117" />
<di:waypoint x="692" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="Activity_0msdtf4_di" bpmnElement="GetMoreStuff">
<dc:Bounds x="520" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNEdge id="Flow_0xdvee4_di" bpmnElement="Flow_0xdvee4">
<di:waypoint x="430" y="142" />
<di:waypoint x="430" y="240" />
<di:waypoint x="710" y="240" />
<di:waypoint x="710" y="135" />
<bpmndi:BPMNLabel>
<dc:Bounds x="563" y="222" width="15" height="14" />
</bpmndi:BPMNLabel>
</bpmndi:BPMNEdge>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 7.9 KiB

View File

@ -1,64 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<bpmn:definitions xmlns:bpmn="http://www.omg.org/spec/BPMN/20100524/MODEL" xmlns:bpmndi="http://www.omg.org/spec/BPMN/20100524/DI" xmlns:dc="http://www.omg.org/spec/DD/20100524/DC" xmlns:di="http://www.omg.org/spec/DD/20100524/DI" id="Definitions_1c9mbga" targetNamespace="http://bpmn.io/schema/bpmn" exporter="Camunda Modeler" exporterVersion="4.7.0">
<bpmn:process id="top_workflow" name="top_workflow" isExecutable="true">
<bpmn:startEvent id="StartEvent_1">
<bpmn:outgoing>Flow_1xegt6f</bpmn:outgoing>
</bpmn:startEvent>
<bpmn:sequenceFlow id="Flow_11qyfqv" sourceRef="Activity_0gjyb1c" targetRef="Activity_02eu174" />
<bpmn:sequenceFlow id="Flow_0hntmrc" sourceRef="Activity_02eu174" targetRef="Activity_0fz4sv6" />
<bpmn:endEvent id="Event_0jgpqrj">
<bpmn:incoming>Flow_0qc6vpv</bpmn:incoming>
</bpmn:endEvent>
<bpmn:sequenceFlow id="Flow_0qc6vpv" sourceRef="Activity_0fz4sv6" targetRef="Event_0jgpqrj" />
<bpmn:sequenceFlow id="Flow_1xegt6f" sourceRef="StartEvent_1" targetRef="Activity_0gjyb1c" />
<bpmn:scriptTask id="Activity_0gjyb1c" name="my_custom_function(&#39;test 1 from top workflow&#39;)">
<bpmn:incoming>Flow_1xegt6f</bpmn:incoming>
<bpmn:outgoing>Flow_11qyfqv</bpmn:outgoing>
<bpmn:script>my_custom_function('test 1 from top workflow')</bpmn:script>
</bpmn:scriptTask>
<bpmn:callActivity id="Activity_02eu174" name="Common Activity" calledElement="CommonActivity">
<bpmn:incoming>Flow_11qyfqv</bpmn:incoming>
<bpmn:outgoing>Flow_0hntmrc</bpmn:outgoing>
</bpmn:callActivity>
<bpmn:scriptTask id="Activity_0fz4sv6" name="my_custom_function(&#39;test from top workflow&#39;)">
<bpmn:incoming>Flow_0hntmrc</bpmn:incoming>
<bpmn:outgoing>Flow_0qc6vpv</bpmn:outgoing>
<bpmn:script>my_custom_function('test 2 from top workflow')</bpmn:script>
</bpmn:scriptTask>
</bpmn:process>
<bpmndi:BPMNDiagram id="BPMNDiagram_1">
<bpmndi:BPMNPlane id="BPMNPlane_1" bpmnElement="top_workflow">
<bpmndi:BPMNEdge id="Flow_1xegt6f_di" bpmnElement="Flow_1xegt6f">
<di:waypoint x="215" y="117" />
<di:waypoint x="240" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0qc6vpv_di" bpmnElement="Flow_0qc6vpv">
<di:waypoint x="640" y="117" />
<di:waypoint x="692" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_0hntmrc_di" bpmnElement="Flow_0hntmrc">
<di:waypoint x="490" y="117" />
<di:waypoint x="540" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNEdge id="Flow_11qyfqv_di" bpmnElement="Flow_11qyfqv">
<di:waypoint x="340" y="117" />
<di:waypoint x="390" y="117" />
</bpmndi:BPMNEdge>
<bpmndi:BPMNShape id="_BPMNShape_StartEvent_2" bpmnElement="StartEvent_1">
<dc:Bounds x="179" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Event_0jgpqrj_di" bpmnElement="Event_0jgpqrj">
<dc:Bounds x="692" y="99" width="36" height="36" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0py36p6_di" bpmnElement="Activity_0gjyb1c">
<dc:Bounds x="240" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0zer2pn_di" bpmnElement="Activity_02eu174">
<dc:Bounds x="390" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
<bpmndi:BPMNShape id="Activity_0mjjhco_di" bpmnElement="Activity_0fz4sv6">
<dc:Bounds x="540" y="77" width="100" height="80" />
</bpmndi:BPMNShape>
</bpmndi:BPMNPlane>
</bpmndi:BPMNDiagram>
</bpmn:definitions>

View File

@ -1,7 +1,8 @@
import unittest
from SpiffWorkflow.camunda.specs.UserTask import FormField, UserTask, Form, EnumFormField
from SpiffWorkflow.camunda.serializer.task_spec_converters import UserTaskConverter
from SpiffWorkflow.camunda.serializer.task_spec import UserTaskConverter
from SpiffWorkflow.bpmn.serializer.helpers.dictionary import DictionaryConverter
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
@ -53,7 +54,7 @@ class UserTaskSpecTest(unittest.TestCase):
self.form.add_field(field1)
self.form.add_field(field2)
converter = UserTaskConverter()
converter = UserTaskConverter(DictionaryConverter())
dct = converter.to_dict(self.user_spec)
self.assertEqual(dct['name'], 'userTask')
self.assertEqual(dct['form'], {

View File

@ -2,7 +2,7 @@ import os
from lxml import etree
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine
from SpiffWorkflow.dmn.parser.DMNParser import DMNParser, get_dmn_ns

View File

@ -1,14 +1,11 @@
import os
import unittest
from SpiffWorkflow.dmn.engine.DMNEngine import DMNEngine
from SpiffWorkflow.bpmn.serializer.helpers.dictionary import DictionaryConverter
from SpiffWorkflow.dmn.parser.BpmnDmnParser import BpmnDmnParser
from SpiffWorkflow.dmn.serializer.task_spec_converters import \
BusinessRuleTaskConverter
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
from tests.SpiffWorkflow.dmn.DecisionRunner import DecisionRunner
from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import \
PythonDecisionRunner
from tests.SpiffWorkflow.dmn.python_engine.PythonDecisionRunner import PythonDecisionRunner
class HitPolicyTest(BpmnWorkflowTestCase):
@ -38,8 +35,8 @@ class HitPolicyTest(BpmnWorkflowTestCase):
runner = PythonDecisionRunner(file_name)
decision_table = runner.decision_table
self.assertEqual("COLLECT", decision_table.hit_policy)
dict = BusinessRuleTaskConverter().decision_table_to_dict(decision_table)
new_table = BusinessRuleTaskConverter().decision_table_from_dict(dict)
dict = BusinessRuleTaskConverter(DictionaryConverter()).decision_table_to_dict(decision_table)
new_table = BusinessRuleTaskConverter(DictionaryConverter()).decision_table_from_dict(dict)
self.assertEqual("COLLECT", new_table.hit_policy)
def suite():

View File

@ -1,6 +1,6 @@
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
from .FeelDecisionRunner import FeelDecisionRunner
@ -19,7 +19,7 @@ class FeelDictDecisionTestClass(unittest.TestCase):
"PEANUTS": {"delicious": True},
"SPAM": {"delicious": False}
}}
PythonScriptEngine.convert_to_box(PythonScriptEngine(), data)
Box.convert_to_box(data)
res = self.runner.decide(data)
self.assertEqual(res.description, 'They are allergic to peanuts')

View File

@ -1,6 +1,6 @@
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
from .FeelDecisionRunner import FeelDecisionRunner

View File

@ -1,6 +1,6 @@
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
from .PythonDecisionRunner import PythonDecisionRunner

View File

@ -1,6 +1,6 @@
import unittest
from SpiffWorkflow.bpmn.PythonScriptEngine import Box
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import Box
from .PythonDecisionRunner import PythonDecisionRunner

View File

@ -2,11 +2,12 @@ import datetime
from decimal import Decimal
from SpiffWorkflow.bpmn.PythonScriptEngine import PythonScriptEngine
from SpiffWorkflow.bpmn.PythonScriptEngineEnvironment import TaskDataEnvironment
from ..DecisionRunner import DecisionRunner
class PythonDecisionRunner(DecisionRunner):
def __init__(self, filename):
scripting_additions={'Decimal': Decimal, 'datetime': datetime}
super().__init__(PythonScriptEngine(scripting_additions=scripting_additions), filename, 'python_engine')
environment = TaskDataEnvironment({'Decimal': Decimal, 'datetime': datetime})
super().__init__(PythonScriptEngine(environment=environment), filename, 'python_engine')

View File

@ -1,27 +1,17 @@
# -*- coding: utf-8 -*-
import os
from copy import deepcopy
from SpiffWorkflow.spiff.parser.process import SpiffBpmnParser, VALIDATOR
from SpiffWorkflow.spiff.serializer.task_spec_converters import NoneTaskConverter, \
ManualTaskConverter, UserTaskConverter, ScriptTaskConverter, \
SubWorkflowTaskConverter, TransactionSubprocessConverter, \
CallActivityTaskConverter, \
StartEventConverter, EndEventConverter, BoundaryEventConverter, \
SendTaskConverter, ReceiveTaskConverter, \
IntermediateCatchEventConverter, IntermediateThrowEventConverter, \
ServiceTaskConverter
from SpiffWorkflow.dmn.serializer.task_spec_converters import BusinessRuleTaskConverter
from SpiffWorkflow.spiff.serializer.config import SPIFF_SPEC_CONFIG
from SpiffWorkflow.dmn.serializer.task_spec import BusinessRuleTaskConverter
from SpiffWorkflow.bpmn.serializer.workflow import BpmnWorkflowSerializer
from tests.SpiffWorkflow.bpmn.BpmnWorkflowTestCase import BpmnWorkflowTestCase
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter([
NoneTaskConverter, ManualTaskConverter, UserTaskConverter, ScriptTaskConverter,
SubWorkflowTaskConverter, TransactionSubprocessConverter, CallActivityTaskConverter,
StartEventConverter, EndEventConverter, BoundaryEventConverter, SendTaskConverter, ReceiveTaskConverter,
IntermediateCatchEventConverter, IntermediateThrowEventConverter, BusinessRuleTaskConverter,
ServiceTaskConverter
])
SPIFF_SPEC_CONFIG['task_specs'].append(BusinessRuleTaskConverter)
wf_spec_converter = BpmnWorkflowSerializer.configure_workflow_spec_converter(SPIFF_SPEC_CONFIG)
class BaseTestCase(BpmnWorkflowTestCase):
""" Provides some basic tools for loading up and parsing Spiff extensions"""

View File

@ -10,7 +10,6 @@ set -o errtrace -o errexit -o nounset -o pipefail
for subtree in "SpiffWorkflow" \
"spiffworkflow-backend" \
"spiffworkflow-frontend" \
"flask-bpmn" \
"bpmn-js-spiffworkflow" \
"connector-proxy-demo"
do

2
poetry.lock generated
View File

@ -1760,7 +1760,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331"
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
[[package]]
name = "sqlalchemy"

View File

@ -1,28 +1,43 @@
FROM ghcr.io/sartography/python:3.11
# Base image to share ENV vars that activate VENV.
FROM ghcr.io/sartography/python:3.11 AS base
ENV VIRTUAL_ENV=/app/venv
RUN python3 -m venv $VIRTUAL_ENV
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
WORKDIR /app
# base plus packages needed for deployment. Could just install these in final, but then we can't cache as much.
FROM base AS deployment
RUN apt-get update \
&& apt-get clean -y \
&& apt-get install -y -q curl git-core gunicorn3 default-mysql-client \
&& rm -rf /var/lib/apt/lists/*
# Setup image for installing Python dependencies.
FROM base AS setup
RUN pip install poetry
RUN useradd _gunicorn --no-create-home --user-group
RUN apt-get update && \
apt-get install -y -q \
gcc libssl-dev \
curl git-core libpq-dev \
gunicorn3 default-mysql-client
RUN apt-get update \
&& apt-get install -y -q gcc libssl-dev libpq-dev
WORKDIR /app
# poetry install takes a long time and can be cached if dependencies don't change,
# so that's why we tolerate running it twice.
COPY pyproject.toml poetry.lock /app/
RUN poetry install --without dev
RUN set -xe \
&& apt-get remove -y gcc python3-dev libssl-dev \
&& apt-get autoremove -y \
&& apt-get clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY . /app/
# run poetry install again AFTER copying the app into the image
# otherwise it does not know what the main app module is
COPY . /app
RUN poetry install --without dev
CMD ./bin/boot_server_in_docker
# Final image without setup dependencies.
FROM deployment AS final
LABEL source="https://github.com/sartography/spiff-arena"
LABEL description="Software development platform for building, running, and monitoring executable diagrams"
COPY --from=setup /app /app
CMD ["./bin/boot_server_in_docker"]

View File

@ -7,7 +7,13 @@ function error_handler() {
trap 'error_handler ${LINENO} $?' ERR
set -o errtrace -o errexit -o nounset -o pipefail
# you can get a list of users from the keycloak realm file like:
# grep '"email" :' keycloak/realm_exports/spiffworkflow-realm.json | awk -F : '{print $2}' | sed -E 's/ "//g' | sed -E 's/",//g' > s
# we keep some of these in keycloak/test_user_lists
# spiffworkflow-realm.json is a mashup of the status and sartography user lists.
user_file_with_one_email_per_line="${1:-}"
keycloak_realm="${2:-spiffworkflow}"
if [[ -z "${1:-}" ]]; then
>&2 echo "usage: $(basename "$0") [user_file_with_one_email_per_line]"

View File

@ -21,6 +21,9 @@ docker exec keycloak /opt/keycloak/bin/kc.sh export --dir "${docker_container_pa
docker cp "keycloak:${docker_container_path}" "$local_tmp_dir"
for realm in $realms ; do
if ! grep -Eq '\-realm$' <<< "$realm"; then
realm="${realm}-realm"
fi
cp "${local_tmp_dir}/hey/${realm}.json" "${script_dir}/../realm_exports/"
done

View File

@ -547,7 +547,7 @@
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "kevin@sartography.com",
"email" : "kb@sartography.com",
"credentials" : [ {
"id" : "4057e784-689d-47c0-a164-035a69e78edf",
"type" : "password",

View File

@ -854,6 +854,46 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "672167fd-ae79-47a7-8429-f3bb1bd4ee55",
"createdTimestamp" : 1675349217829,
"username" : "infra1.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "infra1.sme@status.im",
"credentials" : [ {
"id" : "bd5843bf-98cc-4891-ab03-693a5d69078b",
"type" : "password",
"createdDate" : 1675349217863,
"secretData" : "{\"value\":\"A78sm/+e2x/N/3A7Pk05eKhfANp+ZO9BQA3LYMwpzQ5KK2D/Ot8d1plOnqMT61rTnnCgxP8dtlA6/Ws61CMTYg==\",\"salt\":\"XOOknamJPwXD1LDj6LEodA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "40891b68-121f-4fdb-86c0-0f52836d7e65",
"createdTimestamp" : 1675349217890,
"username" : "infra2.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "infra2.sme@status.im",
"credentials" : [ {
"id" : "7e9927e2-ef7f-4247-b663-1f59147a9066",
"type" : "password",
"createdDate" : 1675349217926,
"secretData" : "{\"value\":\"j4M9u8p9FDCitGpb7JXM9JWFVGvBu7R2TOYG79c+Witl7gfWppues9fFzhlFyXgC78v6diHoQ4LwCwJGJS3loQ==\",\"salt\":\"H+i8qv6ulrBEZla/v8gDDw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "1561518b-c327-491e-9db3-23c2b5394104",
"createdTimestamp" : 1669303773974,
@ -863,7 +903,7 @@
"emailVerified" : false,
"firstName" : "",
"lastName" : "",
"email" : "j@status.im",
"email" : "j@sartography.com",
"credentials" : [ {
"id" : "e71ec785-9133-4b7d-8015-1978379af0bb",
"type" : "password",
@ -1043,6 +1083,86 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "e911fb0f-fd07-4886-acbf-d00930d293d3",
"createdTimestamp" : 1675447845512,
"username" : "legal.program-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal.program-lead@status.im",
"credentials" : [ {
"id" : "9676d8d3-1e8c-4f5d-b5f7-49745cecf8fd",
"type" : "password",
"createdDate" : 1675447845577,
"secretData" : "{\"value\":\"vTffScfGXIjWWyDDfzo7JPiJe9VjAtrmds382EeV7N+wYNapJmLTVModkBsmGPy4TmWLc9BoysQynOaanSGi9Q==\",\"salt\":\"67ZxTEnar8aq4LZLhSNTFg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "eff82d12-9a67-4002-b3c5-37811bd45199",
"createdTimestamp" : 1675349217585,
"username" : "legal.program-lead.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal.program-lead.sme@status.im",
"credentials" : [ {
"id" : "933e3fc4-398a-46c3-bc4d-783ab29a0a5b",
"type" : "password",
"createdDate" : 1675349217655,
"secretData" : "{\"value\":\"x2M9khnGK+VCykoWbZKEcHNv5QMAcumqLa7+o+STJV8UYt7BobSBn7w1r3cbyYlvkgoWIglG8S2nLDFFb6hAQg==\",\"salt\":\"/lQYRrsUY1BxNUOZSKaZwA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "4ed2b5a2-16c2-4029-ae97-d75c60f2147f",
"createdTimestamp" : 1675447845616,
"username" : "legal.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal.project-lead@status.im",
"credentials" : [ {
"id" : "fd0b0d0a-8a3e-48c9-b17b-023e87057048",
"type" : "password",
"createdDate" : 1675447845652,
"secretData" : "{\"value\":\"l/DPfNBcHINV8lCf9nEyCJkFvaMGnLqcd1Y8t9taLqxb8r/ofY2ce79C19JCHDQJXRPRuCsMoobuFhhNR6aQmg==\",\"salt\":\"2ivCPrNc56396ldlwpQP6Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "8cd6feba-5ca6-4cfb-bc1a-a52c80595783",
"createdTimestamp" : 1675349217698,
"username" : "legal.project-lead.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal.project-lead.sme@status.im",
"credentials" : [ {
"id" : "908f858c-d3cd-47a9-b611-a1d48f0247e5",
"type" : "password",
"createdDate" : 1675349217733,
"secretData" : "{\"value\":\"r53SXu0dp6FrSJAVLHYrfwSKPZY9OKHfHBuJDEE2DCbZiQRH77C4sZWfUwbu/6OOhTtiBEe7gz2DQpimIDY4RQ==\",\"salt\":\"+g/OXXJEMkQiahmjSylAkw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "2a3176a0-8dd5-4223-a3e1-3cac4134e474",
"createdTimestamp" : 1674148695030,
@ -1063,6 +1183,46 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "3d62ca4e-88bc-4302-89c1-8741c771147e",
"createdTimestamp" : 1675349217762,
"username" : "legal1.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "legal1.sme@status.im",
"credentials" : [ {
"id" : "b774d46d-a3e8-417f-97c6-2d2102a54b0b",
"type" : "password",
"createdDate" : 1675349217799,
"secretData" : "{\"value\":\"PF21YsnIoYZLJFT/y1i2FV4OmaQj8dRsalZ9R2PK6t/jKze3ds4k+I7WVe4h2H0hMB9fo9cSQ7kt2ygxfEBheg==\",\"salt\":\"5sOkSXzRSgNz7lHfUbKzdQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "99ce8a54-2941-4767-8ddf-52320b3708bd",
"createdTimestamp" : 1675447085191,
"username" : "madhurya",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "madhurya@sartography.com",
"credentials" : [ {
"id" : "4fa2bf1f-188e-42e3-9633-01d436864206",
"type" : "password",
"createdDate" : 1675447085252,
"secretData" : "{\"value\":\"6ZApQ7kx4YDc5ojW9eyFiSKMz5l3/Zl5PIScHEW1gtP3lrnnWqWgwcP+8cWkKdm3im+XrZwDQHjuGjGN5Rbjyw==\",\"salt\":\"HT3fCh245v8etRFIprXsyw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "6f5bfa09-7494-4a2f-b871-cf327048cac7",
"createdTimestamp" : 1665517010600,
@ -1185,6 +1345,86 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "9f703c96-02f1-403c-b070-25feb86cfe21",
"createdTimestamp" : 1675447845811,
"username" : "ppg.ba.program-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba.program-lead@status.im",
"credentials" : [ {
"id" : "bf74118b-b28f-4d2f-8bfa-7b9d1a8345f2",
"type" : "password",
"createdDate" : 1675447845847,
"secretData" : "{\"value\":\"wFUAB6E98gE222nCfsKe6P3kSZxeOSjhflsxon8kw/dY4ZwN0KMwvlYuNhmoptTLqDQJyqUiydmlMK0NS4JjTQ==\",\"salt\":\"YCPk4Tc3eXcoes78oLhDEg==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "81a1727b-c846-4af9-8d95-1c50b1deb0d5",
"createdTimestamp" : 1675447845879,
"username" : "ppg.ba.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba.project-lead@status.im",
"credentials" : [ {
"id" : "6411830d-6015-4cf2-bac6-d49c26510319",
"type" : "password",
"createdDate" : 1675447845915,
"secretData" : "{\"value\":\"1+m8twycOEbA4X61zN7dLENqp2IxxQZrXKaf3mEuzmxouHrgxvmXudwC6DWyfjXvLm7gxWlaa4cofBFwr1idig==\",\"salt\":\"UEKUSScYv2xY+rJ8vlvF4A==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "1d4d471a-b3ef-4750-97c4-a9e64eb8f414",
"createdTimestamp" : 1675447845942,
"username" : "ppg.ba.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba.sme@status.im",
"credentials" : [ {
"id" : "6512f88a-cbcc-4d79-be17-1d132ba11e64",
"type" : "password",
"createdDate" : 1675447845977,
"secretData" : "{\"value\":\"EErx/3vG+lh4DgrJUzkBv4cLT3sK1gS+T9KD5V/JpvJUmJpRFQqpk+YxC/nC/kTGLIpRDdCIN690T84FlOIjew==\",\"salt\":\"FPeVGnFbt9TRNiORMB5LMQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "2dade29f-c6dc-445b-bdf0-eed316bdb638",
"createdTimestamp" : 1675447846003,
"username" : "ppg.ba.sme1",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "ppg.ba.sme1@status.im",
"credentials" : [ {
"id" : "ccf2d138-020a-4a29-b63d-1f4d2f415639",
"type" : "password",
"createdDate" : 1675447846038,
"secretData" : "{\"value\":\"BtSJtW/8lCtyrDPTXzhsyT/32H+pOHx9thKqJV30dOEZ9wcSQbrRSHoQbXwLos+sIiA82X3wm+qObdQoD5guVQ==\",\"salt\":\"nSbgxYpVGaMz2ArmqLCN6Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "c3ea06ee-c497-48e6-8816-43c8ef68bd8b",
"createdTimestamp" : 1674148694747,
@ -1225,6 +1465,86 @@
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "c21c075d-9ac5-40a1-964a-c1d6ffe17257",
"createdTimestamp" : 1675447845680,
"username" : "security.program-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security.program-lead@status.im",
"credentials" : [ {
"id" : "d1401dbd-a88b-44a6-b13c-fff13ee07e0c",
"type" : "password",
"createdDate" : 1675447845718,
"secretData" : "{\"value\":\"3D76RpIFG0/ixbSBeJfCc61kyL8PvVn/khA8FOy6RLg2hrZbs1Uwl8SmplnSUll1wD5a/BoobsO7v1XW4TCvwQ==\",\"salt\":\"YtDRRmBV4SBlO/oX23r2EQ==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "ace0432f-1818-4210-8bcf-15533abfb3ce",
"createdTimestamp" : 1675349217958,
"username" : "security.program-lead.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security.program-lead.sme@status.im",
"credentials" : [ {
"id" : "602512dd-b24f-458c-9cef-7271bd8177bc",
"type" : "password",
"createdDate" : 1675349217993,
"secretData" : "{\"value\":\"vUb+t9ukHz3oHGUxaYUP34riZrshZU4c3iWpHB0OzI3y0ggCeT9xFEcmrwdkfilkKvCBJxLswlirWmgnmxZH0w==\",\"salt\":\"0hzZkDK4hPH5xgR1TpyG1Q==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "34dfacfd-24b5-414e-ac3e-9b013399aee2",
"createdTimestamp" : 1675447845747,
"username" : "security.project-lead",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security.project-lead@status.im",
"credentials" : [ {
"id" : "cb5d8a8a-e7d0-40e4-878b-a33608cb76c8",
"type" : "password",
"createdDate" : 1675447845784,
"secretData" : "{\"value\":\"rudimVOjVwJeO/1RLuyHySEaSQMzjHqPQrh5Pmfr4L2PgP/1oDKLVB38pKOohlbTarDcbAfMHB7AFYAPn9kuIg==\",\"salt\":\"cOkkUBOx/4AVUSa3Ozsiuw==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "6272ac80-1d79-4e3c-a5c1-b31660560318",
"createdTimestamp" : 1675349218020,
"username" : "security.project-lead.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security.project-lead.sme@status.im",
"credentials" : [ {
"id" : "eb7673bf-50f1-40af-927b-162f536f6187",
"type" : "password",
"createdDate" : 1675349218054,
"secretData" : "{\"value\":\"E1eLmC7hCcv7I5X30TfMvpZv3MtHH+rVhgLrZnBJSUvsrXmRkHWScJ/POHQLwUgCLJeU/lKDP/f0TdO2PvHiow==\",\"salt\":\"dWM5XJIR7m/eZ0YlHmuC3A==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "74374cda-1516-48e5-9ef2-1fd7bcee84d3",
"createdTimestamp" : 1674148695088,
@ -1246,13 +1566,32 @@
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "487d3a85-89dd-4839-957a-c3f6d70551f6",
"createdTimestamp" : 1657115173081,
"id" : "98faab0c-d2af-4794-8491-03dad5f30c63",
"createdTimestamp" : 1675349218087,
"username" : "security1.sme",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "security1.sme@status.im",
"credentials" : [ {
"id" : "37bd6b9b-015b-4790-8a4f-883c47035bc4",
"type" : "password",
"createdDate" : 1675349218122,
"secretData" : "{\"value\":\"BJP9K4qIdnaDnE3meM2GLWMFdSJryxcZovtKDlZNaQXfSUH3X1mOJfaLXQsuTWJzSMIow8XZ5+ye47ZNabLCaQ==\",\"salt\":\"BqD7jPpdB7PzU6QTN5dpMA==\",\"additionalParameters\":{}}",
"credentialData" : "{\"hashIterations\":27500,\"algorithm\":\"pbkdf2-sha256\",\"additionalParameters\":{}}"
} ],
"disableableCredentialTypes" : [ ],
"requiredActions" : [ ],
"realmRoles" : [ "default-roles-spiffworkflow" ],
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "b768e3ef-f905-4493-976c-bc3408c04bec",
"createdTimestamp" : 1675447832524,
"username" : "service-account-spiffworkflow-backend",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "service-account@status.im",
"serviceAccountClientId" : "spiffworkflow-backend",
"credentials" : [ ],
"disableableCredentialTypes" : [ ],
@ -1264,13 +1603,12 @@
"notBefore" : 0,
"groups" : [ ]
}, {
"id" : "22de68b1-4b06-4bc2-8da6-0c577e7e62ad",
"createdTimestamp" : 1657055472800,
"id" : "b6fb214b-cb8a-4403-9308-ac6d4e13ef26",
"createdTimestamp" : 1675447832560,
"username" : "service-account-withauth",
"enabled" : true,
"totp" : false,
"emailVerified" : false,
"email" : "service-account-withauth@status.im",
"serviceAccountClientId" : "withAuth",
"credentials" : [ ],
"disableableCredentialTypes" : [ ],
@ -2514,7 +2852,7 @@
"subType" : "authenticated",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-usermodel-attribute-mapper", "saml-user-attribute-mapper", "oidc-usermodel-property-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "saml-user-property-mapper", "oidc-address-mapper", "oidc-full-name-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-usermodel-property-mapper", "saml-user-property-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-role-list-mapper", "oidc-full-name-mapper" ]
}
}, {
"id" : "d68e938d-dde6-47d9-bdc8-8e8523eb08cd",
@ -2532,7 +2870,7 @@
"subType" : "anonymous",
"subComponents" : { },
"config" : {
"allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "oidc-usermodel-property-mapper", "saml-user-attribute-mapper", "oidc-sha256-pairwise-sub-mapper", "saml-user-property-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper" ]
"allowed-protocol-mapper-types" : [ "oidc-full-name-mapper", "saml-role-list-mapper", "saml-user-attribute-mapper", "oidc-usermodel-attribute-mapper", "oidc-address-mapper", "saml-user-property-mapper", "oidc-sha256-pairwise-sub-mapper", "oidc-usermodel-property-mapper" ]
}
}, {
"id" : "3854361d-3fe5-47fb-9417-a99592e3dc5c",
@ -2622,7 +2960,7 @@
"internationalizationEnabled" : false,
"supportedLocales" : [ ],
"authenticationFlows" : [ {
"id" : "a91920d9-792e-486f-9a02-49fe00857ce5",
"id" : "cb39eda2-18c2-4b03-9d7c-672a2bd47d19",
"alias" : "Account verification options",
"description" : "Method with which to verity the existing account",
"providerId" : "basic-flow",
@ -2644,7 +2982,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "6b8f504c-39fb-4608-9223-52deb5ae0dfe",
"id" : "96d4e28f-51ad-4737-87b4-5a10484ceb8b",
"alias" : "Authentication Options",
"description" : "Authentication options.",
"providerId" : "basic-flow",
@ -2673,7 +3011,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ac4dd6f3-43b2-4212-90eb-4df7c9a6a0bc",
"id" : "8f4c884d-93cd-4404-bc3a-1fa717b070c5",
"alias" : "Browser - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2695,7 +3033,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "726b4a58-cb78-4105-a34c-3e4404c74362",
"id" : "166d1879-dd61-4fb4-b4f6-0a4d69f49da8",
"alias" : "Direct Grant - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2717,7 +3055,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "be1b5f5d-b80f-46a6-804b-bce20e2de246",
"id" : "18cab8f9-f010-4226-a86e-8da2f1632304",
"alias" : "First broker login - Conditional OTP",
"description" : "Flow to determine if the OTP is required for the authentication",
"providerId" : "basic-flow",
@ -2739,7 +3077,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "ff5097d8-818a-4176-8512-caf9d81eb6db",
"id" : "04d8d1d1-5253-4644-b55d-8c9317818b33",
"alias" : "Handle Existing Account",
"description" : "Handle what to do if there is existing account with same email/username like authenticated identity provider",
"providerId" : "basic-flow",
@ -2761,7 +3099,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "b9ecf989-e87b-45c0-a440-bce46b473dec",
"id" : "2bf21e1d-ff7e-4d52-8be7-31355945c302",
"alias" : "Reset - Conditional OTP",
"description" : "Flow to determine if the OTP should be reset or not. Set to REQUIRED to force.",
"providerId" : "basic-flow",
@ -2783,7 +3121,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "4554310c-e125-4834-a84e-53bbec7a79d6",
"id" : "fa8636a5-9969-41a5-9fef-9c825cceb819",
"alias" : "User creation or linking",
"description" : "Flow for the existing/non-existing user alternatives",
"providerId" : "basic-flow",
@ -2806,7 +3144,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "204549aa-c931-45a2-b2f0-1a5a0c724935",
"id" : "8656a884-6645-40b5-b075-c40736e27811",
"alias" : "Verify Existing Account by Re-authentication",
"description" : "Reauthentication of existing account",
"providerId" : "basic-flow",
@ -2828,7 +3166,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "d02f58b1-6469-46ea-a348-d923b5aa9727",
"id" : "0d88d334-bfa4-4cf1-9fa3-17d0df0151d1",
"alias" : "browser",
"description" : "browser based authentication",
"providerId" : "basic-flow",
@ -2864,7 +3202,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "7ef6a658-be09-4b81-91ac-f21dc80b0841",
"id" : "9b195d67-e3e6-4983-8607-533b739ebd97",
"alias" : "clients",
"description" : "Base authentication for clients",
"providerId" : "client-flow",
@ -2900,7 +3238,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "f7f2eeab-6455-4a18-a98d-b1a5f04e35fb",
"id" : "fd0273a1-f6f4-4df1-a057-54ac4e91f4a9",
"alias" : "direct grant",
"description" : "OpenID Connect Resource Owner Grant",
"providerId" : "basic-flow",
@ -2929,7 +3267,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "c44389c2-08b2-4adb-a6e9-e41006cb20c7",
"id" : "b457cba8-ef31-473b-a481-c095b2f4eb48",
"alias" : "docker auth",
"description" : "Used by Docker clients to authenticate against the IDP",
"providerId" : "basic-flow",
@ -2944,7 +3282,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "edf00de8-8f19-4a32-98c4-15e719c1fadd",
"id" : "97519504-fd69-4c08-bd27-15d26fbc9b76",
"alias" : "first broker login",
"description" : "Actions taken after first broker login with identity provider account, which is not yet linked to any Keycloak account",
"providerId" : "basic-flow",
@ -2967,7 +3305,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "58415605-eb47-41b3-a07f-90bbbbcb9963",
"id" : "fc6a4468-1a78-410d-ac97-cf9f05814850",
"alias" : "forms",
"description" : "Username, password, otp and other auth forms.",
"providerId" : "basic-flow",
@ -2989,7 +3327,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "1eae6099-3e1e-484b-ad94-b09339affb68",
"id" : "97a25d8a-25a0-4bf4-be6d-a6f019cf3a32",
"alias" : "http challenge",
"description" : "An authentication flow based on challenge-response HTTP Authentication Schemes",
"providerId" : "basic-flow",
@ -3011,7 +3349,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "8af03739-b77a-4582-ab63-a1855ca4f637",
"id" : "671e8ec7-af31-4c54-b6bb-96ebe69881de",
"alias" : "registration",
"description" : "registration flow",
"providerId" : "basic-flow",
@ -3027,7 +3365,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "0c308998-c5ad-4cf8-ab5c-15be89cbe4d7",
"id" : "24d6aaaa-5202-4401-99c3-bb15925bd5be",
"alias" : "registration form",
"description" : "registration form",
"providerId" : "form-flow",
@ -3063,7 +3401,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "5510aa65-e78d-4d08-a3ca-31e277bc3cd0",
"id" : "f948bd43-ff05-4245-be30-a0a0dad2b7f0",
"alias" : "reset credentials",
"description" : "Reset credentials for a user if they forgot their password or something",
"providerId" : "basic-flow",
@ -3099,7 +3437,7 @@
"userSetupAllowed" : false
} ]
}, {
"id" : "b6b3e35d-8df3-487e-b2d2-9fdf524a4181",
"id" : "7e4aaea7-05ca-4aa0-b934-4c81614620a8",
"alias" : "saml ecp",
"description" : "SAML ECP Profile Authentication Flow",
"providerId" : "basic-flow",
@ -3115,13 +3453,13 @@
} ]
} ],
"authenticatorConfig" : [ {
"id" : "a2e9294b-74ce-4ea6-8372-9d9fb3d60a06",
"id" : "14ca1058-25e7-41f6-85ce-ad0bfce2c67c",
"alias" : "create unique user config",
"config" : {
"require.password.update.after.registration" : "false"
}
}, {
"id" : "de65a90c-cc4b-4bf0-8e84-756e23a504f0",
"id" : "16803de1-f7dc-4293-acde-fd0eae264377",
"alias" : "review profile config",
"config" : {
"update.profile.on.first.login" : "missing"
@ -3216,4 +3554,4 @@
"clientPolicies" : {
"policies" : [ ]
}
}
}

View File

@ -1,8 +1,13 @@
admin@spiffworkflow.org
alex@sartography.com
dan@sartography.com
kevin@sartography.com
jason@sartography.com
mike@sartography.com
daniel@sartography.com
elizabeth@sartography.com
j@sartography.com
jason@sartography.com
jon@sartography.com
kb@sartography.com
kevin@sartography.com
madhurya@sartography.com
mike@sartography.com
natalia@sartography.com

View File

@ -1,17 +1,46 @@
admin@spiffworkflow.org
amir@status.im
app.program.lead@status.im
core@status.im
dao.project.lead@status.im
desktop.program.lead@status.im
desktop.project.lead@status.im
fin1@status.im
fin@status.im
finance.lead@status.im
legal.lead@status.im
program.lead@status.im
services.lead@status.im
finance.sme@status.im
infra.sme@status.im
legal.sme@status.im
security.sme@status.im
ppg.ba@status.im
peopleops.partner@status.im
peopleops.talent@status.im
finance_user1@status.im
harmeet@status.im
infra.program-lead@status.im
infra.project-lead@status.im
dao.project.lead@status.im
desktop.project.lead@status.im
app.program.lead@status.im
desktop.program.lead@status.im
infra.sme@status.im
infra1.sme@status.im
infra2.sme@status.im
jakub@status.im
jarrad@status.im
lead1@status.im
lead@status.im
legal.lead@status.im
legal.program-lead.sme@status.im
legal.program-lead@status.im
legal.project-lead.sme@status.im
legal.project-lead@status.im
legal.sme@status.im
legal1.sme@status.im
manuchehr@status.im
peopleops.partner@status.im
peopleops.talent@status.im
ppg.ba.program-lead@status.im
ppg.ba.project-lead@status.im
ppg.ba.sme1@status.im
ppg.ba.sme@status.im
ppg.ba@status.im
program.lead@status.im
sasha@status.im
security.program-lead.sme@status.im
security.program-lead@status.im
security.project-lead.sme@status.im
security.project-lead@status.im
security.sme@status.im
security1.sme@status.im
services.lead@status.im

View File

@ -1825,7 +1825,7 @@ lxml = "*"
type = "git"
url = "https://github.com/sartography/SpiffWorkflow"
reference = "main"
resolved_reference = "98c6294f1240aee599cd98bcee58d121cb57b331"
resolved_reference = "0e61be85c47474a33037e6f398e64c96e02f13ad"
[[package]]
name = "SQLAlchemy"

View File

@ -157,6 +157,29 @@ def get_hacked_up_app_for_script() -> flask.app.Flask:
return app
def traces_sampler(sampling_context: Any) -> Any:
# always inherit
if sampling_context["parent_sampled"] is not None:
return sampling_context["parent_sampled"]
if "wsgi_environ" in sampling_context:
wsgi_environ = sampling_context["wsgi_environ"]
path_info = wsgi_environ.get("PATH_INFO")
request_method = wsgi_environ.get("REQUEST_METHOD")
# tasks_controller.task_submit
# this is the current pain point as of 31 jan 2023.
if (
path_info
and path_info.startswith("/v1.0/tasks/")
and request_method == "PUT"
):
return 1
# Default sample rate for all others (replaces traces_sample_rate)
return 0.01
def configure_sentry(app: flask.app.Flask) -> None:
"""Configure_sentry."""
import sentry_sdk
@ -193,5 +216,10 @@ def configure_sentry(app: flask.app.Flask) -> None:
# of transactions for performance monitoring.
# We recommend adjusting this value to less than 1(00%) in production.
traces_sample_rate=float(sentry_traces_sample_rate),
traces_sampler=traces_sampler,
# The profiles_sample_rate setting is relative to the traces_sample_rate setting.
_experiments={
"profiles_sample_rate": 1,
},
before_send=before_send,
)

View File

@ -1605,6 +1605,45 @@ paths:
schema:
$ref: "#/components/schemas/Workflow"
/process-data-file-download/{modified_process_model_identifier}/{process_instance_id}/{process_data_identifier}:
parameters:
- name: modified_process_model_identifier
in: path
required: true
description: The modified id of an existing process model
schema:
type: string
- name: process_instance_id
in: path
required: true
description: The unique id of an existing process instance.
schema:
type: integer
- name: process_data_identifier
in: path
required: true
description: The identifier of the process data.
schema:
type: string
- name: index
in: query
required: false
description: The optional index of the value if key's value is an array
schema:
type: integer
get:
operationId: spiffworkflow_backend.routes.process_api_blueprint.process_data_file_download
summary: Download the file referneced in the process data value.
tags:
- Data Objects
responses:
"200":
description: Fetch succeeded.
content:
application/json:
schema:
$ref: "#/components/schemas/Workflow"
/send-event/{modified_process_model_identifier}/{process_instance_id}:
parameters:
- name: modified_process_model_identifier

View File

@ -82,13 +82,17 @@ def setup_config(app: Flask) -> None:
app.config.from_pyfile(f"{app.instance_path}/config.py", silent=True)
app.config["PERMISSIONS_FILE_FULLPATH"] = None
if app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]:
permissions_file_name = app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"]
if permissions_file_name is not None:
app.config["PERMISSIONS_FILE_FULLPATH"] = os.path.join(
app.root_path,
"config",
"permissions",
app.config["SPIFFWORKFLOW_BACKEND_PERMISSIONS_FILE_NAME"],
permissions_file_name,
)
print(f"base_permissions: loaded permissions file: {permissions_file_name}")
else:
print("base_permissions: no permissions file loaded")
# unversioned (see .gitignore) config that can override everything and include secrets.
# src/spiffworkflow_backend/config/secrets.py

View File

@ -69,6 +69,8 @@ GIT_BRANCH = environ.get("GIT_BRANCH")
GIT_CLONE_URL_FOR_PUBLISHING = environ.get("GIT_CLONE_URL")
GIT_COMMIT_ON_SAVE = environ.get("GIT_COMMIT_ON_SAVE", default="false") == "true"
GIT_SSH_PRIVATE_KEY = environ.get("GIT_SSH_PRIVATE_KEY")
GIT_USERNAME = environ.get("GIT_USERNAME")
GIT_USER_EMAIL = environ.get("GIT_USER_EMAIL")
# Datbase Configuration
SPIFF_DATABASE_TYPE = environ.get(

View File

@ -21,6 +21,11 @@ from SpiffWorkflow.exceptions import WorkflowTaskException
from SpiffWorkflow.specs.base import TaskSpec # type: ignore
from SpiffWorkflow.task import Task # type: ignore
from spiffworkflow_backend.services.authentication_service import NotAuthorizedError
from spiffworkflow_backend.services.authentication_service import TokenInvalidError
from spiffworkflow_backend.services.authentication_service import TokenNotProvidedError
from spiffworkflow_backend.services.authentication_service import UserNotLoggedInError
api_error_blueprint = Blueprint("api_error_blueprint", __name__)
@ -169,13 +174,30 @@ def set_user_sentry_context() -> None:
set_tag("username", username)
def should_notify_sentry(exception: Exception) -> bool:
"""Determine if we should notify sentry.
We want to capture_exception to log the exception to sentry, but we don't want to log:
1. ApiErrors that are just invalid tokens
2. NotAuthorizedError. we usually call check-permissions before calling an API to
make sure we'll have access, but there are some cases
where it's more convenient to just make the call from the frontend and handle the 403 appropriately.
"""
if isinstance(exception, ApiError):
if exception.error_code == "invalid_token":
return False
if isinstance(exception, NotAuthorizedError):
return False
return True
@api_error_blueprint.app_errorhandler(Exception) # type: ignore
def handle_exception(exception: Exception) -> flask.wrappers.Response:
"""Handles unexpected exceptions."""
set_user_sentry_context()
sentry_link = None
if not isinstance(exception, ApiError) or exception.error_code != "invalid_token":
if should_notify_sentry(exception):
id = capture_exception(exception)
if isinstance(exception, ApiError):
@ -191,22 +213,41 @@ def handle_exception(exception: Exception) -> flask.wrappers.Response:
f"https://sentry.io/{organization_slug}/{project_slug}/events/{id}"
)
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
# seems to break the sentry sdk context where we no longer get back
# an event id or send out tags like username
current_app.logger.exception(exception)
# !!!NOTE!!!: do this after sentry stuff since calling logger.exception
# seems to break the sentry sdk context where we no longer get back
# an event id or send out tags like username
current_app.logger.exception(exception)
else:
current_app.logger.warning(
f"Received exception: {exception}. Since we do not want this particular"
" exception in sentry, we cannot use logger.exception or logger.error, so"
" there will be no backtrace. see api_error.py"
)
error_code = "internal_server_error"
status_code = 500
if (
isinstance(exception, NotAuthorizedError)
or isinstance(exception, TokenNotProvidedError)
or isinstance(exception, TokenInvalidError)
):
error_code = "not_authorized"
status_code = 403
if isinstance(exception, UserNotLoggedInError):
error_code = "not_authenticated"
status_code = 401
# set api_exception like this to avoid confusing mypy
# and what type the object is
# about what type the object is
api_exception = None
if isinstance(exception, ApiError):
api_exception = exception
else:
api_exception = ApiError(
error_code="internal_server_error",
error_code=error_code,
message=f"{exception.__class__.__name__}",
sentry_link=sentry_link,
status_code=500,
status_code=status_code,
)
return make_response(jsonify(api_exception), api_exception.status_code)

View File

@ -1,7 +1,9 @@
"""APIs for dealing with process groups, process models, and process instances."""
import base64
import json
from typing import Any
from typing import Dict
from typing import Optional
import flask.wrappers
from flask import Blueprint
@ -81,10 +83,12 @@ def process_list() -> Any:
return SpecReferenceSchema(many=True).dump(references)
def process_data_show(
def _process_data_fetcher(
process_instance_id: int,
process_data_identifier: str,
modified_process_model_identifier: str,
download_file_data: bool,
index: Optional[int] = None,
) -> flask.wrappers.Response:
"""Process_data_show."""
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
@ -94,6 +98,26 @@ def process_data_show(
if process_data_identifier in all_process_data:
process_data_value = all_process_data[process_data_identifier]
if process_data_value is not None and index is not None:
process_data_value = process_data_value[index]
if (
download_file_data
and isinstance(process_data_value, str)
and process_data_value.startswith("data:")
):
parts = process_data_value.split(";")
mimetype = parts[0][4:]
filename = parts[1]
base64_value = parts[2].split(",")[1]
file_contents = base64.b64decode(base64_value)
return Response(
file_contents,
mimetype=mimetype,
headers={"Content-disposition": f"attachment; filename={filename}"},
)
return make_response(
jsonify(
{
@ -105,6 +129,37 @@ def process_data_show(
)
def process_data_show(
process_instance_id: int,
process_data_identifier: str,
modified_process_model_identifier: str,
) -> flask.wrappers.Response:
"""Process_data_show."""
return _process_data_fetcher(
process_instance_id,
process_data_identifier,
modified_process_model_identifier,
False,
None,
)
def process_data_file_download(
process_instance_id: int,
process_data_identifier: str,
modified_process_model_identifier: str,
index: Optional[int] = None,
) -> flask.wrappers.Response:
"""Process_data_file_download."""
return _process_data_fetcher(
process_instance_id,
process_data_identifier,
modified_process_model_identifier,
True,
index,
)
# sample body:
# {"ref": "refs/heads/main", "repository": {"name": "sample-process-models",
# "full_name": "sartography/sample-process-models", "private": False .... }}

View File

@ -10,6 +10,7 @@ from typing import Union
import flask.wrappers
import jinja2
import sentry_sdk
from flask import current_app
from flask import g
from flask import jsonify
@ -326,13 +327,12 @@ def process_data_show(
)
def task_submit(
def task_submit_shared(
process_instance_id: int,
task_id: str,
body: Dict[str, Any],
terminate_loop: bool = False,
) -> flask.wrappers.Response:
"""Task_submit_user_data."""
principal = _find_principal_or_raise()
process_instance = _find_process_instance_by_id_or_raise(process_instance_id)
if not process_instance.can_submit_task():
@ -380,15 +380,16 @@ def task_submit(
)
)
processor.lock_process_instance("Web")
ProcessInstanceService.complete_form_task(
processor=processor,
spiff_task=spiff_task,
data=body,
user=g.user,
human_task=human_task,
)
processor.unlock_process_instance("Web")
with sentry_sdk.start_span(op="task", description="complete_form_task"):
processor.lock_process_instance("Web")
ProcessInstanceService.complete_form_task(
processor=processor,
spiff_task=spiff_task,
data=body,
user=g.user,
human_task=human_task,
)
processor.unlock_process_instance("Web")
# If we need to update all tasks, then get the next ready task and if it a multi-instance with the same
# task spec, complete that form as well.
@ -417,6 +418,19 @@ def task_submit(
return Response(json.dumps({"ok": True}), status=202, mimetype="application/json")
def task_submit(
process_instance_id: int,
task_id: str,
body: Dict[str, Any],
terminate_loop: bool = False,
) -> flask.wrappers.Response:
"""Task_submit_user_data."""
with sentry_sdk.start_span(
op="controller_action", description="tasks_controller.task_submit"
):
return task_submit_shared(process_instance_id, task_id, body, terminate_loop)
def _get_tasks(
processes_started_by_user: bool = True,
has_lane_assignment_id: bool = True,

View File

@ -17,6 +17,7 @@ from flask import request
from werkzeug.wrappers import Response
from spiffworkflow_backend.exceptions.api_error import ApiError
from spiffworkflow_backend.helpers.api_version import V1_API_PATH_PREFIX
from spiffworkflow_backend.models.user import UserModel
from spiffworkflow_backend.services.authentication_service import AuthenticationService
from spiffworkflow_backend.services.authentication_service import (
@ -58,6 +59,10 @@ def verify_token(
if not token and "Authorization" in request.headers:
token = request.headers["Authorization"].removeprefix("Bearer ")
if not token and "access_token" in request.cookies:
if request.path.startswith(f"{V1_API_PATH_PREFIX}/process-data-file-download/"):
token = request.cookies["access_token"]
# This should never be set here but just in case
_clear_auth_tokens_from_thread_local_data()
@ -96,7 +101,7 @@ def verify_token(
)
if auth_token and "error" not in auth_token:
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token["access_token"]
tld.new_access_token = auth_token["id_token"]
tld.new_id_token = auth_token["id_token"]
# We have the user, but this code is a bit convoluted, and will later demand
# a user_info object so it can look up the user. Sorry to leave this crap here.
@ -186,6 +191,7 @@ def set_new_access_token_in_cookie(
):
domain_for_frontend_cookie = None
# fixme - we should not be passing the access token back to the client
if hasattr(tld, "new_access_token") and tld.new_access_token:
response.set_cookie(
"access_token", tld.new_access_token, domain=domain_for_frontend_cookie
@ -254,7 +260,7 @@ def parse_id_token(token: str) -> Any:
return json.loads(decoded)
def login_return(code: str, state: str, session_state: str) -> Optional[Response]:
def login_return(code: str, state: str, session_state: str = "") -> Optional[Response]:
"""Login_return."""
state_dict = ast.literal_eval(base64.b64decode(state).decode("utf-8"))
state_redirect_url = state_dict["redirect_url"]
@ -269,12 +275,13 @@ def login_return(code: str, state: str, session_state: str) -> Optional[Response
user_model = AuthorizationService.create_user_from_sign_in(user_info)
g.user = user_model.id
g.token = auth_token_object["id_token"]
AuthenticationService.store_refresh_token(
user_model.id, auth_token_object["refresh_token"]
)
if "refresh_token" in auth_token_object:
AuthenticationService.store_refresh_token(
user_model.id, auth_token_object["refresh_token"]
)
redirect_url = state_redirect_url
tld = current_app.config["THREAD_LOCAL_DATA"]
tld.new_access_token = auth_token_object["access_token"]
tld.new_access_token = auth_token_object["id_token"]
tld.new_id_token = auth_token_object["id_token"]
return redirect(redirect_url)

View File

@ -0,0 +1,53 @@
"""Markdown_file_download_link."""
from typing import Any
from urllib.parse import unquote
from flask import current_app
from spiffworkflow_backend.models.process_model import ProcessModelInfo
from spiffworkflow_backend.models.script_attributes_context import (
ScriptAttributesContext,
)
from spiffworkflow_backend.scripts.script import Script
class GetMarkdownFileDownloadLink(Script):
"""GetMarkdownFileDownloadLink."""
@staticmethod
def requires_privileged_permissions() -> bool:
"""We have deemed this function safe to run without elevated permissions."""
return False
def get_description(self) -> str:
"""Get_description."""
return """Returns a string which is a string in markdown format."""
def run(
self,
script_attributes_context: ScriptAttributesContext,
*_args: Any,
**kwargs: Any,
) -> Any:
"""Run."""
# example input:
# "data:application/pdf;name=Harmeet_1234.pdf;base64,JV...."
process_data_identifier = kwargs["key"]
parts = kwargs["file_data"].split(";")
file_index = kwargs["file_index"]
label = unquote(parts[1].split("=")[1])
process_model_identifier = script_attributes_context.process_model_identifier
modified_process_model_identifier = (
ProcessModelInfo.modify_process_identifier_for_path_param(
process_model_identifier
)
)
process_instance_id = script_attributes_context.process_instance_id
url = current_app.config["SPIFFWORKFLOW_BACKEND_URL"]
url += (
f"/v1.0/process-data-file-download/{modified_process_model_identifier}/"
+ f"{process_instance_id}/{process_data_identifier}?index={file_index}"
)
link = f"[{label}]({url})"
return link

Some files were not shown because too many files have changed in this diff Show More